- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np z) Z4 k& }- Q6 ^- _& x
import matplotlib.pyplot as plt
. p: |, i! f' n5 G0 I+ x- R. {5 S- t' K+ y. ^. e
import utilities / q% V0 M! m- v* Y
5 V0 f8 e, [5 y* g# Load input data
; \% w8 ~6 \2 G) |% k9 w$ }input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
. W$ X: [1 ~5 d& ?; C) W: QX, y = utilities.load_data(input_file)
! z) ?; x; C6 t7 w7 k$ j' R5 ^/ k4 ^8 U8 s; J5 m4 r
###############################################
- x# E$ O: p# ^' O+ L# Separate the data into classes based on 'y'1 F7 M6 r2 d2 W" d; D
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
) D% J9 P3 p) ?) cclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
) m* |; |; i, V6 n' V2 g# F" G6 Z. G/ K2 S. \
# Plot the input data
( @( p3 [" E6 r' Z* X$ Xplt.figure()
( P( V# O/ d* S! lplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')3 i# d! F# i- t b6 M. l) G6 ?
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
( V0 `# l1 g/ d/ I! h* k& g1 oplt.title('Input data')
9 m. M X1 R: ~7 T6 R0 f+ T
5 E8 F/ @, L$ T- J& y###############################################2 j& I! [2 M; P( f2 c9 ^# ]
# Train test split and SVM training
( N# d/ N6 K* {2 H" m7 _! p$ \from sklearn import cross_validation
1 Q% L. P6 t! U. Q' U( B& b- Afrom sklearn.svm import SVC
1 ` I3 k N# n5 G b. o# o( u7 i
D9 `6 y2 F5 m* PX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)% G- G" r! p# e! U
- g) Z/ K1 x' D3 w* @5 \0 _' B6 t
#params = {'kernel': 'linear'}, i* n J) i- h% }0 ]
#params = {'kernel': 'poly', 'degree': 3}
" r! |% \8 @( u$ u& t! z0 S& U& n3 J( T% Tparams = {'kernel': 'rbf'}
# {% H! }" {8 l+ d: rclassifier = SVC(**params)
0 l8 t- @5 {6 K6 Z8 Eclassifier.fit(X_train, y_train)# S; c1 b( `7 I; O
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
d9 a9 z$ L; i
9 k2 r0 y% Q+ `* T$ R; K. ]y_test_pred = classifier.predict(X_test)
& Z( R. O, Q2 ~5 jutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')' V$ Y( F9 n9 J
# Q+ b3 s4 [. O- N7 u
###############################################
0 y. x6 u2 X+ ?$ F# Evaluate classifier performance
7 P8 P6 Y. X7 |" v6 P
5 F+ z( K/ P6 m% i( E0 S2 Gfrom sklearn.metrics import classification_report
. d% z3 n7 [ U6 b& M3 K2 Y2 R8 L5 v+ Y3 K" t. |% y1 M
target_names = ['Class-' + str(int(i)) for i in set(y)]( u* A! i' l( a
print "\n" + "#"*307 o$ _+ a6 I' ^$ i y
print "\nClassifier performance on training dataset\n"
/ B) \* @4 o8 G5 |print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
+ p+ q5 W# ~3 f4 k: Lprint "#"*30 + "\n"
# A2 k4 T# _1 x4 X4 s3 m/ \ n: D; P4 k3 _9 _" J: n( L
print "#"*30
2 ?- k, N3 C$ U& f3 Qprint "\nClassification report on test dataset\n"
) e; X, X. j# c, ^& {6 k" Gprint classification_report(y_test, y_test_pred, target_names=target_names)( }( s1 n% K* t+ a. f: F* y& c- I* _( B
print "#"*30 + "\n"( T1 ^# b8 t1 V+ q
S, B5 f* q! k5 w: N, B
|
|