- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
& Z9 d2 ^: |/ ^9 g N/ U# n& Iimport matplotlib.pyplot as plt
3 a( X+ }3 F4 x3 ]9 N0 i7 e) h5 `/ w$ D9 S1 K; M4 g& r1 m& r
import utilities
: I+ X5 K6 ?. `" J7 J
6 J# w% Q, @3 |5 {9 G1 w8 z# Load input data
4 d3 v' ?2 C- F$ D: ?input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'* K+ O# q3 H. q( A
X, y = utilities.load_data(input_file)" e [ g% X( Z. J
4 V& h. z# B) j5 N0 }5 p- ^
###############################################
. {' o" R% E. `* a# Separate the data into classes based on 'y'* }3 a/ r. |# i8 L" R; X9 D
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
/ O) d* n3 e7 B2 g" s/ J7 [class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
3 y9 c _! e. k8 y" Q3 O; M. T- p6 x1 B3 u$ s
# Plot the input data
4 v: F* G$ b1 ]! `% L1 dplt.figure()
$ i' M: M( {; @" dplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
T6 b* o- }: w5 r9 Hplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
1 z' a' ^" _( R2 f$ [" |plt.title('Input data')8 \' U9 N" `- l* C
; }0 d( f& c8 ~ |###############################################
9 L2 C/ `, J) i7 T# Train test split and SVM training
0 W! r. u( v& e7 ~6 a% [from sklearn import cross_validation
: }) T( ^# d7 t7 m. m! M: bfrom sklearn.svm import SVC
6 A% ?* I4 E) c
% W6 V+ I) `& |* z" Q* YX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)6 j9 a( N! \" d7 h. X+ H
- X8 K& L# L8 Q#params = {'kernel': 'linear'}4 E R# D9 m! T0 d" c# ~4 J% k0 x
#params = {'kernel': 'poly', 'degree': 3}& c% b. ~8 J; Y8 |) o3 ^6 \
params = {'kernel': 'rbf'}0 C0 d: {' B$ L( e" M0 E- n+ _
classifier = SVC(**params)- T/ S. Z7 I# g/ t
classifier.fit(X_train, y_train)
9 H% @' |& ~9 V: m; kutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')3 J7 H" h, O: ?
9 Y" }, X3 P+ g* W% M
y_test_pred = classifier.predict(X_test)
7 X( F8 M- P6 w8 ]utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')$ S" G U+ j/ E: I# S+ I. H( v/ s
, j' h3 f2 \: _- ]. A
###############################################
3 t6 t5 }" E6 a1 n# j( {* \# Evaluate classifier performance4 l- G! t& [ A+ z+ g% G7 {$ S
9 B/ P# A3 T, o) p6 Rfrom sklearn.metrics import classification_report
0 s# `* o0 {8 [
P9 I0 _, x) X3 k. b( g& _target_names = ['Class-' + str(int(i)) for i in set(y)]) \. T" P/ H) D4 d$ u
print "\n" + "#"*30
9 b7 d, z5 \9 r6 `& o% v+ Iprint "\nClassifier performance on training dataset\n"7 ^# Z5 V& E3 G% s
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
& P2 H. M8 A& ~/ c, h! N1 p8 Q9 dprint "#"*30 + "\n"' m4 m+ u8 X* r
4 X" Q7 A2 z2 |9 A% x! O3 Y
print "#"*305 d, Z$ b$ h) @$ X9 [$ ^
print "\nClassification report on test dataset\n"
" `, {: C' G7 z" R9 Z& Cprint classification_report(y_test, y_test_pred, target_names=target_names); o( T8 {) l" Q) P+ M1 G+ A9 y3 P
print "#"*30 + "\n", K9 }! ]0 v( M: I4 S: o
$ s% R' k) u$ L |
|