- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
/ A: z' f0 |& `- X2 Wimport matplotlib.pyplot as plt
# Z. C! ~! E, q/ w/ ^% K! l
. u5 f* j, K8 R gimport utilities
+ W- V, I; x$ j; L, X7 r7 O1 i' B. h
1 D' w# s x O; v( L P# Load input data
2 S' i$ J6 E% o- B% m' Y" s) Qinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
+ P2 o4 u7 q- a# X& bX, y = utilities.load_data(input_file)
1 J6 D1 u1 S3 l3 b3 A& X3 ]5 r7 H0 Y% Z2 k
###############################################
, X7 e$ q8 X( o; @" m7 l/ x# Separate the data into classes based on 'y'
3 f4 |8 Z* d" F4 o) E- u- |class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
/ H! i2 K0 ?; ?' \3 J1 O3 d. Eclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
, M9 P( t$ Y0 {* A! W, g# q2 l( D* S0 S' Y2 O3 r
# Plot the input data
4 w; ?* x. @' Oplt.figure()4 q9 ]2 Q/ R2 Z* h
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
$ Y5 j) N! d# C2 Uplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
. D, a( e2 l' o- hplt.title('Input data')
) Y8 c- a" m% Y4 n
1 ^+ e0 s4 B% u+ A###############################################
1 G4 r6 E. u& R+ N# Train test split and SVM training
1 ?+ W0 i! X: y- Z( j8 e1 yfrom sklearn import cross_validation! x3 M5 }7 Y( N! A# u( E
from sklearn.svm import SVC: P% @3 h+ o) x* z
( B! k" A y, J9 d6 e& B$ x/ ]
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
3 b7 m. _7 v% x6 f& }4 B
" E6 X+ h: ~: q4 P#params = {'kernel': 'linear'}6 ?5 O! |' G& p- ?6 @3 B; @) S
#params = {'kernel': 'poly', 'degree': 3}
9 D, L9 I. d( J% K" S/ L* i9 Uparams = {'kernel': 'rbf'}4 H& c( q3 z& W& y7 C% {! y1 I
classifier = SVC(**params), w9 ^! G0 N( w6 d. @; r+ |
classifier.fit(X_train, y_train)# K& H; d5 f" [- s4 {0 f
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')' l% b( ?' v. N' \" |
7 } q: G' J3 ]9 Z% U0 Dy_test_pred = classifier.predict(X_test), @# y) \' q; J
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
5 C' r+ [% r- ^% L2 F3 ? H9 V8 d- {2 \3 H& ]$ ~
###############################################0 m7 E& ?4 B9 D" p# v ^
# Evaluate classifier performance
( a8 Z. l' V) n! g3 y
/ Z) X3 J4 a4 G& L( ?! Dfrom sklearn.metrics import classification_report5 M3 y; K& x. j, k5 U$ M4 ^* z
+ X o# E/ c8 _' b+ D Vtarget_names = ['Class-' + str(int(i)) for i in set(y)]7 {, x9 b* k, y% O7 s7 i
print "\n" + "#"*30
1 n% E# ]6 P; v) `print "\nClassifier performance on training dataset\n"0 S- j1 C$ r0 g# O0 a
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
, q5 n, v# a4 |' H' `8 k: pprint "#"*30 + "\n"2 G- v5 Z( H9 i- {# ]9 N/ `) O
) ]: e5 w8 Y$ Y
print "#"*30
) H! ]2 L3 {1 r. o J5 Sprint "\nClassification report on test dataset\n"8 a( ^& [, b6 N% w# i
print classification_report(y_test, y_test_pred, target_names=target_names)
2 K0 @( n8 L2 Z. [. Cprint "#"*30 + "\n"
f& K* M+ y8 i+ x5 }1 K& h. z- F( t
|
|