- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 555
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
D- u' B) M+ y# nimport matplotlib.pyplot as plt
5 B3 }( }6 g, e3 [( E E- z" x6 a" O
import utilities
1 d$ r$ p3 W/ ?8 C( M- b
1 G# U9 q J5 Y" z/ i8 Y8 {# Load input data
, H/ J% Q: b# G) iinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt': z5 V8 a# m4 u7 x+ U
X, y = utilities.load_data(input_file)
# {" o) |1 q7 |) q" w; }
! S9 _9 q' G$ J###############################################
" c% l) s9 z* D# Separate the data into classes based on 'y'; @7 |4 v% @7 G
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
l7 f0 W* P. m' A4 ], E0 ]7 F/ pclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])2 s" A# G/ q6 e+ T2 J" q) g/ G7 _
) \3 f' R9 R" M& P. b( |* z8 N# Plot the input data
6 f' |0 k/ n0 j- m+ X/ P3 Yplt.figure()7 n2 e3 G& I( D" j# d! K
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
/ n( Q: q. o( cplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
5 B0 L% {* N2 N8 F( s: Zplt.title('Input data')
) p6 z. k% C _3 @1 [6 h; }% @2 p) X/ ?( |$ ~. i! s; f8 [
###############################################( ^* i- @9 _! [: b4 |& k
# Train test split and SVM training4 N- u4 J4 o* y! l; l S
from sklearn import cross_validation
; q, t* ^" \( c5 L3 N; q! U+ |from sklearn.svm import SVC) l" W5 a0 u0 l. E' C
8 ], P8 J0 z; G2 }3 Y' |' J: tX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)3 t9 W# T) P$ D K
) K& x: J+ g6 N8 f#params = {'kernel': 'linear'}+ ?$ I8 o# U! H% L
#params = {'kernel': 'poly', 'degree': 3}
% T6 V8 @9 p8 qparams = {'kernel': 'rbf'}" y/ L* x) ]# g8 Y) Y( y
classifier = SVC(**params)* l; ?9 y. ]! y$ E, c1 h
classifier.fit(X_train, y_train)/ m( ?9 ?- P* d1 F! K3 d# A
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset'); _0 U @; c' Y; n) y; r
T3 T! w9 d1 dy_test_pred = classifier.predict(X_test). F# k2 e9 c+ l$ [& E9 f5 E, {) `
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')2 q6 `7 D6 {# h/ M* M
) h; Y1 i) y1 m6 [ v
###############################################! t7 P$ x( s5 D" x. S' C
# Evaluate classifier performance: X% C; g9 ~2 ^) F0 J$ Q& H6 a5 v0 _
2 G' q' o) {; f f& e6 t. b2 j
from sklearn.metrics import classification_report
}, j2 m4 X5 U4 ~1 B
, D0 Y; g" M8 W- M# z* dtarget_names = ['Class-' + str(int(i)) for i in set(y)]
+ k7 s' o- Y" d7 k Pprint "\n" + "#"*30
) A9 e: n6 _) O: R7 w' H6 rprint "\nClassifier performance on training dataset\n"' K7 G0 w. J) U0 k% x) I
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)/ I0 V$ j9 z' w' Y9 V' H
print "#"*30 + "\n". W2 e! N! Q' @7 u% v! V
; B1 h% v- _8 u3 f& `; f& G6 ]
print "#"*30
1 }( d7 s, E( e. s* n: zprint "\nClassification report on test dataset\n"; K6 K4 F4 B7 H& J1 m+ f. q
print classification_report(y_test, y_test_pred, target_names=target_names)7 E2 y: Z% \; h6 C) F$ M* a( y+ R8 i
print "#"*30 + "\n"
* C7 {1 j& {. g1 N$ X1 D1 h
3 {7 c% f- H7 a' G* a1 P: b% g |
|