- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 555
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np% x4 B4 ^7 Y4 @2 |" \# X3 C; _6 a3 s
import matplotlib.pyplot as plt
& l6 S/ p: }* l' g1 o& @# S& ~; A; u7 `- g/ B
import utilities . H5 u2 u" U1 K% E! L7 W3 Y$ h1 Q
7 h, b3 R# u+ ]9 X4 X- `$ K* O W
# Load input data
& A6 Y$ d* |2 n9 n; {9 H% t7 N& ?input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'+ S! W' W a! Q; ]# v: {8 m7 v% ]
X, y = utilities.load_data(input_file)* _! j* L' ~% |. h% ]3 Y" @
; _. j$ y' c- b; d- q###############################################
0 T7 E) \6 D: u2 J0 P# Separate the data into classes based on 'y' k2 G7 A! V1 @/ R! A2 G5 ?9 J& k
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
& V% J# g ]) A8 Iclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
6 x3 \4 \6 I3 ~
' }3 c9 h6 n# C: k) N& \9 q$ T, ]1 v# Plot the input data3 \+ s D0 E) {9 P9 h9 D
plt.figure()
7 w& r7 {- _( f1 ?, ?/ Dplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
7 T4 N9 f( c: F, _plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
) B1 D( Z6 A4 ^5 lplt.title('Input data')( e" p; i( T2 D! u- l
9 i I1 ~3 ~& v9 R###############################################
) |: d* _& k) ?; l# Train test split and SVM training* u+ W- A) |) X* A
from sklearn import cross_validation
4 I9 r* z T+ ^3 b5 pfrom sklearn.svm import SVC
+ V1 O9 q+ Y; r3 e# B7 [+ u& o* Z% T! |+ n
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
( O6 i; m' Q0 S* [% X& H% D- l1 v! Y L3 H
#params = {'kernel': 'linear'}4 A% b0 S+ l d/ _7 ~) Y! L. Q3 U
#params = {'kernel': 'poly', 'degree': 3}
, T) h) b) \+ S/ y) f8 ~% Fparams = {'kernel': 'rbf'}! s% i' ]# Z/ n
classifier = SVC(**params)
+ \7 j9 v" }8 P; O( W7 E& u! `classifier.fit(X_train, y_train)
" G) u5 I0 ] {utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
0 h6 y# z8 l3 L4 T) |. u# J% `0 P$ z7 j% v, @9 ^7 K1 `
y_test_pred = classifier.predict(X_test)% T! W: Y9 V6 ^& z& N" [9 i2 t
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')% b( S3 N2 m) O: V# I+ ]/ }
/ F5 a+ F; a( C: S###############################################
, u" h7 G2 R. \: T& d7 u- _# Evaluate classifier performance5 X# B I, V0 G! C5 K
. S1 i9 A+ }: D& ?/ N
from sklearn.metrics import classification_report
( B4 O R" ]4 g& y' u( M2 s( s$ K6 j6 t
target_names = ['Class-' + str(int(i)) for i in set(y)]
2 O7 o! `8 Y/ Z7 c9 V9 V7 l+ gprint "\n" + "#"*307 f% I5 j! u' F' P: \
print "\nClassifier performance on training dataset\n". d7 C" e, p4 j! C* M
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
4 ^3 O5 V1 S+ x5 [- iprint "#"*30 + "\n"& D; V8 j3 k7 ]6 y9 |* F# Y
7 p' b j& M5 N- q" N& r
print "#"*301 h- `/ ?4 l, L; k
print "\nClassification report on test dataset\n"5 V: W Z; f) n, L! r
print classification_report(y_test, y_test_pred, target_names=target_names): \, ]; N$ T& ^6 D5 i' N; s2 C$ \- h
print "#"*30 + "\n"/ u( n$ e; B: k% Y
+ F- x4 C+ J& j( `$ A$ A& |! P. h
|
|