- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np$ I* J) z( \2 q: c
import matplotlib.pyplot as plt: O: i- \1 z; o8 w# ]
5 {% u$ D3 g" z9 |" Q" n2 Bimport utilities
8 W, o6 j$ D5 |. x' Y+ i
( q0 E3 o. b( v+ ^: { Z3 _% }# Load input data T$ n/ V- F# n! P" y
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
/ d" I5 C) G! ^9 S6 cX, y = utilities.load_data(input_file)# V4 g- g2 p# i+ H
# N; w5 E( x$ z6 ]6 J3 M/ \2 y###############################################
; _% }) v8 B) H. U; t# k2 Y+ e9 J7 X# Separate the data into classes based on 'y'
5 H% [1 I9 g9 l: {class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
9 x) {; G: e1 @/ f9 E4 u5 _; ]class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])% ~/ |5 Q& n1 x( f
) s( u$ s+ x( P) ^
# Plot the input data% ?8 h) V8 x6 m4 C1 v
plt.figure()6 ?- v! T7 P+ z7 W8 H
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
( `5 Q6 L' u; ]4 {& bplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
I4 h6 l: Q5 ~5 J9 h+ r5 n7 B. rplt.title('Input data')
* I: x/ K0 ~; B+ y) }1 b+ V( G: h. K, V1 u& e9 K9 z# B" p
###############################################
8 r6 F* p! |0 Z) }- C# Train test split and SVM training6 O- ?3 A1 R0 e7 L j: j
from sklearn import cross_validation0 v. x/ C$ @- K0 M4 w3 S: d/ o
from sklearn.svm import SVC
: K$ }& X% [7 }8 U' D% L
" L! s# i( H, GX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)% [; z" u& I; L! a P5 n
' N/ V, `. [0 v D3 Z* R
#params = {'kernel': 'linear'}5 Z6 O; N# {( R6 q, b* F
#params = {'kernel': 'poly', 'degree': 3}3 x' H0 Y- W) @% [0 ~
params = {'kernel': 'rbf'}9 y( R& `7 B* U
classifier = SVC(**params)" D( j1 N3 p* T
classifier.fit(X_train, y_train)% E2 _( Z) s- u E: G n
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')4 R9 W0 t- D( a; K% N1 H( e7 x
$ {1 _& V0 J( m$ Z( S8 i
y_test_pred = classifier.predict(X_test)
- ^+ z, Y) y( k8 h* S( kutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
7 T/ U2 ]8 m% _. ^5 a, \- ]1 J. x7 x' P/ U( j
###############################################
4 }& n u5 b& R* z M1 [0 m% _# Evaluate classifier performance, b1 Z- p9 L* z; w1 B# N8 t
0 r3 S, w4 X# I0 g9 Afrom sklearn.metrics import classification_report
: G& H3 `$ v9 c$ C5 K+ ^
! o( `) ^ ?8 jtarget_names = ['Class-' + str(int(i)) for i in set(y)]
9 a6 Z% }; a7 z6 k2 Q2 Sprint "\n" + "#"*30
5 _( J" P b+ o- g9 Rprint "\nClassifier performance on training dataset\n"2 k% F/ g2 z4 Y0 H* S( K
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
9 }7 I% M7 A F& }print "#"*30 + "\n". T- W; {0 V5 J7 a: R
+ D, Y# I ]: t, P) |; vprint "#"*30
( @$ }. _$ _8 t9 Z% Aprint "\nClassification report on test dataset\n"+ \0 t* j+ e4 n0 l4 f7 E
print classification_report(y_test, y_test_pred, target_names=target_names)8 y1 O6 G2 Z0 r. M$ Y, B
print "#"*30 + "\n"
/ `. ~2 X1 d" B/ q# i5 _8 D
3 M0 d% q5 Y- m9 j1 L. g/ G& V |
|