- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np" W P3 @7 ^9 e t5 R) W4 j {/ d
import matplotlib.pyplot as plt
3 h3 H4 O5 h5 d: g: d e8 o& _/ x" r- f
import utilities " S2 r3 H$ K9 L! H X/ N5 f) f, y
5 R& `; K; Q2 m1 B) e
# Load input data, G' t, z: |( U* z( i
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'' }; p3 }8 ?7 } p) t" _
X, y = utilities.load_data(input_file)
: g5 q4 b) Z e% w. m9 d0 U I- w% f
###############################################
2 n& e" J. v4 f5 ]$ w( t# Separate the data into classes based on 'y'
& r0 Z2 R7 O* b1 |/ Eclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])/ l+ ^/ P' P: e8 B. Q
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])( k. p( f& a: r, m7 I2 h: ^
3 O) P1 V$ }' N7 p9 K
# Plot the input data0 ^( W4 U4 [' v0 h; o
plt.figure()- S8 E5 ~7 i# z) z# ~% r' L! l
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')8 k- X5 z# d E4 y# e0 z: e
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')+ J& {# q% W4 m' \1 |4 |$ o
plt.title('Input data') u% Y- t( q2 e+ ]9 i
2 r# m0 w4 q5 ?/ o###############################################
9 T0 |7 x9 o2 ~5 [- M6 c# Train test split and SVM training6 A `5 s4 _# Q, L3 W. O" @1 p
from sklearn import cross_validation
7 i. p0 p7 m# `/ L2 Bfrom sklearn.svm import SVC
* k; O. {) e' V' i0 q8 q2 U6 O7 G9 F( r
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
3 s8 W: g7 H+ F, v/ v) n$ V# A
- f# U/ g8 j* L( N5 G2 y+ y/ [& r c9 H#params = {'kernel': 'linear'}/ O- w% ]& W7 H
#params = {'kernel': 'poly', 'degree': 3}
; I. o8 _. T) p+ Q& R6 j6 \params = {'kernel': 'rbf'}. v" ?- I" O. p* B2 p
classifier = SVC(**params)
9 a* n; ~% a$ `8 i' Oclassifier.fit(X_train, y_train)& d) w: u1 Q+ o7 ?+ j
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
9 |$ q8 E4 i% @3 v
3 k! @5 i9 k, j0 x: z& {y_test_pred = classifier.predict(X_test)
5 y( \7 o6 _7 P7 o' e: b# K4 ?5 rutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
2 a; Q0 ^8 m/ U% A# l* m, {, i$ c1 d7 Y2 w! F" {
###############################################
2 O, {+ @) A/ @* A7 X: \# Evaluate classifier performance" }$ k& V7 ?& C
5 A* V0 j2 X' Y( u) B5 B, X
from sklearn.metrics import classification_report
2 G2 q* f- A Y5 z+ B4 z
+ e7 ]- A& D/ d- r, ytarget_names = ['Class-' + str(int(i)) for i in set(y)]# M- N$ X0 a. |- Y* v4 z4 R. y
print "\n" + "#"*309 |5 G% F8 Y ]4 h
print "\nClassifier performance on training dataset\n"
- M& z9 t: p u" g4 W( j" Z3 kprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)' M9 M* ~" V: M/ B9 u+ q+ M
print "#"*30 + "\n"+ ~+ J5 r/ A$ v; f
5 J; L! D8 t; } C X8 `
print "#"*30
3 B- f4 L0 \; h7 k9 E tprint "\nClassification report on test dataset\n"
# `* l6 I+ j% `! gprint classification_report(y_test, y_test_pred, target_names=target_names)
0 y9 u! F. D; z- B; c6 Qprint "#"*30 + "\n"
/ M/ p$ I) P& w* m" [, K# k7 M5 `9 P3 {& q% q0 `3 Z5 M
|
|