- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
+ B" O' J" k& I+ g8 h' _2 Nimport matplotlib.pyplot as plt) C1 }! w# B' [5 S/ B9 d# ~
* F; [+ m! D# O5 n- D/ jimport utilities # ~9 y+ ~, C% m) F
8 F$ F+ }: v t5 w* h2 |# _# Load input data
8 h/ y% i" r V! Qinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'/ c. P! c8 N/ L2 [7 `6 p4 Z1 [2 Z
X, y = utilities.load_data(input_file)1 U) Q7 a4 U7 M7 {) \3 }( [: L
+ E7 z" e$ t8 t5 b0 d! k
###############################################
: |5 U, i* q) x/ h! r8 N4 Z# Separate the data into classes based on 'y'
, U7 S) {9 i' u0 \class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
( u( t) p; a) ~class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])) R/ d3 b& C: ]; C' K4 ?# I
% {6 i4 L, a0 f2 ]
# Plot the input data
6 g! J% z1 ~* N. yplt.figure()" @ m5 n5 n4 D. {3 ?
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
& g3 y2 n7 L; ?& {, g$ g. Splt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')4 i* t; V- p( P0 r5 l" X7 V8 k. r, }
plt.title('Input data')
0 L+ ]' `+ z+ _* f& V- T4 E9 Q& ^1 L' Y1 ~) a! e
###############################################
# i" u% U. ~+ P# Train test split and SVM training
: V2 j5 i5 H7 y+ _9 k$ ]% {# afrom sklearn import cross_validation
" n3 r1 u2 v* h9 T3 R& j& k/ @) A2 [) Wfrom sklearn.svm import SVC
% h/ j$ E2 h) T; S/ W: ~
4 a# j2 @; D( [X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)2 K# m: Y* O" Q) y+ H& t3 \
% P9 u) o4 H8 l3 o( S#params = {'kernel': 'linear'}
# ^+ r) ]: w3 L' ^( A! ?; {#params = {'kernel': 'poly', 'degree': 3}; Y; B( z% `5 k5 U& b2 I/ ? Y
params = {'kernel': 'rbf'}
# @$ U2 u: d0 D: c$ jclassifier = SVC(**params), M q1 a! h! Y& b
classifier.fit(X_train, y_train)# S8 q, @/ _1 W' D) _, i0 i
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
7 b% K$ m; i5 l8 S# Y: ? @9 q) y: ]" w M
y_test_pred = classifier.predict(X_test)
& l9 C6 I1 J& B+ v/ W7 \* l2 @# \6 I* R( Hutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
3 t2 \/ h% {. c# m8 z2 a, V0 b3 \- G" [3 |
###############################################7 L( U, v) k7 S5 H9 u. a, Y: X/ _
# Evaluate classifier performance9 u. @' C/ b& }1 o, u
+ H& P, {8 W$ ^9 v# W' j* J8 }from sklearn.metrics import classification_report
# p# h$ S3 @ c# N. c* a
% `3 x! V. A, W- I$ |" P& ~' ttarget_names = ['Class-' + str(int(i)) for i in set(y)]
4 H- S( U! Y3 Z6 H" M. T8 g7 Y- \print "\n" + "#"*30
5 w2 Z2 G, m% D$ `print "\nClassifier performance on training dataset\n"
7 e4 z' g5 j' z* n4 t0 q% o: sprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
. p% x4 d4 C; W3 m" e sprint "#"*30 + "\n"1 T, W' Y1 r# n
5 M: v' t: B( `6 T
print "#"*30
f: o! p! O# |) c$ zprint "\nClassification report on test dataset\n"3 A6 ~9 a6 h+ Y: D, v
print classification_report(y_test, y_test_pred, target_names=target_names)
: h& |& J/ Q6 I9 d! y" _! ~print "#"*30 + "\n"
" _. K$ G% j" V; b# |
2 O+ j D9 r: S( c( M |
|