- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
4 v6 T- P) w: S: J( Q1 jimport matplotlib.pyplot as plt& _2 u- @3 E* l! @; J
]) { j0 E! m4 h! \$ Jimport utilities
+ g$ n. a" q9 g& D4 S/ F3 N3 `1 }5 U: E' L
# Load input data- b2 J* P2 ^/ f0 j4 q
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt' J6 I5 a, `/ [# ?7 V
X, y = utilities.load_data(input_file)
8 {5 I O A) b, y. U Y: v+ L1 x
###############################################
; f3 i3 f1 F U# z# Separate the data into classes based on 'y'+ E* J6 {3 z9 `( {7 c
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
* r. J' C2 x2 Y3 Sclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])! S3 u7 t: _5 ]
4 a' C" v, ^- A& d# Plot the input data
- f! U: q) G; j3 J7 B |4 Zplt.figure()9 _& c- ]3 U! v! z5 I. q6 G: [3 A* |
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')) L% Z6 k& l; n
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
4 Z1 ~: U; G& n$ j, _$ I% x: s) Y# Bplt.title('Input data')8 j; N0 C; l8 I$ B
3 m7 F9 X0 x0 g% y" ^# e4 H: X
###############################################2 B- A. @; H- W
# Train test split and SVM training7 ^0 K/ t% Y& J9 I5 a4 n& U
from sklearn import cross_validation, S. ~6 a5 T" p* P# Q7 V
from sklearn.svm import SVC
! t/ e1 d* {! U2 @/ A
! g& q0 j* X+ G& o6 p& a: ` `X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
( k% f5 K0 p! L' P4 Y6 \+ R; ^5 A2 d) K# _
#params = {'kernel': 'linear'}
0 @ O1 o ~" c$ k `; N#params = {'kernel': 'poly', 'degree': 3}
- P$ c }% n+ |. |params = {'kernel': 'rbf'}
' k8 _/ B1 T5 ~# t' lclassifier = SVC(**params); j# f2 @9 [3 ?5 ^: |8 N% W
classifier.fit(X_train, y_train)
) p6 w/ V5 Q8 ]utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
$ B! f) J( g" y- ? M
# S2 }( r$ C! p! e3 O# T4 s a9 Ay_test_pred = classifier.predict(X_test)0 E; E; Q6 p5 R
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')$ l$ k0 l# ~! W" W/ N
8 s" \8 ]) [' r8 L$ |
###############################################, M& Y: q2 r, X! Q6 l
# Evaluate classifier performance
; N6 v& U% f( v1 e1 k' E3 O& M# Y2 H% g" D; o; v, S( i7 D) ]- L y2 \* Z
from sklearn.metrics import classification_report- `: e2 d& A' m1 R) m3 ?" t5 s
+ V5 O; A4 A% c. A& @3 jtarget_names = ['Class-' + str(int(i)) for i in set(y)]
! y- l i+ g2 kprint "\n" + "#"*306 a4 r- U( i5 s" q. q+ j' i
print "\nClassifier performance on training dataset\n"
9 S5 P8 }, y. n# g3 Kprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)7 S. c& {. ]+ U' _$ i# [+ j1 g
print "#"*30 + "\n"
$ M- `# e: e' b+ `5 a2 F
6 Q0 {0 l8 n- A! d% F$ a; [print "#"*303 ]! x7 }+ [+ ~& O
print "\nClassification report on test dataset\n"
+ j8 Q% \+ E+ R: F. xprint classification_report(y_test, y_test_pred, target_names=target_names)
$ l+ R' o3 r) V3 @# Q- j% Q1 gprint "#"*30 + "\n"
+ m l5 L% c& W! {! }, Z* b& ]
9 N$ n/ W, j+ @ |
|