- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np. n4 c0 Y& P0 K$ a& y' }
import matplotlib.pyplot as plt
: U7 @+ a$ `9 M& W- \; U0 r; N' e! C! N2 [- _: } j3 e
import utilities % X5 Z# k8 X W
( @# C# e% h2 I) i* ~; x
# Load input data
) C% \# I# y$ T$ D- K) Iinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
2 b4 L: @# {4 q {8 O) n2 tX, y = utilities.load_data(input_file)9 R. F+ H0 u4 x" P' f' Q4 @& K) }
; }9 V/ }( B! V& o% s ]
###############################################
$ Z3 {+ K |( d0 M9 v# Separate the data into classes based on 'y'1 M; o- B" N7 N. v
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])$ w1 m+ D4 z8 \" |) _7 n
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
) J8 a( ?* }; ^% l1 A
2 _- J. D% [, }! _# Plot the input data
0 a4 Q0 o0 a$ m$ k( a0 C. ~! t6 Q. Gplt.figure()
& H. M, `$ N2 E! ? k& H J+ vplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
: s5 C9 Q1 z1 [6 j! U$ Splt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
( ]9 B# I: O5 f, v# g* e* F; @plt.title('Input data')' T- ~3 F% |+ M
3 N! a; X2 D1 d; z( z###############################################! J$ P( S2 X- M, S
# Train test split and SVM training
, w& N5 D! A; f& Y& nfrom sklearn import cross_validation5 s3 ?3 }8 U3 F5 _
from sklearn.svm import SVC1 @ ?( o: `! H u& j
; o. T4 y* c, W3 f1 Q& n+ jX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
4 e% t) n) D/ i4 F% b, N2 l- s0 o2 V" Z9 u- A' {
#params = {'kernel': 'linear'}; M" X: x! R. r/ l% K$ F9 A
#params = {'kernel': 'poly', 'degree': 3}$ \2 X+ Z8 ^, k2 j' ^! `. w
params = {'kernel': 'rbf'}
# S. t: |$ {3 p A( L" Uclassifier = SVC(**params)
# a6 x- {# T/ @classifier.fit(X_train, y_train)* N* Y2 k* `# r" H9 G5 C
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')& N' W/ d# d' j5 U+ u2 v, d. m q
' j+ O& M0 N+ t ^y_test_pred = classifier.predict(X_test)
# w- E& u3 n3 \& Y6 n1 o, L6 @utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset') m7 R! p8 |) S; }7 |8 F
$ A3 l3 I2 l, H, J1 X, a- _! a( ]
###############################################
2 y f$ k3 A. E9 ~3 q8 t8 ~# Evaluate classifier performance* G" y- v8 t! ^! l
* {- _1 P( d) ?" @from sklearn.metrics import classification_report
1 ~6 V1 y4 a. I. B0 ^+ w6 h) I7 }4 Q( R9 ~; ^
target_names = ['Class-' + str(int(i)) for i in set(y)]
9 f* m/ W$ J( \6 \( Tprint "\n" + "#"*30
' S) m* n0 B( B# q# a8 C- rprint "\nClassifier performance on training dataset\n"/ L0 {' Q$ U2 X7 `* Y
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)' K4 ?) o0 f. Z$ O
print "#"*30 + "\n"5 h$ y) f/ i" N0 m
2 U; b( `. S5 T' }) o) [; _# Pprint "#"*30
) j4 U K7 r* c4 p" |* Sprint "\nClassification report on test dataset\n"
3 ~7 b- S6 \' `+ g0 {print classification_report(y_test, y_test_pred, target_names=target_names)7 P8 M# E9 L& s2 z- V$ V
print "#"*30 + "\n"
) V" ]+ i4 r% S9 Y" B6 b3 U, e; \4 r% e* @& S/ O+ S8 y' q
|
|