- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
' l7 b+ f- j& `) {. gimport matplotlib.pyplot as plt
) H0 W/ e; a7 C/ @& g' n8 U" o- A& D1 a$ N+ t: C) b6 W6 p. S
import utilities
0 D" L4 F( i$ A2 a* u1 r: h' ~4 R" F
# Load input data
4 D, @! t, u0 m4 T' ?5 Z9 sinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'* C5 V# J" h: E6 D& H
X, y = utilities.load_data(input_file)
, Q) y7 Z& ?1 @$ d0 L( L e
" H5 s& c, f5 x4 Q4 f3 P###############################################
+ q& Q* I# [8 p- [! o* x* l! \# Separate the data into classes based on 'y'
. _; @: E, A9 r0 Qclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
: N1 n( ]3 F& w! `class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])* H" `5 q a, d: X* H+ W- c. p( d' Q, n
( x- H+ j$ ?! c. {; ?: ?) O# Plot the input data5 Z$ J4 c8 H, o# `+ `7 U
plt.figure()
S& x2 ~" |9 N- P4 y; i" ]plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
8 v" N6 _" q) xplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
1 U! r# Q+ [1 t+ x4 Hplt.title('Input data')( v6 C' ~1 J. [! r' u P
! c1 i( `' g/ t: q- a$ {- S
###############################################' Q" [' m. ]2 [' s* g6 {- q
# Train test split and SVM training' T. j+ [% g0 `' p+ }+ E
from sklearn import cross_validation
7 N2 y& V) Q' |from sklearn.svm import SVC
( D! ] b8 L5 U; Q% G5 z) D4 Z$ G" P! ?* p0 p3 J4 l
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5): x9 n8 w- B, \. P5 [/ @
! C4 s( x) ~; v1 g#params = {'kernel': 'linear'}
3 A( @& o5 y4 p/ P' J; g#params = {'kernel': 'poly', 'degree': 3}
3 @% q6 F" F. M' Q: R# U9 M6 mparams = {'kernel': 'rbf'}
. J2 u& m: x8 c9 fclassifier = SVC(**params). ~) O" ^' F! s# R# _
classifier.fit(X_train, y_train)
. J! N% g4 p! F9 i" \utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')8 }/ ^- e. t M* V* ^$ @& W' T7 p
3 H5 _7 @7 O7 l% B; ?1 @9 K! xy_test_pred = classifier.predict(X_test)
: T9 h+ {5 C- V) U! H: C: n5 x" [utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')8 j9 ]/ r6 J# h6 l* L
% A! ]7 R, v0 ]; |4 E$ k. o8 y###############################################( J- r% `8 n) {# B2 [7 Q0 X
# Evaluate classifier performance( H- Z. W/ [/ \" S# S9 V/ n: n
* M1 C' E2 w4 B) j C2 G* o$ k* I8 H
from sklearn.metrics import classification_report
* {' _7 i- Z% ~. `- n
* R& {3 E, ~2 b+ Starget_names = ['Class-' + str(int(i)) for i in set(y)]' r+ O1 c5 H. G. q. [5 p
print "\n" + "#"*301 P( j8 @4 h- L5 I
print "\nClassifier performance on training dataset\n"/ U' E- u& ^) c
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)" P: t# j2 I6 y" a3 v
print "#"*30 + "\n"9 }% s% k3 N) P9 w# H
1 ]! d1 e3 M' V6 ?
print "#"*309 T# k& ?+ S$ J0 g0 l7 z
print "\nClassification report on test dataset\n"/ q6 z: U2 y7 L4 u$ w
print classification_report(y_test, y_test_pred, target_names=target_names)
0 c/ s$ x4 ]8 H: l. x P0 V# e: Wprint "#"*30 + "\n", j$ U6 a: c" j2 d/ G
, k. l1 y- A' F' f% C* X' e4 B
|
|