- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np! t) g1 W# K; W! H! W3 _8 f
import matplotlib.pyplot as plt3 @/ B1 e5 d+ D; o; {" f8 G$ B
3 K, k$ D; \+ r5 e
import utilities 5 F2 F8 C8 X; \0 O
) i- E; ]2 F. D7 b7 G
# Load input data% C. C: G$ B( K8 M3 S! X
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'0 g4 ?% {/ d8 ~9 g
X, y = utilities.load_data(input_file)
! ~# o& W$ A( K6 w& u( Q4 G; H; F# k8 E" k# i: t
###############################################1 ^7 H2 a8 s- V' l" j
# Separate the data into classes based on 'y') r% @9 Q7 @$ h. j: l7 ?- h
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])6 f$ r. O- n/ F
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
! E& ? \! n1 c4 U5 U9 t) t* J3 Y& c3 F/ C* s; H
# Plot the input data" r0 k1 Q& ]0 K
plt.figure()1 e) ^ W! |' _7 _; c. {6 \3 |
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')) n* }4 x3 D! }! v7 J# J/ F
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s') ]5 N5 g2 A( A# J; D! A+ i" j% U
plt.title('Input data')
0 R+ p$ T% B0 M+ m8 `0 H- s* D* l/ D; }% }+ x2 J- R$ V3 ~
###############################################1 D6 E1 ^. E4 w. U1 K
# Train test split and SVM training: U0 G9 t X4 c. u
from sklearn import cross_validation8 s Z. S7 G. \6 h0 ^
from sklearn.svm import SVC
6 |. c+ ^+ H3 z3 Y7 r0 L4 g7 n1 D) {7 p8 f, f
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)3 y+ k6 ^0 p. t; M' q) Y: b
- n! v5 F- _- ]) i
#params = {'kernel': 'linear'}
/ ^: T7 m! r, j' b#params = {'kernel': 'poly', 'degree': 3}/ _, D: h6 }5 H5 F' D, g
params = {'kernel': 'rbf'}# p% P: O8 e4 l! N5 d4 O" {
classifier = SVC(**params)
! v7 j% z; _2 M1 f: kclassifier.fit(X_train, y_train)
- V) M+ |% ~; [& Dutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
( `: _; J: v) O' @+ |3 G$ Z& _& d& Q2 d6 c' B7 x
y_test_pred = classifier.predict(X_test)
0 `/ s( V; g7 Q5 X6 _ K9 {! Sutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
+ M2 t) ~$ Y$ ~: {, ^: v7 r; ]/ ` N3 L0 ^3 \0 N
###############################################
0 V, E3 \; o) p0 |# Evaluate classifier performance$ }+ N9 k- N$ r, j+ t, |+ Q$ ^
6 `( g, X6 Z/ U' N" Yfrom sklearn.metrics import classification_report8 D# Y# [6 ~ {0 z: d2 ~) f+ ?8 m
# C; ]* d. o8 C( L8 M" |
target_names = ['Class-' + str(int(i)) for i in set(y)]
; v% ]- Y) R7 S( O# G: L" y0 Oprint "\n" + "#"*30
* j9 Y1 r2 Q& v/ S# r* x$ Jprint "\nClassifier performance on training dataset\n"
8 k' j; K+ z* Vprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)! K* L' r/ P+ c) H9 R6 S6 h' I
print "#"*30 + "\n"
! S/ ~3 G2 a- |0 ]9 K7 s$ X
9 `! R8 V1 s" f# D+ v, a* t: q |print "#"*30
6 v! M/ t3 R. Y& Q& A$ `print "\nClassification report on test dataset\n"
* J3 f% ]; T: e% z Mprint classification_report(y_test, y_test_pred, target_names=target_names)
# C* l; V J9 h; D/ j mprint "#"*30 + "\n"
6 |% b9 a, z* p+ J5 e' ]' T5 ^. s) G/ e% k, g8 O: h- S
|
|