- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
# K2 a/ I4 O4 b4 r) Mimport matplotlib.pyplot as plt' S ] m8 {8 c6 E/ ~
+ E- M/ p q9 {! F: @
import utilities . e0 N. F% |6 d% Q- @1 w
' S3 c, A( w9 @
# Load input data
+ b5 v! e& u) N% l. k# minput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
) W% _1 g$ J9 E5 W9 t: i) Z# lX, y = utilities.load_data(input_file)
% S7 ~9 Z- U+ N+ y; r
7 S2 Y/ J; |2 Q3 x###############################################
2 l9 f; Y% A" h' }# Separate the data into classes based on 'y'
. P2 x/ u# o- K( `! N9 e* `class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
, d" L4 X6 H2 ]- b- d8 c yclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])0 X( g# H2 L; x% _: _4 S, z
0 r1 n, g% o; l- z# J0 y# Plot the input data
) y5 y* F" [! }. ?plt.figure()2 O& z3 Y1 J. n$ h
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
9 O/ G1 g& k7 Q$ qplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')* y" |; z9 S0 C1 c2 E* B& O8 ]) }6 z8 n
plt.title('Input data')
' y5 g+ e6 r3 H* c# V" K8 l& Y2 I1 \3 j0 R
###############################################1 O1 h: x4 D2 T0 j. S; f- I" f
# Train test split and SVM training4 q% {/ \& Z. o2 h
from sklearn import cross_validation" P" y5 d) \1 w) k( x
from sklearn.svm import SVC
2 [% _; H( A3 m; `' @( \
0 R) N6 j% ?5 M7 IX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5): x: b% Z% Q! V' X: d& n' g0 d* f
0 |4 k. V# u2 Q% h
#params = {'kernel': 'linear'}4 M [) a \% V. u( ?% {
#params = {'kernel': 'poly', 'degree': 3}
3 e( G! |8 g, H/ uparams = {'kernel': 'rbf'}
$ @; R7 U5 X$ B% p. C: wclassifier = SVC(**params), L) l. B! f. d% l0 g7 g' h+ ?4 c
classifier.fit(X_train, y_train)$ _. U2 f2 Z3 ^* e
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')! W ~+ x: P0 Z5 H7 I( ~
, b. g! K" y8 A7 ^( _y_test_pred = classifier.predict(X_test)
+ {, M" ?6 L+ u$ o8 P& p- ?utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
5 X! e0 e% P/ W4 y: G
: @: g& v( r5 c5 b- p# V% {###############################################
7 V7 q2 R3 R5 Q/ K# Evaluate classifier performance
; U- e, _ [5 Q1 h# J
0 \; h& |# c: w( @+ q. Cfrom sklearn.metrics import classification_report
/ i# B4 { S1 b* ?3 r# Z* w2 k: u1 i
target_names = ['Class-' + str(int(i)) for i in set(y)]
# _+ M5 r" D1 ]* r# X0 Qprint "\n" + "#"*30
" w% }) M/ w2 w& Z0 x2 `print "\nClassifier performance on training dataset\n"0 J7 w7 [3 N/ }8 b
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)% P, j: q% q% ?+ m: r5 K f
print "#"*30 + "\n"
1 d* n: J" c- L" S. j; d
$ t; Z) `1 K8 c( F. U4 Aprint "#"*30
3 z+ C1 p, ]* ~5 Y: Rprint "\nClassification report on test dataset\n"7 ?& y! D+ J5 D) \
print classification_report(y_test, y_test_pred, target_names=target_names)6 O# J1 I0 v* j9 G; O
print "#"*30 + "\n"3 d8 _% |, J f8 P) u. N- ]1 ~
5 ]" t% q% ]+ y% x" V2 x |
|