- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
, ]6 {) w/ i# S; n8 kimport matplotlib.pyplot as plt0 P8 S/ o' ?7 y. e2 L9 ?
) N, v* i+ J- }# g
import utilities
2 Q4 R9 r/ X& r+ U# A/ m( g H
2 j0 o' m7 c w9 u, [. P# Load input data% D' t; K& t! E \
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'; w+ l. L: \1 \
X, y = utilities.load_data(input_file)
9 L8 v+ T9 m1 o* U
- G& X; d5 M# N/ S/ s, _- S###############################################0 L8 m# t. Q( O: Q6 m, x+ ]0 D
# Separate the data into classes based on 'y'8 s( b0 r1 J+ \
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])8 c+ y9 c& v) F; a! ?
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
7 P6 t& U' M' W* m
" |, B% t( K# h' y# Plot the input data
9 {! m! w1 X8 p' t, P+ D$ ]0 hplt.figure()( g8 M% Y) G% F' e6 n
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
3 F; [2 _6 K4 j7 c8 C( [# `plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')9 U/ `$ |2 ?! y4 x# h. @
plt.title('Input data')
' U3 Y2 S2 z1 F- i' b' @7 U9 X5 S& F! w; M4 h
###############################################
& r G# C+ n% a3 ~2 _' q$ H2 q1 _3 D# Train test split and SVM training
( }1 D, `/ x6 hfrom sklearn import cross_validation
. l, N( @* z& y4 X: R8 q3 K* O/ |from sklearn.svm import SVC* s8 i+ k$ ?! _' Z
8 C% Q0 q) B' |. Y+ T
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
* \" l5 e; ~5 z! y1 C5 d
' y3 e9 u8 h5 T* T1 w4 P#params = {'kernel': 'linear'}
; b- o" C" v* L# I' {#params = {'kernel': 'poly', 'degree': 3}1 p, D' p( Z' i% h0 ~; r
params = {'kernel': 'rbf'}
, k4 T- D, P' [: Lclassifier = SVC(**params)
. N- V0 o6 _ z! _5 j1 pclassifier.fit(X_train, y_train)
" m1 M4 j7 O* l) s0 x/ i+ Lutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
7 E: R0 N! r: o H) R7 f
5 ?2 b% b' `- O+ f2 }% g! n' }y_test_pred = classifier.predict(X_test)- e1 \6 y3 Z( V' a% b
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset'). n+ w, g4 v, I6 ~ H# E* k
" D4 K4 O4 G# E/ {. N
###############################################8 @/ P" h* ^& d; X8 ~1 ~9 m1 g
# Evaluate classifier performance
( O) R$ `# a; M1 E6 c) U5 j: f" ^! d8 K
from sklearn.metrics import classification_report
) Z1 D7 r( Z5 n0 Q, W/ P+ L
# N" L% k1 r" X9 `% k6 wtarget_names = ['Class-' + str(int(i)) for i in set(y)]
5 f5 j$ A8 g& r1 Lprint "\n" + "#"*30
/ t5 X/ r& |" aprint "\nClassifier performance on training dataset\n"0 H8 ]+ J# F% p7 W; {* d
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)4 \& |! s: b7 ? h7 Z
print "#"*30 + "\n"" H7 F5 ^, Z F; X/ z
7 b! v# R1 ?* b e
print "#"*30
9 D) |; o- u4 r+ w* Xprint "\nClassification report on test dataset\n"
f! W/ F0 l( X5 |# l& Sprint classification_report(y_test, y_test_pred, target_names=target_names)
! R4 N; u7 j1 k0 Iprint "#"*30 + "\n"
) N2 U, t7 @6 M8 ]% Y
/ F- l4 U& ?4 [3 I |
|