- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np0 a( a0 B x Q7 `% C
import matplotlib.pyplot as plt5 a6 p% n) N' O3 F
3 Y0 p. v8 ~* v4 r7 yimport utilities
6 v8 M+ y5 B, @# R% |& k& X W' N
: O( V4 z! ~: P# Load input data& J5 ?' ?- s# X j4 _- O
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
( f" _4 v. M0 r7 F3 DX, y = utilities.load_data(input_file)
8 o( }* S) j! j3 u7 @" k* H+ ~$ S1 @$ x- b( O% P7 _ }3 b( |
###############################################
1 K) O; i0 R! h# Separate the data into classes based on 'y'
* k. l# `* J. P1 O3 {3 fclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0]) m8 H* N# r2 S8 ~3 ~- O( Y$ ]7 b
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])1 G5 A/ |: o) r1 g% J
* q2 n' S1 f' ?5 ^ B" h# w! D
# Plot the input data
* `2 h( o3 _$ b" f* ~plt.figure()
1 s! |: w7 I" e& J: e9 O8 i! w6 `plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
; c2 q4 F7 }! d! Q& mplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')% x/ F' g- p }
plt.title('Input data')
& R9 L `5 e' V6 B* u2 I2 {
8 |/ q7 X; N# C###############################################
7 @( o0 z L. c- |2 j1 h: V# Train test split and SVM training% z3 z% _4 H1 g! K9 x) L! n( m
from sklearn import cross_validation c; w% v8 I# M
from sklearn.svm import SVC
% z5 {6 ?* U8 P4 ?% f2 L3 I- z2 y3 F5 B- Z& h$ m5 m& C
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
! x# m" n) T' j3 D! o2 Z3 W+ z* M- D0 p* k1 E" D
#params = {'kernel': 'linear'}) H M6 X6 T" O1 Y4 Z* W. W
#params = {'kernel': 'poly', 'degree': 3}
. B: T; x1 q1 s8 a4 r, w0 ?- }" I' cparams = {'kernel': 'rbf'}- k& H! ?6 _) W$ Q
classifier = SVC(**params)4 x# J( I' D- u* q, X" w* ~
classifier.fit(X_train, y_train)
3 v7 [. Y- G: B: c& W( w' `utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')7 X* Z# ^( ~8 ?" [+ I
& q( r8 {: E( [! f/ L" x8 r2 M
y_test_pred = classifier.predict(X_test)
+ }5 ?2 N) k1 s+ a: c+ `6 eutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
5 Q' Y ~% E3 g6 {9 V+ l3 K o& Y0 a7 U) s# O) z; T& r
###############################################
- C4 A2 _, x- ]! i# Evaluate classifier performance- W6 S6 e. @2 n
, d5 e1 Q/ G8 G9 n1 q- ] f
from sklearn.metrics import classification_report
: {8 q+ I2 ^; |( Z; t( `1 z. Z
3 R" _8 t V, k7 q/ |& t% c. _target_names = ['Class-' + str(int(i)) for i in set(y)]
0 u5 q+ p/ Q! T$ G# lprint "\n" + "#"*30
% w9 H" G3 @9 l& ~% x; j" |print "\nClassifier performance on training dataset\n": K; M( k) ?4 L5 F! p
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
; o2 b* {5 q& N4 vprint "#"*30 + "\n"
! @% Z/ J' I2 v% p6 i1 Z1 Q; t2 W! b9 v6 p5 V& [+ X3 v
print "#"*30
3 D! ?9 R: U. E- c5 O- Tprint "\nClassification report on test dataset\n"
: S/ r$ T2 z3 O* Sprint classification_report(y_test, y_test_pred, target_names=target_names)
* [2 ?, E7 k. X8 p3 h$ H- yprint "#"*30 + "\n"1 z( a" b( y! w! }1 R V$ y+ f7 e; @
0 A4 h3 E$ R3 c- w4 J2 l |
|