- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
4 {* y# A. { i' H4 w+ w3 K+ nimport matplotlib.pyplot as plt
8 g9 S/ R5 O8 F \0 w1 \; c/ T/ Q% Z
import utilities
) L: }* v+ h* C) f) I* `- B( e9 N
7 f, M1 q7 d$ m! N5 J4 |# Load input data3 Y0 Y& R7 _1 Z+ R7 x
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'. a% R6 e4 y, L }! x P
X, y = utilities.load_data(input_file)
& r: e* r! q* M7 m$ ]2 e7 t% D+ j( ?( k
################################################ g3 i' E. R I6 N; b
# Separate the data into classes based on 'y'
% u4 f3 z Y( J, W7 y/ `class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])" \& T$ W# ~8 X2 x. _0 S( Z
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1]), S; j5 O2 C4 m# U. {' g0 j- Z3 O
+ e9 L% }2 a2 {, \" s2 G# Plot the input data; _# t; C; t3 ?" D) I! U
plt.figure()
) ]* v: e; ?+ e" T; A8 o9 g( [; `plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
$ K% a% ^& A3 q- @& Nplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
9 T- `4 J& B! P+ b5 uplt.title('Input data')
0 _& B- W( M* `( L7 \4 M2 E8 N# V! g7 j! f8 _; y7 ~
###############################################
+ r& L' [; R0 }5 a" x/ Z3 d# Train test split and SVM training" _- o4 T/ Y3 l N% d2 b8 o
from sklearn import cross_validation- ]1 [! y2 t. k7 y5 z& j
from sklearn.svm import SVC0 F0 ?1 ?3 k4 m3 E4 q* W$ {# i( J
7 {4 i( p/ Q8 G5 i Y4 [
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
0 P1 y9 z3 a D8 ^, T5 I
, X6 n2 O5 n! Q5 U7 `! M2 J) F#params = {'kernel': 'linear'}
6 Z" s- z6 @2 F& p% h" s#params = {'kernel': 'poly', 'degree': 3}! e* f) C! X. k# g# ] f+ l/ c2 m* x
params = {'kernel': 'rbf'}
+ ~* Y# f9 z4 r& k" v6 eclassifier = SVC(**params)3 D" o/ s _0 T+ f. a
classifier.fit(X_train, y_train) Y+ n- s( U' G0 s2 {) x/ w) \
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')* d: O' f0 z4 q% S
6 b/ o& l( ~7 i0 e; ^
y_test_pred = classifier.predict(X_test)
, t3 V* b+ q- k' }6 o% V6 |& b, sutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset'), E9 X9 e" P/ K! u( V; ]
F( }8 _7 l8 M7 I6 N
###############################################% W+ S% \ J. h G# ]
# Evaluate classifier performance) H; v5 ]2 X" |9 J. m
8 ?4 v8 t4 f7 C* H0 }5 z7 u: O& Ufrom sklearn.metrics import classification_report
. ^* b4 }8 \/ m! c3 q" g( [
% M1 a1 U0 e9 `) R1 F/ O' D" f+ Qtarget_names = ['Class-' + str(int(i)) for i in set(y)]6 g' d! a4 m% {5 U
print "\n" + "#"*30% r/ I+ z! V- c$ Q9 z
print "\nClassifier performance on training dataset\n"& \% W2 I& v0 q, |
print classification_report(y_train, classifier.predict(X_train), target_names=target_names), y' Q+ x1 ^$ ^9 l6 F) J+ N6 R
print "#"*30 + "\n"
; l, W- U C w _2 [4 t6 d8 N- j2 |# c, f5 ^: y1 \
print "#"*30
* ~4 ~6 X% Y" c7 O. E9 ~print "\nClassification report on test dataset\n"
" E$ G: E) e8 x2 s+ yprint classification_report(y_test, y_test_pred, target_names=target_names)" ~$ _: v3 o) {5 |, ~+ q. N) @
print "#"*30 + "\n"+ ^! e% w, O4 ]! i$ z. c( ^
, Y% b) K/ Q1 o, @ |
|