- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
! R- \/ W- C* c/ }import matplotlib.pyplot as plt
# G# x9 F0 K$ G [3 Q! C, n+ A4 R+ I
import utilities * }+ Z" Z5 ?) R
! w9 V- N' [$ v$ S1 M: A3 l# `& f# Load input data
1 P: S* u& U4 T+ _4 l& U$ f" Linput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
( J# z4 Z9 f3 r- \- x( C7 W% AX, y = utilities.load_data(input_file)
6 z9 e8 N7 S0 Z% b
1 m) i5 u' z; R1 C###############################################. D+ E+ i( J% d' W9 V$ C9 k) L
# Separate the data into classes based on 'y': p/ T8 m: h1 h) [; x$ x/ |: D: u
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
6 j0 m6 c/ c' ~9 I2 h/ {* |class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1]), f$ x2 E4 d9 v- H1 C6 y+ C
4 G5 y3 a' B8 {; q8 I
# Plot the input data- p& q6 Z/ z7 t% L; n2 t7 K
plt.figure()
^& {3 }1 k, D. U$ lplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
% D1 U) v+ {; C, r3 m1 f2 pplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
9 A; e4 k$ v! X2 i$ Z8 z7 Uplt.title('Input data')+ F i9 W, s+ l
% T) t! t0 f8 T7 B' p###############################################
5 q- N' n3 R' Y" Y) Q) A# Train test split and SVM training5 x9 B9 o0 Y$ G% [8 Q
from sklearn import cross_validation8 ~" K8 Y8 P/ n" ]/ W
from sklearn.svm import SVC, M6 }2 x& P$ q3 P4 x% |- h) u
5 L/ u$ a' ?4 M+ w' s: b0 n2 S
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
d/ A) P5 I H0 E' M: T1 O9 O j& P$ v2 J- L1 G
#params = {'kernel': 'linear'}7 m/ m# L2 Y! s+ o
#params = {'kernel': 'poly', 'degree': 3}2 L" H8 I# S8 S- e; r
params = {'kernel': 'rbf'}8 Z. [# |$ T" K
classifier = SVC(**params)1 Z1 g7 U- |- ]
classifier.fit(X_train, y_train)
- b# i5 a& N4 K1 U- M+ gutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
& U: B# D2 ]2 k( p7 F4 }& k/ u4 M7 f8 {. A
y_test_pred = classifier.predict(X_test)
, c; |0 k5 D! Z1 C; l. |utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')6 g n- C! P1 w4 F9 k- n$ [
3 {+ g! B3 o8 ?# y" d% b###############################################0 I t. o. j- e1 Z! y
# Evaluate classifier performance% j) @- S- S* w& l0 `
" y7 I2 c! z. v- S
from sklearn.metrics import classification_report1 S: C' I5 K, t$ d/ n
6 O2 r, |3 O, L5 u( utarget_names = ['Class-' + str(int(i)) for i in set(y)]
+ x& K9 D$ J+ ?print "\n" + "#"*30
+ ?. }' y: |* B/ mprint "\nClassifier performance on training dataset\n"
6 l# D8 n# [) p# n/ l8 ^print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
7 t: T p! c" Wprint "#"*30 + "\n"
/ Z# ^/ t! m5 p, y: t! a; `# H: Y' Q8 a( w+ ?
print "#"*30
{0 ]+ X' U$ J! n! F, T5 s3 U" |+ xprint "\nClassification report on test dataset\n"
5 b& n0 r3 S/ P: z- Mprint classification_report(y_test, y_test_pred, target_names=target_names). @7 Z0 M1 y& i% f; a) y; ]
print "#"*30 + "\n"
, o, y" B+ U9 u: ]$ o: ^. x& s% y2 b8 A4 C& [3 E2 }
|
|