- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np: m3 Q! w# O/ ]* X
import matplotlib.pyplot as plt6 D5 O! l5 r1 c
/ q; }* g. C' P6 q, `- Mimport utilities * d' f3 u0 C3 Q# w
, A' \: B3 o+ V! b9 }
# Load input data
5 S& Z9 B s* s0 |: dinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
8 o7 ^8 t7 q: B; ?4 m# G$ x; DX, y = utilities.load_data(input_file)$ H8 ~7 M: F% f. k1 d& {
: ^, C; Y" a9 o! |###############################################
- E/ T* ~& O* C: ]1 G; \- R% ^/ Q- U# Separate the data into classes based on 'y'# Q% R1 {8 C$ C# o$ s1 E7 ?/ M
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
. I. Z( b% O* d+ \' Yclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])1 F8 S5 Z' A1 n) J* p
; G" ~5 R5 g: K A% k/ Y# Q4 c3 i7 { o# Plot the input data
: Q* M7 `' }0 l- ~# k; R% Jplt.figure()
9 l& L% m; ^0 G! Q, |! i# u5 Oplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s'), d: x6 T" f3 i% S9 T# p
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')! a+ D* l/ V1 p1 F- u
plt.title('Input data')
' x# ~% n% \9 H: i! F- B9 C
8 Y* }' ~9 S5 t& c###############################################
7 c0 L$ q' W9 ^4 `8 S. [# Train test split and SVM training" S2 t. O$ V) Q! |/ L. H$ L5 t
from sklearn import cross_validation; _7 k0 m% b2 \; T# t8 k& k" ~) m
from sklearn.svm import SVC# i4 l7 v4 X: [4 L# R
7 d* x7 j5 |/ ?+ |; Y
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)7 H! b" u8 Y1 v- h4 B* K- k
) x, o5 i. j5 _9 U( A2 u& [2 F#params = {'kernel': 'linear'}
) Z5 ~8 h, A+ c6 R/ c" t#params = {'kernel': 'poly', 'degree': 3}* H Y) }: V0 R6 C. m( g
params = {'kernel': 'rbf'}
6 L% S! t$ t0 k" i% |- Nclassifier = SVC(**params)+ V+ Q/ s. Q' y
classifier.fit(X_train, y_train)
- x& |) j6 R. d1 w% Tutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset'), L& P2 {2 ], K5 M* |
( j0 v" e& [( m r" Ky_test_pred = classifier.predict(X_test)# _; T8 h) a% X8 b- v8 y, H
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
b/ V" `1 f5 n) O4 c( k4 ]( U9 S+ X, @4 L" r- L
###############################################) c" _6 s! x9 D8 q7 | }
# Evaluate classifier performance" v/ o+ @1 V- D, H
: s) v& h z6 f3 w7 H yfrom sklearn.metrics import classification_report) l" r" ^. `8 n2 ~) E$ a+ I
9 H! ^. l0 X5 q7 j/ mtarget_names = ['Class-' + str(int(i)) for i in set(y)]" S" `# q2 }: M( J' p/ R
print "\n" + "#"*30
! c2 Q/ u# v, O' Dprint "\nClassifier performance on training dataset\n"
6 t! v$ s3 k8 d$ _" H) Mprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
" u) V. w" P2 D3 Zprint "#"*30 + "\n"( }4 D$ ]9 x$ P& Z
' V. Y6 J+ r3 {8 ^ b, ^print "#"*30, U$ ^" l8 [" j2 f
print "\nClassification report on test dataset\n". V; a9 E) e$ H4 g$ N4 r: I* q# X% K
print classification_report(y_test, y_test_pred, target_names=target_names), V' G' t4 i5 {! e& p# e1 v1 v
print "#"*30 + "\n"
3 q# u5 P* ~ t [, V) r" g: h6 p8 n ~1 R6 S
|
|