- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
# U# A$ z7 ~9 M$ s. [4 X0 wimport matplotlib.pyplot as plt8 T ?. k9 F% C; r8 |
_9 X/ u5 n4 C: w: [+ Fimport utilities % ]6 x$ V5 C+ S; @8 a# X; m
7 ?2 L2 h8 e4 c1 H. |( Y
# Load input data. D+ h: S0 {- M. i4 }, h9 n
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
& x$ a/ p$ b8 cX, y = utilities.load_data(input_file)
% u1 ]7 }3 B- w/ `( l
! T8 n2 d# H5 u###############################################& j: H" c: c8 j& _( H
# Separate the data into classes based on 'y'% T/ h" }( L& E3 S3 W# v
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
/ l! `! g& g5 @( G. iclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
, P/ g6 K5 T9 Y7 Z$ ]% P" k! r
' @2 S9 E1 ~ x" F4 M+ l# Plot the input data
+ |2 F! @* T) \* b; A1 gplt.figure()
! `* G3 P- A6 }: y" ?plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s'), a4 R' v. t+ `; E' \5 I. \
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')( V9 s1 |$ x4 q
plt.title('Input data')* i( B7 K2 \# H3 }. {; i7 a
+ j: i4 ?$ h2 i9 B
###############################################2 D5 m# h% @2 ?; [4 v$ M) L( b$ I
# Train test split and SVM training, x. _9 W _* _ @. Y* |" B( n
from sklearn import cross_validation
* w( R2 H* e; H; T1 ?from sklearn.svm import SVC
. ^5 i. m9 m; W' O; q0 Z; H! B) P% O) K! T
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)* c+ x) Z% d1 w* x1 J
8 i& X$ Z' `4 m. t# L#params = {'kernel': 'linear'}
- i! E1 D8 Q4 H1 G0 \#params = {'kernel': 'poly', 'degree': 3}
0 E$ |5 u+ J; K; ?7 z7 ~params = {'kernel': 'rbf'}
! p- g3 I, k' x: Dclassifier = SVC(**params) E6 @ A6 S* w; `, W
classifier.fit(X_train, y_train)3 }. p: O; ~; X9 l* }+ p
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
/ K( W8 s0 S H( _5 R6 N3 [7 e
& T0 ~; d) O& l- w/ Q- |y_test_pred = classifier.predict(X_test)
, o, j. Z; p! l; _5 O: P+ s; eutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')8 } k8 r; L! K1 s. c6 [
9 |# H6 b; y* P2 H% U###############################################$ L- ~& ]; r" t) u1 x
# Evaluate classifier performance
8 Q1 o" @$ n; f5 X& n. P- j5 E$ A g) j P4 D( r. R0 e; }# T
from sklearn.metrics import classification_report
4 }, E+ C) B1 ], N3 R7 \1 I, O i9 e9 ~. w2 L
target_names = ['Class-' + str(int(i)) for i in set(y)]) Z" |0 m2 R: x
print "\n" + "#"*30
9 ~" A7 j$ t, n6 G. ?, dprint "\nClassifier performance on training dataset\n": G4 ~! Z$ H9 m. g
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)8 \# C1 | M" B1 N6 G
print "#"*30 + "\n"% G2 J9 G, V( ^; ^+ E6 N8 w6 E8 }4 I
, G1 \) j. Y1 e6 H0 @( F) q
print "#"*304 s9 \% F, q5 e
print "\nClassification report on test dataset\n"
% `1 y8 H0 ?1 D2 \$ S$ K: y& kprint classification_report(y_test, y_test_pred, target_names=target_names), O1 U" w- K: g3 v' a) v
print "#"*30 + "\n"
4 {" G+ Z3 s$ e! c
' J* y5 E. [& ]8 I) D% s |
|