- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np! A3 }9 }8 C. \1 ?5 S, b
import matplotlib.pyplot as plt
, H" I6 }" e7 c* ^- i% K' Z6 O2 l( f* p9 k# N/ p* [7 b
import utilities
$ g9 b: H2 j, [ N0 i* x9 l
4 Y) a# o( ^, P, b! G5 p) c/ \# Load input data
: t4 W; U; q9 E$ p% W" finput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'# P: ?" M- W8 Q3 l- C; l. ~
X, y = utilities.load_data(input_file)
7 L x* l/ w" y' E& o4 K$ Q) M
$ L" ]9 N: k3 \* a$ p% u################################################ L2 \' @& [& L6 |' l
# Separate the data into classes based on 'y'; ~& l) i1 U8 @. p. d% D/ S6 y
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
3 f; O; d) y0 Dclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
: B# F& u: ]' O- \3 K7 K4 N1 ~) f; _( v" D3 i l' v4 N' A" e
# Plot the input data6 p% t% n1 h5 U' i" n3 F. a8 t
plt.figure()0 z5 |( I) i& H
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')* q8 r; ^6 r' X& w* y+ x1 G
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
3 ~1 V' R8 ]. G* v& A+ n8 N5 U. ^plt.title('Input data')% c( i$ e0 d! O+ M! ]- D
( X d, {* _# u' g###############################################
% c- o, D( w; T! w$ @% c# Train test split and SVM training
& C4 g2 ~) a. ?2 v7 z( I7 Dfrom sklearn import cross_validation9 n* j* Y# ?9 `7 G6 {
from sklearn.svm import SVC
: p3 i6 L9 u4 u' e2 T. b# j# _8 j
7 {; K4 T6 ?9 ~7 X( H2 ^: OX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5) |) a& K; _* c \
- y0 N$ t8 K) a+ u& G8 _$ c* h
#params = {'kernel': 'linear'}! ^ E8 H# x; d
#params = {'kernel': 'poly', 'degree': 3}
, N0 l1 N0 V5 m" ^6 g! p0 ]: l t; bparams = {'kernel': 'rbf'}9 I N9 _( y9 t; _. z* K
classifier = SVC(**params)) `: ]6 b/ P1 u/ T8 q
classifier.fit(X_train, y_train)3 \) s0 V, I$ \' e; B
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
+ S- l7 o1 o* r8 J6 w# T. v, L
' D r1 t5 p. @, O$ ky_test_pred = classifier.predict(X_test)% m1 a) C7 f6 k! p& P/ G% ^4 ~
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')* W& w- x. m, V5 N8 G6 {, B9 C9 O6 ] b
- P5 l: ?( T9 w3 q: d2 F6 l###############################################
; {+ `7 L3 P& i. E: _# Evaluate classifier performance9 m3 K/ q' l0 A$ a
; {% @) n W; L* cfrom sklearn.metrics import classification_report$ M6 [8 @0 ^3 h" `
" @; Z( C# `# h7 Z0 G5 \3 |target_names = ['Class-' + str(int(i)) for i in set(y)]
. S( A9 U4 X2 Eprint "\n" + "#"*30
- m c3 t8 @+ b* J/ D' vprint "\nClassifier performance on training dataset\n"
, d* ^$ b6 `& Q; mprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)6 A t/ d' p& F# q5 B5 |
print "#"*30 + "\n"9 Q9 ` h [+ s" l/ D3 E! j
) {0 C3 f% P; K9 d% V; J- i
print "#"*30
$ q% \+ e# y+ G( y2 }print "\nClassification report on test dataset\n"
8 p+ |: E; G0 P* bprint classification_report(y_test, y_test_pred, target_names=target_names)- h7 t$ w! V# b+ d' e
print "#"*30 + "\n"
& F+ X4 t1 p3 H9 V- g# A* Q$ {3 [% ~2 x: n+ m( A# }" n" a
|
|