- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
! F& X* E8 ^4 e: s6 g9 kimport matplotlib.pyplot as plt
^0 q5 ]8 x4 i
! o& B; S) W, g4 Wimport utilities 7 w+ |4 T- l3 R+ @7 M
/ N; [& T6 j) b- S
# Load input data9 ]1 l4 U$ B' u- B% z
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'- V# z( m) V3 y7 e D8 t1 D( W
X, y = utilities.load_data(input_file)
* L5 G* {( }4 x. e1 ? }* |
: X0 N5 y# u5 y7 ~- K, M3 i###############################################: s3 X( Q5 |0 `* G0 Y$ z
# Separate the data into classes based on 'y' s/ a5 m9 `# c/ }
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])3 x! Q" E. f0 l4 P S* b9 E2 `( [
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])) i. ^8 t0 W' ^+ { \
7 j% f# u/ a0 Z# G
# Plot the input data
& P; a' z" U/ N7 x; rplt.figure()# s9 J, K! p/ x/ {
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
4 f/ { W. ^! P2 M( bplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
$ \& D0 k' w N$ t/ ]plt.title('Input data')! m. W# @; Z, Z# s3 e* o
' J' m3 Z1 U- Q; R) B( N###############################################. c( `. D! c6 U3 ^& j! }
# Train test split and SVM training
$ j' l( h& N4 R8 Z" P" ^from sklearn import cross_validation4 W- l8 ^% _- q I2 g& H7 D Q
from sklearn.svm import SVC
5 `0 Y0 @$ C, u. T* R' A9 y# o
/ q) H0 l/ C$ Y# f# o. gX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
. u3 R! i. s4 L3 \) H* r. Z: i) j6 v. V
#params = {'kernel': 'linear'}+ C i3 ?: J; y! M
#params = {'kernel': 'poly', 'degree': 3}$ [' L( {$ z, t- U, I# y2 `
params = {'kernel': 'rbf'}
, v# ]5 q3 s! S: u1 Cclassifier = SVC(**params)
+ e" R6 O _/ ]0 [' aclassifier.fit(X_train, y_train)( J. L5 P0 D0 h, |* }
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')/ m3 d* m/ q2 Y7 ]+ M2 q: V
- c$ }: k) t) m
y_test_pred = classifier.predict(X_test); u7 R' W0 x* M1 _
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')6 n- i3 G! W4 V: [4 N2 @
$ R4 E: o: v% D4 \+ q- T###############################################5 C4 [' u( ?8 ~% O4 B5 p
# Evaluate classifier performance
9 u5 e4 Q& I6 {: @& e$ I4 A3 V( [0 ^2 m
9 Z% z9 {( r6 J9 _from sklearn.metrics import classification_report
- s* h$ f& k G0 V3 R
/ @ H+ K3 ]! H+ R2 T. z# etarget_names = ['Class-' + str(int(i)) for i in set(y)]
! z. ~; U+ J# I7 j, I! E' bprint "\n" + "#"*30, l* d) y4 r4 X, Z
print "\nClassifier performance on training dataset\n") T r0 X5 }2 D7 g
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
: n! j6 b0 @% [; b$ u( ]print "#"*30 + "\n"$ y# E, Q% v+ F7 v6 Z ]
7 }% s( i) q) E
print "#"*308 b' ]- v& r. }8 I+ Q$ c* c6 [
print "\nClassification report on test dataset\n"! r6 M0 ~5 g3 U, d' q% Z
print classification_report(y_test, y_test_pred, target_names=target_names)
7 S' k' M& N6 K5 T! `print "#"*30 + "\n"
& W" C! G1 S }- f4 b: b2 r$ P+ v+ I6 q6 K) ~* p `) E8 C3 k
|
|