- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
2 E/ Y- k# T5 Rimport matplotlib.pyplot as plt( p3 U- `1 b. C* U" T
e! u' g+ l3 \& V+ R* r+ X a
import utilities
! n& |/ r6 b" H, U; N0 Z1 N/ G5 Q& z7 ^( b. Q% s- K$ m
# Load input data
: G8 p9 W$ a) h3 B# F! L; v' Xinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
' M; }* u8 i! P& Q' l; XX, y = utilities.load_data(input_file)
' E! s) h C3 p, m' @+ v b0 X+ u& s t# G) w& [; M5 O- E
###############################################
3 x& T; H0 y5 F# Separate the data into classes based on 'y'3 Y: M: h, A5 V, u; c+ K0 O* l
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])0 O. P. w8 r8 X6 `
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
' u1 L3 b( R n9 o4 o% }# m% j4 A" ?4 O# t) x+ A! H
# Plot the input data
3 V+ t' W. {" w6 ^1 }3 }5 `# C7 e2 @plt.figure()
) o. h- G/ y" i. tplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')! M8 G: _9 H8 T' A _9 Y
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')1 a7 i, N4 q. V+ F D
plt.title('Input data')/ n) W- _$ j2 |
- F; u$ ]7 y3 o, z1 E0 N6 O; E' A
###############################################
. V( r3 \0 J3 j0 v: k8 u# Train test split and SVM training
. t% U v0 c0 D+ G$ \; Z6 _$ ^5 y' Cfrom sklearn import cross_validation
+ H3 \3 l; X& w. lfrom sklearn.svm import SVC
& k; q5 ?! T% ?& i! o" f# O6 x+ r! J% g/ {$ X. X
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)9 k$ l6 w; [. a1 Q& A% \" l
3 I0 d% k8 P, M2 X# X
#params = {'kernel': 'linear'}/ W6 L6 b. j: l
#params = {'kernel': 'poly', 'degree': 3}
( ]7 ]. p1 t: {; v( C* e5 p( ~params = {'kernel': 'rbf'}& r2 T* j9 s! l. \* ?0 N, w
classifier = SVC(**params)
: _9 N& Y( e+ _classifier.fit(X_train, y_train)
0 ~! X% V3 W9 \3 B- b8 xutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')* T5 }, L7 u' W% F
# S5 j1 o9 h) M5 L: T( ^
y_test_pred = classifier.predict(X_test)
- G2 @" D; O2 l, [" J* _utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')8 ~" o: Y, Y$ D0 {/ D! E& C- [
; y3 f; }6 _! w t###############################################* d1 t& t! s3 _+ @8 h/ O( P
# Evaluate classifier performance
% R, I& l7 G b) s3 b2 e$ j/ B/ \9 u8 k
from sklearn.metrics import classification_report8 f( b7 W# e" H* N( w" u, k7 P
2 p/ h* o% D5 ]target_names = ['Class-' + str(int(i)) for i in set(y)] a- p/ `& G! m x3 |2 R) G
print "\n" + "#"*30
8 }) G* b6 V2 r" K$ I% e! A6 xprint "\nClassifier performance on training dataset\n"7 g4 G+ O( V( { J. @, [8 T( J
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)7 s" F+ p" j) |# y' o
print "#"*30 + "\n"
0 N; p/ `" y1 w+ U" V
: X; S" j7 |1 h9 b2 @ oprint "#"*30
. T) i3 P7 u6 U- G4 xprint "\nClassification report on test dataset\n"% T. ?. ]- J1 D3 ]" Y( }3 Q
print classification_report(y_test, y_test_pred, target_names=target_names)
! L0 a3 L# X, s( Z& C$ Fprint "#"*30 + "\n"
! C: D! {: j( ]9 ~: n: J- G/ b6 C" \1 s* k: @' W5 J+ C$ S4 G
|
|