- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 558
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
6 |. J1 e. Y3 ]% k( [import matplotlib.pyplot as plt( ~+ c" `' K! s6 X
, Q& q4 T" l; i- `6 @. ~
import utilities
X1 L) q+ [, V( @( s" t+ d" B7 H& H7 V% W4 c
# Load input data
: @, c6 d$ ^$ b2 X, e- Dinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'8 }$ m4 _5 w' a$ Y, _0 Y6 ]
X, y = utilities.load_data(input_file)
. H5 i; \) D! y
; `. ^/ L9 D# `. G' k###############################################/ V4 v4 |% z8 l q8 _' e
# Separate the data into classes based on 'y': W; f0 [3 V0 u
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
! J/ `0 {. m7 Q# Z6 u, }# tclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])) W5 F8 W& q) _: M G0 @
( j2 b6 H. O. t/ M2 A) M) ?
# Plot the input data
, S# f) N( _6 ~# {: xplt.figure()
8 Y4 G' b0 B9 ]( P4 ^plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')3 a& z' o% K1 O; p
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')4 f! b }1 C" S3 b2 q5 z$ ]
plt.title('Input data')) _8 }% t* |/ N1 }( a" p
- {. r8 R. v, z- @$ B
###############################################9 @* ~3 [8 ^; n2 D/ N3 d* w% g
# Train test split and SVM training: j) i! f3 `3 V9 C E: l
from sklearn import cross_validation
6 N; l- L6 H2 O" m: n& @3 kfrom sklearn.svm import SVC) N" G' N3 J/ |, \; W
. r4 Z0 O/ S5 \/ t2 J2 ^X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
% N; f ]" i; F. C
% ]% m' T8 Q5 {# s1 u+ V#params = {'kernel': 'linear'}
" x1 N+ f" U1 V/ ?4 n/ c$ j#params = {'kernel': 'poly', 'degree': 3}; E( S) }& d$ s- f' K2 j
params = {'kernel': 'rbf'}2 q/ v4 N7 H) p( f8 H: G5 }" _- ?
classifier = SVC(**params)& f1 S4 t8 y% k9 ^# J8 ~
classifier.fit(X_train, y_train)7 m. R- J, I6 o& \: h' R* m7 z
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
1 o0 Q9 G6 W: O, J. s Q; f; R/ K# G7 d2 k; o5 R' E6 q' U
y_test_pred = classifier.predict(X_test)
% t2 y# D; }8 L1 K, j9 Futilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
# b$ @* m9 Z/ F3 M1 @6 m' G
" [4 H! S* |) z6 S1 x( r% B###############################################. T8 e; R% y8 N. N- u; G
# Evaluate classifier performance5 @. H# v, f7 D" X; s
4 t* O' b" V& j- H% e5 I" W, H7 g
from sklearn.metrics import classification_report, F2 b4 c4 |& n0 \( G& c
- {% y8 L) ^- T: r. c4 j8 n9 x7 r
target_names = ['Class-' + str(int(i)) for i in set(y)]9 v" G% X5 c! s9 A) ~! ?
print "\n" + "#"*304 [( D+ T" s& o* ?1 i
print "\nClassifier performance on training dataset\n"; M$ |5 m$ G% P7 A" u2 K' Y) n, D
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)8 ^1 @3 r$ q$ q1 ~. n/ D
print "#"*30 + "\n"
% F/ a$ e; |! {+ Q+ o! P4 M( U+ M R2 ?# u# K
print "#"*30) @, d0 R4 E; k& {7 U% ?
print "\nClassification report on test dataset\n") j- `' P0 m* y+ |
print classification_report(y_test, y_test_pred, target_names=target_names)5 {9 K7 O+ f8 p7 R1 t$ c: p
print "#"*30 + "\n"! d- F- s2 B) a" G, Z
' l* E1 @) H9 s( ]- T. t |
|