- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
. i3 d6 K) Q% |! I+ i+ mimport matplotlib.pyplot as plt+ H3 f$ O \# V6 T& n" e+ X
) p: \6 n) b" C( v3 h [4 j" l/ |
import utilities 0 h1 g. j* t7 W3 Y" y
: y& p( Q1 ^$ u1 f. d2 e1 J- `# Load input data% T/ Q* O0 E0 P5 p1 V
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
" Q3 r" Q( ~$ G9 ?$ s- [3 }- NX, y = utilities.load_data(input_file)% ^8 Q4 ~( T3 w, z4 C* A
8 V+ T; v3 ^3 k0 f* F
################################################ J* z; X& f, X5 A# A1 ^) p
# Separate the data into classes based on 'y'. D8 F; B9 m& v" u! V
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
' L$ V. T2 d7 d4 O9 }' kclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])! @/ A! U+ o; F L- ^( h
$ Y6 e, Q0 W8 C/ g2 P/ P2 x. q( H# Plot the input data0 V9 A- y: [, `) n0 ^7 T: F
plt.figure()0 s; D8 D' J8 Q' X: Z* N
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
- H& P2 c1 E5 w, G; Xplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')/ F. m2 x7 E8 @- a! Z5 g
plt.title('Input data')
4 w* ^2 v! m$ b% F6 r0 @' k% Z; _
2 o" b9 T$ B+ {* s9 ]$ P###############################################$ }9 o: Y% n( _' J, n2 k( E8 y
# Train test split and SVM training3 R$ Y7 A$ G# V; V
from sklearn import cross_validation2 n) }) _9 @$ D4 _
from sklearn.svm import SVC
( L! s: ^& \, o
- T( ]1 h& _' v* R+ R, ] g; b: G, L! [X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)8 O% N$ K z5 X/ v% _" U
0 g. A0 d1 K# I
#params = {'kernel': 'linear'}' @' M* R; P$ b/ a
#params = {'kernel': 'poly', 'degree': 3}
3 Q# e* h, S* J. \params = {'kernel': 'rbf'}) E! z# N& u/ Y/ N7 M
classifier = SVC(**params)) `$ x3 p. z- T6 u! Q
classifier.fit(X_train, y_train)
; x# x5 U( |1 T% d' tutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')8 V' w X! Z! M8 M- J
4 e! ~. |. ~% I% W9 A* N# ^. Ny_test_pred = classifier.predict(X_test)
6 T0 H6 @- l! c% kutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')8 m( u7 J3 B7 c5 O. Z
# F. Z% V4 K3 @) m) Y' O* s
###############################################
0 Z4 D( E3 i8 f+ i5 w6 Y# Evaluate classifier performance- h9 A" q: H1 P9 E5 m, K. E& K- |
1 e+ W" n6 _6 V2 m5 T5 K7 P
from sklearn.metrics import classification_report
$ f; D4 q! Y7 y- _/ B# _9 s+ F# r2 L$ H$ [
target_names = ['Class-' + str(int(i)) for i in set(y)]7 \3 M& U! ~# m
print "\n" + "#"*301 B3 A% b' a* m
print "\nClassifier performance on training dataset\n" r) P1 F7 M: Q2 x! q
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)" ]2 {2 Y5 e5 z! w8 M# O# j b
print "#"*30 + "\n"
$ C( B5 D/ A, T( X* e. c: g
( O" f" }2 S1 c; ]* d. N) Rprint "#"*30
- l7 b1 i$ ^& x5 U$ dprint "\nClassification report on test dataset\n"
; P% Y g6 m5 e" Kprint classification_report(y_test, y_test_pred, target_names=target_names)
+ k, h& d& A5 C1 S0 b1 Iprint "#"*30 + "\n"& P' W9 f1 G; R! k
Q: P: h! H% r |
|