- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np2 A0 F+ Q2 W$ \8 y- p; R- c# m
import matplotlib.pyplot as plt' c3 L/ E) L( u% m* U4 l% j+ Y
' ?0 A0 }/ U9 simport utilities 5 f; e9 i; n+ X2 c5 \6 ^# Z5 u
, r* b% w) d8 @
# Load input data% m% q0 r! C7 P8 I
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
) \9 C& x) Z0 _" sX, y = utilities.load_data(input_file)& M5 Z5 N4 ]' v( d2 K0 ?
3 M$ ?) i. T: Z2 Y. U
###############################################2 U1 a- n' c& y- V$ f3 Y7 `
# Separate the data into classes based on 'y'8 J! f* ~$ y, |" ^( v
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0]). y. D% T) R' T4 F$ B- _
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
4 p- C+ l8 e, T# v2 d8 O+ T1 j9 z' Y4 S/ B$ l+ r, U" C
# Plot the input data
! ~! N; R; W% _" z9 F" |2 E: Qplt.figure()3 d1 }# e: h3 O, q! d2 f
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')6 q, m0 s1 B ]% d9 H) V
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
$ E& I6 U8 d9 M) Wplt.title('Input data')4 N, _9 }8 o4 d: O
- {) L2 F8 M" @0 B# v5 b###############################################
, G* ~5 A, Q9 B0 _$ I+ w8 @. u4 W/ y9 b# Train test split and SVM training t: v4 R& W" N2 l
from sklearn import cross_validation6 R# w1 \$ |& z7 g7 ~( _
from sklearn.svm import SVC7 Y8 d! h6 y* g
6 @: `3 T4 M" J
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5); G6 c+ @7 z4 L8 K5 g0 i% o7 t4 @
: m8 h8 K) f$ Y1 l1 B8 P/ i' D#params = {'kernel': 'linear'}
% B" b3 \" _& L9 q#params = {'kernel': 'poly', 'degree': 3}
# k1 [# { _/ M$ |' n- Pparams = {'kernel': 'rbf'}, o9 W: ^7 P& X7 a8 P
classifier = SVC(**params)6 j w' I% X) D( m
classifier.fit(X_train, y_train)
5 I2 R$ _2 g: Wutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset') X7 K* x8 U" \ P: g- a2 J" L
! l- ?1 Z$ V: e& T2 U/ Sy_test_pred = classifier.predict(X_test)
# c) @1 G. V5 \: y# V- ?% iutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')# X4 p; T- h( \2 N- j) y, l
1 f9 R+ ?7 Z8 \% W2 d" m
###############################################
6 T4 D# |2 y. Z' S# Evaluate classifier performance+ f' K0 q1 o( h i0 c: n+ ~
* ?7 c3 z; p9 u3 p9 ?5 p
from sklearn.metrics import classification_report! n; Z9 V/ D7 ~! Z) }! M8 @% S8 T
1 }/ l5 j# Q' p5 Q0 H- K
target_names = ['Class-' + str(int(i)) for i in set(y)]
" @/ C3 {; E. n3 _print "\n" + "#"*30
% d2 T% p* n( D" @% j3 Y1 cprint "\nClassifier performance on training dataset\n"
) |0 `" b- V5 e* I/ xprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
, d9 e1 \) J0 F& _) o6 }print "#"*30 + "\n"& J: q: S6 ~: z
' D- i9 m8 [3 i. |9 h
print "#"*30
0 k' |- Y5 V0 j5 S8 Q) Dprint "\nClassification report on test dataset\n"2 C! V5 X3 h, Q1 V1 J
print classification_report(y_test, y_test_pred, target_names=target_names)
9 Y, M, Q4 n: c7 Lprint "#"*30 + "\n"
. n9 O' Y- j P, H4 w5 z5 C2 U5 V& |
|
|