- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
1 K) T# W' ?9 i1 c) X Ximport matplotlib.pyplot as plt4 c- p: b' S: \5 n
3 E% g% F2 l0 o) b& J1 }- ~* C
import utilities 2 \5 |/ R; J0 V" B( B/ v
, w. C" b! S& |, B: A# Load input data! |9 V! b. c/ ^4 u/ V& E6 P
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
+ T- ?* b1 P* p1 \: r! b; X1 Y( b. E @X, y = utilities.load_data(input_file)
# M9 l, G9 d* ]' |( z; w( s" i9 W# l! P. ^) {
###############################################
& u8 W d o) S% e2 Q5 b1 K' r# Separate the data into classes based on 'y'
4 z* A1 F: ?! A( l6 xclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
! e: Q8 w |% O% rclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1]); c" y" X! G- N$ H( f
+ E& z- E3 G. Q, N
# Plot the input data1 {! J( |$ |7 w
plt.figure()6 C; r* \& Z; _% d2 s" y
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
% i$ ^" @& s) l$ M( ^! C1 nplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
/ I5 W$ q& [) gplt.title('Input data')
' V' V% i5 F! M$ x; G; R
, u9 A% n/ }6 C" W###############################################9 G: G2 j" d- e
# Train test split and SVM training
3 d7 z0 G- v2 K; }1 zfrom sklearn import cross_validation+ n$ n/ K- q1 A$ q7 D, S C0 U# {
from sklearn.svm import SVC
1 X p! b2 e* ?9 O
5 d% t% E( m% ]+ r0 mX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)' X+ ^& a' u: p$ |' l4 S
1 _8 T! v: ?; y4 S7 B6 Z#params = {'kernel': 'linear'}8 m! C6 e5 n; p
#params = {'kernel': 'poly', 'degree': 3}/ h* a& s8 C! l- o
params = {'kernel': 'rbf'}
; ?+ ~/ z! Q& f1 C2 k4 ]1 m1 e9 h ]classifier = SVC(**params)
, [3 `1 p/ U3 y1 gclassifier.fit(X_train, y_train)
4 [/ w l; Y% N/ i: b: Dutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
4 B8 [9 W7 H$ M% z, o7 H! l6 K3 ~/ S( e$ k
y_test_pred = classifier.predict(X_test)
" R% ^% `, I+ }, u! s6 [: Putilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
! D, i. T* {2 p) ~, L$ b9 J, T
# f) D' i- U6 b3 t$ [$ D###############################################2 ~) ^, b5 i) a+ e, o
# Evaluate classifier performance
2 L' z5 m2 ] S$ I" p3 p, r- g: H+ h) Z$ }. T
from sklearn.metrics import classification_report4 t8 D/ y% \6 p# a4 d
+ a+ P/ Z; p) v4 \# h6 Itarget_names = ['Class-' + str(int(i)) for i in set(y)]
1 l( I L: n" Vprint "\n" + "#"*30
/ I* q" a$ C1 _% V/ i5 U3 `print "\nClassifier performance on training dataset\n"
& u' ~' O! b/ P; eprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)1 ~: X1 O) t+ G) U! Y
print "#"*30 + "\n"# q4 A4 T0 D! h! v6 C3 A9 I
2 {: t& j: ^, n8 l, p- b! L# C
print "#"*30! n& b8 A( G# G. r3 x% w2 W' }/ U8 M
print "\nClassification report on test dataset\n"( ?) A5 R* c. n# w
print classification_report(y_test, y_test_pred, target_names=target_names) \4 f5 |% y/ m* Q, d
print "#"*30 + "\n"* t4 l4 g3 i+ V, z% M
9 |4 Q* \' U" k7 O9 U7 d" z |
|