- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
. Q5 `- J$ b2 i B: G. rimport matplotlib.pyplot as plt
' `/ l5 T4 j5 Z/ K0 v* g, s0 N* V' u* n( V: T" e& T, `
import utilities 1 O3 j! t2 w; i
8 {; y0 c1 S: X# O# Load input data# ]' y6 B7 S- `6 { S
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
5 w2 c9 @) h. N# \) LX, y = utilities.load_data(input_file)
1 D" f$ A$ j/ y- B# E
3 I/ z a4 C7 v+ _###############################################* o' @* S# d$ P, d5 a* z5 i t& W O
# Separate the data into classes based on 'y'4 u* \9 u4 _5 f/ b; y4 w
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
% t" j: }0 V' k4 W0 Lclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
|9 u2 v+ A* ^# X7 \( v7 W. i1 v& [* q
# Plot the input data+ J7 H: R5 M0 e& M. x
plt.figure()1 o) ]# A: t: {( s
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s'); x; z+ N6 Z, a+ l
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s'); }) X% M9 q g6 z3 o
plt.title('Input data')
- A J! d) n0 K. m$ u' {0 ^1 t( A6 s5 P+ ~3 a$ f* |
###############################################: a, ^1 _/ X% o7 ~$ o$ t& z
# Train test split and SVM training
; \9 v) U- Z" P8 R' ]* L( z7 bfrom sklearn import cross_validation
; ]: h! x% p& D4 d% o+ Bfrom sklearn.svm import SVC
9 ]! o/ d: _' H" k' Q5 |+ B, K& R/ h& F
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)0 |+ e9 ]) l% r
) P( ?8 c! c& r7 H7 [#params = {'kernel': 'linear'}
( B0 F( ?$ N- {5 U6 Z2 p#params = {'kernel': 'poly', 'degree': 3}; o8 F& v2 L# t! @8 `! x8 n
params = {'kernel': 'rbf'}
, M" E7 Z0 J) u/ m2 kclassifier = SVC(**params)
. R0 V6 {4 p B# q0 V$ ~classifier.fit(X_train, y_train)
) l; {) G I- [0 u) l. Rutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')0 M/ ?2 R3 |) F: _
Q. B7 t, U2 ~& s3 q# w' D
y_test_pred = classifier.predict(X_test)
/ L2 S. M0 [3 p9 P2 U" u* Q' qutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset'); q$ e1 A s- o4 T/ L: v E
4 `+ x* d) J* V' l3 q1 c4 A7 S1 d4 _
###############################################
" u. _1 u( k. J2 F; k# M& X# Evaluate classifier performance
( X0 I8 F9 x' e) K a' K n+ i
4 a) Z& S, l. V4 l. O6 N Dfrom sklearn.metrics import classification_report
. O8 m+ r2 s x2 F; x- J% v5 ]. O( M9 ~$ y3 r+ R$ v4 L
target_names = ['Class-' + str(int(i)) for i in set(y)]. y& U' L( v' h9 I
print "\n" + "#"*30
5 g) i, C4 R2 u) j* Dprint "\nClassifier performance on training dataset\n"2 p$ X/ L6 b" b8 Z' O
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
1 Z I* w+ ~- B7 Z1 Lprint "#"*30 + "\n"1 j' z2 P- I3 y! S6 `
, k5 U1 n8 {- B Y% d _0 p5 c, i
print "#"*30
1 ]# q7 h/ _7 d* p3 ~print "\nClassification report on test dataset\n": |) q, I9 ~! N* |* W; p
print classification_report(y_test, y_test_pred, target_names=target_names)
_. R* M H) @print "#"*30 + "\n"6 l+ c0 T& ?$ S: j
% X: o2 d- i) Z2 P
|
|