-
Notifications
You must be signed in to change notification settings - Fork 2
/
plotting.py
85 lines (79 loc) · 4.82 KB
/
plotting.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
import seaborn as sns
import matplotlib.pyplot as plt
import numpy as np
# # import pandas as pd
# #
# # files = [
# # "LSTM128_train",
# # "LSTM128_val",
# # "LSTM16_train",
# # "LSTM16_val"
# # ]
# #
# # sns.set()
# # for file in files:
# # csv = pd.read_csv(file)
# # sns.lineplot(x = "Step",y ="Value", data=csv, legend=False)
# # plt.title("accuracy on training and validation sets")
# # plt.legend(title='dataset', loc='lower right', labels=["LSTM 128 training", "LSTM 128 validation", "LSTM 16 training", "LSTM 16 validation"])
# # plt.ylabel("Accuracy")
# # plt.xlabel("Epoch")
# # plt.show()
def plot_svm_pooling():
x = [1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 21, 23, 25, 27, 29, 31, 33, 35, 37, 39, 41, 43, 45, 47, 49]
y = [0.7134427636519687, 0.7453138946742041, 0.7559694364851958, 0.768979057591623, 0.7817427385892116, 0.7916251246261217, 0.79463243873979, 0.7978723404255319, 0.7976011994002998, 0.7940199335548173, 0.8029197080291971, 0.822, 0.8060344827586207, 0.7916666666666666, 0.8098765432098766, 0.828125, 0.7944444444444444, 0.7971014492753623, 0.7926829268292683, 0.8038585209003215, 0.7796610169491526, 0.8049645390070922, 0.791970802919708, 0.7946768060836502, 0.8267716535433071]
L = [0.7041771613641951, 0.7303101451121229, 0.7371151456211453, 0.7471868692907248, 0.7575450243798127, 0.7654007352019234, 0.7662998303701309, 0.7676890426037043, 0.7654450152461627, 0.7598986811330217, 0.7675547771281378, 0.7860548114500956, 0.7676043568634217, 0.7508822283371844, 0.7688174130475844, 0.7871876193316981, 0.7496965051191326, 0.7515013886251797, 0.7455469515168244, 0.7561338434626973, 0.728936558008109, 0.7547499843855315, 0.740036338779895, 0.7417834891789822, 0.7754546270532428]
U = [0.7225325063091491, 0.7597575208331068, 0.7738862913523745, 0.7894221872580033, 0.8041498075151903, 0.815624206585045, 0.8203354842112816, 0.8250278489769646, 0.8263490588677214, 0.8244126017050357, 0.8340672914810175, 0.8530351132926648, 0.8394388938905615, 0.8273096689684666, 0.8451124969082482, 0.8625624119472012, 0.8329748613797477, 0.8361581192011843, 0.8330425974874925, 0.84416829180227, 0.8231956683942544, 0.8469821810939838, 0.8358316424750426, 0.8390857681134286, 0.8683518451845609]
fig, ax = plt.subplots()
ax.plot(x,y)
ax.fill_between(x, L, U, color='b', alpha=.1)
plt.title("SVM with pooled pointclouds")
plt.xlabel("pooled pointclouds")
plt.ylabel("Accuracy")
plt.show()
def plot_svm_averaging():
data = np.array([1 , 0.7969034608378871 , 0.7720914714240136 , 0.8196452024690152 ,
2 , 0.8319327731092437 , 0.8083606738161696 , 0.8531322321051397 ,
3 , 0.8563218390804598 , 0.8337373747757308 , 0.8762937026711797 ,
4 , 0.8584070796460177 , 0.8356295632885438 , 0.8784872015636225 ,
5 , 0.8727272727272727 , 0.8505162370807605 , 0.8920569303495062 ,
6 , 0.8858921161825726 , 0.8642713864485786 , 0.9044495576888599 ,
7 , 0.8829787234042553 , 0.8608536491303282 , 0.9019863313253107 ,
8 , 0.9029443838604144 , 0.8820693007281012 , 0.920457554379427 ,
9 , 0.9120267260579065 , 0.8917017291427427 , 0.9288416077844281 ,
10 , 0.9215909090909091 , 0.9019420430923379 , 0.9375750361246151 ,
11 , 0.9351100811123986 , 0.9166706105728504 , 0.9496931199087395 ,
12 , 0.9422850412249706 , 0.9245153155694085 , 0.9560703926330046 ,
13 , 0.9461077844311377 , 0.9286482440434486 , 0.9594814402563435 ,
14 , 0.9574209245742092 , 0.9413593002762735 , 0.9692270985633538 ,
15 , 0.9654320987654321 , 0.9504934435520893 , 0.9759769295505443 ,
16 , 0.967459324155194 , 0.9527471230793547 , 0.9776980996888858 ,
17 , 0.9670050761421319 , 0.9520935623140347 , 0.9773854284629069 ,
18 , 0.972972972972973 , 0.9590364692912706 , 0.9822557632063117 ,
19 , 0.9751958224543081 , 0.9615852418259684 , 0.9840640105736228 ,
20 , 0.9735099337748344 , 0.9594384285607912 , 0.9827873706346989 ,
21 , 0.9771812080536912 , 0.963761621076855 , 0.9857050405576538 ,
22 , 0.9823129251700681 , 0.9699755651144849 , 0.9896349054886054 ,
23 , 0.9820689655172414 , 0.9695647685859607 , 0.9894915413158123 ,
24 , 0.9818181818181818 , 0.9691425754326175 , 0.9893441556058915 ,
25 , 0.9787234042553191 , 0.9651936464129421 , 0.9870644242746888 ,
26 , 0.9813218390804598 , 0.968307266264106 , 0.9890524207841863 ,
27 , 0.9796215429403202 , 0.9660863302587779 , 0.9878228359990646 ,
28 , 0.9823008849557522 , 0.9693203680930924 , 0.9898468863169747 ,
29 , 0.9790732436472347 , 0.9651819833332831 , 0.9874941509874178 ,
])
x = data[0::4]
y = data[1::4]
L = data[2::4]
U = data[3::4]
sns.set()
fig, ax = plt.subplots()
ax.plot(x,y)
ax.fill_between(x, L, U, color='b', alpha=.1)
plt.title("Accuracy of averaged SVM predictions")
plt.xlabel("number of averaged results")
plt.ylabel("Accuracy")
from matplotlib.ticker import MaxNLocator
ax.xaxis.set_major_locator(MaxNLocator(integer=True))
plt.show()
plot_svm_averaging()