Skip to content

Commit

Permalink
Update all.
Browse files Browse the repository at this point in the history
  • Loading branch information
victorkich committed Jul 1, 2021
1 parent baaf2db commit 95f2ead
Show file tree
Hide file tree
Showing 7,464 changed files with 445 additions and 1,454,343 deletions.
The diff you're trying to view is too large. We only load the first 3000 changed files.
248 changes: 248 additions & 0 deletions data.csv
Original file line number Diff line number Diff line change
@@ -0,0 +1,248 @@
,epoch,step,reward,done,action,evaluation_state
0,0.0,0.0,0.0,1.0,2.0,1.0
1,1.0,0.0,0.0,0.0,2.0,1.0
2,1.0,1.0,0.0,0.0,2.0,1.0
3,1.0,2.0,0.0,0.0,2.0,1.0
4,1.0,3.0,0.0,0.0,2.0,1.0
5,1.0,4.0,0.0,0.0,2.0,1.0
6,1.0,5.0,0.0,0.0,2.0,1.0
7,1.0,6.0,0.0,0.0,1.0,1.0
8,1.0,7.0,0.0,0.0,1.0,1.0
9,1.0,8.0,0.0,0.0,1.0,1.0
10,1.0,9.0,0.0,0.0,0.0,1.0
11,1.0,10.0,0.0,0.0,2.0,1.0
12,1.0,11.0,0.0,0.0,0.0,1.0
13,1.0,12.0,0.0,0.0,2.0,1.0
14,1.0,13.0,0.0,0.0,1.0,1.0
15,1.0,14.0,0.0,0.0,1.0,1.0
16,1.0,15.0,0.0,0.0,1.0,1.0
17,1.0,16.0,0.0,0.0,0.0,1.0
18,1.0,17.0,0.0,0.0,2.0,1.0
19,1.0,18.0,0.0,0.0,0.0,1.0
20,1.0,19.0,0.0,0.0,2.0,1.0
21,1.0,20.0,0.0,0.0,2.0,1.0
22,1.0,21.0,0.0,0.0,2.0,1.0
23,1.0,22.0,0.0,0.0,2.0,1.0
24,1.0,23.0,0.0,0.0,2.0,1.0
25,1.0,24.0,0.0,0.0,2.0,1.0
26,1.0,25.0,0.0,0.0,2.0,1.0
27,1.0,26.0,0.0,0.0,2.0,1.0
28,1.0,27.0,0.0,0.0,2.0,1.0
29,1.0,28.0,0.0,0.0,2.0,1.0
30,1.0,29.0,0.0,0.0,1.0,1.0
31,1.0,30.0,0.0,0.0,1.0,1.0
32,1.0,31.0,0.0,0.0,2.0,1.0
33,1.0,32.0,0.0,1.0,2.0,1.0
34,2.0,0.0,0.0,0.0,2.0,1.0
35,2.0,1.0,0.0,0.0,2.0,1.0
36,2.0,2.0,0.0,0.0,2.0,1.0
37,2.0,3.0,0.0,0.0,2.0,1.0
38,2.0,4.0,0.0,0.0,2.0,1.0
39,2.0,5.0,0.0,0.0,1.0,1.0
40,2.0,6.0,0.0,0.0,2.0,1.0
41,2.0,7.0,0.0,0.0,1.0,1.0
42,2.0,8.0,0.0,0.0,1.0,1.0
43,2.0,9.0,0.0,0.0,0.0,1.0
44,2.0,10.0,0.0,0.0,2.0,1.0
45,2.0,11.0,0.0,0.0,0.0,1.0
46,2.0,12.0,0.0,0.0,2.0,1.0
47,2.0,13.0,0.0,1.0,0.0,1.0
48,3.0,0.0,0.0,0.0,2.0,1.0
49,3.0,1.0,0.0,0.0,2.0,1.0
50,3.0,2.0,0.0,0.0,2.0,1.0
51,3.0,3.0,0.0,0.0,2.0,1.0
52,3.0,4.0,0.0,0.0,2.0,1.0
53,3.0,5.0,0.0,0.0,2.0,1.0
54,3.0,6.0,0.0,0.0,1.0,1.0
55,3.0,7.0,0.0,0.0,1.0,1.0
56,3.0,8.0,0.0,0.0,1.0,1.0
57,3.0,9.0,0.0,0.0,0.0,1.0
58,3.0,10.0,0.0,0.0,2.0,1.0
59,3.0,11.0,0.0,0.0,0.0,1.0
60,3.0,12.0,0.0,0.0,2.0,1.0
61,3.0,13.0,0.0,0.0,1.0,1.0
62,3.0,14.0,0.0,0.0,2.0,1.0
63,3.0,15.0,0.0,0.0,1.0,1.0
64,3.0,16.0,0.0,0.0,0.0,1.0
65,3.0,17.0,0.0,0.0,0.0,1.0
66,3.0,18.0,0.0,0.0,0.0,1.0
67,3.0,19.0,0.0,0.0,2.0,1.0
68,3.0,20.0,0.0,1.0,1.0,1.0
69,4.0,0.0,0.0,0.0,2.0,1.0
70,4.0,1.0,0.0,0.0,2.0,1.0
71,4.0,2.0,0.0,0.0,2.0,1.0
72,4.0,3.0,0.0,0.0,2.0,1.0
73,4.0,4.0,0.0,0.0,2.0,1.0
74,4.0,5.0,0.0,0.0,1.0,1.0
75,4.0,6.0,0.0,0.0,1.0,1.0
76,4.0,7.0,0.0,0.0,1.0,1.0
77,4.0,8.0,0.0,0.0,0.0,1.0
78,4.0,9.0,0.0,1.0,0.0,1.0
79,5.0,0.0,0.0,0.0,2.0,1.0
80,5.0,1.0,0.0,0.0,2.0,1.0
81,5.0,2.0,0.0,0.0,2.0,1.0
82,5.0,3.0,0.0,0.0,2.0,1.0
83,5.0,4.0,0.0,0.0,2.0,1.0
84,5.0,5.0,0.0,0.0,1.0,1.0
85,5.0,6.0,0.0,0.0,1.0,1.0
86,5.0,7.0,0.0,0.0,0.0,1.0
87,5.0,8.0,0.0,0.0,0.0,1.0
88,5.0,9.0,0.0,1.0,0.0,1.0
89,6.0,0.0,0.0,0.0,2.0,1.0
90,6.0,1.0,0.0,0.0,2.0,1.0
91,6.0,2.0,0.0,0.0,2.0,1.0
92,6.0,3.0,0.0,0.0,2.0,1.0
93,6.0,4.0,0.0,0.0,2.0,1.0
94,6.0,5.0,0.0,0.0,1.0,1.0
95,6.0,6.0,0.0,0.0,1.0,1.0
96,6.0,7.0,0.0,0.0,1.0,1.0
97,6.0,8.0,0.0,0.0,0.0,1.0
98,6.0,9.0,0.0,1.0,0.0,1.0
99,7.0,0.0,0.0,0.0,2.0,1.0
100,7.0,1.0,0.0,0.0,2.0,1.0
101,7.0,2.0,0.0,0.0,2.0,1.0
102,7.0,3.0,0.0,0.0,2.0,1.0
103,7.0,4.0,0.0,0.0,2.0,1.0
104,7.0,5.0,0.0,0.0,1.0,1.0
105,7.0,6.0,0.0,0.0,2.0,1.0
106,7.0,7.0,0.0,0.0,1.0,1.0
107,7.0,8.0,0.0,0.0,1.0,1.0
108,7.0,9.0,0.0,0.0,0.0,1.0
109,7.0,10.0,0.0,0.0,2.0,1.0
110,7.0,11.0,0.0,0.0,0.0,1.0
111,7.0,12.0,0.0,0.0,2.0,1.0
112,7.0,13.0,0.0,1.0,0.0,1.0
113,8.0,0.0,0.0,0.0,2.0,1.0
114,8.0,1.0,0.0,0.0,2.0,1.0
115,8.0,2.0,0.0,0.0,2.0,1.0
116,8.0,3.0,0.0,0.0,2.0,1.0
117,8.0,4.0,0.0,0.0,2.0,1.0
118,8.0,5.0,0.0,0.0,1.0,1.0
119,8.0,6.0,0.0,0.0,2.0,1.0
120,8.0,7.0,0.0,0.0,1.0,1.0
121,8.0,8.0,0.0,0.0,1.0,1.0
122,8.0,9.0,0.0,0.0,0.0,1.0
123,8.0,10.0,0.0,0.0,0.0,1.0
124,8.0,11.0,0.0,0.0,1.0,1.0
125,8.0,12.0,0.0,0.0,0.0,1.0
126,8.0,13.0,0.0,1.0,1.0,1.0
127,9.0,0.0,0.0,0.0,2.0,1.0
128,9.0,1.0,0.0,0.0,2.0,1.0
129,9.0,2.0,0.0,0.0,2.0,1.0
130,9.0,3.0,0.0,0.0,2.0,1.0
131,9.0,4.0,0.0,0.0,2.0,1.0
132,9.0,5.0,0.0,0.0,1.0,1.0
133,9.0,6.0,0.0,0.0,2.0,1.0
134,9.0,7.0,0.0,0.0,1.0,1.0
135,9.0,8.0,0.0,0.0,1.0,1.0
136,9.0,9.0,0.0,0.0,0.0,1.0
137,9.0,10.0,0.0,0.0,2.0,1.0
138,9.0,11.0,0.0,0.0,0.0,1.0
139,9.0,12.0,0.0,0.0,2.0,1.0
140,9.0,13.0,0.0,0.0,1.0,1.0
141,9.0,14.0,0.0,0.0,2.0,1.0
142,9.0,15.0,0.0,0.0,1.0,1.0
143,9.0,16.0,0.0,0.0,0.0,1.0
144,9.0,17.0,0.0,0.0,0.0,1.0
145,9.0,18.0,0.0,0.0,0.0,1.0
146,9.0,19.0,0.0,0.0,2.0,1.0
147,9.0,20.0,0.0,1.0,1.0,1.0
148,10.0,0.0,0.0,0.0,2.0,1.0
149,10.0,1.0,0.0,0.0,2.0,1.0
150,10.0,2.0,0.0,0.0,2.0,1.0
151,10.0,3.0,0.0,0.0,2.0,1.0
152,10.0,4.0,0.0,0.0,2.0,1.0
153,10.0,5.0,0.0,0.0,1.0,1.0
154,10.0,6.0,0.0,0.0,2.0,1.0
155,10.0,7.0,0.0,0.0,1.0,1.0
156,10.0,8.0,0.0,0.0,1.0,1.0
157,10.0,9.0,0.0,0.0,0.0,1.0
158,10.0,10.0,0.0,0.0,2.0,1.0
159,10.0,11.0,0.0,0.0,0.0,1.0
160,10.0,12.0,0.0,0.0,2.0,1.0
161,10.0,13.0,0.0,0.0,0.0,1.0
162,10.0,14.0,0.0,1.0,1.0,1.0
163,11.0,0.0,0.0,0.0,2.0,1.0
164,11.0,1.0,0.0,0.0,2.0,1.0
165,11.0,2.0,0.0,0.0,2.0,1.0
166,11.0,3.0,0.0,0.0,2.0,1.0
167,11.0,4.0,0.0,0.0,2.0,1.0
168,11.0,5.0,0.0,0.0,1.0,1.0
169,11.0,6.0,0.0,0.0,1.0,1.0
170,11.0,7.0,0.0,0.0,1.0,1.0
171,11.0,8.0,0.0,1.0,0.0,1.0
172,12.0,0.0,0.0,0.0,2.0,1.0
173,12.0,1.0,0.0,0.0,2.0,1.0
174,12.0,2.0,0.0,0.0,2.0,1.0
175,12.0,3.0,0.0,0.0,2.0,1.0
176,12.0,4.0,0.0,0.0,2.0,1.0
177,12.0,5.0,0.0,0.0,1.0,1.0
178,12.0,6.0,0.0,0.0,2.0,1.0
179,12.0,7.0,0.0,0.0,1.0,1.0
180,12.0,8.0,0.0,0.0,2.0,1.0
181,12.0,9.0,0.0,0.0,0.0,1.0
182,12.0,10.0,0.0,0.0,2.0,1.0
183,12.0,11.0,0.0,0.0,0.0,1.0
184,12.0,12.0,0.0,0.0,2.0,1.0
185,12.0,13.0,0.0,0.0,1.0,1.0
186,12.0,14.0,0.0,0.0,1.0,1.0
187,12.0,15.0,0.0,0.0,1.0,1.0
188,12.0,16.0,0.0,0.0,0.0,1.0
189,12.0,17.0,0.0,0.0,2.0,1.0
190,12.0,18.0,0.0,0.0,0.0,1.0
191,12.0,19.0,0.0,0.0,2.0,1.0
192,12.0,20.0,0.0,0.0,2.0,1.0
193,12.0,21.0,0.0,0.0,0.0,1.0
194,12.0,22.0,0.0,0.0,2.0,1.0
195,12.0,23.0,0.0,0.0,0.0,1.0
196,12.0,24.0,0.0,0.0,2.0,1.0
197,12.0,25.0,0.0,0.0,2.0,1.0
198,12.0,26.0,0.0,0.0,2.0,1.0
199,12.0,27.0,0.0,0.0,2.0,1.0
200,12.0,28.0,0.0,0.0,2.0,1.0
201,12.0,29.0,0.0,0.0,1.0,1.0
202,12.0,30.0,0.0,1.0,2.0,1.0
203,13.0,0.0,0.0,0.0,2.0,1.0
204,13.0,1.0,0.0,0.0,2.0,1.0
205,13.0,2.0,0.0,0.0,2.0,1.0
206,13.0,3.0,0.0,0.0,2.0,1.0
207,13.0,4.0,0.0,0.0,2.0,1.0
208,13.0,5.0,0.0,0.0,1.0,1.0
209,13.0,6.0,0.0,0.0,2.0,1.0
210,13.0,7.0,0.0,0.0,1.0,1.0
211,13.0,8.0,0.0,0.0,1.0,1.0
212,13.0,9.0,0.0,0.0,0.0,1.0
213,13.0,10.0,0.0,0.0,0.0,1.0
214,13.0,11.0,0.0,0.0,1.0,1.0
215,13.0,12.0,0.0,0.0,2.0,1.0
216,13.0,13.0,0.0,0.0,0.0,1.0
217,13.0,14.0,0.0,1.0,1.0,1.0
218,14.0,0.0,0.0,0.0,2.0,1.0
219,14.0,1.0,0.0,0.0,2.0,1.0
220,14.0,2.0,0.0,0.0,2.0,1.0
221,14.0,3.0,0.0,0.0,2.0,1.0
222,14.0,4.0,0.0,0.0,2.0,1.0
223,14.0,5.0,0.0,0.0,2.0,1.0
224,14.0,6.0,0.0,0.0,1.0,1.0
225,14.0,7.0,0.0,0.0,1.0,1.0
226,14.0,8.0,0.0,0.0,0.0,1.0
227,14.0,9.0,0.0,0.0,2.0,1.0
228,14.0,10.0,0.0,0.0,2.0,1.0
229,14.0,11.0,0.0,0.0,2.0,1.0
230,14.0,12.0,0.0,1.0,2.0,1.0
231,15.0,0.0,0.0,1.0,2.0,1.0
232,16.0,0.0,0.0,1.0,2.0,1.0
233,17.0,0.0,0.0,1.0,2.0,1.0
234,18.0,0.0,0.0,1.0,2.0,1.0
235,19.0,0.0,0.0,1.0,2.0,1.0
236,20.0,0.0,0.0,1.0,2.0,1.0
237,21.0,0.0,0.0,1.0,2.0,1.0
238,22.0,0.0,0.0,1.0,2.0,1.0
239,23.0,0.0,0.0,1.0,2.0,1.0
240,24.0,0.0,0.0,1.0,2.0,1.0
241,25.0,0.0,0.0,1.0,2.0,1.0
242,26.0,0.0,0.0,1.0,2.0,1.0
243,27.0,0.0,0.0,1.0,2.0,1.0
244,28.0,0.0,0.0,1.0,2.0,1.0
245,29.0,0.0,0.0,1.0,2.0,1.0
246,30.0,0.0,0.0,1.0,2.0,1.0
31 changes: 16 additions & 15 deletions data_visualization.py
Original file line number Diff line number Diff line change
@@ -1,47 +1,48 @@
from matplotlib import pyplot as plt
from matplotlib.animation import FuncAnimation
from matplotlib import pyplot as plt
from scipy.signal import lfilter
from tqdm import tqdm
import pandas as pd
import numpy as np
import time


def animate(i):
global count
global first

if first:
time.sleep(0)
time.sleep(2)
first = False
if count < len(rewards):
count = count + 50
count = count + 5
elif count > len(rewards):
count = len(rewards)

ax.clear()
ax.plot(num_epochs[:count], rewards[:count], color='b', linestyle='-',
linewidth=1, label='Real Rewards')
ax.plot(num_epochs[:count], mean[:count], color='r', linestyle='-',
linewidth=1, label='Filtered Rewards')
ax.set_title('Reward per Epoch', size=20)
ax.legend(loc=2, prop={'size':20})
ax.set_xlabel('Epoch')
ax.plot(num_epochs[:count], rewards[:count], color='b', linestyle='-', linewidth=1, label='Real Rewards', alpha=0.5)
ax.plot(num_epochs[:count], mean[:count], color='r', linestyle='-', linewidth=2, label='Filtered Rewards')
ax.set_title('Reward per Episode', size=20)
ax.legend(loc=2, prop={'size': 20})
ax.set_xlabel('Episode')
ax.set_ylabel('Reward')
ax.set_xlim([0, len(rewards)+50])
ax.set_ylim([0, max(rewards)+5])


df = pd.read_csv('data.csv')[1:-4]
filter = df["evaluation_state"]==0
df.where(filter, inplace = True)
filter = df["evaluation_state"] == 0
df.where(filter, inplace=True)
df.dropna(inplace=True)
epochs = pd.unique(df.epoch)
print('Computing Real Rewards: ')
rewards = [len(df[df.epoch == epochs[i]])-1 for i in tqdm(range(epochs.size))]
rewards = np.array([len(df[df.epoch == epochs[i]])-1 for i in tqdm(range(epochs.size))]) - 5
num_epochs = np.arange(len(rewards))

n = 15 # the larger n is, the smoother curve will be
n = 25 # the larger n is, the smoother curve will be
b = [1.0 / n] * n
a = 1
mean = lfilter(b,a,rewards)
mean = lfilter(b, a, rewards)

print('Plotting the graphs: ')
first = True
Expand Down
Loading

0 comments on commit 95f2ead

Please sign in to comment.