-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathbidir_gru.py
109 lines (89 loc) · 2.56 KB
/
bidir_gru.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
from keras import layers, models
import numpy as np
import os
# Import data
fname = os.path.join(os.getcwd(), 'data', 'jena_climate_2009_2016.csv')
f = open(fname)
lines = f.read()
f.close()
lines = lines.split('\n')
headers = lines[0].split(',')
lines = lines[1 : ]
float_data = np.zeros((len(lines), len(headers) - 1))
for i, line in enumerate(lines):
float_data[i, :] = [float(x) for x in line.split(',')[1:]]
mean = float_data[:200000].mean(axis=0)
float_data -= mean
std = float_data[:200000].std(axis=0)
float_data /= std
# Build generator function
def generator(data, shuffle, step, lookback, min_index, max_index, delay, batch_size=128):
if max_index is None:
max_index = len(data) - delay - 1
i = min_index + lookback
while 1:
if shuffle:
rows = np.random.randint(
min_index + lookback, max_index, size=batch_size)
else:
if (i + batch_size >= max_index):
i = min_index + lookback
rows = np.arange(i, min(i + batch_size, max_index))
i += len(rows)
samples = np.zeros((len(rows), lookback // step, data.shape[-1]))
targets = np.zeros((len(rows),))
for j, row in enumerate(rows):
indices = range(rows[j] - lookback, rows[j], step)
samples[j] = data[indices]
targets[j] = data[rows[j] + delay][1]
yield (samples, targets)
# Create training, validation and test generators
lookback = 1440
delay = 144
batch_size = 128
val_steps = 300000 - 200000 - lookback
gru_width = 32
train_gen = generator(float_data,
shuffle=True,
step=6,
lookback=lookback,
min_index=0,
max_index=200000,
delay=delay,
batch_size=batch_size)
validation_gen = generator(float_data,
shuffle=True,
step=6,
lookback=lookback,
min_index=200001,
max_index=300000,
delay=delay,
batch_size=batch_size)
test_gen = generator(float_data,
shuffle=True,
step=6,
lookback=lookback,
min_index=300000,
max_index=None,
delay=delay,
batch_size=batch_size)
model = models.Sequential()
model.add(layers.Bidirectional(
layers.GRU(
gru_width,
dropout=0.1,
recurrent_dropout=0.5,
activation='relu'),
input_shape=(None, float_data.shape[-1])))
model.add(layers.Dense(1))
model.compile(optimizer='rmsprop', loss='mae', metrics=['mae'])
history = model.fit_generator(train_gen,
steps_per_epoch=500,
epochs=20,
validation_data=validation_gen,
validation_steps = val_steps)
loss = history['loss']
val_loss = history['val_loss']
epochs = np.arange(1, len(loss))
plt.plot(epochs, loss, 'bo', title="Training Loss")
plt.plot(epochs, val_loss, 'b', title="Validation Loss")