@@ -61,7 +61,7 @@ def _built_RNN(self):
61
61
62
62
with tf .variable_scope ('output_layer' ):
63
63
# cell_outputs_reshaped (BATCH*TIME_STEP, CELL_SIZE)
64
- cell_outputs_reshaped = tf .reshape (tf .concat (1 , self .cell_outputs ), [- 1 , self ._cell_size ])
64
+ cell_outputs_reshaped = tf .reshape (tf .concat (self .cell_outputs , 1 ), [- 1 , self ._cell_size ])
65
65
Wo = self ._weight_variable ((self ._cell_size , self ._output_size ))
66
66
bo = self ._bias_variable ((self ._output_size ,))
67
67
product = tf .matmul (cell_outputs_reshaped , Wo ) + bo
@@ -76,13 +76,13 @@ def _built_RNN(self):
76
76
self ._cost = mse_sum_across_time
77
77
self ._cost_ave_time = self ._cost / self ._time_steps
78
78
79
- with tf .name_scope ('trian' ):
79
+ with tf .variable_scope ('trian' ):
80
80
self ._lr = tf .convert_to_tensor (self ._lr )
81
81
self .train_op = tf .train .AdamOptimizer (self ._lr ).minimize (self ._cost )
82
82
83
83
@staticmethod
84
- def ms_error (y_pre , y_target ):
85
- return tf .square (tf .sub ( y_pre , y_target ))
84
+ def ms_error (y_target , y_pre ):
85
+ return tf .square (tf .subtract ( y_target , y_pre ))
86
86
87
87
@staticmethod
88
88
def _weight_variable (shape , name = 'weights' ):
0 commit comments