Skip to content

Commit

Permalink
wip...
Browse files Browse the repository at this point in the history
  • Loading branch information
jmvalin committed Jul 23, 2018
1 parent 1914ceb commit 57f36eb
Show file tree
Hide file tree
Showing 2 changed files with 17 additions and 5 deletions.
7 changes: 5 additions & 2 deletions src/gatedconv.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from keras import backend as K
from keras.engine.topology import Layer
from keras.layers import activations, initializers, regularizers, constraints, InputSpec, Conv1D
from keras.layers import activations, initializers, regularizers, constraints, InputSpec, Conv1D, Dense
import numpy as np

class GatedConv(Conv1D):
Expand Down Expand Up @@ -42,13 +42,16 @@ def __init__(self, filters,
self.out_dims = filters
self.nongate_activation = activations.get(activation)

def call(self, inputs, memory=None):
def call(self, inputs, cond=None, memory=None):
if memory is None:
mem = K.zeros((K.shape(inputs)[0], self.mem_size, K.shape(inputs)[-1]))
else:
mem = K.variable(K.cast_to_floatx(memory))
inputs = K.concatenate([mem, inputs], axis=1)
ret = super(GatedConv, self).call(inputs)
if cond is not None:
d = Dense(2*self.out_dims, use_bias=False, activation='linear')
ret = ret + d(cond)
ret = self.nongate_activation(ret[:, :, :self.out_dims]) * activations.sigmoid(ret[:, :, self.out_dims:])
if self.return_memory:
ret = ret, inputs[:, :self.mem_size, :]
Expand Down
15 changes: 12 additions & 3 deletions src/wavenet.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from keras.models import Model
from keras.layers import Input, LSTM, CuDNNGRU, Dense, Embedding, Reshape, Concatenate, Lambda, Conv1D, Add, Multiply, Bidirectional, MaxPooling1D, Activation
from keras import backend as K
from keras.initializers import VarianceScaling
from mdense import MDense
import numpy as np
import h5py
Expand Down Expand Up @@ -34,12 +35,20 @@ def new_wavenet_model(fftnet=False):
rfeat = rep(cfeat)
#tmp = Concatenate()([pcm, rfeat])
tmp = pcm
init = VarianceScaling(scale=1.5,mode='fan_avg',distribution='uniform')
for k in range(10):
res = tmp
tmp = Concatenate()([tmp, rfeat])
dilation = 9-k if fftnet else k
c = GatedConv(units, 2, dilation_rate=2**dilation, activation='tanh')
tmp = Dense(units, activation='relu')(c(tmp))
'''#tmp = Concatenate()([tmp, rfeat])
c = GatedConv(units, 2, dilation_rate=2**dilation, activation='tanh', kernel_initializer=init)
tmp = Dense(units, activation='relu')(c(tmp, cond=rfeat))'''

tmp = Concatenate()([tmp, rfeat])
c1 = CausalConv(units, 2, dilation_rate=2**dilation, activation='tanh')
c2 = CausalConv(units, 2, dilation_rate=2**dilation, activation='sigmoid')
tmp = Multiply()([c1(tmp), c2(tmp)])
tmp = Dense(units, activation='relu')(tmp)

if k != 0:
tmp = Add()([tmp, res])

Expand Down

0 comments on commit 57f36eb

Please sign in to comment.