Skip to content

Commit

Permalink
Fixes, improvements
Browse files Browse the repository at this point in the history
 layers small refactoring, simple growing mutation
  • Loading branch information
Qwinpin committed Jun 18, 2019
1 parent 454483b commit 6c4534d
Show file tree
Hide file tree
Showing 7 changed files with 173 additions and 69 deletions.
4 changes: 2 additions & 2 deletions neuvol/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,13 +92,13 @@

'max_pool': {
'input_rank': [3],
'pool_size': [i for i in range(2, 16)],
'pool_size': [i for i in range(1, 16, 2)],
'strides': [i for i in range(2, 8)],
'padding': ['valid', 'same']},

'max_pool2': {
'input_rank': [4],
'pool_size': [i for i in range(2, 16)],
'pool_size': [i for i in range(1, 16, 2)],
'strides': [i for i in range(2, 8)],
'padding': ['valid', 'same']},

Expand Down
73 changes: 62 additions & 11 deletions neuvol/individs/individ_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,11 +77,12 @@ def _random_init(self):

def _random_init_architecture(self):
"""
Init structure of the individ
"""
if self._architecture:
self._architecture = []
input_layer = Layer('input', **self.options)
architecture = Structure(input_layer)

return ...
return architecture

def _random_init_training(self):
"""
Expand All @@ -106,15 +107,16 @@ def _random_init_data_processing(self):
def layers_imposer(self, net_tail, head, layers_map, arch_map):
net = net_tail
source = head
print(source)

try:
target = arch_map[source]
except KeyError:
return [net], ''
return net, ''

# if the next layer is merger - return its net tail
if target[0][0] == 'm':
return [net], target[0]
return net, target[0]

if len(target) > 1:
buffer_tails = {branch: layers_map[branch](net) for branch in target}
Expand Down Expand Up @@ -146,15 +148,16 @@ def layers_imposer(self, net_tail, head, layers_map, arch_map):

if 'f' in target[0]:
net = layers_map[target](net)
return [net], target[0]
return net, target[0]

if len(target) == 1:
new_head = target[0]
net = layers_map[target[0]](net[0])
net = layers_map[target[0]](net)
print(net.shape)

net, new_head = self.layers_imposer(net, new_head, layers_map, arch_map)

return [net], new_head
return net, new_head

def init_tf_graph(self):
"""
Expand All @@ -164,9 +167,9 @@ def init_tf_graph(self):
raise Exception('Non initialized net')

starter = 'root'
network_input = self._architecture.layers[starter]
network_input = self._architecture.layers[starter].init_layer()

network_graph = self.layers_imposer(network_input, 'root', self._architecture.layers, self._architecture.tree)
network_graph, _ = self.layers_imposer(network_input, 'root', self._architecture.layers, self._architecture.tree)

model = Model(inputs=[network_input], outputs=[network_graph])

Expand Down Expand Up @@ -375,7 +378,7 @@ def random_init_architecture(self):
"""
Public method for calling the random architecture initialisation
"""
return self._random_init_architecture()
self._random_init_architecture()

def random_init_data_processing(self):
"""
Expand Down Expand Up @@ -409,3 +412,51 @@ def architecture(self, architecture):
Set a new architecture
"""
self._architecture = architecture

def add_layer(self, layer, branch, branch_out=None):
self._architecture.add_layer(layer, branch, branch_out=branch_out)

def merge_branches(self, layer, branches=None):
self._architecture.merge_branches(layer, branches=branches)

def split_branch(self, left_layer, right_layer, branch):
self._architecture.split_branch(left_layer, right_layer, branch)

def recalculate_shapes(self):
self._architecture.recalculate_shapes()

@property
def branchs_end(self):
return self._architecture.branchs_end

@property
def tree(self):
return self._architecture.tree

@property
def layers(self):
return self._architecture.layers

@property
def layers_indexes(self):
return self._architecture.layers_indexes

@property
def layers_indexes_reverse(self):
return self._architecture.layers_indexes_reverse

@property
def layers_counter(self):
return self._architecture.layers_counter

@property
def finisher(self):
return self._architecture.finisher

@property
def current_depth(self):
return self._architecture.current_depth

@property
def branch_count(self):
return self._architecture.branch_count
18 changes: 9 additions & 9 deletions neuvol/individs/individ_text.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from .individ_base import IndividBase
from ..layer.block import Block
from ..layer.block import Layer
from .structure import StructureText


class IndividText(IndividBase):
Expand All @@ -29,20 +30,19 @@ def _random_init_architecture(self):
At first, we set probabilities pool and the we change
this uniform distribution according to previous layer
"""
architecture = super()._random_init_architecture()
input_layer = Layer('input', **self.options)
embed = Layer('embedding', **self.options)

# Push embedding for texts after input layer and before others
block = Block('embedding', layers_number=1, **self.options)
architecture.insert(1, block)
architecture = StructureText(input_layer, embed)

return architecture

def _random_init_data_processing(self):
if not self._architecture:
raise Exception('Not initialized yet')

"""
Init structure of the individ
"""
data_tmp = {}
data_tmp['vocabular'] = self._architecture[1].config['vocabular']
data_tmp['vocabular'] = self._architecture.layers['embedding'].config['vocabular']
data_tmp['sentences_length'] = self.options.get('shape', [10])[0]
data_tmp['classes'] = self.options.get('classes', 2)

Expand Down
6 changes: 3 additions & 3 deletions neuvol/individs/structure/structure.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,14 +76,14 @@ def add_layer(self, layer, branch, branch_out=None):
# for instance, shape is already known for embedding, input, reshape layers
new_shape = layer.config.get('shape', None)
if new_shape is None:
layer.config['rank'], layer.config['shape'] = calculate_shape(add_to_object, layer)
layer.config['rank'], layer.config['shape'] = layer.calculate_rank(add_to_object), layer.calculate_shape(add_to_object)

modifier_reshaper = reshaper(add_to_object, layer)
if modifier_reshaper is not None:
# now we want to connect new layer through the reshaper
add_to = self.add_layer(modifier_reshaper, branch, branch_out=branch_out)
add_to_object = self.layers[add_to]
layer.config['rank'], layer.config['shape'] = calculate_shape(add_to_object, layer)
layer.config['rank'], layer.config['shape'] = layer.calculate_rank(add_to_object), layer.calculate_shape(add_to_object)

# if not None - we want to create a new branch
if branch_out is not None:
Expand Down Expand Up @@ -229,7 +229,7 @@ def __init__(self, root, embedding):
self.tree['root'] = ['embedding']
self.branchs_end[1] = 'embedding'

embedding.config['rank'], embedding.config['shape'] = calculate_shape(root, embedding)
embedding.config['rank'], embedding.config['shape'] = embedding.calculate_rank(root), embedding.calculate_shape(root)
self.layers['root'] = root
self.layers['embedding'] = embedding

Expand Down
92 changes: 54 additions & 38 deletions neuvol/layer/layer.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,37 +21,12 @@
from ..utils import dump


class Layer:
"""
Factory of layers instances
"""
def __init__(self, layer_type=None, previous_layer=None, next_layer=None, **kwargs):
if layer_type == 'input':
return LayerInput(layer_type=None, previous_layer=None, next_layer=None, **kwargs)
elif layer_type == 'lstm':
return LayerLSTM(layer_type=None, previous_layer=None, next_layer=None, **kwargs)
elif layer_type == 'bi':
return LayerBiLSTM(layer_type=None, previous_layer=None, next_layer=None, **kwargs)
elif layer_type == 'cnn':
return LayerCNN1D(layer_type=None, previous_layer=None, next_layer=None, **kwargs)
elif layer_type == 'cnn2':
return LayerCNN2D(layer_type=None, previous_layer=None, next_layer=None, **kwargs)
elif layer_type == 'max_pool':
return LayerMaxPool1D(layer_type=None, previous_layer=None, next_layer=None, **kwargs)
elif layer_type == 'max_pool2':
return LayerMaxPool2D(layer_type=None, previous_layer=None, next_layer=None, **kwargs)
elif layer_type == 'dense':
return LayerDense(layer_type=None, previous_layer=None, next_layer=None, **kwargs)
elif layer_type == 'embedding':
return LayerEmbedding(layer_type=None, previous_layer=None, next_layer=None, **kwargs)
elif layer_type == 'flatten':
return LayerFlatten(layer_type=None, previous_layer=None, next_layer=None, **kwargs)
elif layer_type == 'concat':
return LayerConcat(layer_type=None, previous_layer=None, next_layer=None, **kwargs)
elif layer_type == 'reshape':
return LayerReshape(layer_type=None, previous_layer=None, next_layer=None, **kwargs)
else:
raise TypeError()
def Layer(layer_type=None, previous_layer=None, next_layer=None, **kwargs):
print(layer_type)
if layer_type in LAYERS_MAP:
return LAYERS_MAP[layer_type](layer_type=layer_type, previous_layer=previous_layer, next_layer=next_layer, **kwargs)
else:
raise TypeError()


class LayerBase:
Expand Down Expand Up @@ -81,6 +56,9 @@ def _init_parameters(self):
for parameter in variables:
self.config[parameter] = Distribution.layer_parameters(self.type, parameter)

def _check_compatibility(self):
pass

def calculate_shape(self, previous_layer):
"""
Shape calculator for the output
Expand Down Expand Up @@ -232,6 +210,14 @@ def init_layer(self):

return layer_tf

def _check_compatibility(self):
super()._check_compatibility()
if self.config['dilation_rate'] > 1:
self.config['strides'] = 1

elif self.config['dilation_rate'] == 1 and self.config['padding'] == 'causal':
self.config['padding'] = 'same'

def calculate_shape(self, previous_layer):
previous_shape = previous_layer.shape
filters = self.config['filters']
Expand All @@ -257,14 +243,15 @@ def calculate_shape(self, previous_layer):
out = [((i - kernel_size + (2 * (kernel_size // 2))) // strides + 1 - align) for i in previous_shape[1:-1]]

elif padding == 'causal':
out = [(i - kernel_size - (kernel_size - 1) * (dilation_rate - 1)) // strides + 1 - align
for i in previous_shape[1:-1]]
out = [i for i in previous_shape[1:-1]]
# out = [(i - kernel_size - (kernel_size - 1) * (dilation_rate - 1)) // strides + 1 - align
# for i in previous_shape[1:-1]]

for i in out:
# if some of the layer too small - change the padding
if i <= 0:
self.config['padding'] = 'same'
shape = self.calculate_shape(previous_shape)
shape = self.calculate_shape(previous_layer)
return shape

shape = (None, *out, filters)
Expand Down Expand Up @@ -300,11 +287,23 @@ def calculate_shape(self, previous_layer):
kernel_size = self.config['pool_size']
strides = self.config['strides']
padding = self.config['padding']
if kernel_size % 2 == 0:
align = 1
else:
align = 0

if padding == 'same':
out = [((i + 2*(kernel_size // 2) - kernel_size) // strides + 1) for i in previous_shape[1:-1]]
out = [((i + 2*(kernel_size // 2) - kernel_size) // strides + 1 - align) for i in previous_shape[1:-1]]
else:
out = [((i - kernel_size) // strides + 1) for i in previous_shape[1:-1]]
print('WOPWOWPOW')
out = [((i - kernel_size) // strides + 1 - align) for i in previous_shape[1:-1]]

for i in out:
# if some of the layer too small - change the padding
if i <= 0:
self.config['padding'] = 'same'
shape = self.calculate_shape(previous_layer)
return shape

shape = (None, *out, previous_shape[-1])

Expand Down Expand Up @@ -347,7 +346,7 @@ def calculate_shape(self, previous_layer):
class LayerInput(LayerBase):
def _init_parameters(self):
self.config['shape'] = self.options['shape']
self.config['rank'] = self.options['rank']
self.config['rank'] = len(self.options['shape']) + 1

def init_layer(self):
layer_tf = Input(
Expand All @@ -358,7 +357,7 @@ def init_layer(self):

class LayerEmbedding(LayerSpecialBase):
def _init_parameters(self):
return super()._init_parameters()
super()._init_parameters()
self.config['sentences_length'] = self.options['shape'][0]

def init_layer(self):
Expand Down Expand Up @@ -414,3 +413,20 @@ def init_layer(self):
layer_tf = Dropout(rate=self.config['rate'])

return layer_tf


LAYERS_MAP = {
'input': LayerInput,
'lstm': LayerLSTM,
'bi': LayerBiLSTM,
'cnn': LayerCNN1D,
'cnn2': LayerCNN2D,
'max_pool': LayerMaxPool1D,
'max_pool2': LayerMaxPool2D,
'dense': LayerDense,
'embedding': LayerEmbedding,
'flatten': LayerFlatten,
'concat': LayerConcat,
'reshape': LayerReshape,
'dropout': LayerDropout
}
11 changes: 7 additions & 4 deletions neuvol/layer/reshaper.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,11 +79,15 @@ def calculate_shape(prev_layer, layer):
kernel_size = layer.config['pool_size']
strides = layer.config['strides']
padding = layer.config['padding']
if kernel_size % 2 == 0:
align = 1
else:
align = 0

if padding == 'same':
out = [((i + 2*(kernel_size // 2) - kernel_size) // strides + 1) for i in prev_shape[1:-1]]
out = [((i + 2*(kernel_size // 2) - kernel_size) // strides + 1 - align) for i in prev_shape[1:-1]]
else:
out = [((i - kernel_size) // strides + 1) for i in prev_shape[1:-1]]
out = [((i - kernel_size) // strides + 1 - align) for i in prev_shape[1:-1]]

shape = (None, *out, prev_shape[-1])

Expand Down Expand Up @@ -120,11 +124,10 @@ def reshaper_shape(difference, prev_layer):
# difference + 1
# (None, 12, 124, 124, 10) -> rank 2 -> (None, 12*124*124*10)
# (None, 12, 124, 124, 10) -> rank 4 -> (None, 12, 124*124, 10)
print(prev_layer.config['shape'], difference)
new_shape = (
None,
*prev_layer.config['shape'][1:-(difference + 2)],
np.prod(prev_layer.config['shape'][-(difference + 1): -1]),
np.prod(prev_layer.config['shape'][-(difference + 2): -1]),
prev_layer.config['shape'][-1])

elif difference < 0:
Expand Down
Loading

0 comments on commit 6c4534d

Please sign in to comment.