Skip to content

Commit

Permalink
Format code
Browse files Browse the repository at this point in the history
  • Loading branch information
Dobiasd committed Mar 9, 2019
1 parent aad42ba commit 2379597
Show file tree
Hide file tree
Showing 2 changed files with 20 additions and 18 deletions.
10 changes: 6 additions & 4 deletions keras_export/convert_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -385,7 +385,8 @@ def show_gru_layer(layer):


def transform_cudnn_weights(input_weights, recurrent_weights, n_gates):
return transform_kernels(input_weights, n_gates, transform_input_kernel), transform_kernels(recurrent_weights, n_gates, transform_recurrent_kernel)
return transform_kernels(input_weights, n_gates, transform_input_kernel), \
transform_kernels(recurrent_weights, n_gates, transform_recurrent_kernel)


def show_cudnn_lstm_layer(layer):
Expand Down Expand Up @@ -440,16 +441,17 @@ def get_transform_func(layer):
return input_transform_func, recurrent_transform_func, bias_transform_func



def show_bidirectional_layer(layer):
"""Serialize Bidirectional layer to dict"""
forward_weights = layer.forward_layer.get_weights()
assert len(forward_weights) == 2 or len(forward_weights) == 3
forward_input_transform_func, forward_recurrent_transform_func, forward_bias_transform_func = get_transform_func(layer.forward_layer)
forward_input_transform_func, forward_recurrent_transform_func, forward_bias_transform_func = get_transform_func(
layer.forward_layer)

backward_weights = layer.backward_layer.get_weights()
assert len(backward_weights) == 2 or len(backward_weights) == 3
backward_input_transform_func, backward_recurrent_transform_func, backward_bias_transform_func = get_transform_func(layer.backward_layer)
backward_input_transform_func, backward_recurrent_transform_func, backward_bias_transform_func = get_transform_func(
layer.backward_layer)

result = {'forward_weights': encode_floats(forward_input_transform_func(forward_weights[0])),
'forward_recurrent_weights': encode_floats(forward_recurrent_transform_func(forward_weights[1])),
Expand Down
28 changes: 14 additions & 14 deletions keras_export/generate_test_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -347,12 +347,12 @@ def get_test_model_lstm():
inputs = [Input(shape=s) for s in input_shapes]
outputs = []

for input in inputs:
for inp in inputs:
lstm_sequences = LSTM(
units=8,
recurrent_activation='relu',
return_sequences=True
)(input)
)(inp)
lstm_regular = LSTM(
units=3,
recurrent_activation='sigmoid',
Expand All @@ -366,7 +366,7 @@ def get_test_model_lstm():
recurrent_activation='hard_sigmoid',
return_sequences=True
)
)(input)
)(inp)
lstm_bidi = Bidirectional(
LSTM(
units=6,
Expand All @@ -381,21 +381,21 @@ def get_test_model_lstm():
# run GPU-enabled mode if GPU is available
lstm_gpu_regular = keras.layers.CuDNNLSTM(
units=3
)(input)
)(inp)

lstm_gpu_bidi = Bidirectional(
keras.layers.CuDNNLSTM(
units=3
)
)(input)
)(inp)
else:
# fall back to equivalent regular LSTM for CPU-only mode
lstm_gpu_regular = LSTM(
units=3,
activation='tanh',
recurrent_activation='sigmoid',
use_bias=True
)(input)
)(inp)

lstm_gpu_bidi = Bidirectional(
LSTM(
Expand All @@ -404,7 +404,7 @@ def get_test_model_lstm():
recurrent_activation='sigmoid',
use_bias=True
)
)(input)
)(inp)
outputs.append(lstm_gpu_regular)
outputs.append(lstm_gpu_bidi)

Expand All @@ -430,14 +430,14 @@ def get_test_model_gru():
inputs = [Input(shape=s) for s in input_shapes]
outputs = []

for input in inputs:
for inp in inputs:
gru_sequences = GRU(
units=8,
recurrent_activation='relu',
reset_after=True,
return_sequences=True,
use_bias=True
)(input)
)(inp)
gru_regular = GRU(
units=3,
recurrent_activation='sigmoid',
Expand All @@ -455,7 +455,7 @@ def get_test_model_gru():
return_sequences=True,
use_bias=True
)
)(input)
)(inp)
gru_bidi = Bidirectional(
GRU(
units=6,
Expand All @@ -472,13 +472,13 @@ def get_test_model_gru():
# run GPU-enabled mode if GPU is available
gru_gpu_regular = keras.layers.CuDNNGRU(
units=3
)(input)
)(inp)

gru_gpu_bidi = Bidirectional(
keras.layers.CuDNNGRU(
units=3
)
)(input)
)(inp)
else:
# fall back to equivalent regular GRU for CPU-only mode
gru_gpu_regular = GRU(
Expand All @@ -487,7 +487,7 @@ def get_test_model_gru():
recurrent_activation='sigmoid',
reset_after=True,
use_bias=True
)(input)
)(inp)

gru_gpu_bidi = Bidirectional(
GRU(
Expand All @@ -497,7 +497,7 @@ def get_test_model_gru():
reset_after=True,
use_bias=True
)
)(input)
)(inp)
outputs.append(gru_gpu_regular)
outputs.append(gru_gpu_bidi)

Expand Down

0 comments on commit 2379597

Please sign in to comment.