When converting a SimpleRNN from Keras using relay.frontend.from_keras, _convert_simple_rnn() throws an error for any model with more than one timestep.
Steps to reproduce:
inp = keras.layers.Input(shape=(2, 28))
out = keras.layers.SimpleRNN(28, return_sequences=True)(inp)
model = keras.models.Model(inp, out)
model.summary()
mod, params = tvm.relay.frontend.from_keras(model, {'input_1': input_shape})
A simple fix was simply to split across the timesteps similar to the _convert_lstm() function.
def _convert_simple_rnn(inexpr, keras_layer, etab):
_check_data_format(keras_layer)
if not isinstance(inexpr, list):
buf = np.zeros((1, keras_layer.units), 'float32')
prev_op = etab.new_const(buf)
inexpr = [inexpr, prev_op]
in_data = inexpr[0]
output = inexpr[1]
in_shape = tuple(dim if dim else 1 for dim in _as_list(keras_layer.input_shape)[0])
weightList = keras_layer.get_weights()
kernel_weight = etab.new_const(weightList[0].transpose([1, 0]))
recurrent_weight = etab.new_const(weightList[1].transpose([1, 0]))
in_bias = etab.new_const(weightList[2])
units = list(weightList[0].shape)[1]
time_steps = in_shape[1]
in_data = _op.squeeze(in_data, axis=[0])
in_data = _op.split(in_data, indices_or_sections=time_steps, axis=0)
# loop for the number of time_steps
for data in in_data:
ixh1 = _op.nn.dense(data, kernel_weight, units=units)
ixh2 = _op.nn.bias_add(_op.nn.dense(output, recurrent_weight, units=units), bias=in_bias)
output = ixh1 + ixh2
output = _convert_activation(output, keras_layer, None)
out_shape = tuple(dim if dim else 1 for dim in _as_list(keras_layer.output_shape)[0])
output = _op.reshape(output, newshape=out_shape)
return [output, output]