From 277f31a1d7b2600c2d87106f812cfecaa3a3d125 Mon Sep 17 00:00:00 2001 From: dorhar Date: Mon, 1 Feb 2021 13:30:35 +0100 Subject: [PATCH] convert softmax lambda to keras layer --- onnx2keras/activation_layers.py | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/onnx2keras/activation_layers.py b/onnx2keras/activation_layers.py index 78a585a6..55be903b 100644 --- a/onnx2keras/activation_layers.py +++ b/onnx2keras/activation_layers.py @@ -141,14 +141,9 @@ def convert_softmax(node, params, layers, lambda_func, node_name, keras_name): input_0 = ensure_tf_type(layers[node.input[0]], name="%s_const" % keras_name) - def target_layer(x, axis=params['axis']): - import tensorflow as tf - return tf.nn.softmax(x, axis=axis) - - lambda_layer = keras.layers.Lambda(target_layer, name=keras_name) - layers[node_name] = lambda_layer(input_0) + softmax_layer = keras.layers.Softmax(axis=params['axis'], name=keras_name) + layers[node_name] = softmax_layer(input_0) layers[node_name].set_shape(layers[node_name].shape) - lambda_func[keras_name] = target_layer def convert_prelu(node, params, layers, lambda_func, node_name, keras_name):