Export a text_classifier_learner in ONNX

Hello everyone,

I tried to export a text_classifier_learner in ONNX. It’s an intent-records classification.
When I tried to export the model, I’ve got an error:
ValueError: too many values to unpack (expected 2)
(Full stack trace at the end)

torch_out = torch.onnx.export(learn.model, dummy_input, "/content/model.onnx", export_params=True)

Does anybody know how to export a text_classifier_learner model in ONNX format ?

If that’s not possible, I’m looking for a way to export the model for use from Tensorflow.js. If you’ve got any leads I’m all ears.

Full stack trace error :

---------------------------------------------------------------------------
ValueError                                Traceback (most recent call last)
<ipython-input-48-68dbfcc0ca5f> in <module>()
----> 1 torch_out = torch.onnx.export(learn.model, dummy_input, "/content/model.onnx", export_params=True)

14 frames
/usr/local/lib/python3.6/dist-packages/torch/onnx/__init__.py in export(model, args, f, export_params, verbose, training, input_names, output_names, aten, export_raw_ir, operator_export_type, opset_version, _retain_param_name, do_constant_folding, example_outputs, strip_doc_string, dynamic_axes, keep_initializers_as_inputs, custom_opsets, enable_onnx_checker, use_external_data_format)
    166                         do_constant_folding, example_outputs,
    167                         strip_doc_string, dynamic_axes, keep_initializers_as_inputs,
--> 168                         custom_opsets, enable_onnx_checker, use_external_data_format)
    169 
    170 

/usr/local/lib/python3.6/dist-packages/torch/onnx/utils.py in export(model, args, f, export_params, verbose, training, input_names, output_names, aten, export_raw_ir, operator_export_type, opset_version, _retain_param_name, do_constant_folding, example_outputs, strip_doc_string, dynamic_axes, keep_initializers_as_inputs, custom_opsets, enable_onnx_checker, use_external_data_format)
     67             dynamic_axes=dynamic_axes, keep_initializers_as_inputs=keep_initializers_as_inputs,
     68             custom_opsets=custom_opsets, enable_onnx_checker=enable_onnx_checker,
---> 69             use_external_data_format=use_external_data_format)
     70 
     71 

/usr/local/lib/python3.6/dist-packages/torch/onnx/utils.py in _export(model, args, f, export_params, verbose, training, input_names, output_names, operator_export_type, export_type, example_outputs, propagate, opset_version, _retain_param_name, do_constant_folding, strip_doc_string, dynamic_axes, keep_initializers_as_inputs, fixed_batch_size, custom_opsets, add_node_names, enable_onnx_checker, use_external_data_format)
    486                                                         example_outputs, propagate,
    487                                                         _retain_param_name, val_do_constant_folding,
--> 488                                                         fixed_batch_size=fixed_batch_size)
    489 
    490         # TODO: Don't allocate a in-memory string for the protobuf

/usr/local/lib/python3.6/dist-packages/torch/onnx/utils.py in _model_to_graph(model, args, verbose, training, input_names, output_names, operator_export_type, example_outputs, propagate, _retain_param_name, do_constant_folding, _disable_torch_constant_prop, fixed_batch_size)
    332             model.graph, tuple(in_vars), False, propagate)
    333     else:
--> 334         graph, torch_out = _trace_and_get_graph_from_model(model, args, training)
    335         state_dict = _unique_state_dict(model)
    336         params = list(state_dict.values())

/usr/local/lib/python3.6/dist-packages/torch/onnx/utils.py in _trace_and_get_graph_from_model(model, args, training)
    289     with set_training(model, training):
    290         trace_graph, torch_out, inputs_states = \
--> 291             torch.jit._get_trace_graph(model, args, _force_outplace=False, _return_inputs_states=True)
    292         warn_on_static_input_change(inputs_states)
    293 

/usr/local/lib/python3.6/dist-packages/torch/jit/__init__.py in _get_trace_graph(f, args, kwargs, _force_outplace, return_inputs, _return_inputs_states)
    276     if not isinstance(args, tuple):
    277         args = (args,)
--> 278     outs = ONNXTracedModule(f, _force_outplace, return_inputs, _return_inputs_states)(*args, **kwargs)
    279     return outs
    280 

/usr/local/lib/python3.6/dist-packages/torch/nn/modules/module.py in __call__(self, *input, **kwargs)
    548             result = self._slow_forward(*input, **kwargs)
    549         else:
--> 550             result = self.forward(*input, **kwargs)
    551         for hook in self._forward_hooks.values():
    552             hook_result = hook(self, input, result)

/usr/local/lib/python3.6/dist-packages/torch/jit/__init__.py in forward(self, *args)
    359             in_vars + module_state,
    360             _create_interpreter_name_lookup_fn(),
--> 361             self._force_outplace,
    362         )
    363 

/usr/local/lib/python3.6/dist-packages/torch/jit/__init__.py in wrapper(*args)
    346             if self._return_inputs_states:
    347                 inputs_states.append(_unflatten(args[:len(in_vars)], in_desc))
--> 348             outs.append(self.inner(*trace_inputs))
    349             if self._return_inputs_states:
    350                 inputs_states[0] = (inputs_states[0], trace_inputs)

/usr/local/lib/python3.6/dist-packages/torch/nn/modules/module.py in __call__(self, *input, **kwargs)
    546                 input = result
    547         if torch._C._get_tracing_state():
--> 548             result = self._slow_forward(*input, **kwargs)
    549         else:
    550             result = self.forward(*input, **kwargs)

/usr/local/lib/python3.6/dist-packages/torch/nn/modules/module.py in _slow_forward(self, *input, **kwargs)
    532                 recording_scopes = False
    533         try:
--> 534             result = self.forward(*input, **kwargs)
    535         finally:
    536             if recording_scopes:

/usr/local/lib/python3.6/dist-packages/torch/nn/modules/container.py in forward(self, input)
     98     def forward(self, input):
     99         for module in self:
--> 100             input = module(input)
    101         return input
    102 

/usr/local/lib/python3.6/dist-packages/torch/nn/modules/module.py in __call__(self, *input, **kwargs)
    546                 input = result
    547         if torch._C._get_tracing_state():
--> 548             result = self._slow_forward(*input, **kwargs)
    549         else:
    550             result = self.forward(*input, **kwargs)

/usr/local/lib/python3.6/dist-packages/torch/nn/modules/module.py in _slow_forward(self, *input, **kwargs)
    532                 recording_scopes = False
    533         try:
--> 534             result = self.forward(*input, **kwargs)
    535         finally:
    536             if recording_scopes:

/usr/local/lib/python3.6/dist-packages/fastai/text/learner.py in forward(self, input)
    259 
    260     def forward(self, input:LongTensor)->Tuple[List[Tensor],List[Tensor],Tensor]:
--> 261         bs,sl = input.size()
    262         self.reset()
    263         raw_outputs,outputs,masks = [],[],[]

ValueError: too many values to unpack (expected 2)

You may get a better response if you post the full stack trace for the error :slight_smile:

1 Like

Hello again,

I managed to export the model in onnx format by making the following changes:

dummy_input = torch.randn(1, 512, requires_grad=False).cuda()
target = torch.empty(1,512, dtype=torch.long, requires_grad=False).random_(28).cuda()
torch.onnx.export(learn.model, target, "/content/model.onnx", verbose=True, export_params=True, operator_export_type=torch.onnx.OperatorExportTypes.ONNX_ATEN_FALLBACK)

Unfortunately ONNX.js does not support LSTM yet and it is impossible to load the model. There’s an open issue for this case. https://github.com/microsoft/onnxjs/issues/167