Skip to content
This repository was archived by the owner on Jan 13, 2024. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion _unittests/ut_onnxrt/test_onnx_inference.py
Original file line numberDiff line numberDiff line change
Expand Up@@ -139,7 +139,7 @@ def test_onnx_inference_verbose_intermediate(self):
model_def = to_onnx(clr, X_train.astype(numpy.float32))
for runtime in ['python', 'python_compiled']:
with self.subTest(runtime=runtime):
oinf = OnnxInference(model_def)
oinf = OnnxInference(model_def, inplace=False)
buf = BufferedPrint()
got = oinf.run({'X': X_test.astype(numpy.float32)},
verbose=15, fLOG=buf.fprint,
Expand Down
2 changes: 1 addition & 1 deletion _unittests/ut_onnxrt/test_onnxrt_onnxruntime_runtime_.py
Original file line numberDiff line numberDiff line change
Expand Up@@ -42,7 +42,7 @@ def test_onnxt_runtime_add(self):
self.assertEqual(list(sorted(got)), ['Y1'])
self.assertEqualArray(idi + X, got['Y1'], decimal=6)

oinf = OnnxInference(model_def, runtime='onnxruntime1')
oinf = OnnxInference(model_def, runtime='onnxruntime1', inplace=False)
got = oinf.run({'X': X}, intermediate=True)
self.assertEqual(list(sorted(got)), ['Ad_Addcst', 'X', 'Y1'])
self.assertEqualArray(idi + X, got['Y1'], decimal=6)
Expand Down
7 changes: 4 additions & 3 deletions _unittests/ut_onnxrt/test_onnxrt_side_by_side.py
Original file line numberDiff line numberDiff line change
Expand Up@@ -84,7 +84,7 @@ def test_kernel_ker2_def(self):
inputs=[('X', FloatTensorType([None, None]))],
outputs=[('Y', FloatTensorType([None, None]))],
target_opset=get_opset_number_from_onnx())
sess = OnnxInference(model_onnx.SerializeToString())
sess = OnnxInference(model_onnx.SerializeToString(), inplace=False)

res = sess.run({'X': Xtest_.astype(numpy.float32)})
m1 = res['Y']
Expand DownExpand Up@@ -182,8 +182,9 @@ def myprint(*args, **kwargs):
self.assertEqual(sbs[1]['order[0]'], 0)

sess3 = _capture_output(
lambda: OnnxInference(model_onnx.SerializeToString(),
runtime="onnxruntime2"), 'c')[0]
lambda: OnnxInference(
model_onnx.SerializeToString(), runtime="onnxruntime2",
inplace=False), 'c')[0]
try:
sbs = side_by_side_by_values(
[cpu, sess, sess3], inputs={'X': Xtest_.astype(numpy.float32)})
Expand Down
4 changes: 2 additions & 2 deletions _unittests/ut_tools/test_onnx_manipulations.py
Original file line numberDiff line numberDiff line change
Expand Up@@ -361,8 +361,8 @@ def test_insert_results_into_onnx(self):
# with open('debug.onnx', 'wb') as f:
# f.write(new_graph.SerializeToString())

oinf1 = OnnxInference(model_def)
oinf2 = OnnxInference(new_graph)
oinf1 = OnnxInference(model_def, inplace=False)
oinf2 = OnnxInference(new_graph, inplace=False)
cst = numpy.array([[5.6, 7.8]])
self.assertEqualArray(oinf1.run({'X': cst})['Z'],
oinf2.run({'X': cst})['Z'])
Expand Down
2 changes: 1 addition & 1 deletion _unittests/ut_tools/test_zoo.py
Original file line numberDiff line numberDiff line change
Expand Up@@ -45,7 +45,7 @@ def test_verify_side_by_side(self):
return
oinf2 = OnnxInference(link, runtime="python", inplace=False)
oinf2 = oinf2.build_intermediate('474')['474']
oinf1 = OnnxInference(link, runtime="onnxruntime1")
oinf1 = OnnxInference(link, runtime="onnxruntime1", inplace=False)
oinf1 = oinf1.build_intermediate('474')['474']
inputs ={'input': data['test_data_set_0']['in']['input_0']}
rows = side_by_side_by_values([oinf1, oinf2], inputs=inputs)
Expand Down
16 changes: 15 additions & 1 deletion mlprodict/onnx_tools/onnx2py_helper.py
Original file line numberDiff line numberDiff line change
Expand Up@@ -50,7 +50,21 @@ def from_array(value, name=None):
:return: ONNX tensor
"""
if isinstance(value, numpy.ndarray):
pb = onnx_from_array(value, name=name)
try:
pb = onnx_from_array(value, name=name)
except NotImplementedError as e:
if value.dtype == numpy.dtype('O'):
pb = TensorProto()
pb.data_type = TensorProto.STRING # pylint: disable=E1101
if name is not None:
pb.name = name
pb.dims.extend(value.shape) # pylint: disable=E1101
pb.string_data.extend( # pylint: disable=E1101
list(map(lambda o: str(o).encode('utf-8'), value.ravel())))
else:
raise NotImplementedError(
"Unable to convert type %r (dtype=%r) into an ONNX tensor "
"due to %r." % (type(value), value.dtype, e)) from e
return pb
if isinstance(value, TensorProto):
return value
Expand Down
4 changes: 4 additions & 0 deletions mlprodict/onnxrt/onnx_inference.py
Original file line numberDiff line numberDiff line change
Expand Up@@ -693,6 +693,10 @@ def retype(col_array):
name, retype(numpy.expand_dims(inputs[name].values, axis=1)))
for name in inputs.columns)
if intermediate:
if self.inplace:
raise RuntimeError( # pragma: no cover
"inplace must be False if intermediate is True, a container "
"might be used by several nodes.")
return self._run(inputs, clean_right_away=False,
intermediate=intermediate,
verbose=verbose, node_time=node_time,
Expand Down
6 changes: 5 additions & 1 deletion mlprodict/onnxrt/ops_cpu/op_string_normalizer.py
Original file line numberDiff line numberDiff line change
Expand Up@@ -61,7 +61,11 @@ def _run_column(self, cin, cout):
cout[:] = cin[:]

for i in range(0, cin.shape[0]):
cout[i] = self.strip_accents_unicode(cout[i])
if isinstance(cout[i], float):
# nan
cout[i] = ''
else:
cout[i] = self.strip_accents_unicode(cout[i])

if self.is_case_sensitive and len(stops) > 0:
for i in range(0, cin.shape[0]):
Expand Down