Coverage for mlprodict/onnxrt/ops_onnx/op_conv.py: 85%
26 statements
« prev ^ index » next coverage.py v7.1.0, created at 2023-02-04 02:28 +0100
« prev ^ index » next coverage.py v7.1.0, created at 2023-02-04 02:28 +0100
1# pylint: disable=W0221
2"""
3@file
4@brief Extension for :epkg:`ReferenceEvaluator`.
5"""
6import numpy
7from ..ops_cpu.op_conv_ import ConvFloat, ConvDouble # pylint: disable=E0611,E0401
8from ._op import OpRunExtended
11class Conv(OpRunExtended):
12 """
13 C++ implementation of operator Conv for :epkg:`ReferenceEvaluator`.
14 See following example.
16 .. runpython::
17 :showcode:
19 import numpy
20 from numpy.testing import assert_allclose
21 from onnx import TensorProto
22 from onnx.checker import check_model
23 from onnx.helper import (
24 make_graph, make_model, make_node,
25 make_opsetid, make_tensor_value_info)
26 from onnx.reference import ReferenceEvaluator
27 from mlprodict.plotting.text_plot import onnx_simple_text_plot
28 from mlprodict.onnxrt.ops_onnx.op_conv import Conv
29 from cpyquickhelper.numbers import measure_time
31 # creating a model
32 X = make_tensor_value_info("X", TensorProto.FLOAT, [
33 None, None, None, None])
34 Y = make_tensor_value_info("Y", TensorProto.FLOAT, [
35 None, None, None, None])
36 B = make_tensor_value_info("B", TensorProto.FLOAT, [
37 None, None, None, None])
38 W = make_tensor_value_info("W", TensorProto.FLOAT, [1, 1, 3, 3])
39 node = make_node(
40 "Conv", ["X", "W", "B"], ["Y"], pads=[1, 1, 1, 1],
41 dilations=[1, 1], strides=[2, 2])
42 graph = make_graph([node], "g", [X, W, B], [Y])
43 onnx_model = make_model(graph, opset_imports=[make_opsetid("", 16)])
44 check_model(onnx_model)
46 # prints the model
47 print(onnx_simple_text_plot(onnx_model))
49 # comparing without and with C++ implementation
50 sess1 = ReferenceEvaluator(onnx_model)
51 sess2 = ReferenceEvaluator(onnx_model, new_ops=[Conv])
53 sH, sW = 224, 224
54 X = numpy.random.randn(1, 1, sH, sW).astype(numpy.float32)
55 W = numpy.random.randn(1, 1, 3, 3).astype(numpy.float32)
56 B = numpy.array([[[[0]]]], dtype=numpy.float32)
58 expected = sess1.run(None, {"X": X, "W": W, "B": B})[0]
59 got = sess2.run(None, {"X": X, "W": W, "B": B})[0]
61 # checking it is the same
62 assert_allclose(expected, got, atol=1e-5)
64 # comparing the time
65 t1 = measure_time(
66 lambda: sess1.run(None, {"X": X, "W": W, "B": B}),
67 repeat=5, number=5, div_by_number=True)
68 print("No C++:", t1["average"])
69 t2 = measure_time(
70 lambda: sess2.run(None, {"X": X, "W": W, "B": B}),
71 repeat=5, number=5, div_by_number=True)
72 print("With C++:", t2["average"])
73 print("speedup:", t1["average"] / t2["average"])
74 """
76 def get_impl(self, dtype=None, auto_pad=None, dilations=None, group=None,
77 kernel_shape=None, pads=None, strides=None):
78 """
79 Instantiates the C++ implementation and caches it.
80 """
81 key = self.get_cache_key(
82 auto_pad=auto_pad, dilations=dilations,
83 group=group, kernel_shape=kernel_shape, pads=pads,
84 strides=strides, dtype=dtype)
85 if self.has_cache_key(key):
86 return self.get_cache_impl(key)
87 if dtype == numpy.float32:
88 rt = ConvFloat()
89 elif dtype == numpy.float64:
90 rt = ConvDouble()
91 else:
92 raise RuntimeError(
93 f"No C++ implementation for Conv is available for dtype={dtype}.")
94 rt.init(auto_pad,
95 numpy.array(dilations, dtype=numpy.int64),
96 group,
97 numpy.array(kernel_shape, dtype=numpy.int64),
98 numpy.array(pads, dtype=numpy.int64),
99 numpy.array(strides, dtype=numpy.int64))
100 self.cache_impl(key, rt)
101 return rt
103 def _run(self, X, W, B=None, auto_pad=None, dilations=None, group=None,
104 kernel_shape=None, pads=None, strides=None):
105 if len(X.shape) < 3:
106 raise ValueError(
107 f"X must have at least 3 dimensions but its shape is {X.shape}.")
108 if X is None:
109 raise ValueError( # pragma: no cover
110 "X cannot be None for operator %r, ONNX=%r" % (
111 type(self), self.onnx_node))
112 if min(X.shape) == 0:
113 raise RuntimeError( # pragma: no cover
114 f"Unable to run operator Conv on an empty matrix. X.shape={X.shape!r}.")
115 if min(W.shape) == 0:
116 raise RuntimeError( # pragma: no cover
117 f"Unable to run operator Conv on an empty matrix. W.shape={W.shape!r}.")
118 if B is not None and min(B.shape) == 0:
119 raise RuntimeError( # pragma: no cover
120 f"Unable to run operator Conv on an empty matrix. B.shape={B.shape!r}.")
121 rt = self.get_impl(dtype=X.dtype, auto_pad=auto_pad,
122 dilations=dilations, group=group,
123 kernel_shape=kernel_shape or W.shape[-2:],
124 pads=pads, strides=strides)
125 return (rt.compute(X, W, B), )