Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Next Next commit
Extend to use inner API
  • Loading branch information
xadupre committed Nov 10, 2023
commit 8bd1a66d08d174ecaeb7a20409ddd80094cf4fd9
6 changes: 6 additions & 0 deletions _doc/api/light_api.rst
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,12 @@ EventType
.. autoclass:: onnx_array_api.light_api.translate.EventType
:members:

InnerEmitter
++++++++++++

.. autoclass:: onnx_array_api.light_api.inner_emitter.Emitter
:members:

Translater
++++++++++

Expand Down
Binary file not shown.
236 changes: 236 additions & 0 deletions _unittests/ut_light_api/test_translate_classic.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,236 @@
import unittest
from textwrap import dedent
import numpy as np
from onnx import ModelProto, TensorProto
from onnx.defs import onnx_opset_version
from onnx.reference import ReferenceEvaluator
from onnx.helper import (
make_tensor_value_info,
make_node,
make_graph,
make_model,
make_opsetid,
)
from onnx.checker import check_model
from onnx_array_api.ext_test_case import ExtTestCase
from onnx_array_api.light_api import start, translate

OPSET_API = min(19, onnx_opset_version() - 1)


class TestTranslateClassic(ExtTestCase):
def test_check_code(self):
opset_imports = [
make_opsetid("", 19),
]
inputs = []
outputs = []
nodes = []
initializers = []
sparse_initializers = []
functions = []
inputs.append(make_tensor_value_info("X", TensorProto.FLOAT, shape=[]))
nodes.append(make_node("Exp", ["X"], ["Y"]))
outputs.append(make_tensor_value_info("Y", TensorProto.FLOAT, shape=[]))
graph = make_graph(
nodes,
"noname",
inputs,
outputs,
initializers,
sparse_initializer=sparse_initializers,
)
model = make_model(graph, functions=functions, opset_imports=opset_imports)
check_model(model)

def test_exp(self):
onx = start(opset=19).vin("X").Exp().rename("Y").vout().to_onnx()
self.assertIsInstance(onx, ModelProto)
self.assertIn("Exp", str(onx))
ref = ReferenceEvaluator(onx)
a = np.arange(10).astype(np.float32)
got = ref.run(None, {"X": a})[0]
self.assertEqualArray(np.exp(a), got)

code = translate(onx, api="onnx")

expected = dedent(
"""
opset_imports = [
make_opsetid('', 19),
]
inputs = []
outputs = []
nodes = []
initializers = []
sparse_initializers = []
functions = []
inputs.append(make_tensor_value_info('X', TensorProto.FLOAT, shape=[]))
nodes.append(
make_node(
'Exp',
['X'],
['Y']
)
)
outputs.append(make_tensor_value_info('Y', TensorProto.FLOAT, shape=[]))
graph = make_graph(
nodes,
'noname',
inputs,
outputs,
initializers,
sparse_initializer=sparse_initializers,
)
model = make_model(
graph,
functions=functions,
opset_imports=opset_imports
)"""
).strip("\n")
self.maxDiff = None
self.assertEqual(expected, code)

onx2 = (
start(opset=19)
.vin("X", elem_type=TensorProto.FLOAT)
.bring("X")
.Exp()
.rename("Y")
.bring("Y")
.vout(elem_type=TensorProto.FLOAT)
.to_onnx()
)
ref = ReferenceEvaluator(onx2)
a = np.arange(10).astype(np.float32)
got = ref.run(None, {"X": a})[0]
self.assertEqualArray(np.exp(a), got)

def test_transpose(self):
onx = (
start(opset=19)
.vin("X")
.reshape((-1, 1))
.Transpose(perm=[1, 0])
.rename("Y")
.vout()
.to_onnx()
)
self.assertIsInstance(onx, ModelProto)
self.assertIn("Transpose", str(onx))
ref = ReferenceEvaluator(onx)
a = np.arange(10).astype(np.float32)
got = ref.run(None, {"X": a})[0]
self.assertEqualArray(a.reshape((-1, 1)).T, got)

code = translate(onx, api="onnx")
expected = dedent(
"""
opset_imports = [
make_opsetid('', 19),
]
inputs = []
outputs = []
nodes = []
initializers = []
sparse_initializers = []
functions = []
inputs.append(make_tensor_value_info('X', TensorProto.FLOAT, shape=[]))
nodes.append(
make_node(
'Reshape',
['X', 'r'],
['r0_0']
)
)
nodes.append(
make_node(
'Transpose',
['r0_0'],
['Y'],
perm=[1, 0]
)
)
outputs.append(make_tensor_value_info('Y', TensorProto.FLOAT, shape=[]))
graph = make_graph(
nodes,
'noname',
inputs,
outputs,
initializers,
sparse_initializer=sparse_initializers,
)
model = make_model(
graph,
functions=functions,
opset_imports=opset_imports
)"""
).strip("\n")
self.maxDiff = None
self.assertEqual(expected, code)

def test_topk_reverse(self):
onx = (
start(opset=19)
.vin("X", np.float32)
.vin("K", np.int64)
.bring("X", "K")
.TopK(largest=0)
.rename("Values", "Indices")
.vout()
.to_onnx()
)
self.assertIsInstance(onx, ModelProto)
ref = ReferenceEvaluator(onx)
x = np.array([[0, 1, 2, 3], [9, 8, 7, 6]], dtype=np.float32)
k = np.array([2], dtype=np.int64)
got = ref.run(None, {"X": x, "K": k})
self.assertEqualArray(np.array([[0, 1], [6, 7]], dtype=np.float32), got[0])
self.assertEqualArray(np.array([[0, 1], [3, 2]], dtype=np.int64), got[1])

code = translate(onx, api="onnx")
expected = dedent(
"""
opset_imports = [
make_opsetid('', 19),
]
inputs = []
outputs = []
nodes = []
initializers = []
sparse_initializers = []
functions = []
inputs.append(make_tensor_value_info('X', TensorProto.FLOAT, shape=[]))
inputs.append(make_tensor_value_info('K', TensorProto.INT64, shape=[]))
nodes.append(
make_node(
'TopK',
['X', 'K'],
['Values', 'Indices'],
axis=-1
largest=0
sorted=1
)
)
outputs.append(make_tensor_value_info('Values', TensorProto.FLOAT, shape=[]))
outputs.append(make_tensor_value_info('Indices', TensorProto.FLOAT, shape=[]))
graph = make_graph(
nodes,
'noname',
inputs,
outputs,
initializers,
sparse_initializer=sparse_initializers,
)
model = make_model(
graph,
functions=functions,
opset_imports=opset_imports
)"""
).strip("\n")
self.assertEqual(expected, code)


if __name__ == "__main__":
# TestLightApi().test_topk()
unittest.main(verbosity=2)
39 changes: 33 additions & 6 deletions onnx_array_api/light_api/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from .model import OnnxGraph
from .translate import Translater
from .var import Var, Vars
from .inner_emitter import InnerEmitter


def start(
Expand Down Expand Up @@ -50,13 +51,18 @@ def start(
return OnnxGraph(opset=opset, opsets=opsets, is_function=is_function)


def translate(proto: ModelProto, single_line=False) -> str:
def translate(proto: ModelProto, single_line: bool = False, api: str = "light") -> str:
"""
Translates an ONNX proto into a code using :ref:`l-light-api`
to describe the ONNX graph.

:param proto: model to translate
:param single_line: as a single line or not
:param api: API to export into,
default is `"light"` and this is handle by class
:class:`onnx_array_api.light_api.translate.Emitter`,
another value is `"onnx"` which is the inner API implemented
in onnx package.
:return: code

.. runpython::
Expand All @@ -75,9 +81,30 @@ def translate(proto: ModelProto, single_line=False) -> str:
)
code = translate(onx)
print(code)

The inner API from onnx packahe is also available.

.. runpython::
:showcode:

from onnx_array_api.light_api import start, translate

onx = (
start()
.vin("X")
.reshape((-1, 1))
.Transpose(perm=[1, 0])
.rename("Y")
.vout()
.to_onnx()
)
code = translate(onx, api="onnx")
print(code)
"""
tr = Translater(proto)
rows = tr.export()
if single_line:
return ".".join(rows)
return "".join(["(\n ", "\n .".join(rows), "\n)"])
if api == "light":
tr = Translater(proto)
elif api == "onnx":
tr = Translater(proto, emitter=InnerEmitter())
else:
raise ValueError(f"Unexpected value {api!r} for api.")
return tr.export(single_line=single_line, as_str=True)
Loading