Skip to content

Commit

Permalink
[ONNX] Bump onnxscript in CI (pytorch#137497)
Browse files Browse the repository at this point in the history
To 0.1.0.dev20241008
Pull Request resolved: pytorch#137497
Approved by: https://github.com/titaiwangms
  • Loading branch information
justinchuby authored and pytorchmergebot committed Oct 8, 2024
1 parent 76ab1ab commit 81c8a8a
Show file tree
Hide file tree
Showing 2 changed files with 1 addition and 71 deletions.
2 changes: 1 addition & 1 deletion .ci/docker/common/install_onnx.sh
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ pip_install coloredlogs packaging

pip_install onnxruntime==1.18.1
pip_install onnx==1.16.2
pip_install onnxscript==0.1.0.dev20240831 --no-deps
pip_install onnxscript==0.1.0.dev20241008 --no-deps
# required by onnxscript
pip_install ml_dtypes

Expand Down
70 changes: 0 additions & 70 deletions test/onnx/test_fx_to_onnx.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@

import logging
import tempfile
from typing import Mapping, Tuple

import onnx
import onnx.inliner
Expand Down Expand Up @@ -111,75 +110,6 @@ def forward(self, x):

_ = dynamo_export(TopKModel(), x, export_options=self.export_options)

def test_symbolic_shape_of_values_inside_function_is_exported_as_graph_value_info(
self,
):
class SubModule(torch.nn.Module):
def forward(self, x, y, bias):
output = x @ y
return output + bias

class Module(torch.nn.Module):
def __init__(self) -> None:
super().__init__()
self.submodule = SubModule()

def forward(self, x, y, bias):
return self.submodule(x, y, bias)

x = torch.randn(2, 3)
y = torch.randn(3, 4)
bias = torch.randn(4)
onnx_program = torch.onnx.dynamo_export(
Module(),
x,
y,
bias,
export_options=torch.onnx.ExportOptions(dynamic_shapes=True),
)
model_proto = onnx_program.model_proto

# Assert value_info for values inside local function can be retrieved
def _assert_node_outputs_has_value_info(
node: onnx.NodeProto,
value_infos: Mapping[str, onnx.ValueInfoProto],
local_functions: Mapping[Tuple[str, str], onnx.FunctionProto],
exclude_names_in_value_info,
function_id: str = "",
):
for output in node.output:
name = f"{function_id}/{output}" if function_id else output
if name not in exclude_names_in_value_info:
self.assertIn(name, value_infos)
if node.domain.startswith("pkg.onnxscript.torch_lib"):
# No shape info available for values inside torchlib functions.
return
if (
function := local_functions.get((node.domain, node.op_type))
) is not None:
for node in function.node:
function_id = f"{function.domain}::{function.name}"
_assert_node_outputs_has_value_info(
node,
value_infos,
local_functions,
exclude_names_in_value_info,
function_id,
)

type_infos = {vi.name: vi for vi in model_proto.graph.value_info}
functions = {(f.domain, f.name): f for f in model_proto.functions}
# NOTE: inputs, outputs, and initializers are not included in value_info spec
exclude_names_in_value_info = (
[input.name for input in model_proto.graph.input]
+ [output.name for output in model_proto.graph.output]
+ [init.name for init in model_proto.graph.initializer]
)
for node in model_proto.graph.node:
_assert_node_outputs_has_value_info(
node, type_infos, functions, exclude_names_in_value_info
)

def test_dynamo_export_retains_readable_parameter_and_buffer_names(self):
class SubModule(torch.nn.Module):
def __init__(self) -> None:
Expand Down

0 comments on commit 81c8a8a

Please sign in to comment.