Skip to content

Commit

Permalink
Add dynamo_onnx_aot_inline to bench (#110183)
Browse files Browse the repository at this point in the history
Summary:
An option that applies onnx.inliner post model export.

X-link: pytorch/pytorch#110183
Approved by: https://github.com/thiagocrepaldi

Reviewed By: jeanschmidt

Differential Revision: D50413979

fbshipit-source-id: 2a7ce6d8af7791c68daed958287473ec0a8c96f6
  • Loading branch information
BowenBao authored and facebook-github-bot committed Oct 19, 2023
1 parent ff41192 commit 5248206
Showing 1 changed file with 44 additions and 1 deletion.
45 changes: 44 additions & 1 deletion userbenchmark/dynamo/dynamobench/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -1455,9 +1455,11 @@ def run(self, pt_inputs):
class OnnxModelFromDynamo(OnnxModelFromTorchScript):
"""Dynamo and Fx based export. `torch.onnx.dynamo_export`."""

_EXPORTED_MODEL_FOLDER_NAME = "bench_dynamo_onnx_model"

def __init__(self, output_directory, model, example_inputs, dynamic_shapes: bool):
self.model_path = self._generate_onnx_model_path(
output_directory, "bench_dynamo_onnx_model"
output_directory, self._EXPORTED_MODEL_FOLDER_NAME
)
self._dynamic_shapes = dynamic_shapes
self._export_output = self._export(model, example_inputs, self.model_path)
Expand All @@ -1483,6 +1485,29 @@ def format_pt_outputs(self, pt_outputs):
return self._export_output.adapt_torch_outputs_to_onnx(pt_outputs)


class OnnxModelFromDynamoAotInline(OnnxModelFromDynamo):
"""Dynamo and Fx based export, with AOT inline post export. `torch.onnx.dynamo_export`."""

_EXPORTED_MODEL_FOLDER_NAME = "bench_dynamo_onnx_aot_inline_model"

def _export(
self, model, example_inputs, output_path: str
) -> torch.onnx.ExportOutput:
example_args, example_kwargs = _normalize_bench_inputs(example_inputs)
options = torch.onnx.ExportOptions(dynamic_shapes=self._dynamic_shapes)
export_output = torch.onnx.dynamo_export(
model, *example_args, **example_kwargs, export_options=options
)
# Apply AOT inline post export.
# Requires onnx >= 1.15
import onnx
import onnx.inliner

model_proto = onnx.inliner.inline_local_functions(export_output.model_proto)
onnx.save_model(model_proto, output_path, save_as_external_data=True)
return export_output


class _OnnxPatch:
@classmethod
def patch_non_tensor_outputs(cls, correct_result, new_result, fp64_outputs):
Expand Down Expand Up @@ -3104,6 +3129,12 @@ def get_example_inputs(self):
action="store_true",
help="Measure speedup with Dynamo ONNX, i.e. `torch.onnx.dynamo_export`",
)
group.add_argument(
"--dynamo-onnx-aot-inline",
"--dynamo_onnx_aot_inline",
action="store_true",
help="Measure speedup with Dynamo ONNX AOT Inline, i.e. `torch.onnx.dynamo_export`",
)
group.add_argument(
"--backend",
choices=torch._dynamo.list_backends(exclude_tags=None),
Expand Down Expand Up @@ -3450,6 +3481,18 @@ def run(runner, args, original_dir=None):
experiment = functools.partial(speedup_experiment_onnx, OnnxModelFromDynamo)
output_filename = "dynamo_onnx.csv"
current_onnx_compiler = "dynamo"
elif args.dynamo_onnx_aot_inline:
optimize_ctx = functools.partial(
optimize_onnx_ctx,
args.output_directory or ".",
OnnxModelFromDynamoAotInline,
dynamic_shapes=args.dynamic_shapes,
)
experiment = functools.partial(
speedup_experiment_onnx, OnnxModelFromDynamoAotInline
)
output_filename = "dynamo_onnx_aot_inline.csv"
current_onnx_compiler = "dynamo"
elif args.speedup_dynamo_ts:
optimize_ctx = torch._dynamo.optimize("ts", nopython=args.nopython)
experiment = speedup_experiment
Expand Down

0 comments on commit 5248206

Please sign in to comment.