Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Collect Named Data Store at construction #9370

Merged
merged 2 commits into from
Mar 20, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions exir/backend/test/test_backend_with_named_data_map.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ def forward(self, x):
return y - y

ep = to_edge(torch.export.export(M(), (torch.randn(1, 2),)))
ep.to_backend(BackendWithNDMPartitioner())
ep = ep.to_backend(BackendWithNDMPartitioner())

ndm_output = ep._named_data_store.get_named_data_store_output()
buffer_entries = ndm_output.buffers
Expand All @@ -71,7 +71,7 @@ def forward(self, x, y):
return z - z

ep = to_edge(torch.export.export(M(), (torch.randn(1, 2), torch.randn(1, 2))))
ep.to_backend(BackendWithNDMPartitioner())
ep = ep.to_backend(BackendWithNDMPartitioner())

ndm_output = ep._named_data_store.get_named_data_store_output()
buffer_entries = ndm_output.buffers
Expand Down
1 change: 1 addition & 0 deletions exir/lowered_backend_module.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,7 @@ def __deepcopy__(self, memo: Optional[Dict[int, Any]]) -> "LoweredBackendModule"
backend_id=self._backend_id,
processed_bytes=self._processed_bytes,
compile_specs=copy.deepcopy(self._compile_specs, memo),
named_data_store_output=self._named_data_store_output,
)
# pyre-fixme[16]: `LoweredBackendModule` has no attribute `meta`.
res.meta = copy.copy(getattr(self, "meta", {}))
Expand Down
53 changes: 32 additions & 21 deletions exir/program/_program.py
Original file line number Diff line number Diff line change
Expand Up @@ -1098,6 +1098,33 @@ def _gen_edge_manager_for_partitioners(
return edge_manager


def collect_named_data_store_from_exported_program(
exported_program: ExportedProgram,
named_data_store: NamedDataStore,
) -> None:
"""
Collects all the named data store outputs found within the exported program
and adds them to named_data_store.
"""

# collected all the named data into the named data store for deduplication
def collect_named_data_store_outputs(
graph_module: torch.fx.GraphModule,
) -> None:
for node in graph_module.graph.nodes:
if node.target == executorch_call_delegate:
lbm = getattr(graph_module, node.args[0].target)
assert is_lowered_module(lbm)
data_store_output = lbm.named_data_store_output
if data_store_output is not None:
named_data_store.merge_named_data_store(data_store_output)

for _, submod, _ in get_control_flow_submodules(graph_module):
collect_named_data_store_outputs(submod)

collect_named_data_store_outputs(exported_program.graph_module)


@et_logger("to_edge_transform_and_lower")
def to_edge_transform_and_lower(
programs: Union[ExportedProgram, Dict[str, ExportedProgram]],
Expand Down Expand Up @@ -1307,7 +1334,6 @@ def __init__(
constant_methods: Optional[Dict[str, Any]] = None,
compile_config: Optional[EdgeCompileConfig] = None,
ops_set_to_not_decompose: Optional[List[torch._ops.OpOverload]] = None,
named_data_store: Optional[NamedDataStore] = None,
):
"""
Should not be called directly by users. User should use :func:'to_edge' instead.
Expand All @@ -1331,7 +1357,11 @@ def __init__(
self._edge_programs: Dict[str, ExportedProgram] = edge_programs
self._config_methods = constant_methods

self._named_data_store = named_data_store or NamedDataStore()
self._named_data_store = NamedDataStore()
for _, program in self._edge_programs.items():
collect_named_data_store_from_exported_program(
program, self._named_data_store
)

@property
def methods(self) -> Set[str]:
Expand Down Expand Up @@ -1441,30 +1471,11 @@ def to_backend(
for name, program in self._edge_programs.items():
new_edge_programs[name] = to_backend(program, partitioner)

# collected all the named data into the named data store for deduplication
def collect_named_data_store_outputs(
graph_module: torch.fx.GraphModule,
) -> None:
for node in graph_module.graph.nodes:
if node.target == executorch_call_delegate:
lbm = getattr(graph_module, node.args[0].name)
assert is_lowered_module(lbm)
data_store_output = lbm.named_data_store_output
if data_store_output is not None:
self._named_data_store.merge_named_data_store(data_store_output)

for _, submod, _ in get_control_flow_submodules(graph_module):
collect_named_data_store_outputs(submod)

for _, program in new_edge_programs.items():
collect_named_data_store_outputs(program.graph_module)

config = EdgeCompileConfig(_check_ir_validity=False)
return EdgeProgramManager(
new_edge_programs,
copy.deepcopy(self._config_methods),
config,
named_data_store=self._named_data_store,
)

@et_logger("to_executorch")
Expand Down
Loading