From def3ec774880d859fb69075da3a2f1ac8e0868d8 Mon Sep 17 00:00:00 2001 From: Sheng Fu Date: Fri, 7 Mar 2025 09:32:16 -0800 Subject: [PATCH] Clean up dead code Summary: Clean up dead code Differential Revision: D70739655 --- et_replay/tools/et_replay.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/et_replay/tools/et_replay.py b/et_replay/tools/et_replay.py index bd157463..14ce0c3a 100644 --- a/et_replay/tools/et_replay.py +++ b/et_replay/tools/et_replay.py @@ -43,7 +43,6 @@ from param_bench.train.compute.python.lib.init_helper import load_modules from param_bench.train.compute.python.workloads import pytorch as workloads_pytorch from torch._inductor.async_compile import AsyncCompile -from torch._inductor.codecache import TritonFuture # grid and split_scan_grid are dynamically loaded from torch._inductor.runtime.triton_heuristics import grid, split_scan_grid # noqa @@ -1038,8 +1037,6 @@ def _generate_run_ops_str(override): func, output_count = self.funcs[node.id] if not func: continue - if isinstance(func, TritonFuture): - func = func.result() func_str = f"funcs[{node.id}]" inputs_str = _generate_inputs_str(node)