Skip to content

Commit 373be8c

Browse files
authored
Remove legacy tests
Differential Revision: D78417828 Pull Request resolved: #12547
1 parent 0e433e8 commit 373be8c

File tree

1 file changed

+0
-39
lines changed

1 file changed

+0
-39
lines changed

backends/cadence/aot/tests/test_fusion_ops_passes.py

Lines changed: 0 additions & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,6 @@
1212

1313
import executorch.backends.cadence.aot.ops_registrations # noqa
1414
import torch
15-
from executorch.backends.cadence.aot import compiler
1615
from executorch.backends.cadence.aot.fuse_ops import (
1716
FuseCascadedTransposeOrPermuteOps,
1817
FuseCascadedViewOps,
@@ -30,7 +29,6 @@
3029
from executorch.exir.dialects._ops import ops as exir_ops
3130
from executorch.exir.dialects.edge._ops import EdgeOpOverload
3231
from executorch.exir.pass_base import PassResult, ProxyValue
33-
from torch import nn
3432

3533

3634
class TestFusionPassesBase(unittest.TestCase):
@@ -178,43 +176,6 @@ def test_keep_mm_add_with_multiple_users(self) -> None:
178176
self.assertEqual(count_node(converted_graph, exir_ops.edge.aten.mm.default), 1)
179177
self.assertEqual(count_node(converted_graph, exir_ops.edge.aten.add.Tensor), 3)
180178

181-
# TODO(matthiascremon) -> None: enable that pass with new flow
182-
@torch.no_grad()
183-
@unittest.expectedFailure
184-
def test_legacy_conv_bn_fusion(self) -> None:
185-
class ModelConvBN(torch.nn.Module):
186-
def __init__(
187-
self, in_features: int, out_features: int, kernel_size: int
188-
) -> None:
189-
super().__init__()
190-
self.conv1d = nn.Conv1d(in_features, out_features, kernel_size)
191-
self.bn = nn.BatchNorm1d(out_features)
192-
193-
def forward(self, x: torch.Tensor) -> torch.Tensor:
194-
y = self.conv1d(x)
195-
return self.bn(y)
196-
197-
model = ModelConvBN(64, 1, 2)
198-
x = torch.randn(1, 64, 4)
199-
200-
graph_module = (
201-
compiler.export_to_executorch_gen_etrecord(model.eval(), (x,))
202-
.exported_program()
203-
.graph_module
204-
)
205-
# Assert that after running the fusion passes, batchnorm was fused with conv1d
206-
self.assertEqual(
207-
count_node(graph_module, torch.ops.aten.linear.out)
208-
+ count_node(graph_module, torch.ops.cadence.convolution.out),
209-
1,
210-
)
211-
self.assertEqual(
212-
count_node(
213-
graph_module, torch.ops.aten._native_batch_norm_legit_no_training.out
214-
),
215-
0,
216-
)
217-
218179
def test_permute_transpose_fusion(self) -> None:
219180
builder = GraphBuilder()
220181
x = builder.placeholder("x", torch.randn(3, 1, 3, 1, 4, dtype=torch.float32))

0 commit comments

Comments
 (0)