Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion KernelBench/level3/18_SqueezeNet.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ def forward(self, x):
return torch.flatten(x, 1)

# Test code
batch_size = 64
batch_size = 32
input_channels = 3
height = 512
width = 512
Expand Down
2 changes: 1 addition & 1 deletion KernelBench/level3/2_ShallowWideMLP.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def forward(self, x):
return self.network(x)

# Test code
batch_size = 128
batch_size = 64
input_size = 16384
hidden_layer_sizes = [32768, 32768]
output_size = 16384
Expand Down
6 changes: 3 additions & 3 deletions KernelBench/level3/35_LSTM.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,10 +41,10 @@ def forward(self, x, h0=None, c0=None):
return out

# === Test configuration ===
batch_size = 10
batch_size = 3
sequence_length = 512
input_size = 128
hidden_size = 256
input_size = 64
hidden_size = 128
num_layers = 6
output_size = 10
dropout = 0.0
Expand Down
6 changes: 3 additions & 3 deletions KernelBench/level3/36_LSTMHn.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,10 +34,10 @@ def forward(self, x,h0,c0):
return state[0]

# Test code
batch_size = 10
batch_size = 3
sequence_length = 512
input_size = 128
hidden_size = 256
input_size = 64
hidden_size = 128
num_layers = 6
output_size = 10
dropout = 0.0
Expand Down
6 changes: 3 additions & 3 deletions KernelBench/level3/37_LSTMCn.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,10 +34,10 @@ def forward(self, x, h0, c0):
return state[1]

# Test code
batch_size = 10
batch_size = 3
sequence_length = 512
input_size = 128
hidden_size = 256
input_size = 64
hidden_size = 128
num_layers = 6
output_size = 10
dropout = 0.0
Expand Down
6 changes: 3 additions & 3 deletions KernelBench/level3/38_LSTMBidirectional.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,10 +33,10 @@ def forward(self, x,h0,c0):
return out

# Test code
batch_size = 10
batch_size = 3
sequence_length = 512
input_size = 128
hidden_size = 256
input_size = 64
hidden_size = 128
num_layers = 6
output_size = 10
dropout = 0.0
Expand Down
6 changes: 3 additions & 3 deletions KernelBench/level3/39_GRU.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,10 +27,10 @@ def forward(self, x,h0):
return output

# Test code
batch_size = 10
batch_size = 3
seq_len = 512
input_size = 128
hidden_size = 256
input_size = 64
hidden_size = 128
num_layers = 6

def get_inputs():
Expand Down
6 changes: 3 additions & 3 deletions KernelBench/level3/40_GRUHidden.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,10 +27,10 @@ def forward(self, x,h0):
return h_n

# Test code
batch_size = 10
batch_size = 3
seq_len = 512
input_size = 128
hidden_size = 256
input_size = 64
hidden_size = 128
num_layers = 6

def get_inputs():
Expand Down
6 changes: 3 additions & 3 deletions KernelBench/level3/41_GRUBidirectional.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,10 +28,10 @@ def forward(self, x,h0):
return output

# Test code
batch_size = 10
batch_size = 3
seq_len = 512
input_size = 128
hidden_size = 256
input_size = 64
hidden_size = 128
num_layers = 6

def get_inputs():
Expand Down
6 changes: 3 additions & 3 deletions KernelBench/level3/42_GRUBidirectionalHidden.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,10 +27,10 @@ def forward(self, x,h0):
return h_n

# Test code
batch_size = 10
batch_size = 3
seq_len = 512
input_size = 128
hidden_size = 256
input_size = 64
hidden_size = 128
num_layers = 6

def get_inputs():
Expand Down
2 changes: 1 addition & 1 deletion KernelBench/level3/44_MiniGPTBlock.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ def forward(self, x):
x = x + self.mlpf(self.ln_2(x))
return x

batch_size = 128
batch_size = 32
max_seqlen = 1024
seq_len = 512
n_embd = 768
Expand Down
2 changes: 1 addition & 1 deletion KernelBench/level3/5_AlexNet.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ def forward(self, x):
return x

# Test code
batch_size = 1024
batch_size = 512
num_classes = 1000

def get_inputs():
Expand Down