Skip to content

Commit 15ac2d3

Browse files
committed
ensuring small memory footprint for parallel test execution on A30
Signed-off-by: Lucas Liebenwein <[email protected]>
1 parent 1f4a360 commit 15ac2d3

File tree

1 file changed

+20
-2
lines changed

1 file changed

+20
-2
lines changed

tests/unittest/_torch/auto_deploy/_utils_test/_model_test_utils.py

Lines changed: 20 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -435,15 +435,33 @@ def apply_rotary_pos_emb_ds(q, k, cos, sin, position_ids, unsqueeze_dim=1):
435435
"llm_models_subdir": "Qwen2.5-3B-Instruct",
436436
"model_kwargs": {
437437
"num_hidden_layers": 2,
438+
"hidden_size": 64,
439+
"intermediate_size": 128,
440+
"num_attention_heads": 4,
441+
"num_key_value_heads": 2,
438442
},
439443
},
440444
"mistralai/Mistral-Small-3.1-24B-Instruct-2503": {
441445
"llm_models_subdir": "Mistral-Small-3.1-24B-Instruct-2503",
442446
"model_factory": "AutoModelForImageTextToText",
443447
"compile_backend": "torch-simple",
444448
"model_kwargs": {
445-
"text_config": {"num_hidden_layers": 2},
446-
"vision_config": {"num_hidden_layers": 2},
449+
"text_config": {
450+
"num_hidden_layers": 2,
451+
"head_dim": 64,
452+
"hidden_size": 64,
453+
"intermediate_size": 128,
454+
"num_attention_heads": 4,
455+
"num_key_value_heads": 2,
456+
},
457+
"vision_config": {
458+
"num_hidden_layers": 1,
459+
"hidden_size": 64,
460+
"head_dim": 32,
461+
"image_size": 128,
462+
"intermediate_size": 128,
463+
"num_attention_heads": 2,
464+
},
447465
},
448466
},
449467
"ibm-ai-platform/Bamba-9B-v2": {

0 commit comments

Comments
 (0)