@@ -1215,8 +1215,12 @@ def getMakoArgsFromStageName(stageName, parseSysinfo=false) {
1215
1215
// If stageName contains "-Triton-", add "backend=triton" to makoArgs
1216
1216
// At this point, only tests with backend=triton or unspecified backend will be run
1217
1217
makoArgs + = [" backend=triton" ]
1218
+ } else if (stageName. contains(" -FMHA-" )) {
1219
+ // If stageName contains "-FMHA-", add "backend=fmha" to makoArgs
1220
+ // At this point, only tests with backend=fmha or unspecified backend will be run
1221
+ makoArgs + = [" backend=fmha" ]
1218
1222
} else {
1219
- // If stageName does not contain "-PyTorch-", "-TensorRT-", "-CPP-", or "-Triton -", do not add any backend
1223
+ // If stageName does not contain "-PyTorch-", "-TensorRT-", "-CPP-", "-Triton-", or "-FMHA -", do not add any backend
1220
1224
// At this point, all tests will be run
1221
1225
// For cases where backend is not specified in makoArgs, we will match all types of backends and tests without specified backend
1222
1226
}
@@ -2000,6 +2004,7 @@ def launchTestJobs(pipeline, testFilter)
2000
2004
" A10-PyTorch-Post-Merge-1" : [" a10" , " l0_a10" , 1 , 1 ],
2001
2005
" A10-TensorRT-Post-Merge-1" : [" a10" , " l0_a10" , 1 , 2 ],
2002
2006
" A10-TensorRT-Post-Merge-2" : [" a10" , " l0_a10" , 2 , 2 ],
2007
+ " A10-FMHA-Post-Merge-1" : [" a10" , " l0_a10" , 1 , 1 ],
2003
2008
" A30-TensorRT-Post-Merge-1" : [" a30" , " l0_a30" , 1 , 6 ],
2004
2009
" A30-TensorRT-Post-Merge-2" : [" a30" , " l0_a30" , 2 , 6 ],
2005
2010
" A30-TensorRT-Post-Merge-3" : [" a30" , " l0_a30" , 3 , 6 ],
@@ -2017,18 +2022,21 @@ def launchTestJobs(pipeline, testFilter)
2017
2022
" A100X-TensorRT-Post-Merge-6" : [" a100x" , " l0_a100" , 6 , 6 ],
2018
2023
" A100X-Triton-Post-Merge-1" : [" a100x" , " l0_a100" , 1 , 2 ],
2019
2024
" A100X-Triton-Post-Merge-2" : [" a100x" , " l0_a100" , 2 , 2 ],
2025
+ " A100X-FMHA-Post-Merge-1" : [" a100x" , " l0_a100" , 1 , 1 ],
2020
2026
" L40S-TensorRT-Post-Merge-1" : [" l40s" , " l0_l40s" , 1 , 5 ],
2021
2027
" L40S-TensorRT-Post-Merge-2" : [" l40s" , " l0_l40s" , 2 , 5 ],
2022
2028
" L40S-TensorRT-Post-Merge-3" : [" l40s" , " l0_l40s" , 3 , 5 ],
2023
2029
" L40S-TensorRT-Post-Merge-4" : [" l40s" , " l0_l40s" , 4 , 5 ],
2024
2030
" L40S-TensorRT-Post-Merge-5" : [" l40s" , " l0_l40s" , 5 , 5 ],
2031
+ " L40S-FMHA-Post-Merge-1" : [" l40s" , " l0_l40s" , 1 , 1 ],
2025
2032
" H100_PCIe-PyTorch-Post-Merge-1" : [" h100-cr" , " l0_h100" , 1 , 1 ],
2026
2033
" H100_PCIe-CPP-Post-Merge-1" : [" h100-cr" , " l0_h100" , 1 , 1 ],
2027
2034
" H100_PCIe-TensorRT-Post-Merge-1" : [" h100-cr" , " l0_h100" , 1 , 5 ],
2028
2035
" H100_PCIe-TensorRT-Post-Merge-2" : [" h100-cr" , " l0_h100" , 2 , 5 ],
2029
2036
" H100_PCIe-TensorRT-Post-Merge-3" : [" h100-cr" , " l0_h100" , 3 , 5 ],
2030
2037
" H100_PCIe-TensorRT-Post-Merge-4" : [" h100-cr" , " l0_h100" , 4 , 5 ],
2031
2038
" H100_PCIe-TensorRT-Post-Merge-5" : [" h100-cr" , " l0_h100" , 5 , 5 ],
2039
+ " H100_PCIe-FMHA-Post-Merge-1" : [" h100-cr" , " l0_h100" , 1 , 1 ],
2032
2040
" B200_PCIe-Triton-Post-Merge-1" : [" b100-ts2" , " l0_b200" , 1 , 1 ],
2033
2041
" B200_PCIe-PyTorch-Post-Merge-1" : [" b100-ts2" , " l0_b200" , 1 , 1 ],
2034
2042
" B200_PCIe-TensorRT-Post-Merge-1" : [" b100-ts2" , " l0_b200" , 1 , 2 ],
@@ -2422,6 +2430,7 @@ def launchTestJobs(pipeline, testFilter)
2422
2430
" pytorch" : " -PyTorch-" ,
2423
2431
" tensorrt" : " -TensorRT-" ,
2424
2432
" cpp" : " -CPP-" ,
2433
+ " fmha" : " -FMHA-" ,
2425
2434
]
2426
2435
def backendModeList = backendMode. collect { changeMap. get(it) }. flatten()
2427
2436
def parallelJobsNoBackend = parallelJobsFiltered. findAll { key , _ ->
@@ -2445,8 +2454,9 @@ def launchTestJobs(pipeline, testFilter)
2445
2454
} else {
2446
2455
echo " ONLY_ONE_GROUP_CHANGED mode is true. The group is: ${ testFilter[(ONLY_ONE_GROUP_CHANGED)]} ."
2447
2456
def excludedBackends = new HashMap ()
2448
- excludedBackends[" PyTorch" ] = [" -CPP-" , " -TensorRT-" , " -Triton-" ]
2449
- excludedBackends[" Triton" ] = [" -PyTorch-" , " -CPP-" , " -TensorRT-" ]
2457
+ excludedBackends[" PyTorch" ] = [" -CPP-" , " -TensorRT-" , " -Triton-" , " -FMHA-" ]
2458
+ excludedBackends[" Triton" ] = [" -PyTorch-" , " -CPP-" , " -TensorRT-" , " -FMHA-" ]
2459
+ excludedBackends[" FMHA" ] = [" -PyTorch-" , " -CPP-" , " -TensorRT-" , " -Triton-" ]
2450
2460
def group = testFilter[(ONLY_ONE_GROUP_CHANGED )]
2451
2461
if (excludedBackends. containsKey(group)) {
2452
2462
parallelJobsFiltered = parallelJobsFiltered. findAll { key , value ->
0 commit comments