mirror of
https://github.com/NVIDIA/TensorRT-LLM.git
synced 2026-01-14 06:27:45 +08:00
[Test] - Correct waive the Slurm test stage (#4677)
Signed-off-by: Yanchao Lu <yanchaol@nvidia.com>
This commit is contained in:
parent
268171bc66
commit
d6e1b71388
@ -1475,9 +1475,8 @@ def launchTestJobs(pipeline, testFilter, dockerNode=null)
|
||||
}]]}
|
||||
fullSet = parallelJobs.keySet()
|
||||
|
||||
slurmX86Configs = [
|
||||
// "RTXPro6000-PyTorch-[Post-Merge]-1": ["rtx-pro-6000", "l0_rtx_pro_6000", 1, 1],
|
||||
]
|
||||
slurmX86Configs = [:]
|
||||
// "RTXPro6000-PyTorch-[Post-Merge]-1": ["rtx-pro-6000", "l0_rtx_pro_6000", 1, 1],
|
||||
fullSet += slurmX86Configs.keySet()
|
||||
|
||||
parallelSlurmJobs = slurmX86Configs.collectEntries{key, values -> [key, [createKubernetesPodConfig(LLM_DOCKER_IMAGE, "slurm", "amd64"), {
|
||||
|
||||
Loading…
Reference in New Issue
Block a user