From d8862505b9c869cf71729e0d4a0efcf7c1f7f1fa Mon Sep 17 00:00:00 2001 From: xxi <95731198+xxi-nv@users.noreply.github.com> Date: Wed, 14 Jan 2026 18:28:08 +0800 Subject: [PATCH] [None][chore] enable EPLB for DEEPGEMM (#10617) Signed-off-by: xxi --- tensorrt_llm/_torch/modules/fused_moe/create_moe.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tensorrt_llm/_torch/modules/fused_moe/create_moe.py b/tensorrt_llm/_torch/modules/fused_moe/create_moe.py index f1559edfb6..530d903ad3 100644 --- a/tensorrt_llm/_torch/modules/fused_moe/create_moe.py +++ b/tensorrt_llm/_torch/modules/fused_moe/create_moe.py @@ -138,8 +138,9 @@ def create_moe_backend( moe_load_balancer = get_moe_load_balancer() if moe_load_balancer is not None: assert moe_cls in [ - WideEPMoE, CutlassFusedMoE, TRTLLMGenFusedMoE, CuteDslFusedMoE - ], "MoE Load Balance is only supported in WideEPMoE, CutlassFusedMoE, TRTLLMGenFusedMoE and CuteDslFusedMoE now." + WideEPMoE, CutlassFusedMoE, TRTLLMGenFusedMoE, CuteDslFusedMoE, + DeepGemmFusedMoE + ], "MoE Load Balance is only supported in WideEPMoE, CutlassFusedMoE, TRTLLMGenFusedMoE and CuteDslFusedMoE, and DeepGemmFusedMoE." if bias: assert moe_cls in [CutlassFusedMoE, TritonFusedMoE, TRTLLMGenFusedMoE