mirror of
https://github.com/NVIDIA/TensorRT-LLM.git
synced 2026-01-14 06:27:45 +08:00
Waive unittest/trt/model/test_mamba.py::TestMamba::test_loaders_mamba_130m_hf_from_checkpoint. Will fix it later. (#3356)
Signed-off-by: Fanrong Li <23290157+lfr-0531@users.noreply.github.com>
This commit is contained in:
parent
31422e7e46
commit
62e0876e39
@ -366,6 +366,11 @@ class TestMamba(unittest.TestCase):
|
||||
pytest.skip(f'Skipping since the path {hf_path} does not exist.')
|
||||
dtype = 'float16'
|
||||
|
||||
if path == 'mamba-130m-hf' and load_mode == 'from_checkpoint':
|
||||
pytest.skip(
|
||||
f'Skipping since it is a known issue. Will be fixed in the near future.'
|
||||
)
|
||||
|
||||
# get hf mamba
|
||||
hf_mamba = AutoModelForCausalLM.from_pretrained(
|
||||
hf_path, device_map='cpu', torch_dtype=str_dtype_to_torch(dtype))
|
||||
|
||||
Loading…
Reference in New Issue
Block a user