TensorRT-LLMs/tests/hlapi/test_llm_download.py
Kaiyu Xie b777bd6475
Update TensorRT-LLM (#1725)
* Update TensorRT-LLM

---------

Co-authored-by: RunningLeon <mnsheng@yeah.net>
Co-authored-by: Tlntin <TlntinDeng01@Gmail.com>
Co-authored-by: ZHENG, Zhen <zhengzhen.z@qq.com>
Co-authored-by: Pham Van Ngoan <ngoanpham1196@gmail.com>
Co-authored-by: Nathan Price <nathan@abridge.com>
Co-authored-by: Tushar Goel <tushar.goel.ml@gmail.com>
Co-authored-by: Mati <132419219+matichon-vultureprime@users.noreply.github.com>
2024-06-04 20:26:32 +08:00

18 lines
480 B
Python

from tensorrt_llm.hlapi import LLM, ModelConfig
from tensorrt_llm.hlapi.utils import download_hf_model
prompts = ["A B C"]
def test_download_hf_model():
dir = download_hf_model("TinyLlama/TinyLlama-1.1B-Chat-v1.0")
assert dir.exists()
print(f"Downloaded model to {dir}")
def test_llm_with_model_downloaded():
config = ModelConfig(model="TinyLlama/TinyLlama-1.1B-Chat-v1.0")
llm = LLM(config)
for output in llm.generate(prompts):
print(output)