TensorRT-LLMs/examples/auto_deploy/.vscode/launch.json
Lucas Liebenwein 39eb120b96
[#7308] [feat] AutoDeploy: graph-less transformers mode for HF (#7635)
Signed-off-by: h-guo18 <67671475+h-guo18@users.noreply.github.com>
Signed-off-by: Lucas Liebenwein <11156568+lucaslie@users.noreply.github.com>
Co-authored-by: h-guo18 <67671475+h-guo18@users.noreply.github.com>
2025-09-18 10:44:24 +08:00

42 lines
1.5 KiB
JSON

{
"version": "0.2.0",
"configurations": [
{
"name": "build_and_run_ad.py",
"type": "debugpy",
"request": "launch",
"program": "build_and_run_ad.py",
"args": [
"--model=meta-llama/Meta-Llama-3.1-8B-Instruct",
"--args.world-size=2",
"--args.runtime=demollm",
"--args.compile-backend=torch-simple",
"--args.attn-page-size=16",
"--args.attn-backend=flashinfer",
"--args.model-factory=AutoModelForCausalLM",
"--benchmark.enabled=false",
"--prompt.batch-size=2",
"--args.model-kwargs.num-hidden-layers=3",
"--args.model-kwargs.num-attention-heads=32",
"--prompt.sp-kwargs.max-tokens=128",
// "--yaml-extra=config.yaml", // uncomment to load a custom extra yaml config file
// "--dry-run", // uncomment to print the final config and return
],
"console": "integratedTerminal",
"justMyCode": false,
"cwd": "${workspaceFolder}/examples/auto_deploy"
},
{
"name": "Python: Debug Tests",
"type": "debugpy",
"request": "launch",
"program": "${file}",
"purpose": [
"debug-test",
],
"console": "integratedTerminal",
"justMyCode": false
},
]
}