mirror of
https://github.com/NVIDIA/TensorRT-LLM.git
synced 2026-01-13 22:18:36 +08:00
[TRTLLM-9164][infra] Enable checking duplicate items in waives.txt in pre-commit (#9265)
Signed-off-by: qqiao <qqiao@nvidia.com>
This commit is contained in:
parent
a39e8c5567
commit
b018b2698d
@ -1460,6 +1460,11 @@ repos:
|
||||
entry: ./scripts/format_test_list.py
|
||||
language: script
|
||||
files: tests/integration/test_lists/.*\.txt$
|
||||
- id: waive list check
|
||||
name: Checks for duplicated test items in waives.txt
|
||||
entry: ./scripts/check_test_list.py --check-duplicate-waives
|
||||
language: script
|
||||
pass_filenames: false
|
||||
- id: DCO check
|
||||
name: Checks the commit message for a developer certificate of origin signature
|
||||
entry: ./scripts/dco_check.py
|
||||
|
||||
88
scripts/check_test_list.py
Normal file → Executable file
88
scripts/check_test_list.py
Normal file → Executable file
@ -1,3 +1,4 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
This script is used to verify test lists for L0, QA, and waives file.
|
||||
|
||||
@ -110,14 +111,14 @@ def verify_qa_test_lists(llm_src):
|
||||
f.write(f"{cleaned_line}\n")
|
||||
|
||||
|
||||
def verify_waive_list(llm_src, args):
|
||||
def check_waive_duplicates(llm_src):
|
||||
"""Check for duplicate entries in waives.txt and write report."""
|
||||
waives_list_path = f"{llm_src}/tests/integration/test_lists/waives.txt"
|
||||
dup_cases_record = f"{llm_src}/dup_cases.txt"
|
||||
non_existent_cases_record = f"{llm_src}/nonexits_cases.json"
|
||||
# Remove prefix and markers in wavies.txt
|
||||
dedup_lines = {
|
||||
} # Track all occurrences: processed_line -> [(line_no, original_line), ...]
|
||||
processed_lines = set()
|
||||
|
||||
# Track all occurrences: processed_line -> [(line_no, original_line), ...]
|
||||
dedup_lines = {}
|
||||
|
||||
with open(waives_list_path, "r") as f:
|
||||
lines = f.readlines()
|
||||
|
||||
@ -125,6 +126,43 @@ def verify_waive_list(llm_src, args):
|
||||
original_line = line.strip()
|
||||
line = line.strip()
|
||||
|
||||
if not line:
|
||||
continue
|
||||
|
||||
# Check for SKIP marker in waives.txt and split by the first occurrence
|
||||
line = line.split(" SKIP", 1)[0].strip()
|
||||
|
||||
# Track all occurrences of each processed line
|
||||
if line in dedup_lines:
|
||||
dedup_lines[line].append((line_no, original_line))
|
||||
else:
|
||||
dedup_lines[line] = [(line_no, original_line)]
|
||||
|
||||
# Write duplicate report after processing all lines
|
||||
for processed_line, occurrences in dedup_lines.items():
|
||||
if len(occurrences) > 1:
|
||||
with open(dup_cases_record, "a") as f:
|
||||
f.write(
|
||||
f"Duplicate waive records found for '{processed_line}' ({len(occurrences)} occurrences):\n"
|
||||
)
|
||||
for i, (line_no, original_line) in enumerate(occurrences, 1):
|
||||
f.write(
|
||||
f" Occurrence {i} at line {line_no}: '{original_line}'\n"
|
||||
)
|
||||
f.write(f"\n")
|
||||
|
||||
|
||||
def verify_waive_list(llm_src, args):
|
||||
waives_list_path = f"{llm_src}/tests/integration/test_lists/waives.txt"
|
||||
non_existent_cases_record = f"{llm_src}/nonexits_cases.json"
|
||||
|
||||
processed_lines = set()
|
||||
with open(waives_list_path, "r") as f:
|
||||
lines = f.readlines()
|
||||
|
||||
for line in lines:
|
||||
line = line.strip()
|
||||
|
||||
if not line:
|
||||
continue
|
||||
|
||||
@ -135,12 +173,6 @@ def verify_waive_list(llm_src, args):
|
||||
# Check for SKIP marker in waives.txt and split by the first occurrence
|
||||
line = line.split(" SKIP", 1)[0].strip()
|
||||
|
||||
# Track all occurrences of each processed line
|
||||
if line in dedup_lines:
|
||||
dedup_lines[line].append((line_no, original_line))
|
||||
else:
|
||||
dedup_lines[line] = [(line_no, original_line)]
|
||||
|
||||
# If the line starts with 'full:', process it
|
||||
if line.startswith("full:"):
|
||||
line = line.split("/", 1)[1].lstrip("/")
|
||||
@ -173,19 +205,6 @@ def verify_waive_list(llm_src, args):
|
||||
|
||||
processed_lines.add(line)
|
||||
|
||||
# Write duplicate report after processing all lines
|
||||
for processed_line, occurrences in dedup_lines.items():
|
||||
if len(occurrences) > 1:
|
||||
with open(dup_cases_record, "a") as f:
|
||||
f.write(
|
||||
f"Duplicate waive records found for '{processed_line}' ({len(occurrences)} occurrences):\n"
|
||||
)
|
||||
for i, (line_no, original_line) in enumerate(occurrences, 1):
|
||||
f.write(
|
||||
f" Occurrence {i} at line {line_no}: '{original_line}'\n"
|
||||
)
|
||||
f.write(f"\n")
|
||||
|
||||
# Write the processed lines to a tmp file
|
||||
tmp_waives_file = f"{llm_src}/processed_waive_list.txt"
|
||||
with open(tmp_waives_file, "w") as f:
|
||||
@ -210,11 +229,19 @@ def main():
|
||||
parser.add_argument("--waive",
|
||||
action="store_true",
|
||||
help="Enable test list verification for waive file.")
|
||||
parser.add_argument(
|
||||
"--check-duplicate-waives",
|
||||
action="store_true",
|
||||
help="Enable duplicate check in waives.txt (fails if duplicates found)."
|
||||
)
|
||||
args = parser.parse_args()
|
||||
script_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
llm_src = os.path.abspath(os.path.join(script_dir, "../"))
|
||||
|
||||
install_python_dependencies(llm_src)
|
||||
# Only skip installing dependencies if ONLY --check-duplicates is used
|
||||
if args.l0 or args.qa or args.waive:
|
||||
install_python_dependencies(llm_src)
|
||||
|
||||
pass_flag = True
|
||||
# Verify L0 test lists
|
||||
if args.l0:
|
||||
@ -243,6 +270,12 @@ def main():
|
||||
print("-----------Skipping waive list verification.-----------",
|
||||
flush=True)
|
||||
|
||||
# Check for duplicates in waives.txt if requested
|
||||
if args.check_duplicate_waives:
|
||||
print("-----------Checking for duplicates in waives.txt...-----------",
|
||||
flush=True)
|
||||
check_waive_duplicates(llm_src)
|
||||
|
||||
invalid_json_file = os.path.join(llm_src, "invalid_tests.json")
|
||||
if os.path.isfile(invalid_json_file) and os.path.getsize(
|
||||
invalid_json_file) > 0:
|
||||
@ -261,7 +294,8 @@ def main():
|
||||
print(
|
||||
"Duplicate test names found in waives.txt, please delete one or combine them first!!!\n"
|
||||
)
|
||||
# pass_flag = False
|
||||
if args.check_duplicate_waives:
|
||||
pass_flag = False
|
||||
|
||||
non_existent_cases_file = os.path.join(llm_src, "nonexits_cases.json")
|
||||
if os.path.isfile(non_existent_cases_file) and os.path.getsize(
|
||||
|
||||
@ -379,8 +379,6 @@ accuracy/test_disaggregated_serving.py::TestQwen3_8B::test_auto_dtype[True] SKIP
|
||||
disaggregated/test_disaggregated.py::test_disaggregated_deepseek_v3_lite_bf16_empty_batch[DeepSeek-V3-Lite-bf16] SKIP (https://nvbugs/5601682)
|
||||
accuracy/test_disaggregated_serving.py::TestLlama3_1_8BInstruct::test_eagle3[eagle3_one_model=False-overlap_scheduler=False] SKIP (https://nvbugs/5655584)
|
||||
accuracy/test_disaggregated_serving.py::TestQwen3_8B::test_chunked_prefill SKIP (https://nvbugs/5608930)
|
||||
accuracy/test_disaggregated_serving.py::TestQwen3_8B::test_auto_dtype[False] SKIP (https://nvbugspro.nvidia.com/bug/5651854)
|
||||
test_e2e.py::test_ptp_quickstart_multimodal_chunked_prefill[phi4-multimodal-instruct-fp4-multimodals/Phi-4-multimodal-instruct-FP4-0.8-image] SKIP (https://nvbugs/5568836)
|
||||
test_e2e.py::test_ptp_quickstart_multimodal_chunked_prefill[phi4-multimodal-instruct-fp4-multimodals/Phi-4-multimodal-instruct-FP4-0.8-image] SKIP (https://nvbugs/5568836)
|
||||
test_e2e.py::test_ptp_quickstart_multimodal_kv_cache_reuse[phi4-multimodal-instruct-fp4-multimodals/Phi-4-multimodal-instruct-FP4-0.8-image] SKIP (https://nvbugs/5568836)
|
||||
test_e2e.py::test_ptp_quickstart_multimodal_multiturn[phi4-multimodal-instruct-fp4-multimodals/Phi-4-multimodal-instruct-FP4] SKIP (https://nvbugs/5568836)
|
||||
|
||||
Loading…
Reference in New Issue
Block a user