[None][feat] Upgrade NIXL to v0.8.0 (#9707)

Signed-off-by: Yoray Zack <62789610+zackyoray@users.noreply.github.com>
Signed-off-by: zackyoray 
Signed-off-by: Bo Deng 
Co-authored-by: Bo Deng
This commit is contained in:
zackyoray 2025-12-12 14:21:10 +02:00 committed by GitHub
parent e767fc649a
commit d5b9ad91c9
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 9 additions and 6 deletions

View File

@ -4,7 +4,7 @@ set -ex
GITHUB_URL="https://github.com" GITHUB_URL="https://github.com"
UCX_INSTALL_PATH="/usr/local/ucx/" UCX_INSTALL_PATH="/usr/local/ucx/"
CUDA_PATH="/usr/local/cuda" CUDA_PATH="/usr/local/cuda"
NIXL_VERSION="0.7.1" NIXL_VERSION="0.8.0"
NIXL_REPO="https://github.com/ai-dynamo/nixl.git" NIXL_REPO="https://github.com/ai-dynamo/nixl.git"
OLD_LD_LIBRARY_PATH=$LD_LIBRARY_PATH OLD_LD_LIBRARY_PATH=$LD_LIBRARY_PATH
@ -18,11 +18,14 @@ fi
if [ -n "${GITHUB_MIRROR}" ]; then if [ -n "${GITHUB_MIRROR}" ]; then
export PIP_INDEX_URL="https://urm.nvidia.com/artifactory/api/pypi/pypi-remote/simple" export PIP_INDEX_URL="https://urm.nvidia.com/artifactory/api/pypi/pypi-remote/simple"
fi fi
pip3 install --no-cache-dir meson ninja pybind11 pip3 install --no-cache-dir meson ninja pybind11 setuptools
git clone --depth 1 -b ${NIXL_VERSION} ${NIXL_REPO} git clone --depth 1 -b ${NIXL_VERSION} ${NIXL_REPO}
cd nixl cd nixl
# Remove POSIX backend compilation from meson.build
sed -i "/^subdir('posix')/d" src/plugins/meson.build
CUDA_SO_PATH=$(find "/usr/local" -name "libcuda.so.1" 2>/dev/null | head -n1) CUDA_SO_PATH=$(find "/usr/local" -name "libcuda.so.1" 2>/dev/null | head -n1)
if [[ -z "$CUDA_SO_PATH" ]]; then if [[ -z "$CUDA_SO_PATH" ]]; then

View File

@ -13,7 +13,7 @@
# images are adopted from PostMerge pipelines, the abbreviated commit hash is used instead. # images are adopted from PostMerge pipelines, the abbreviated commit hash is used instead.
IMAGE_NAME=urm.nvidia.com/sw-tensorrt-docker/tensorrt-llm IMAGE_NAME=urm.nvidia.com/sw-tensorrt-docker/tensorrt-llm
LLM_DOCKER_IMAGE=urm.nvidia.com/sw-tensorrt-docker/tensorrt-llm:pytorch-25.10-py3-x86_64-ubuntu24.04-trt10.13.3.9-skip-tritondevel-202512091705-9823 LLM_DOCKER_IMAGE=urm.nvidia.com/sw-tensorrt-docker/tensorrt-llm:pytorch-25.10-py3-x86_64-ubuntu24.04-trt10.13.3.9-skip-tritondevel-202512121105-9707
LLM_SBSA_DOCKER_IMAGE=urm.nvidia.com/sw-tensorrt-docker/tensorrt-llm:pytorch-25.10-py3-aarch64-ubuntu24.04-trt10.13.3.9-skip-tritondevel-202512091705-9823 LLM_SBSA_DOCKER_IMAGE=urm.nvidia.com/sw-tensorrt-docker/tensorrt-llm:pytorch-25.10-py3-aarch64-ubuntu24.04-trt10.13.3.9-skip-tritondevel-202512121105-9707
LLM_ROCKYLINUX8_PY310_DOCKER_IMAGE=urm.nvidia.com/sw-tensorrt-docker/tensorrt-llm:cuda-13.0.2-devel-rocky8-x86_64-rocky8-py310-trt10.13.3.9-skip-tritondevel-202512091705-9823 LLM_ROCKYLINUX8_PY310_DOCKER_IMAGE=urm.nvidia.com/sw-tensorrt-docker/tensorrt-llm:cuda-13.0.2-devel-rocky8-x86_64-rocky8-py310-trt10.13.3.9-skip-tritondevel-202512121105-9707
LLM_ROCKYLINUX8_PY312_DOCKER_IMAGE=urm.nvidia.com/sw-tensorrt-docker/tensorrt-llm:cuda-13.0.2-devel-rocky8-x86_64-rocky8-py312-trt10.13.3.9-skip-tritondevel-202512091705-9823 LLM_ROCKYLINUX8_PY312_DOCKER_IMAGE=urm.nvidia.com/sw-tensorrt-docker/tensorrt-llm:cuda-13.0.2-devel-rocky8-x86_64-rocky8-py312-trt10.13.3.9-skip-tritondevel-202512121105-9707