chore(ci): use latest jetpack image for l4t (#7926)

This image is for HW prior Jetpack 7. Jetpack 7 broke compatibility with
older devices (which are still in use) such as AGX Orin or Jetsons.

While we do have l4t-cuda-13 images with sbsa support for new Nvidia
devices (Thor, DGX, etc). For older HW we are forced to keep old images
around as 24.04 does not seem to be supported.

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
This commit is contained in:
Ettore Di Giacinto
2026-01-08 18:30:59 +01:00
committed by GitHub
parent 09bc2e4a00
commit b736db4bbe
2 changed files with 26 additions and 26 deletions

View File

@@ -41,17 +41,17 @@ jobs:
include:
- build-type: 'l4t'
cuda-major-version: "12"
cuda-minor-version: "9"
cuda-minor-version: "0"
platforms: 'linux/arm64'
tag-latest: 'auto'
tag-suffix: '-nvidia-l4t-diffusers'
runs-on: 'ubuntu-24.04-arm'
base-image: "ubuntu:24.04"
base-image: "nvcr.io/nvidia/l4t-jetpack:r36.4.0"
skip-drivers: 'true'
backend: "diffusers"
dockerfile: "./backend/Dockerfile.python"
context: "./"
ubuntu-version: '2404'
ubuntu-version: '2204'
- build-type: ''
cuda-major-version: ""
cuda-minor-version: ""
@@ -766,12 +766,12 @@ jobs:
tag-latest: 'auto'
tag-suffix: '-nvidia-l4t-vibevoice'
runs-on: 'ubuntu-24.04-arm'
base-image: "ubuntu:24.04"
base-image: "nvcr.io/nvidia/l4t-jetpack:r36.4.0"
skip-drivers: 'true'
backend: "vibevoice"
dockerfile: "./backend/Dockerfile.python"
context: "./"
ubuntu-version: '2404'
ubuntu-version: '2204'
- build-type: 'l4t'
cuda-major-version: "12"
cuda-minor-version: "0"
@@ -779,12 +779,12 @@ jobs:
tag-latest: 'auto'
tag-suffix: '-nvidia-l4t-kokoro'
runs-on: 'ubuntu-24.04-arm'
base-image: "ubuntu:24.04"
base-image: "nvcr.io/nvidia/l4t-jetpack:r36.4.0"
skip-drivers: 'true'
backend: "kokoro"
dockerfile: "./backend/Dockerfile.python"
context: "./"
ubuntu-version: '2404'
ubuntu-version: '2204'
# SYCL additional backends
- build-type: 'intel'
cuda-major-version: ""
@@ -894,17 +894,17 @@ jobs:
ubuntu-version: '2404'
- build-type: 'cublas'
cuda-major-version: "12"
cuda-minor-version: "9"
cuda-minor-version: "0"
platforms: 'linux/arm64'
skip-drivers: 'false'
tag-latest: 'auto'
tag-suffix: '-nvidia-l4t-arm64-llama-cpp'
base-image: "ubuntu:24.04"
base-image: "nvcr.io/nvidia/l4t-jetpack:r36.4.0"
runs-on: 'ubuntu-24.04-arm'
backend: "llama-cpp"
dockerfile: "./backend/Dockerfile.llama-cpp"
context: "./"
ubuntu-version: '2404'
ubuntu-version: '2204'
- build-type: 'vulkan'
cuda-major-version: ""
cuda-minor-version: ""
@@ -973,17 +973,17 @@ jobs:
ubuntu-version: '2404'
- build-type: 'cublas'
cuda-major-version: "12"
cuda-minor-version: "9"
cuda-minor-version: "0"
platforms: 'linux/arm64'
skip-drivers: 'false'
tag-latest: 'auto'
tag-suffix: '-nvidia-l4t-arm64-stablediffusion-ggml'
base-image: "ubuntu:24.04"
base-image: "nvcr.io/nvidia/l4t-jetpack:r36.4.0"
runs-on: 'ubuntu-24.04-arm'
backend: "stablediffusion-ggml"
dockerfile: "./backend/Dockerfile.golang"
context: "./"
ubuntu-version: '2404'
ubuntu-version: '2204'
# whisper
- build-type: ''
cuda-major-version: ""
@@ -1039,17 +1039,17 @@ jobs:
ubuntu-version: '2404'
- build-type: 'cublas'
cuda-major-version: "12"
cuda-minor-version: "9"
cuda-minor-version: "0"
platforms: 'linux/arm64'
skip-drivers: 'false'
tag-latest: 'auto'
tag-suffix: '-nvidia-l4t-arm64-whisper'
base-image: "ubuntu:24.04"
base-image: "nvcr.io/nvidia/l4t-jetpack:r36.4.0"
runs-on: 'ubuntu-24.04-arm'
backend: "whisper"
dockerfile: "./backend/Dockerfile.golang"
context: "./"
ubuntu-version: '2404'
ubuntu-version: '2204'
- build-type: 'hipblas'
cuda-major-version: ""
cuda-minor-version: ""
@@ -1139,12 +1139,12 @@ jobs:
skip-drivers: 'true'
tag-latest: 'auto'
tag-suffix: '-nvidia-l4t-arm64-rfdetr'
base-image: "ubuntu:24.04"
base-image: "nvcr.io/nvidia/l4t-jetpack:r36.4.0"
runs-on: 'ubuntu-24.04-arm'
backend: "rfdetr"
dockerfile: "./backend/Dockerfile.python"
context: "./"
ubuntu-version: '2404'
ubuntu-version: '2204'
# exllama2
- build-type: ''
cuda-major-version: ""
@@ -1192,12 +1192,12 @@ jobs:
skip-drivers: 'true'
tag-latest: 'auto'
tag-suffix: '-nvidia-l4t-arm64-chatterbox'
base-image: "ubuntu:24.04"
base-image: "nvcr.io/nvidia/l4t-jetpack:r36.4.0"
runs-on: 'ubuntu-24.04-arm'
backend: "chatterbox"
dockerfile: "./backend/Dockerfile.python"
context: "./"
ubuntu-version: '2404'
ubuntu-version: '2204'
# runs out of space on the runner
# - build-type: 'hipblas'
# cuda-major-version: ""
@@ -1259,12 +1259,12 @@ jobs:
skip-drivers: 'true'
tag-latest: 'auto'
tag-suffix: '-nvidia-l4t-arm64-neutts'
base-image: "ubuntu:24.04"
base-image: "nvcr.io/nvidia/l4t-jetpack:r36.4.0"
runs-on: 'ubuntu-24.04-arm'
backend: "neutts"
dockerfile: "./backend/Dockerfile.python"
context: "./"
ubuntu-version: '2404'
ubuntu-version: '2204'
- build-type: ''
cuda-major-version: ""
cuda-minor-version: ""

View File

@@ -162,16 +162,16 @@
include:
- build-type: 'cublas'
cuda-major-version: "12"
cuda-minor-version: "9"
cuda-minor-version: "0"
platforms: 'linux/arm64'
tag-latest: 'auto'
tag-suffix: '-nvidia-l4t-arm64'
base-image: "ubuntu:24.04"
base-image: "nvcr.io/nvidia/l4t-jetpack:r36.4.0"
runs-on: 'ubuntu-24.04-arm'
makeflags: "--jobs=4 --output-sync=target"
skip-drivers: 'true'
ubuntu-version: "2404"
ubuntu-codename: 'noble'
ubuntu-version: "2204"
ubuntu-codename: 'jammy'
- build-type: 'cublas'
cuda-major-version: "13"
cuda-minor-version: "0"