Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 14 additions & 0 deletions CHANGELOG
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,20 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

## [3.3.0] - 2026-02-17

### Added
- Added `miniconda` support
- Added CUDA 12.6.3 and cuDNN 9.5.1.17 support for Ubuntu 24.04

### Fixed
- Fixed the command that was listing the available cuDNN support
- Fixed the `cuda_devel.txt` to actually support Ubuntu 24.04 onwards by moving `libtinfo5` and `libncursesw5` out of it and adding it to the python generation function
- Fixed `is_cuda_version_supported()` not using directly 'ubuntu_version' as it was already the 'flat' version

### Changed
- Changed the base image of NVIDIA builds. Now we just use ubuntu images as base images and not `nvidia/opengl`

## [3.2.0] - 2025-05-09

### Added
Expand Down
13 changes: 7 additions & 6 deletions examples/noetic_nvidia_custom.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -21,12 +21,13 @@ cudnn_version: 8.9.6.50

# Define here extra packages to be installed.
# Supported are:
# tmux Supports also custom version: Check version tags at https://github.com/tmux/tmux.git
# llvm Supports also custom version: Check version tags at https://apt.llvm.org/
# vscode Version is whatever latest is available
# conan Version is whatever latest is available
# cpplint Version is whatever latest is available
# meld Version is whatever latest is available
# tmux Supports also custom version: Check version tags at https://github.com/tmux/tmux.git
# llvm Supports also custom version: Check version tags at https://apt.llvm.org/
# vscode Version is whatever latest is available
# conan Version is whatever latest is available
# cpplint Version is whatever latest is available
# meld Version is whatever latest is available
# miniconda Version is whatever latest is available
#
# Packages that are anyway installed but support a custom version:
# cmake Check https://github.com/Kitware/CMake.git
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "turludock"
version = "3.2.0"
version = "3.3.0"
description = "Builds ROS docker images that support GUI with either X11 or Wayland."
authors = [
"Athanasios <dev@tasoglou.net>",
Expand Down
20 changes: 20 additions & 0 deletions turludock/assets/config_files/nvidia_cuda.yaml
Original file line number Diff line number Diff line change
@@ -1,3 +1,23 @@
# https://gitlab.com/nvidia/container-images/cuda/-/tree/master/dist/12.6.3
'12.6.3':
ubuntu2404:
cudnn_version: '9.5.1.17'
compat: 12-6
cudart: 12-6=12.6.77-1
nvidia_require_cuda: "cuda>=12.6 brand=unknown,driver>=470,driver<471 brand=grid,driver>=470,driver<471 brand=tesla,driver>=470,driver<471 brand=nvidia,driver>=470,driver<471 brand=quadro,driver>=470,driver<471 brand=quadrortx,driver>=470,driver<471 brand=nvidiartx,driver>=470,driver<471 brand=vapps,driver>=470,driver<471 brand=vpc,driver>=470,driver<471 brand=vcs,driver>=470,driver<471 brand=vws,driver>=470,driver<471 brand=cloudgaming,driver>=470,driver<471 brand=unknown,driver>=535,driver<536 brand=grid,driver>=535,driver<536 brand=tesla,driver>=535,driver<536 brand=nvidia,driver>=535,driver<536 brand=quadro,driver>=535,driver<536 brand=quadrortx,driver>=535,driver<536 brand=nvidiartx,driver>=535,driver<536 brand=vapps,driver>=535,driver<536 brand=vpc,driver>=535,driver<536 brand=vcs,driver>=535,driver<536 brand=vws,driver>=535,driver<536 brand=cloudgaming,driver>=535,driver<536 brand=unknown,driver>=550,driver<551 brand=grid,driver>=550,driver<551 brand=tesla,driver>=550,driver<551 brand=nvidia,driver>=550,driver<551 brand=quadro,driver>=550,driver<551 brand=quadrortx,driver>=550,driver<551 brand=nvidiartx,driver>=550,driver<551 brand=vapps,driver>=550,driver<551 brand=vpc,driver>=550,driver<551 brand=vcs,driver>=550,driver<551 brand=vws,driver>=550,driver<551 brand=cloudgaming,driver>=550,driver<551"
cuda_lib: 12-6=12.6.3-1
nvml: 12-6=12.6.77-1
nvprof: 12-6=12.6.80-1
libnpp: 12-6=12.3.1.54-1
libcusparse: 12-6=12.5.4.2-1
libcublas: 12-6=12.6.4.1-1
libnccl: 2.23.4-1+cuda12.6
nsight_compute: 12-6=12.6.3-1
nvtx: 12-6=12.6.77-1
nvidia_base_url: https://gitlab.com/nvidia/container-images/cuda/-/blob/master/dist/12.6.3/ubuntu2404/base/Dockerfile
nvidia_devel_url: https://gitlab.com/nvidia/container-images/cuda/-/blob/master/dist/12.6.3/ubuntu2404/devel/Dockerfile
nvidia_runtime_url: https://gitlab.com/nvidia/container-images/cuda/-/blob/master/dist/12.6.3/ubuntu2404/runtime/Dockerfile

# https://gitlab.com/nvidia/container-images/cuda/-/tree/master/dist/12.4.1
'12.4.1':
ubuntu2004:
Expand Down
13 changes: 12 additions & 1 deletion turludock/assets/config_files/nvidia_cudnn.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -16,4 +16,15 @@
libcudnn_version: 8.9.6.50
libcudnn_revision: -1+cuda11.8
nvidia_devel_url: https://gitlab.com/nvidia/container-images/cuda/-/blob/master/dist/11.8.0/ubuntu2204/devel/cudnn8/Dockerfile
nvidia_runtime_url: https://gitlab.com/nvidia/container-images/cuda/-/blob/master/dist/11.8.0/ubuntu2204/runtime/cudnn8/Dockerfile
nvidia_runtime_url: https://gitlab.com/nvidia/container-images/cuda/-/blob/master/dist/11.8.0/ubuntu2204/runtime/cudnn8/Dockerfile

'9.5.1.17':
ubuntu2404:
cuda_version:
- '12.6.3'
libcudnn_package: libcudnn9-cuda-12
libcudnn_dev_package: libcudnn9-dev-cuda-12
libcudnn_version: 9.5.1.17
libcudnn_revision: '-1'
nvidia_devel_url: https://gitlab.com/nvidia/container-images/cuda/-/blob/master/dist/12.6.3/ubuntu2404/devel/cudnn/Dockerfile
nvidia_runtime_url: https://gitlab.com/nvidia/container-images/cuda/-/blob/master/dist/12.6.3/ubuntu2404/runtime/cudnn/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
# Select ROS version.
ros_version: jazzy

# Select which GPU driver your host-machine uses for its GUIs.
gpu_driver: nvidia

# CUDA and CUDNN configuration (if applicable)
cuda_version: 12.6.3

# Define here extra packages to be installed.
extra_packages:
- cmake: v3.29.3
- tmux: 3.4
- llvm: 18
- meld
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
# Select ROS version.
ros_version: jazzy

# Select which GPU driver your host-machine uses for its GUIs.
gpu_driver: nvidia

# CUDA and CUDNN configuration (if applicable)
cuda_version: 12.6.3
cudnn_version: 9.5.1.17

# Define here extra packages to be installed.
extra_packages:
- cmake: v3.29.3
- tmux: 3.4
- llvm: 18
- meld
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
# Install packages without prompting the user to answer any questions
ENV DEBIAN_FRONTEND noninteractive
ENV DEBIAN_FRONTEND=noninteractive
2 changes: 1 addition & 1 deletion turludock/assets/dockerfile_templates/header_info.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Common maintainer and meta-data info
MAINTAINER Athanasios Tasoglou <dev@tasoglou.net>
LABEL maintainer="Athanasios Tasoglou <dev@tasoglou.net>"
LABEL Description="$docker_label_description" Vendor="TurluCode"
LABEL com.turlucode.ros.version="$ros_version_short"
6 changes: 3 additions & 3 deletions turludock/assets/dockerfile_templates/locale.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# Update locale
RUN locale-gen en_US.UTF-8
ENV LANG en_US.UTF-8
ENV LANGUAGE en_US:en
ENV LC_ALL en_US.UTF-8
ENV LANG=en_US.UTF-8
ENV LANGUAGE=en_US:en
ENV LC_ALL=en_US.UTF-8
13 changes: 13 additions & 0 deletions turludock/assets/dockerfile_templates/miniconda.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
# Install latest stable Miniconda (Python 3.x)
ENV CONDA_DIR=/opt/conda
ENV PATH=$CONDA_DIR/bin:$PATH

RUN apt-get update && apt-get install -y curl bzip2 && \
curl -fsSL https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh -o /tmp/miniconda.sh && \
bash /tmp/miniconda.sh -b -p $CONDA_DIR && \
rm /tmp/miniconda.sh && \
$CONDA_DIR/bin/conda clean -afy && \
ln -s $CONDA_DIR/etc/profile.d/conda.sh /etc/profile.d/conda.sh && \
echo ". $CONDA_DIR/etc/profile.d/conda.sh" >> /root/.bashrc && \
echo ". $CONDA_DIR/etc/profile.d/conda.sh" >> /root/.zshrc && \
apt-get clean && rm -rf /var/lib/apt/lists/*
14 changes: 7 additions & 7 deletions turludock/assets/dockerfile_templates/nvidia/cuda_base.txt
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
# CUDA base
# Setup environmental variables
ENV CUDA_VERSION $cuda_version
ENV NVARCH x86_64
ENV NVIDIA_REQUIRE_CUDA "$nvidia_require_cuda"
ENV CUDA_VERSION=$cuda_version
ENV NVARCH=x86_64
ENV NVIDIA_REQUIRE_CUDA="$nvidia_require_cuda"

RUN apt-get update && apt-get install -y --no-install-recommends \
gnupg2 ca-certificates && \
Expand All @@ -20,12 +20,12 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
RUN echo "/usr/local/nvidia/lib" >> /etc/ld.so.conf.d/nvidia.conf \
&& echo "/usr/local/nvidia/lib64" >> /etc/ld.so.conf.d/nvidia.conf

ENV PATH /usr/local/nvidia/bin:/usr/local/cuda/bin:$${PATH}
ENV LD_LIBRARY_PATH /usr/local/nvidia/lib:/usr/local/nvidia/lib64
ENV PATH=/usr/local/nvidia/bin:/usr/local/cuda/bin:$${PATH}
ENV LD_LIBRARY_PATH=/usr/local/nvidia/lib:/usr/local/nvidia/lib64

# TODO: Needed?
# COPY NGC-DL-CONTAINER-LICENSE /

# nvidia-container-runtime
ENV NVIDIA_VISIBLE_DEVICES all
ENV NVIDIA_DRIVER_CAPABILITIES compute,utility
ENV NVIDIA_VISIBLE_DEVICES=all
ENV NVIDIA_DRIVER_CAPABILITIES=compute,utility
3 changes: 1 addition & 2 deletions turludock/assets/dockerfile_templates/nvidia/cuda_devel.txt
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
# CUDA devel
RUN apt-get update && apt-get install -y --no-install-recommends \
libtinfo5 libncursesw5 \
cuda-cudart-dev-$cuda_cudart_version \
cuda-command-line-tools-$cuda_lib_version \
cuda-minimal-build-$cuda_lib_version \
Expand All @@ -17,4 +16,4 @@ RUN apt-get update && apt-get install -y --no-install-recommends \

# Keep apt from auto upgrading the cublas and nccl packages. See https://gitlab.com/nvidia/container-images/cuda/-/issues/88
RUN apt-mark hold libcublas-dev-$cuda_libcublas_version libnccl-dev
ENV LIBRARY_PATH /usr/local/cuda/lib64/stubs
ENV LIBRARY_PATH=/usr/local/cuda/lib64/stubs
14 changes: 7 additions & 7 deletions turludock/config_sanity.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ def check_extra_packages(config: Dict[str, Any]) -> None:
logger.debug("No extra_packages have been configured in the .yaml file.")
return
supported_by_default = ["cmake"]
supported_values = ["tmux", "llvm", "vscode", "conan", "meld", "cpplint"]
supported_values = ["tmux", "llvm", "vscode", "conan", "meld", "cpplint", "miniconda"]
supported_values += supported_by_default
check_against_known_list(config, dict_key, supported_values)

Expand All @@ -168,15 +168,15 @@ def is_cuda_version_supported(cuda_version: str, ubuntu_version: str) -> bool:
else:
logger.error(f"Did not find supported ubuntu version '{ubuntu_version}' for cuda-{cuda_version}")
supported_cuda_versions = list()
for cuda_version in cuda_config:
if ubuntu_version["flat"] in cuda_config[cuda_version]:
supported_cuda_versions.append(cuda_version)
for candidate_version in cuda_config:
if ubuntu_version in cuda_config[candidate_version]:
supported_cuda_versions.append(candidate_version)
if len(supported_cuda_versions) == 0:
logger.error(f"'cuda_version: {cuda_config['cuda_version']}' not supported at all!")
logger.error(f"'cuda_version: {cuda_version}' not supported at all!")
else:
logger.error(
f"cuda_version: {cuda_config['cuda_version']}' not supported. Supported are "
+ f"{supported_cuda_versions} for Ubuntu {ubuntu_version['semantic']}"
f"'cuda_version: {cuda_version}' not supported. Supported are "
+ f"{supported_cuda_versions} for Ubuntu {ubuntu_version}"
)
return False
else:
Expand Down
3 changes: 3 additions & 0 deletions turludock/generate_dockerfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
generate_locale,
generate_meld,
generate_mesa,
generate_miniconda,
generate_ohmyzsh,
generate_python,
generate_terminator,
Expand Down Expand Up @@ -238,6 +239,8 @@ def generate_dockerfile(yaml_config: Dict[str, Any]) -> str:
dockerfile += generate_conan()
if package_name == "vscode":
dockerfile += generate_vscode()
if package_name == "miniconda":
dockerfile += generate_miniconda()

extra_packages_label_list.append(package_name)
else:
Expand Down
9 changes: 9 additions & 0 deletions turludock/generate_non_templated_files.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,3 +141,12 @@ def generate_mesa(use_latest: bool = True) -> str:
else:
filename = "mesa.txt"
return get_non_templated_file(filename)


def generate_miniconda() -> str:
"""Get miniconda.txt a string

Returns:
str: The mesa Dockerfile instructions as a string
"""
return get_non_templated_file("miniconda.txt")
8 changes: 8 additions & 0 deletions turludock/generate_nvidia_templated_files.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,14 @@ def generate_cuda_devel(cuda_version: str, ubuntu_version: str) -> str:
src = Template(f.read())
str_output = src.substitute(mapping)
str_output += "\n\n"

# Only for Ubuntu 20.04 we need to install extra "libtinfo5" and "libncursesw5"
if ubuntu_version == "ubuntu2004":
str_output = str_output.replace(
"RUN apt-get update && apt-get install -y --no-install-recommends \\",
"RUN apt-get update && apt-get install -y --no-install-recommends \\\n libtinfo5 libncursesw5 \\",
)

return str_output


Expand Down
35 changes: 1 addition & 34 deletions turludock/generate_templated_files.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
get_ros_major_version,
get_ubuntu_version,
is_version_greater,
is_version_lower,
)


Expand Down Expand Up @@ -42,26 +41,6 @@ def populate_templated_file(mapping: Dict[str, str], templated_file: str) -> str
print(f"An unexpected error occurred at function `populate_templated_file`: {e}")


def _get_ubuntu_base_image(version: str, nvidia: bool) -> str:
"""Get the base image for the Docker build. It is used in "FROM <base_image>".

Args:
version (str): The semantic version of Ubuntu
nvidia (bool): Whether the NVIDIA GPU driver is being used

Returns:
str: The Docker image name to use as the base image.
"""
if nvidia:
if is_version_lower(version, "16.04"):
raise ValueError(f"Ubuntu version lower than 16.04 is not supported. You provided: {version}")

# TODO(ATA): Not sure nvidia/opengl is really needed. Can we not just use 'ubuntu' with nvidia-docker-v2?
return f"nvidia/opengl:1.2-glvnd-runtime-ubuntu{version}"
else:
return f"ubuntu:{version}"


def _get_base_image(yaml_config: Dict[str, Any]) -> str:
"""Return the supported base image name.

Expand All @@ -74,19 +53,7 @@ def _get_base_image(yaml_config: Dict[str, Any]) -> str:
str: The base image name.
"""
ubuntu_version = get_ubuntu_version(yaml_config["ros_version"])
if yaml_config["gpu_driver"] == "nvidia":
uses_nvidia = True
# Temp fix until nvidia releases nvidia/opengl for Ubuntu 24.04
if is_version_greater(ubuntu_version["semantic"], "23.04"):
uses_nvidia = False
logger.warning(
"'nvidia/opengl:1.2-glvnd-runtime' does not exist yet for Ubuntu 24.04. "
+ "Using 'ubuntu:20.04' as base image instead. This is experimental. "
+ "Please report any issues faced."
)
else:
uses_nvidia = False
return _get_ubuntu_base_image(ubuntu_version["semantic"], uses_nvidia)
return f'ubuntu:{ubuntu_version["semantic"]}'


def generate_from(yaml_config: Dict[str, Any]) -> str:
Expand Down
2 changes: 1 addition & 1 deletion turludock/which_command.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@ def list_cuda_support(ros_codename: str) -> None:
found_supported = True
for cudnn_version in cudnn_config:
if ubuntu_version["flat"] in cudnn_config[cudnn_version]:
if cudnn_config[cudnn_version][ubuntu_version["flat"]]:
if cuda_version in cudnn_config[cudnn_version][ubuntu_version["flat"]]["cuda_version"]:
logger.info(f"*CUDA: {cuda_version} | cuDNN: {cudnn_version}")
if not found_supported:
logger.warning(f"No supported CUDA/cuDNN version for ROS {ros_codename.capitalize()} at this point.")
Expand Down