diff --git a/.dockerignore b/.dockerignore index 7e844cbd7..131eb0d90 100644 --- a/.dockerignore +++ b/.dockerignore @@ -17,4 +17,4 @@ BUILD.info fastsurfer.egg-info .codespellignore .gitignore -uv.lock \ No newline at end of file +uv.lock diff --git a/CerebNet/config/checkpoint_paths.yaml b/CerebNet/config/checkpoint_paths.yaml index 05db43bfb..f226f191c 100644 --- a/CerebNet/config/checkpoint_paths.yaml +++ b/CerebNet/config/checkpoint_paths.yaml @@ -1,6 +1,6 @@ url: -- "https://zenodo.org/records/10390742/files" - "https://b2share.fz-juelich.de/api/files/c6cf7bc6-2ae5-4d0e-814d-2a3cf0e1a8c5" +- "https://zenodo.org/records/10390742/files" checkpoint: axial: "checkpoints/CerebNet_axial_v1.0.0.pkl" diff --git a/CorpusCallosum/config/checkpoint_paths.yaml b/CorpusCallosum/config/checkpoint_paths.yaml index ca78b7da2..aa1675b12 100644 --- a/CorpusCallosum/config/checkpoint_paths.yaml +++ b/CorpusCallosum/config/checkpoint_paths.yaml @@ -1,6 +1,6 @@ url: -- "https://zenodo.org/records/17141933/files" - "https://b2share.fz-juelich.de/api/files/e4eb699c-ba68-4470-9f3d-89ceeee1a334" +- "https://zenodo.org/records/17141933/files" checkpoint: segmentation: "checkpoints/FastSurferCC_segmentation_v1.0.0.pkl" diff --git a/CorpusCallosum/localization/inference.py b/CorpusCallosum/localization/inference.py index cf99682a1..595e0d8f0 100644 --- a/CorpusCallosum/localization/inference.py +++ b/CorpusCallosum/localization/inference.py @@ -21,11 +21,11 @@ from monai.networks.nets import DenseNet from CorpusCallosum.transforms.localization import CropAroundACPCFixedSize -from CorpusCallosum.utils.checkpoint import YAML_DEFAULT as CC_YAML from CorpusCallosum.utils.types import Points2dType from FastSurferCNN.download_checkpoints import load_checkpoint_config_defaults from FastSurferCNN.download_checkpoints import main as download_checkpoints from FastSurferCNN.utils import Image3d, Vector2d, Vector3d +from FastSurferCNN.utils.checkpoint import get_config_file from FastSurferCNN.utils.parser_defaults import FASTSURFER_ROOT PATCH_SIZE = (64, 64) @@ -60,10 +60,8 @@ def load_model(device: torch.device) -> DenseNet: ) download_checkpoints(cc=True) - cc_config = load_checkpoint_config_defaults( - "checkpoint", - filename=CC_YAML, - ) + config_file = get_config_file("CorpusCallosum") + cc_config = load_checkpoint_config_defaults("checkpoint", filename=config_file) checkpoint_path = FASTSURFER_ROOT / cc_config['localization'] # Load state dict diff --git a/CorpusCallosum/segmentation/inference.py b/CorpusCallosum/segmentation/inference.py index 9704b3b4b..2e4a384e4 100644 --- a/CorpusCallosum/segmentation/inference.py +++ b/CorpusCallosum/segmentation/inference.py @@ -23,11 +23,11 @@ from CorpusCallosum.data import constants from CorpusCallosum.transforms.segmentation import CropAroundACPC -from CorpusCallosum.utils.checkpoint import YAML_DEFAULT as CC_YAML from FastSurferCNN.download_checkpoints import load_checkpoint_config_defaults from FastSurferCNN.download_checkpoints import main as download_checkpoints from FastSurferCNN.models.networks import FastSurferVINN from FastSurferCNN.utils import Image3d, Image4d, Shape2d, Shape3d, Shape4d, Vector2d, nibabelImage +from FastSurferCNN.utils.checkpoint import get_config_file from FastSurferCNN.utils.parallel import thread_executor @@ -70,10 +70,8 @@ def load_model(device: torch.device | None = None) -> FastSurferVINN: model = FastSurferVINN(params) download_checkpoints(cc=True) - cc_config: dict[str, Path] = load_checkpoint_config_defaults( - "checkpoint", - filename=CC_YAML, - ) + config_file = get_config_file("CorpusCallosum") + cc_config: dict[str, Path] = load_checkpoint_config_defaults("checkpoint", filename=config_file) checkpoint_path = constants.FASTSURFER_ROOT / cc_config['segmentation'] weights = torch.load(checkpoint_path, weights_only=True, map_location=device) diff --git a/CorpusCallosum/shape/mesh.py b/CorpusCallosum/shape/mesh.py index d3672a8b9..8dcdfa53f 100644 --- a/CorpusCallosum/shape/mesh.py +++ b/CorpusCallosum/shape/mesh.py @@ -538,13 +538,23 @@ def snap_cc_picture( 3. Cleans up temporary files after use. """ try: + # Dummy import of OpenCL to ensure it's available for whippersnappy + import OpenGL.GL # noqa: F401 from whippersnappy.core import snap1 - except ImportError: + except ImportError as e: # whippersnappy not installed - raise RuntimeError( - "The snap_cc_picture method of CCMesh requires whippersnappy, but whippersnappy was not found. " - "Please install whippersnappy!" + raise ImportError( + f"The snap_cc_picture method of CCMesh requires {e.name}, but {e.name} was not found. " + f"Please install {e.name}!", + name=e.name, path=e.path ) from None + except Exception as e: + # Catch all other types of errors, + raise RuntimeError( + "Could not import OpenGL or whippersnappy. The snap_cc_picture method of CCMesh requires OpenGL and " + "whippersnappy to render the QC thickness image. On headless servers, this also requires a virtual " + "framebuffer like xvfb.", + ) from e self.__make_parent_folder(output_path) # Skip snapshot if there are no faces if len(self.t) == 0: diff --git a/CorpusCallosum/shape/postprocessing.py b/CorpusCallosum/shape/postprocessing.py index 5262bbffa..72de5d68f 100644 --- a/CorpusCallosum/shape/postprocessing.py +++ b/CorpusCallosum/shape/postprocessing.py @@ -266,6 +266,7 @@ def _gen_slice2slab_vox2vox(_slice_idx: int) -> AffineMatrix4x4: # Mesh is fsavg_midplane (RAS); we need to transform to voxel coordinates # fsavg ras is also on the midslice, so this is fine and we multiply in the IA and SP offsets cc_mesh = cc_mesh.to_vox_coordinates(mesh_ras2vox=np.linalg.inv(fsavg_vox2ras @ orig2fsavg_vox2vox)) + cc_surf_generated = False if wants_output("cc_thickness_image"): # this will also write overlay and surface thickness_image_path = output_path("cc_thickness_image") @@ -276,8 +277,22 @@ def _gen_slice2slab_vox2vox(_slice_idx: int) -> AffineMatrix4x4: if wants_output("cc_thickness_overlay") else None, "ref_image": upright_img, } - cc_mesh.snap_cc_picture(thickness_image_path, **kwargs) - elif wants_output("cc_surf"): + try: + cc_mesh.snap_cc_picture(thickness_image_path, **kwargs) + cc_surf_generated = True + except (ImportError, ModuleNotFoundError) as e: + logger.error( + "The thickness image was not generated because whippersnappy, glfw or OpenGL are not installed." + ) + logger.exception(e) + except Exception as e: + logger.error( + "The thickness image was not generated (see below). On headless Linux systems or if the " + "x-server cannot/should not be accessed due to other reasons, xvfb-run may be used to provide " + "a virtual framebuffer for offscreen rendering." + ) + logger.exception(e) + if not cc_surf_generated and wants_output("cc_surf"): surf_file_path = output_path("cc_surf") logger.info(f"Saving surf file to {surf_file_path}") io_futures.append(run(cc_mesh.write_fssurf, str(surf_file_path), image=upright_img)) diff --git a/CorpusCallosum/shape/subsegment_contour.py b/CorpusCallosum/shape/subsegment_contour.py index 6ee52a878..2434081df 100644 --- a/CorpusCallosum/shape/subsegment_contour.py +++ b/CorpusCallosum/shape/subsegment_contour.py @@ -827,7 +827,7 @@ def subdivide_contour_vertical( plt.show() - + # add original contour as the final element (Full CC) split_contours.append(contour) diff --git a/FastSurferCNN/config/checkpoint_paths.yaml b/FastSurferCNN/config/checkpoint_paths.yaml index b003c1023..05db20a52 100644 --- a/FastSurferCNN/config/checkpoint_paths.yaml +++ b/FastSurferCNN/config/checkpoint_paths.yaml @@ -1,6 +1,6 @@ url: -- "https://zenodo.org/records/10390573/files" - "https://b2share.fz-juelich.de/api/files/a423a576-220d-47b0-9e0c-b5b32d45fc59" +- "https://zenodo.org/records/10390573/files" checkpoint: axial: "checkpoints/aparc_vinn_axial_v2.0.0.pkl" diff --git a/FastSurferCNN/data_loader/conform.py b/FastSurferCNN/data_loader/conform.py index e49924f99..5e56e9d87 100644 --- a/FastSurferCNN/data_loader/conform.py +++ b/FastSurferCNN/data_loader/conform.py @@ -957,7 +957,7 @@ def is_conform( if "check_dtype" in kwargs: LOGGER.warning("check_dtype is deprecated, replaced by dtype=None and will be removed.") if kwargs["check_dtype"] is False: - dtype = None + dtype: npt.DTypeLike | None = None _vox_size, _img_size = conformed_vox_img_size(img, vox_size, img_size, threshold_1mm=threshold_1mm, vox_eps=vox_eps) @@ -1006,7 +1006,7 @@ def is_conform( checks["Dtype None"] = "IGNORED", dtype_text else: _dtype: npt.DTypeLike = to_dtype(dtype) - _dtype_name = _dtype.name if hasattr(_dtype, "name") else str(getattr(np.dtype(_dtype), "name", dtype)) + _dtype_name = np.dtype(_dtype).name if isinstance(_dtype, (str, np.dtype)) else _dtype.__name__ checks[f"Dtype {_dtype_name}"] = np.issubdtype(img.get_data_dtype(), _dtype), dtype_text _is_conform = all(map(lambda x: x[0], checks.values())) diff --git a/FastSurferCNN/download_checkpoints.py b/FastSurferCNN/download_checkpoints.py index 35492d79e..031a07dab 100644 --- a/FastSurferCNN/download_checkpoints.py +++ b/FastSurferCNN/download_checkpoints.py @@ -1,5 +1,4 @@ #!/usr/bin/env python3 - # Copyright 2022 Image Analysis Lab, German Center for Neurodegenerative Diseases (DZNE), Bonn # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,37 +13,33 @@ # See the License for the specific language governing permissions and # limitations under the License. -from CerebNet.utils.checkpoint import ( - YAML_DEFAULT as CEREBNET_YAML, -) -from CorpusCallosum.utils.checkpoint import YAML_DEFAULT as CC_YAML -from FastSurferCNN.utils import PLANES -from FastSurferCNN.utils.checkpoint import ( - YAML_DEFAULT as VINN_YAML, -) +from functools import lru_cache +from itertools import chain +from pathlib import Path + from FastSurferCNN.utils.checkpoint import ( check_and_download_ckpts, get_checkpoints, + get_config_file, load_checkpoint_config_defaults, ) -from HypVINN.utils.checkpoint import YAML_DEFAULT as HYPVINN_YAML +from FastSurferCNN.utils.parallel import thread_executor class ConfigCache: - def vinn_url(self): - return load_checkpoint_config_defaults("url", filename=VINN_YAML) - - def cerebnet_url(self): - return load_checkpoint_config_defaults("url", filename=CEREBNET_YAML) + @classmethod + @lru_cache + def url(cls, module: str) -> list[str]: + return load_checkpoint_config_defaults("url", get_config_file(module)) - def hypvinn_url(self): - return load_checkpoint_config_defaults("url", filename=HYPVINN_YAML) - - def cc_url(self): - return load_checkpoint_config_defaults("url", filename=CC_YAML) + @classmethod + @lru_cache + def checkpoint(cls, module: str) -> dict[str, Path]: + return load_checkpoint_config_defaults("checkpoint", get_config_file(module)) - def all_urls(self): - return self.vinn_url() + self.cerebnet_url() + self.hypvinn_url() + self.cc_url() + @classmethod + def all_urls(cls) -> list[str]: + return list(chain(*(cls.url(mod) for mod in ("FastSurferCNN", "CorpusCallosum", "CerebNet", "HypVINN")))) defaults = ConfigCache() @@ -93,9 +88,8 @@ def make_parser(): type=str, default=None, help=f"Specify you own base URL. This is applied to all models. \n" - f"Default for VINN: {defaults.vinn_url()} \n" - f"Default for CerebNet: {defaults.cerebnet_url()} \n" - f"Default for HypVINN: {defaults.hypvinn_url()}", + f"Default for VINN: {defaults.url('FastSurferCNN')} \n" + \ + "\n".join(f"Default for {mod}: {defaults.url(mod)}" for mod in ("CerebNet", "CorpusCallosum", "HypVINN")), ) parser.add_argument( "files", @@ -116,50 +110,16 @@ def main( url: str | None = None, ) -> int | str: if not vinn and not files and not cerebnet and not hypvinn and not cc and not all: - return ("Specify either files to download or --vinn, --cerebnet, " - "--hypvinn, or --all, see help -h.") + return "Specify either files to download or --vinn, --cerebnet, --cc, --hypvinn, or --all, see help -h." + futures = [] + all_errors = [] try: - # FastSurferVINN checkpoints - if vinn or all: - vinn_config = load_checkpoint_config_defaults( - "checkpoint", - filename=VINN_YAML, - ) - get_checkpoints( - *(vinn_config[plane] for plane in PLANES), - urls=defaults.vinn_url() if url is None else [url] - ) - # CerebNet checkpoints - if cerebnet or all: - cerebnet_config = load_checkpoint_config_defaults( - "checkpoint", - filename=CEREBNET_YAML, - ) - get_checkpoints( - *(cerebnet_config[plane] for plane in PLANES), - urls=defaults.cerebnet_url() if url is None else [url], - ) - # HypVINN checkpoints - if hypvinn or all: - hypvinn_config = load_checkpoint_config_defaults( - "checkpoint", - filename=HYPVINN_YAML, - ) - get_checkpoints( - *(hypvinn_config[plane] for plane in PLANES), - urls=defaults.hypvinn_url() if url is None else [url], - ) - # Corpus Callosum checkpoints - if cc or all: - cc_config = load_checkpoint_config_defaults( - "checkpoint", - filename=CC_YAML, - ) - get_checkpoints( - *(cc_config[model] for model in cc_config.keys()), - urls=defaults.cc_url() if url is None else [url], - ) + for mod, sel in (("FastSurferCNN", vinn), ("CerebNet", cerebnet), ("HypVINN", hypvinn), ("CorpusCallosum", cc)): + if sel or all: + urls = defaults.url(mod) if url is None else [url] + futures.extend(thread_executor().submit(get_checkpoints, file, urls=urls) + for key, file in defaults.checkpoint(mod).items()) for fname in files: check_and_download_ckpts( fname, @@ -168,8 +128,13 @@ def main( except Exception as e: from traceback import print_exception print_exception(e) - return e.args[0] - return 0 + all_errors = [e.args[0]] + for f in futures: + if e := f.exception(): + from traceback import print_exception + print_exception(e) + all_errors.append(f.exception().args[0]) + return "\n".join(all_errors) or 0 if __name__ == "__main__": diff --git a/FastSurferCNN/run_prediction.py b/FastSurferCNN/run_prediction.py index 371048e2f..b5a6b30fa 100644 --- a/FastSurferCNN/run_prediction.py +++ b/FastSurferCNN/run_prediction.py @@ -26,6 +26,7 @@ # IMPORTS import argparse import sys +import warnings from collections.abc import Iterator, Sequence from concurrent.futures import Executor, Future, ThreadPoolExecutor from pathlib import Path @@ -44,14 +45,13 @@ from FastSurferCNN.utils import PLANES, Plane, logging, nibabelImage, parser_defaults from FastSurferCNN.utils.arg_types import OrientationType, VoxSizeOption from FastSurferCNN.utils.arg_types import vox_size as _vox_size -from FastSurferCNN.utils.checkpoint import get_checkpoints, load_checkpoint_config_defaults +from FastSurferCNN.utils.checkpoint import get_checkpoints, get_config_file, load_checkpoint_config_defaults from FastSurferCNN.utils.common import SubjectDirectory, SubjectList, find_device, handle_cuda_memory_exception from FastSurferCNN.utils.load_config import load_config from FastSurferCNN.utils.parallel import SerialExecutor, pipeline -from FastSurferCNN.utils.parser_defaults import FASTSURFER_ROOT, SubjectDirectoryConfig +from FastSurferCNN.utils.parser_defaults import SubjectDirectoryConfig LOGGER = logging.getLogger(__name__) -CHECKPOINT_PATHS_FILE = FASTSURFER_ROOT / "FastSurferCNN/config/checkpoint_paths.yaml" ## @@ -223,6 +223,16 @@ def __init__( if self.device.type == "cpu" and viewagg_device in ("auto", "cpu"): self.viewagg_device = self.device else: + if self.device.type == "cuda" and not torch.cuda.is_initialized(): + with warnings.catch_warnings(): + warnings.simplefilter("error") + try: + torch.cuda.init() + except RuntimeError as err: + LOGGER.critical("Failed to initialize cuda device, maybe incompatible CUDA version?") + LOGGER.exception(err) + raise err + # check, if GPU is big enough to run view agg on it (this currently takes the memory of the passed device) self.viewagg_device = find_device( viewagg_device, @@ -551,11 +561,12 @@ def _add_sd_help(action: argparse.Action) -> None: parser_defaults.modify_argument(parser, "--sd", _add_sd_help) # 3. Checkpoint to load + config_file = get_config_file("FastSurferCNN") files: dict[Plane, str | Path] = {k: "default" for k in PLANES} - parser = parser_defaults.add_plane_flags(parser, "checkpoint", files, CHECKPOINT_PATHS_FILE) + parser = parser_defaults.add_plane_flags(parser, "checkpoint", files, config_file) # 4. CFG-file with default options for network - parser = parser_defaults.add_plane_flags(parser, "config", files, CHECKPOINT_PATHS_FILE) + parser = parser_defaults.add_plane_flags(parser, "config", files, config_file) # 5. technical parameters image_flags = ["vox_size", "conform_to_1mm_threshold", "orientation", "image_size", "device"] @@ -613,8 +624,10 @@ def main( # Download checkpoints if they do not exist # see utils/checkpoint.py for default paths LOGGER.info("Checking or downloading default checkpoints ...") - - urls = load_checkpoint_config_defaults("url", filename=CHECKPOINT_PATHS_FILE) + + config_file = get_config_file("FastSurferCNN") + + urls = load_checkpoint_config_defaults("url", filename=config_file) get_checkpoints(ckpt_ax, ckpt_cor, ckpt_sag, urls=urls) diff --git a/FastSurferCNN/segstats.py b/FastSurferCNN/segstats.py index 8939df62a..1955c8d8b 100644 --- a/FastSurferCNN/segstats.py +++ b/FastSurferCNN/segstats.py @@ -2279,7 +2279,8 @@ def pv_calc_patch( for p, gc in zip(slicer_patch, global_crop, strict=False)) label_lookup = np.unique(seg[slicer_small_patch]) - maxlabels = label_lookup[-1] + 1 + # make sure to promote label_lookup to int64 to avoid overflow (numpy2) + maxlabels = int(label_lookup[-1]) + 1 if maxlabels > 100_000: raise RuntimeError("Maximum number of labels above 100000!") # create a view for the current patch border diff --git a/FastSurferCNN/utils/__init__.py b/FastSurferCNN/utils/__init__.py index 8ba481e4f..223b208a9 100644 --- a/FastSurferCNN/utils/__init__.py +++ b/FastSurferCNN/utils/__init__.py @@ -52,9 +52,25 @@ # there are very few cases, when we do not need nibabel in any "full script" so always # including nibabel does not overly drag down performance -from nibabel.analyze import SpatialHeader as nibabelHeader -from nibabel.analyze import SpatialImage as nibabelImage -from numpy import bool_, dtype, float64, ndarray, number +try: + from nibabel.analyze import SpatialHeader as nibabelHeader + from nibabel.analyze import SpatialImage as nibabelImage +# Some scripts like the build script do not require the full FastSurfer environment. This makes sure, this typing +# module is still functional in such cases. +except (ImportError, ModuleNotFoundError): + nibabelImage = None + nibabelHeader = None +try: + from numpy import bool_, dtype, float64, ndarray, number +# Some scripts like the build script do not require the full FastSurfer environment. This makes sure, this typing +# module is still functional in such cases. +except (ImportError, ModuleNotFoundError): + float64 = float + bool_ = bool + # by typing this with tuple, ndarray[...] and dtype [...] will still be valid syntax + ndarray = tuple + dtype = tuple + from numbers import Number as number AffineMatrix4x4 = ndarray[tuple[Literal[4], Literal[4]], dtype[float64]] PlaneAxial = Literal["axial"] diff --git a/FastSurferCNN/utils/checkpoint.py b/FastSurferCNN/utils/checkpoint.py index 324184d2b..2c4e47fe8 100644 --- a/FastSurferCNN/utils/checkpoint.py +++ b/FastSurferCNN/utils/checkpoint.py @@ -14,6 +14,7 @@ # IMPORTS import os +import sys from collections.abc import MutableSequence from functools import lru_cache from pathlib import Path @@ -35,10 +36,6 @@ class Scheduler: LOGGER = logging.getLogger(__name__) -# Defaults -YAML_DEFAULT = FASTSURFER_ROOT / "FastSurferCNN/config/checkpoint_paths.yaml" - - class CheckpointConfigDict(TypedDict, total=False): url: list[str] checkpoint: dict[Plane, Path] @@ -49,7 +46,7 @@ class CheckpointConfigDict(TypedDict, total=False): @lru_cache -def load_checkpoint_config(filename: Path | str = YAML_DEFAULT) -> CheckpointConfigDict: +def load_checkpoint_config(filename: Path | str) -> CheckpointConfigDict: """ Load the plane dictionary from the yaml file. @@ -88,21 +85,21 @@ def load_checkpoint_config(filename: Path | str = YAML_DEFAULT) -> CheckpointCon @overload def load_checkpoint_config_defaults( - filetype: Literal["checkpoint", "config"], - filename: str | Path = YAML_DEFAULT, + configtype: Literal["checkpoint", "config"], + filename: str | Path, ) -> dict[Plane, Path]: ... @overload def load_checkpoint_config_defaults( configtype: Literal["url"], - filename: str | Path = YAML_DEFAULT, + filename: str | Path, ) -> list[str]: ... @lru_cache def load_checkpoint_config_defaults( configtype: CheckpointConfigFields, - filename: str | Path = YAML_DEFAULT, + filename: str | Path, ) -> dict[Plane, Path] | list[str]: """ Get the default value for a specific plane or the url. @@ -174,6 +171,23 @@ def get_checkpoint(ckpt_dir: str, epoch: int) -> str: return checkpoint_dir +def get_config_file(module: str) -> Path: + """ + Returns the path to the checkpoint_paths.yaml file of `module`. + + Parameters + ========== + module : str + The FastSurfer module name. + + Returns + ======= + Path + The path to the checkpoint_paths.yaml file of `module`. + """ + return FASTSURFER_ROOT / module / "config/checkpoint_paths.yaml" + + def get_checkpoint_path( log_dir: Path | str, resume_experiment: str | int | None = None ) -> MutableSequence[Path]: @@ -348,34 +362,46 @@ def download_checkpoint( urls : list[str] List of URLs of checkpoint hosting sites. """ - response = None + responses = [] for url in urls: try: LOGGER.info(f"Downloading checkpoint {checkpoint_name} from {url}") - response = requests.get( + responses.append(requests.get( url + "/" + checkpoint_name, verify=True, timeout=(5, None), # (connect timeout: 5 sec, read timeout: None) - ) + )) # Raise error if file does not exist: - response.raise_for_status() - break + if responses[-1].ok: + break except requests.exceptions.RequestException as e: LOGGER.warning(f"Server {url} not reachable ({type(e).__name__}): {e}") - if isinstance(e, requests.exceptions.HTTPError): - LOGGER.warning(f"Response code: {e.response.status_code}") - - if response is None: - links = ', '.join(u.removeprefix('https://')[:22] + "..." for u in urls) - raise requests.exceptions.RequestException( - f"Failed downloading the checkpoint {checkpoint_name} from {links}." - ) + if isinstance(e.response, requests.Response): + responses.append(e.response) + + # if no request was successful, raise an error with all responses + if not any(_response.ok for _response in responses): + import textwrap + message = f"Could not download checkpoint {checkpoint_name} from any server." + exceptions = [] + for _response in responses: + message += f"\n\nResponse code from {_response.url}: {_response.status_code}" + message += f"\nResponse text:\n{textwrap.indent(_response.text, ' ')}" + if sys.version_info >= (3, 11): + try: + _ = _response.raise_for_status() + except Exception as e: + exceptions.append(e) + # ExceptionGroup is introduced in Python 3.11 + if sys.version_info >= (3, 11): + raise ExceptionGroup(message, exceptions) # noqa: F821 + else: + raise RuntimeError(message, responses) else: - response.raise_for_status() # Raise error if no server is reachable - - with open(checkpoint_path, "wb") as f: - f.write(response.content) + response = next(r for r in responses if r.ok) + with open(checkpoint_path, "wb") as f: + f.write(response.content) def check_and_download_ckpts(checkpoint_path: Path | str, urls: list[str]) -> None: @@ -386,7 +412,7 @@ def check_and_download_ckpts(checkpoint_path: Path | str, urls: list[str]) -> No ---------- checkpoint_path : Path, str Path of the file in which the checkpoint will be saved. - urls : list[str] + urls : list of str URLs of checkpoint hosting site. """ if not isinstance(checkpoint_path, Path): @@ -406,7 +432,7 @@ def get_checkpoints(*checkpoints: Path | str, urls: list[str]) -> None: ---------- *checkpoints : Path, str Paths of the files in which the checkpoint will be saved. - urls : Path, str + urls : list of str URLs of checkpoint hosting sites. """ try: diff --git a/FastSurferCNN/utils/parser_defaults.py b/FastSurferCNN/utils/parser_defaults.py index 9dd483ad4..633ef76d7 100644 --- a/FastSurferCNN/utils/parser_defaults.py +++ b/FastSurferCNN/utils/parser_defaults.py @@ -420,6 +420,7 @@ def add_plane_flags( argparse.ArgumentParser The parser object. """ + configtype: Literal["checkpoint", "config"] if configtype not in PLANE_SHORT: raise ValueError("type must be either config or checkpoint.") diff --git a/FastSurferCNN/version.py b/FastSurferCNN/version.py index 990c26379..f3646338a 100644 --- a/FastSurferCNN/version.py +++ b/FastSurferCNN/version.py @@ -7,6 +7,7 @@ import subprocess from collections.abc import Sequence from concurrent.futures import Future, ThreadPoolExecutor +from os import PathLike from pathlib import Path from typing import Any, Literal, TextIO, TypedDict, cast, get_args @@ -233,7 +234,7 @@ def main( python packages: ========== Package Version Location [Installer] - + ...] ``` @@ -291,7 +292,7 @@ def main( if sections != "": futures["git_branch"] = Popen(["git", "branch", "--show-current"], **kw_root).as_future(pool) if "+git" in sections: - futures["git_status"] = pool.submit(filter_git_status, Popen(["git", "status", "-s", "-b"], **kw_root)) + futures["git_status"] = pool.submit(filter_git_status, Popen(["git", "status", "-sb"], **kw_root)) else: # we go not have git, try loading the build cache build_cache_required = True @@ -334,9 +335,7 @@ def __future_or_cache(key: VersionDictKeys, futures: dict[str, Future[Any]], cac if isinstance(returnmsg, str): return returnmsg elif returnmsg.retcode != 0: - raise RuntimeError( - f"The calculation/determination of {key} has failed." - ) + raise RuntimeError(f"The calculation/determination of {key} has failed.") return returnmsg.out_str("utf-8").strip() elif key in cache: # fill from cache @@ -506,7 +505,7 @@ def filter_git_status(git_process) -> str: ) -def read_and_close_version(project_file: TextIO | None = None) -> str: +def read_and_close_version(project_file: TextIO | PathLike | None = None) -> str: """ Read and close the version from the pyproject file. Also fill default. @@ -514,7 +513,7 @@ def read_and_close_version(project_file: TextIO | None = None) -> str: Parameters ---------- - project_file : TextIO, optional + project_file : TextIO, PathLike, optional Project file. Returns @@ -526,7 +525,8 @@ def read_and_close_version(project_file: TextIO | None = None) -> str: ----- See also FastSurferCNN.version.read_version_from_project_file """ - project_file = open(project_file or DEFAULTS.PROJECT_TOML) + if not hasattr(project_file, "readline"): + project_file = open(project_file or DEFAULTS.PROJECT_TOML) try: version = read_version_from_project_file(project_file) finally: diff --git a/HypVINN/config/checkpoint_paths.yaml b/HypVINN/config/checkpoint_paths.yaml index 7071534b6..01aee3844 100644 --- a/HypVINN/config/checkpoint_paths.yaml +++ b/HypVINN/config/checkpoint_paths.yaml @@ -1,6 +1,6 @@ url: -- "https://zenodo.org/records/11184216/files" - "https://b2share.fz-juelich.de/api/files/d9e37247-5455-4c83-853d-21e31fb5bea5" +- "https://zenodo.org/records/11184216/files" checkpoint: axial: "checkpoints/HypVINN_axial_v1.1.0.pkl" diff --git a/HypVINN/run_prediction.py b/HypVINN/run_prediction.py index 2ae0cd52a..f789a6ffc 100644 --- a/HypVINN/run_prediction.py +++ b/HypVINN/run_prediction.py @@ -28,6 +28,7 @@ from FastSurferCNN.utils import PLANES, Plane, logging, parser_defaults from FastSurferCNN.utils.checkpoint import ( get_checkpoints, + get_config_file, load_checkpoint_config_defaults, ) from FastSurferCNN.utils.common import update_docstring @@ -36,7 +37,6 @@ from HypVINN.data_loader.data_utils import hypo_map_label2subseg, rescale_image from HypVINN.inference import Inference from HypVINN.utils import ModalityDict, ModalityMode, ViewOperationDefinition, ViewOperations -from HypVINN.utils.checkpoint import YAML_DEFAULT as CHECKPOINT_PATHS_FILE from HypVINN.utils.img_processing_utils import save_segmentation from HypVINN.utils.load_config import load_config from HypVINN.utils.misc import create_expand_output_directory @@ -130,12 +130,14 @@ def option_parse() -> argparse.ArgumentParser: advanced = parser.add_argument_group(title="Advanced options") parser_defaults.add_arguments(advanced, ["device", "viewagg_device", "threads", "batch_size", "async_io"]) + checkpoints_config = get_config_file("HypVINN") + files: dict[Plane, str | Path] = {k: "default" for k in PLANES} # 5. Checkpoint to load - parser_defaults.add_plane_flags(advanced, "checkpoint", files, CHECKPOINT_PATHS_FILE) + parser_defaults.add_plane_flags(advanced, "checkpoint", files, checkpoints_config) config_files = {plane: Path(f"HypVINN/config/HypVINN_{plane}_v1.1.0.yaml") for plane in PLANES} - parser_defaults.add_plane_flags(advanced, "config", config_files, CHECKPOINT_PATHS_FILE) + parser_defaults.add_plane_flags(advanced, "config", config_files, checkpoints_config) return parser @@ -395,10 +397,8 @@ def prepare_checkpoints(ckpt_ax, ckpt_cor, ckpt_sag): The path to the sagittal checkpoint file. """ logger.info("Checking or downloading default checkpoints ...") - urls = load_checkpoint_config_defaults( - "url", - filename=CHECKPOINT_PATHS_FILE, - ) + config_file = get_config_file("HypVINN") + urls = load_checkpoint_config_defaults("url", filename=config_file) get_checkpoints(ckpt_ax, ckpt_cor, ckpt_sag, urls=urls) diff --git a/Tutorial/Complete_FastSurfer_Tutorial.ipynb b/Tutorial/Complete_FastSurfer_Tutorial.ipynb index ee3230266..873a6a761 100644 --- a/Tutorial/Complete_FastSurfer_Tutorial.ipynb +++ b/Tutorial/Complete_FastSurfer_Tutorial.ipynb @@ -2354,7 +2354,7 @@ " device (no memory check will be done).\n", " --batch Batch size for inference. Default: 1\n", " --py Command for python, used in both pipelines.\n", - " Default: python3.10\n", + " Default: python3\n", "\n", " Dev Flags:\n", " --ignore_fs_version Switch on to avoid check for FreeSurfer version.\n", diff --git a/Tutorial/README.md b/Tutorial/README.md index 2487860d6..d76370e65 100644 --- a/Tutorial/README.md +++ b/Tutorial/README.md @@ -67,6 +67,7 @@ or pip: ```bash sudo apt install python3-setuptools ``` +or ```bash pip install setuptools ``` @@ -79,18 +80,13 @@ install: pip install scikit-sparse=0.4.4 ``` -It is normally recommended to run your setups in separate virtual environments (like conda, +It is normally recommended to run your setups in separate virtual environments (like uv, conda, [pipenv](https://pypi.org/project/pipenv/) or [virtualenv](https://pypi.org/project/virtualenv/)). -#### 2. Anaconda -You can install anaconda via curl with the following command: +#### 2. uv +FastSurfer previously recommended conda to install Python. We now recommend uv instead of conda. + +You can install uv via wget with the following command: ```bash -# The version of Anaconda may be different depending on when you are installing -curl -O https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh -sh Miniconda3-latest-Linux-x86_64.sh -# and follow the prompts. The defaults are generally good. +wget -qO- https://astral.sh/uv/install.sh | sh ``` -You may have to open a new terminal or re-source your ~/.bashrc to get access to the conda command. See also the -documentation for [conda](https://conda.io/projects/conda/en/latest/user-guide/getting-started.html) as well as the -section about how to manage -[conda environments](https://conda.io/projects/conda/en/latest/user-guide/getting-started.html#managing-environments). diff --git a/brun_fastsurfer.sh b/brun_fastsurfer.sh index e0bbe053c..f7fe6e647 100755 --- a/brun_fastsurfer.sh +++ b/brun_fastsurfer.sh @@ -29,7 +29,7 @@ parallel_pipelines="1" num_parallel_surf="1" num_parallel_seg="1" statusfile="" -python="python3.10 -s" +python="python3 -s" function usage() { diff --git a/doc/overview/INSTALL.md b/doc/overview/INSTALL.md index 16959bee0..c464c502d 100644 --- a/doc/overview/INSTALL.md +++ b/doc/overview/INSTALL.md @@ -61,7 +61,7 @@ sudo apt install -y g++-11 You also need to have bash-3.2 or higher (check with `bash --version`). -You also need a working version of python3.10 (we do not support other versions). These packages should be sufficient to install python dependencies and then run the FastSurfer neural network segmentation. If you want to run the full pipeline, you also need a [working installation of FreeSurfer](https://surfer.nmr.mgh.harvard.edu/fswiki/rel7downloads) (including its dependencies and a license file). +You also need a working version of python3 (we do not support other versions). These packages should be sufficient to install python dependencies and then run the FastSurfer neural network segmentation. If you want to run the full pipeline, you also need a [working installation of FreeSurfer](https://surfer.nmr.mgh.harvard.edu/fswiki/rel7downloads) (including its dependencies and a license file). If you are using pip, make sure pip is updated as older versions will fail. diff --git a/doc/scripts/RUN_FASTSURFER.md b/doc/scripts/RUN_FASTSURFER.md index d99c1f27d..62108e322 100644 --- a/doc/scripts/RUN_FASTSURFER.md +++ b/doc/scripts/RUN_FASTSURFER.md @@ -59,7 +59,7 @@ Optional arguments If the minimal voxel size is bigger than 0.98mm, the image is conformed to 1mm isotropic. If the minimal voxel size is smaller or equal to 0.98mm, the T1w image will be conformed to isotropic voxels of that voxel size. The voxel size (whether set manually or derived) determines whether the surfaces are processed with highres options (below 1mm) or not. -* `--py`: Command for python, used in both pipelines. Default: python3.10 +* `--py`: Command for python, used in both pipelines. Default: python3 * `--conformed_name`: Name of the file in which the conformed input image will be saved. Default location: \$SUBJECTS_DIR/\$sid/mri/orig.mgz * `-h`, `--help`: Prints help text diff --git a/env/fastsurfer.yml b/env/fastsurfer.yml deleted file mode 100644 index d8bc1323e..000000000 --- a/env/fastsurfer.yml +++ /dev/null @@ -1,35 +0,0 @@ -name: fastsurfer - -channels: - - conda-forge - -dependencies: -- h5py==3.12.1 -- lapy==1.5.0 -- matplotlib==3.10.1 -- monai==1.4.0 -- nibabel==5.3.2 -- numpy==1.26.4 -- pandas==2.2.3 -- pillow==11.1.0 -- pip==25.0 -- python=3.10 -- python-dateutil==2.9.0.post0 -- pyyaml==6.0.2 -- requests==2.32.3 -- scikit-image==0.25.1 -- scikit-learn==1.6.1 -- scipy==1.15.1 -- setuptools==75.8.0 -- tensorboard==2.18.0 -- tqdm==4.67.1 -- yacs==0.1.8 -- pip: - - --extra-index-url https://download.pytorch.org/whl/cu126 - - simpleitk==2.4.1 - - torch==2.6.0+cu126 - - torchio==0.20.4 - - torchvision==0.21.0+cu126 - - meshpy>=2025.1.1 - - pyrr>=0.10.3 - - whippersnappy>=1.3.1 diff --git a/env/fastsurfer_reconsurf.yml b/env/fastsurfer_reconsurf.yml deleted file mode 100644 index 3b28a99a5..000000000 --- a/env/fastsurfer_reconsurf.yml +++ /dev/null @@ -1,21 +0,0 @@ - -name: fastsurfer_reconsurf - -channels: - - conda-forge - -dependencies: - - lapy=1.0.1 - - nibabel=5.1.0 - - numpy=1.25.0 - - pandas=1.5.3 - - pillow=10.0.1 - - pip=23.1.2 - - python=3.10 - - python-dateutil=2.8.2 - - pyyaml=6.0 - - scikit-image=0.19.3 - - scikit-learn=1.2.2 - - scipy=1.10.1 - - pip: - - simpleitk==2.2.1 diff --git a/long_fastsurfer.sh b/long_fastsurfer.sh index df0bab5ea..b106bdffd 100755 --- a/long_fastsurfer.sh +++ b/long_fastsurfer.sh @@ -57,7 +57,7 @@ t1s=() parallel=0 LF="" brun_flags=() -python="python3.10 -s" # avoid user-directory package inclusion +python="python3 -s" # avoid user-directory package inclusion function usage() diff --git a/pyproject.toml b/pyproject.toml index 52b1576cf..5b5bed332 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -27,17 +27,23 @@ classifiers = [ 'Programming Language :: Python :: 3.10', 'Programming Language :: Python :: 3.11', 'Programming Language :: Python :: 3.12', + 'Programming Language :: Python :: 3.13', 'Natural Language :: English', 'License :: OSI Approved :: Apache Software License', 'Intended Audience :: Science/Research', ] dependencies = [ 'h5py>=3.7', - "lapy>=1.5.0", + 'lapy>=1.5.0', 'matplotlib>=3.7.1', + 'meshpy>=2025.1.1', # needed for FastSurfer-CC + 'monai>=1.4.0', # needed for FastSurfer-CC 'nibabel>=5.1.0', - 'numpy>=1.25,<2', + 'numpy>=1.25', + 'packaging', 'pandas>=1.5.3', + 'pip>=25.0', + 'pyrr>=0.10.3', # needed for FastSurfer-CC 'pyyaml>=6.0', 'requests>=2.31.0', 'scikit-image>=0.19.3', @@ -45,21 +51,20 @@ dependencies = [ 'scipy>=1.10.1,!=1.13.0', 'simpleitk>=2.2.1', 'tensorboard>=2.12.1', - 'torch>=2.0.1', + 'torch==2.7.*', # pinned to 2.7, so compatibility to CUDA 11 is ensured for same torch version 'torchio>=0.18.83', - 'torchvision>=0.15.2', + 'torchvision>=0.22.1,<0.23', 'tqdm>=4.65', 'yacs>=0.1.8', - 'monai>=1.4.0', - 'meshpy>=2025.1.1', - 'pyrr>=0.10.3', - 'pip>=25.0', ] [project.optional-dependencies] +qc = [ + 'whippersnappy>=1.3.1', +] doc = [ + 'fastsurfer[qc]', 'furo!=2023.8.17', - 'whippersnappy>=1.3.1', 'memory-profiler', 'myst-parser', 'numpydoc', @@ -90,6 +95,7 @@ quicktest = [ all = [ 'fastsurfer[doc]', 'fastsurfer[style]', + 'fastsurfer[qc]', 'fastsurfer[quicktest]', ] full = [ diff --git a/recon_surf/README.md b/recon_surf/README.md index e034a3ba5..264eb8e1d 100644 --- a/recon_surf/README.md +++ b/recon_surf/README.md @@ -108,7 +108,7 @@ source $FREESURFER_HOME/SetUpFreeSurfer.sh # Run recon-surf ./recon-surf.sh --sid subjectX \ --sd $HOME/my_fastsurfer_analysis \ - --py python3.10 \ + --py python3 \ --3T \ --t1 $HOME/my_fastsurfer_analysis/subjectX/mri/orig.mgz \ --asegdkt_segfile $HOME/my_fastsurfer_analysis/subjectX/mri/aparc.DKTatlas+aseg.deep.mgz diff --git a/recon_surf/long_prepare_template.sh b/recon_surf/long_prepare_template.sh index a56943d76..40ffa9b1b 100755 --- a/recon_surf/long_prepare_template.sh +++ b/recon_surf/long_prepare_template.sh @@ -66,7 +66,7 @@ interpol="cubic" # for the final interpolation of all time points in median i robust_template_avg_arg=1 # median for template creation (if more than 1 time point) # default arguments -python="python3.10 -s" # avoid user-directory package inclusion +python="python3 -s" # avoid user-directory package inclusion sd="$SUBJECTS_DIR" # init variables that need to be passed diff --git a/recon_surf/recon-surf.sh b/recon_surf/recon-surf.sh index 5727c8505..8e2e8b8ae 100755 --- a/recon_surf/recon-surf.sh +++ b/recon_surf/recon-surf.sh @@ -26,7 +26,7 @@ fstess=0 # run mri_tesselate (FS way), if 0 = run mri_mc fsqsphere=0 # run inflate1 and qsphere (FSway), if 0 run spectral projection fsaparc=0 # run FS aparc (and cortical ribbon), if 0 map aparc from asegdkt_segfile fssurfreg=1 # run FS surface registration to fsaverage, if 0 omit this step -python="python3.10" # python version +python="python3 -s" # python version DoParallel=0 # if 1, run hemispheres in parallel DoParallelFlag=0 # 1, if --parallel passed threads="1" # number of threads to use for running FastSurfer diff --git a/recon_surf/recon-surfreg.sh b/recon_surf/recon-surfreg.sh index ffc01ea10..239c3e894 100755 --- a/recon_surf/recon-surfreg.sh +++ b/recon_surf/recon-surfreg.sh @@ -19,7 +19,7 @@ FS_VERSION_SUPPORT="7.4.1" # Regular flags default subject=""; # Subject name -python="python3.10" # python version +python="python3 -s" # python version DoParallel=0 # if 1, run hemispheres in parallel threads="1" # number of threads to use for running FastSurfer diff --git a/run_fastsurfer.sh b/run_fastsurfer.sh index 881bab4ca..8c9c6e161 100755 --- a/run_fastsurfer.sh +++ b/run_fastsurfer.sh @@ -75,8 +75,8 @@ run_hypvinn_module="1" run_cc_module="1" threads_seg="1" threads_surf="1" -# python3.10 -s excludes user-directory package inclusion -python="python3.10 -s" +# python3 -s excludes user-directory package inclusion +python="python3 -s" allow_root=() version_and_quit="" warn_seg_only=() @@ -738,6 +738,33 @@ then fi fi +maybe_xvfb=() +# check if we are running on a headless system (CC QC needs a (virtual) display that support OpenGL) +if [[ "$run_seg_pipeline" == "1" ]] && [[ "$run_cc_module" == "1" ]] && [[ "${cc_flags[*]}" =~ --thickness_image ]] +then + # if we have xvfb-run, we can use it to provide a virtual display + if [[ -n "$(which xvfb-run)" ]] ; then maybe_xvfb=("xvfb-run" "-a") ; fi + + # try loading opengl, if this is successful we are fine + py_opengltest="import sys ; import glfw ; import whippersnappy.core ; sys.exit(1-glfw.init())" + opengl_error_message="$("${maybe_xvfb[@]}" $python -c "$py_opengltest" 2>&1 > /dev/null)" + if [[ "$?" != "0" ]] + then + # if we cannot import OpenGL or whippersnappy, its an environment installation issue + if [[ "$opengl_error_message" =~ "ModuleNotFoundError" ]] || [[ "$opengl_error_message" =~ "ImportError" ]] + then + echo "WARNING: The --qc_snap option of the corpus callosum module requires the Python packages PyOpenGL, glfw and" + echo " whippersnappy to be installed, but python could not import those three. Please install them and their" + echo " dependencies via 'pip install pyopengl glfw whippersnappy'." + else + echo "WARNING: The --qc_snap option of the corpus callosum module requires OpenGL support, but we could not" + echo " create OpenGL handles. For Linux headless systems, you may install xvfb-run to provide a virtual display." + fi + echo " FastSurfer will not fail due to the unavailability of OpenGL, but some QC snapshots (rendered thickness" + echo " image) will not be created." + fi +fi + if [[ "$run_surf_pipeline" == "1" ]] && [[ "$native_image" != "false" ]] then echo "ERROR: The surface pipeline is not compatible with the options --native_image or " @@ -1119,9 +1146,10 @@ then # note: callosum manedit currently only affects inpainting and not internal FastSurferCC processing (surfaces etc) callosum_seg_manedit="$(add_file_suffix "$callosum_seg" "manedit")" # generate callosum segmentation, mesh, shape and downstream measure files - cmd=($python "$CorpusCallosumDir/fastsurfer_cc.py" --sd "$sd" --sid "$subject" --threads "$threads_seg" - "--conformed_name" "$conformed_name" "--aseg_name" "$asegdkt_segfile" "--segmentation_in_orig" "$callosum_seg" - "${cc_flags[@]}") + cmd=("${maybe_xvfb[@]}" $python "$CorpusCallosumDir/fastsurfer_cc.py" --sd "$sd" --sid "$subject" + "--threads" "$threads_seg" "--conformed_name" "$conformed_name" "--aseg_name" "$asegdkt_segfile" + "--segmentation_in_orig" "$callosum_seg" "${cc_flags[@]}") + # if we are trying to create the thickness image in a headless setting, wrap call in xvfb-run { echo_quoted "${cmd[@]}" "${cmd[@]}" diff --git a/tools/Docker/Dockerfile b/tools/Docker/Dockerfile index 999f73ac6..06e1db131 100644 --- a/tools/Docker/Dockerfile +++ b/tools/Docker/Dockerfile @@ -14,24 +14,54 @@ # DOCUMENTATION FOR BUILD ARGS (use '--build-arg DEVICE='): # - BUILD_BASE_IMAGE: -# The base image to build the conda and freesurfer build images from +# The base image to build the venv and freesurfer build images from # - default: ubuntu:22.04 # - RUNTIME_BASE_IMAGE: # The base image to build the runtime image on. # - default: ubuntu:22.04 # - FREESURFER_BUILD_IMAGE: -# Image to use to install freesurfer binaries from, the freesurfer binaries -# should be located in /opt/freesurfer in the image. +# Image to use to install freesurfer binaries from, the freesurfer binaries should be located in /opt/freesurfer in +# the image. # - default: build_freesurfer -# - CONDA_BUILD_IMAGE: -# Image to use to install the python environment from, the python environment -# should be in /venv/ in the image. +# - VENV_BUILD_IMAGE: +# Image to use to install the python environment from, the python environment should be in /venv/ in the image. # - default: build_cuda -# - MAMBA_VERSION: -# Which miniforge file to download to install mamba -# from https://github.com/conda-forge/miniforge/releases/download/ -# ${FORGE_VERSION}/Miniforge3-${FORGE_VERSION}-Linux-x86_64.sh -# - default: Miniforge3-24.11.2-1-Linux-x86_64.sh +# - BUILDKIT_SBOM_SCAN_CONTEXT: +# Enables buildkit to provide and scan build images this is active by default to provide proper SBOM manifests, +# however, it may also include parts that are not part of the distributed image (specifically build image parts +# installed in the build image, but not transferred to the runtime image such as git, wget, the miniconda installer, +# etc.) +# - default: true +# - UV_VERSION: +# The version of uv to use to build the python environment. +# - default: 0.9.22 +# - PYTHON_VERSION: +# The python version to use for the virtual environment. +# - default: 3.12 +# - FREESURFER_VERSION: +# The freesurfer version used in the image. +# - default: 7.4.1 +# - FREESURFER_URL: +# The url to download the freesurfer install archive from, if "default", the +# - default: default +# - FASTSURFER_VERSION: +# The fastsurfer version to include in image labels. +# - default: dev +# - GIT_HASH: +# The git hash of the fastsurfer version to include in image labels. +# - default: "" +# - REPOSITORY_URL: +# The repository URL of the fastsurfer source code to include in image labels. +# - default: "" +# - DOC_URL: +# The documentation URL of fastsurfer to include in image labels. +# - default: "https://fastsurfer.org/fastsurfer/dev" +# - VENDOR: +# The vendor string to include in image labels. +# - default: "Image Analysis Lab, DZNE https://deep-mi.org/" +# - AUTHOR: +# The author string to include in image labels. +# - default: "David Kügler " # DOCUMENTATION FOR TARGETS (use '--target '): # To select which imaged will be tagged with '-t' @@ -41,122 +71,202 @@ # Build the freesurfer build image only. # - build_common: # Build the basic image with the python environment (hardware/driver-agnostic) -# - build_conda: +# - build_venv: # Build the python environment image with cuda/rocm/cpu support -ARG FREESURFER_BUILD_IMAGE=build_freesurfer -ARG CONDA_BUILD_IMAGE=build_conda -ARG RUNTIME_BASE_IMAGE=ubuntu:22.04 -ARG BUILD_BASE_IMAGE=ubuntu:22.04 -# BUILDKIT_SBOM:SCAN_CONTEXT enables buildkit to provide and scan build images -# this is active by default to provide proper SBOM manifests, however, it may also -# include parts that are not part of the distributed image (specifically build image -# parts installed in the build image, but not transferred to the runtime image such as -# git, wget, the miniconda installer, etc.) -ARG BUILDKIT_SBOM_SCAN_CONTEXT=true - -## Start with ubuntu base to build the conda base stage +ARG BUILD_BASE_IMAGE="ubuntu:24.04" +ARG RUNTIME_BASE_IMAGE="ubuntu:24.04" +ARG VENV_BUILD_IMAGE="build_venv" +ARG FREESURFER_BUILD_IMAGE="build_freesurfer" +ARG UV_VERSION="0.9.22" +ARG PYTHON_VERSION="3.12" +ARG FREESURFER_VERSION="7.4.1" +ARG FREESURFER_URL="not set" +ARG FASTSURFER_VERSION="dev" +ARG GIT_HASH="" +ARG REPOSITORY_URL="" +ARG DOC_URL="https://fastsurfer.org/fastsurfer/dev" +ARG VENDOR="Image Analysis Lab, DZNE https://deep-mi.org/" +ARG AUTHOR="David Kügler " +ARG BUILDKIT_SBOM_SCAN_CONTEXT="true" +ARG DEVICE="cu128" +ARG DEBUG="false" + +FROM ghcr.io/astral-sh/uv:$UV_VERSION AS selected_uv_install_image + +## Start with ubuntu base to build the venv base stage FROM $BUILD_BASE_IMAGE AS build_base -ENV LANG=C.UTF-8 -ENV DEBIAN_FRONTEND=noninteractive +ARG PYTHON_VERSION + +ENV LANG=C.UTF-8 \ + DEBIAN_FRONTEND=noninteractive + +SHELL ["/bin/bash", "-e", "-c"] # Install packages needed for build -RUN apt-get update && apt-get install -y --no-install-recommends \ - aria2 \ - ca-certificates \ - file \ - git \ - upx && \ - apt clean && \ - rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* - -ARG FORGE_VERSION=25.3.1-0 - -# Install conda -RUN aria2c -x 8 -s 8 -c --check-certificate=false -o ~/miniforge.sh \ - https://github.com/conda-forge/miniforge/releases/download/${FORGE_VERSION}/Miniforge3-${FORGE_VERSION}-Linux-x86_64.sh && \ - chmod +x ~/miniforge.sh && \ - ~/miniforge.sh -b -p /opt/miniforge && \ - rm ~/miniforge.sh - -ENV PATH=/opt/miniforge/bin:$PATH +RUN < /install/build_conda.args ; \ - if [ "${DEBUG}" != "true" ]; then mamba env remove -qy -n "fastsurfer" && rm -R /install ; fi +FROM build_base AS build_venv + +ARG PYTHON_VERSION +ARG DEVICE +ARG AUTHOR +ARG REPOSITORY_URL +ARG DOC_URL +ARG FASTSURFER_VERSION +ARG GIT_HASH +ARG VENDOR +ARG BUILD_BASE_IMAGE + +SHELL ["/bin/bash", "--login", "-e", "-c"] + +# Install the project's dependencies using the lockfile and settings +RUN --mount=type=cache,target=/root/.cache/uv \ + --mount=type=bind,source=pyproject.toml,target=/install/pyproject.toml < does not accept variables as part of -# the image/stage name +# This is needed because COPY --from= does not accept variables as part of the image/stage name # selected_freesurfer_build_image -> $FREESURFER_BUILD_IMAGE FROM $FREESURFER_BUILD_IMAGE AS selected_freesurfer_build_image -# selected_conda_build_image -> $CONDA_BUILD_IMAGE -FROM $CONDA_BUILD_IMAGE AS selected_conda_build_image - +# selected_venv_build_image -> $VENV_BUILD_IMAGE +FROM $VENV_BUILD_IMAGE AS selected_venv_build_image # ========================================= # Here, we create the smaller runtime image # ========================================= FROM $RUNTIME_BASE_IMAGE AS runtime +ARG AUTHOR +ARG REPOSITORY_URL +ARG DOC_URL +ARG FASTSURFER_VERSION +ARG GIT_HASH +ARG VENDOR +ARG DEVICE +ARG RUNTIME_BASE_IMAGE +ARG PYTHON_VERSION + + +LABEL org.opencontainers.image.authors="$AUTHOR" \ + org.opencontainers.image.url="$REPOSITORY_URL" \ + org.opencontainers.image.documentation="$DOC_URL/overview/docker.html" \ + org.opencontainers.image.source="${REPOSITORY_URL/tree/blob}/tools/Docker/Dockerfile" \ + org.opencontainers.image.version="$FASTSURFER_VERSION" \ + org.opencontainers.image.revision="$GIT_HASH" \ + org.opencontainers.image.vendor="$VENDOR" \ + org.opencontainers.image.licenses="FreeSurfer License" \ + org.opencontainers.image.ref.name="deepmi/fastsurfer:${DEVICE}_v${FASTSURFER_VERSION}" \ + org.opencontainers.image.title="FastSurfer $FASTSURFER_VERSION minimal installation for $DEVICE" \ + org.opencontainers.image.description="This image contains the python virtual environment for the official FastSurfer docker image. The corresponding virtual environment is placed at /venv in the image." \ + org.opencontainers.image.base.name="$RUNTIME_BASE_IMAGE" + ENV LANG=C.UTF-8 \ - LC_NUMERIC=en_US.UTF-8 - -# Install required packages for freesurfer to dry_run -RUN apt-get update && apt-get install -y --no-install-recommends \ - bc \ - gawk \ - libgomp1 \ - libquadmath0 \ - time \ - tcsh && \ - apt clean && \ - rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* + LC_NUMERIC=en_US.UTF-8 \ + MPLCONFIGDIR=/tmp/matplotlib-config + +SHELL ["/bin/bash", "-e", "-c"] + +RUN <> /etc/bash.bashrc +EOF # Add FreeSurfer and python Environment variables # DO_NOT_SEARCH_FS_LICENSE_IN_FREESURFER_HOME=true deactivates the search for FS_LICENSE in FREESURFER_HOME @@ -170,25 +280,16 @@ ENV OS=Linux \ MPLCONFIGDIR=/tmp \ XDG_CACHE_HOME=/tmp/xdgcache \ PATH=/venv/bin:/opt/freesurfer/bin:$PATH \ - MPLCONFIGDIR=/tmp/matplotlib-config \ DO_NOT_SEARCH_FS_LICENSE_IN_FREESURFER_HOME="true" -# create matplotlib config dir; make sure we use bash and activate conda env -# (in case someone starts this interactively) -RUN mkdir -m 777 $MPLCONFIGDIR && \ - echo "source /venv/bin/activate" >> /etc/bash.bashrc -SHELL ["/bin/bash", "--login", "-c"] - # Copy fastsurfer venv and pruned freesurfer from build images -# Note, since COPY does not support variables in the --from parameter, so we point to a -# reference here, and the +# Note, since COPY does not support variables in the --from parameter, so we point to a reference here, and the # seletced__build_image is a only a reference to $_BUILD_IMAGE COPY --from=selected_freesurfer_build_image /opt/freesurfer /opt/freesurfer -COPY --from=selected_conda_build_image /venv /venv - -# Fix for cuda11.8+cudnn8.7 bug+warning: https://github.com/pytorch/pytorch/issues/97041 -RUN if [[ "$DEVICE" == "cu118" ]] ; then cd /venv/python3.10/site-packages/torch/lib && ln -s libnvrtc-*.so.11.2 libnvrtc.so ; fi +COPY --from=selected_venv_build_image /venv /venv +# also copy the python installation to /opt/python from venv image +COPY --from=selected_venv_build_image /opt/python /opt/python # Copy fastsurfer over from the build context and add PYTHONPATH COPY . /fastsurfer/ @@ -196,29 +297,35 @@ ENV PYTHONPATH=/fastsurfer:/opt/freesurfer/python/packages \ FASTSURFER_HOME=/fastsurfer \ PATH=/fastsurfer:$PATH -# Download all remote network checkpoints already, compile all FastSurfer scripts into -# bytecode and update the build file with checkpoints md5sums and pip packages. -RUN cd /fastsurfer ; python3 FastSurferCNN/download_checkpoints.py --all && \ - python3 -m compileall * && \ - python3 FastSurferCNN/version.py --sections +git+checkpoints+pip \ - --build_cache tools/Docker/BUILD.info -o BUILD.info - -# TODO: SBOM info of FastSurfer and FreeSurfer are missing, it is unclear how to add -# those at the moment, as the buildscanner syft does not find simple package.json -# or pyproject.toml files right now. The easiest option seems to be to "setup" -# fastsurfer and freesurfer via pip install. -#ENV BUILDKIT_SCAN_SOURCE_EXTRAS="/fastsurfer" -#ARG BUILDKIT_SCAN_SOURCE_EXTRAS="/fastsurfer" -#RUN < /fastsurfer/package.json -#{ -# "name": "fastsurfer", -# "version": "$(python3 FastSurferCNN/version.py)", -# "author": "David Kügler " -#} -#EOF +RUN < /etc/containerd/config.toml`, in this config file edit the `"root"`-entry (default value is `/var/lib/containerd`). 4. Finally, you can now build the FastSurfer image with `python tools/Docker/build.py ... --attest`. This will add the additional flags to the docker build command. -Setting the ssl_verify parameter of mamba ------------------------------------------ -The `build.py` script supports the `--ssl_verify` flag, which can be passed `"False"` or the path to an alternative root certificate. - -```bash -python tools/Docker/build.py --device cpu --tag my_fastsurfer:cpu --ssl_verify /path/to/custom-cert.srt -``` - Building for release -------------------- Make sure, you are building on a machine that has [containerd-storage and Buildkit](#build-docker-image-with-attestation-and-provenance). @@ -231,9 +223,9 @@ img=deepmi/fastsurfer # the version can be identified with: $build_dir/run_fastsurfer.sh --version version=2.5.0 # the cuda and rocm version can be identified with: python $build_dir/tools/Docker/build.py --help | grep -E ^[[:space:]]+--device -cuda=126 -cudas=("cuda118" "cuda124" "cuda$cuda") -rocm=6.2.4 +cuda=128 +cudas=("cuda118" "cuda126" "cuda$cuda") +rocm=6.3 rocms=("rocm$rocm") # end of config @@ -242,7 +234,7 @@ git clone --branch stable --single-branch github.com/Deep-MI/FastSurfer $build_d cd $build_dir all_tags=("latest" "gpu-latest" "cuda-v$version" "rocm-v$version" "cpu-latest") # build all distinct images -for dev in cpu "${rocms[@]}" "${cudas[@]}" +for dev in cpu xpu "${rocms[@]}" "${cudas[@]}" do python3 tools/Docker/build.py --tag $img:$dev-v$version --freesurfer_build_image $img-build:freesurfer741 --attest --device $dev all_tags+=("$dev-v$version") diff --git a/tools/Docker/build.py b/tools/Docker/build.py index 94247bed4..0c19572a6 100755 --- a/tools/Docker/build.py +++ b/tools/Docker/build.py @@ -1,4 +1,4 @@ -#!/bin/python +#!/usr/bin/env python # Copyright 2022 Image Analysis Lab, German Center for Neurodegenerative Diseases(DZNE), Bonn # @@ -39,11 +39,10 @@ logger = logging.getLogger(__name__) -Target = Literal['runtime', 'build_common', 'build_conda', 'build_freesurfer', - 'build_base', 'runtime_cuda'] +Target = Literal["runtime", "build_common", "build_venv", "build_freesurfer", "build_base", "runtime_cuda"] CacheType = Literal["inline", "registry", "local", "gha", "s3", "azblob"] -AllDeviceType = Literal["cpu", "cuda", "cu118", "cu124", "cu126", "rocm", "rocm6.2.4"] -DeviceType = Literal["cpu", "cu118", "cu124", "cu126", "rocm6.2.4"] +AllDeviceType = Literal["cpu", "cuda", "cu118", "cu126", "cu128", "rocm", "rocm6.3", "xpu"] +DeviceType = Literal["cpu", "cu118", "cu126", "cu128", "rocm6.3"] CREATE_BUILDER = "Create builder with 'docker buildx create --name fastsurfer'." CONTAINERD_MESSAGE = ( @@ -72,18 +71,19 @@ class DEFAULTS: # torch 2.0.1 comes compiled with cu117, cu118, and rocm5.4.2 # torch 2.4 comes compiled with cu118, cu121, cu124 and rocm6.1 # torch 2.6 comes compiled with cu118, cu124, cu126 and rocm6.2.4 - CUDA="cu126" - CUDA_VERSION="12.6" - ROCM="rocm6.2.4" + # torch 2.7.1 comes compiled with cu118, cu126, cu128, rocm6.3, and xpu (intel) + CUDA="cu128" + CUDA_VERSION="12.8" + ROCM="rocm6.3" MapDeviceType: dict[AllDeviceType, DeviceType] = dict( ((d, d) for d in get_args(DeviceType)), rocm=ROCM, cuda=CUDA, ) - BUILD_BASE_IMAGE = "ubuntu:22.04" - RUNTIME_BASE_IMAGE = "ubuntu:22.04" + BUILD_BASE_IMAGE = "ubuntu:24.04" + RUNTIME_BASE_IMAGE = "ubuntu:24.04" FREESURFER_BUILD_IMAGE = "build_freesurfer" - CONDA_BUILD_IMAGE = "build_conda" + VENV_BUILD_IMAGE = "build_venv" def docker_image(arg) -> str: @@ -189,15 +189,6 @@ def format_cache_to(self) -> str: __repr__ = format_cache_from -def _validate_ssl_verify(value) -> Path | bool: - """Validate the SSL certificate value from false/none/true/path to certificate.""" - if value.lower() in ("false", "", "none"): - return False - elif value.lower() in ("true", ""): - return True - return Path(value) - - def make_parser() -> argparse.ArgumentParser: try: from FastSurferCNN.segstats import HelpFormatter @@ -215,7 +206,8 @@ def make_parser() -> argparse.ArgumentParser: help=f"""selection of internal build stages to build for a specific platform.
- cuda: defaults to {DEFAULTS.CUDA}, cuda {DEFAULTS.CUDA_VERSION}
- cpu: only cpu support
- - rocm: defaults to {DEFAULTS.ROCM} (experimental)""", + - rocm: defaults to {DEFAULTS.ROCM} (experimental)
+ - xpu: intel xpu (VERY experimental)""", ) parser.add_argument( "--tag", @@ -231,7 +223,7 @@ def make_parser() -> argparse.ArgumentParser: choices=get_args(Target), metavar="target", help="""target to build (from list of targets below, defaults to runtime):
- - build_conda: "finished" conda build image
+ - build_venv: "finished" venv build image (this was previously called 'build_conda')
- build_freesurfer: "finished" freesurfer build image
- runtime: final fastsurfer runtime image""", ) @@ -310,43 +302,34 @@ def make_parser() -> argparse.ArgumentParser: the Dockerfile (either by building it or from cache, see --cache).""", ) expert.add_argument( + "--venv_build_image", "--conda_build_image", type=docker_image, metavar="image[:tag]", help="""explicitly specifies an image to copy the python environment from. The environment is expected to be in /venv in the image, like the - runtime image. By default, uses the "build_conda" stage in the + runtime image. By default, uses the "build_venv" stage in the Dockerfile (either by building it or from cache, see --cache).""", ) expert.add_argument( "--runtime_base_image", type=docker_image, metavar="image[:tag]", - help="explicitly specifies the base image to build the runtime image from (default: ubuntu:22.04).", + help=f"explicitly specifies the base image to build the runtime image from " + f"(default: {DEFAULTS.RUNTIME_BASE_IMAGE}).", ) expert.add_argument( "--build_base_image", type=docker_image, metavar="image[:tag]", - help="explicitly specifies the base image to build the build images from (default: ubuntu:22.04).", + help=f"explicitly specifies the base image to build the build images from " + f"(default: {DEFAULTS.BUILD_BASE_IMAGE}).", ) expert.add_argument( "--debug", action="store_true", help="enables the DEBUG build flag.", ) - - def _default_ssl_verify(x): - return os.environ.get("MAMBA_SSL_VERIFY", os.environ.get("CONDA_SSL_VERIFY", x)) - expert.add_argument( - "--ssl_verify", - type=_validate_ssl_verify, - default=_default_ssl_verify(True), - metavar="{True,False,}", - help="ssl certificate to use for condaforge, from None/False (ignore), True (default system certificate), or a " - "certificate file path (defaults to the value of the MAMBA_SSL_VERIFY (or CONDA_SSL_VERIFY) environment " - f"variable, here: {_default_ssl_verify('True (neither set)')}).", - ) return parser @@ -355,7 +338,6 @@ def red(skk): def get_builder( - Popen, builder_type: str, require_builder_type: bool = False, ) -> tuple[bool, str]: @@ -363,6 +345,8 @@ def get_builder( from re import compile from subprocess import PIPE + from FastSurferCNN.utils.run_tools import Popen + buildx_binfo = Popen(["docker", "buildx", "ls"], stdout=PIPE, stderr=PIPE).finish() header, *lines = buildx_binfo.out_str("utf-8").strip().split("\n") header_pattern = compile("\\S+\\s*") @@ -413,7 +397,8 @@ def docker_build_image( attestation: bool = False, action: Literal["load", "push"] = "load", image_path: Path | str | None = None, - **kwargs) -> None: + **kwargs, +) -> None: """ Build a docker image. @@ -424,28 +409,24 @@ def docker_build_image( dockerfile : Path, str Path to the Dockerfile. working_directory : Path, str, optional - Path o the working directory to perform the build operation (default: inherit). - context : Path, str, optional - Base path to the context folder to build the docker image from (default: '.'). - dry_run : bool, optional - Whether to actually trigger the build, or just print the command to the console - (default: False => actually build). + Path to the working directory to perform the build operation (None: inherit). + context : Path, str, default='.' + Base path to the context folder to build the docker image from. + dry_run : bool, default=False + Whether to actually trigger the build, or just print the command to the console (False: actually build). cache_to : str, optional - Forces usage of buildx over build, use docker build caching as in the --cache-to - argument to docker buildx build. + Forces usage of buildx over build, use docker build caching via the --cache-to argument to docker buildx build. attestation : bool, default=False Whether to create sbom and provenance attestation action : "load", "push", default="load" - The operation to perform after the image is built (only if a docker-container - builder is detected). + The operation to perform after the image is built (only if a docker-container builder is detected). image_path : Path, str, optional - A path to save the image to (experimental; currently cannot be imported into a - legacy docker storage driver). + A path to save the image to (experimental; currently cannot be imported into a legacy docker storage driver). - Additional kwargs add additional build flags to the build command in the following - manner: "_" is replaced by "-" in the keyword name and each sequence entry is passed - with its own flag, e.g. `docker_build_image(..., build_arg=["TEST=1", "VAL=2"])` is - translated to `docker [buildx] build ... --build-arg TEST=1 --build-arg VAL=2`. + Additional kwargs add additional build flags to the build command in the following manner: "_" is replaced by "-" in + the keyword name and each sequence entry is passed with its own flag, e.g. + `docker_build_image(..., build_arg=["TEST=1", "VAL=2"])` is translated to + `docker [buildx] build ... --build-arg TEST=1 --build-arg VAL=2`. """ from itertools import chain, repeat from shutil import which @@ -486,11 +467,11 @@ def is_inline_cache(cache_kw): require_container = (attestation or any(is_inline_cache(f"cache_{c}") for c in ("to", "from"))) import_after_args = [] - if dest := image_path or "": - logger.warning("Images exported with image_path cannot be imported into legacy " - "storage drivers. This feature is currently experimental. Also " - "note, that exporting to a file is incompatible with the load " - f"and push actions. Deactivating {action}-action!") + if dest := (image_path or ""): + logger.warning( + "Images exported with image_path cannot be imported into legacy storage drivers. This feature is currently " + "experimental. Also note, that exporting to a file is incompatible with the load and push actions. " + f"Deactivating {action}-action!") dest = f",dest={dest}" action = "export" if not has_buildx: @@ -498,13 +479,12 @@ def is_inline_cache(cache_kw): if require_container: # not supported with builder != docker-container raise RuntimeError( - "Using --cache_{from,to} or attestation requires docker buildx and a " - f"docker-container builder.\n{INSTALL_BUILDX}\n{CREATE_BUILDER}" + "Using --cache_{from,to} or attestation requires docker buildx and a docker-container builder.\n" + f"{INSTALL_BUILDX}\n{CREATE_BUILDER}" ) if action != "load": raise RuntimeError( - "The legacy docker builder does not support pushing or exporting the " - "image." + "The legacy docker builder does not support pushing or exporting the image." ) args = ["build"] kwargs_to_exclude = [f"cache_{c}" for c in ("to", "from")] @@ -513,7 +493,6 @@ def is_inline_cache(cache_kw): args = ["buildx", "build"] # raises RuntimeError, if a docker-container builder is required, but not found default_builder_is_container, alternative_builder = get_builder( - Popen, "docker-container", require_container, ) @@ -538,8 +517,8 @@ def is_inline_cache(cache_kw): print(f"mkdir -p {Path(image_path).parent} && ", sep="") else: Path(image_path).parent.mkdir(exist_ok=True) - # importing after (bock docker image import as well as docker image load - # are not supported for images exported by buildkit. + # importing after (bock docker image import as well as docker image load are not supported for images + # exported by buildkit. # import_after_args = ["image", "import", image_path, image_name] elif attestation: # also implicitly action == load @@ -649,18 +628,18 @@ def main( dry_run: bool = False, tag_dev: bool = True, fastsurfer_home: Path | None = None, - ssl_verify: Path | bool = True, **keywords, ) -> int | str: - from FastSurferCNN.version import has_git + from FastSurferCNN.version import has_git, parse_build_file from FastSurferCNN.version import main as version kwargs: dict[str, str | list[str]] = {} if cache is not None: if not isinstance(cache, CacheSpec): cache = CacheSpec(cache) - logger.info(f"cache: {cache}") + if not dry_run: + logger.info(f"cache: {cache}") kwargs["cache_from"] = cache.format_cache_from() - kwargs["cache_to"] = cache.format_cache_from() + kwargs["cache_to"] = cache.format_cache_to() fastsurfer_home = Path(fastsurfer_home) if fastsurfer_home else default_home() # read the freesurfer download url from pyproject.toml @@ -679,7 +658,8 @@ def main( kwargs["target"] = target kwargs["build_arg"] = [ f"DEVICE={DEFAULTS.MapDeviceType.get(device, 'cpu')}", - f"FREESURFER_URL={pyproject_freesurfer['urls']['linux'].format(version=pyproject_freesurfer['version'])}" + f"FREESURFER_URL={pyproject_freesurfer['urls']['linux'].format(version=pyproject_freesurfer['version'])}", + f"FREESURFER_VERSION={pyproject_freesurfer['version']}", ] if debug: kwargs["build_arg"].append("DEBUG=true") @@ -687,24 +667,13 @@ def main( "build_base_image", "runtime_base_image", "freesurfer_build_image", - "conda_build_image", + "venv_build_image", ] for key in build_arg_list: upper_key = key.upper() value = keywords.get(key) or getattr(DEFAULTS, upper_key) kwargs["build_arg"].append(f"{upper_key}={value}") - # kwargs["build_arg"] = " ".join(kwargs["build_arg"]) - if ssl_verify is not True: - if ssl_verify is False: - kwargs["build_arg"].append("MAMBA_SSL_VERIFY=") - else: - _ssl_cert = "tools/Docker/custom-ssl.crt" - if (fastsurfer_home / _ssl_cert).exists(): - (fastsurfer_home / _ssl_cert).unlink() - from shutil import copy2 - copy2(ssl_verify, fastsurfer_home / _ssl_cert) - kwargs["build_arg"].append(f"MAMBA_SSL_CERTIFICATE={_ssl_cert}") - kwargs["build_arg"].append(f"MAMBA_SSL_VERIFY=/install/{Path(_ssl_cert).name}") + build_filename = fastsurfer_home / "tools" / "Docker" / "BUILD.info" if has_git(): version_sections = "+git" @@ -728,9 +697,17 @@ def main( return f"Creating the version file failed with message: {ret_version}" with open(build_filename) as build_file: - from FastSurferCNN.version import parse_build_file build_info = parse_build_file(build_file) + if has_git(): + repository_url = get_repository_url(build_info["git_status"], build_info["git_branch"]) + kwargs["build_arg"].extend([ + f"REPOSITORY_URL={repository_url}", + f"GIT_HASH={build_info['git_hash']}", + ]) + if "github.com/tree/stable" in repository_url: + kwargs["build_arg"].append("DOC_URL=https://deep-mi.org/fastsurfer/stable") + kwargs["build_arg"].append(f"FASTSURFER_VERSION={build_info['version_tag']}") version_tag = build_info["version_tag"] image_prefix = "" if device != "cuda": @@ -773,6 +750,23 @@ def main( return 0 +def get_repository_url(git_status_text: str, branch: str) -> str: + """Get the repository URL of the current git repository.""" + from FastSurferCNN.utils.run_tools import Popen + + remote = git_status_text.removeprefix(f"## {branch}...").split("/")[0] + repository_process = Popen(["git", "remote", "get-url", remote], stdout=subprocess.PIPE).finish() + if repository_process.retcode != 0: + logger.error(repository_process.err_str()) + raise RuntimeError("Could not get the repository URL from git.") + repository_url = repository_process.out_str().strip() + if repository_url.endswith(".git"): + repository_url = repository_url[:-4] + if repository_url.startswith("git@"): + repository_url = "https://" + repository_url[4:].replace(":/", "/") + return repository_url + "/tree/" + branch + + def default_home() -> Path: """ Find the fastsurfer path. @@ -782,18 +776,19 @@ def default_home() -> Path: Path The FastSurfer root path belonging to this build.py file. """ - return Path(__file__).parents[2] + return Path(__file__).resolve().parents[2] if __name__ == "__main__": import sys logging.basicConfig(stream=sys.stdout) - arguments = make_parser().parse_args() # make sure the code can run without FastSurfer being in PYTHONPATH fastsurfer_home = default_home() if str(fastsurfer_home) not in sys.path: sys.path.append(str(fastsurfer_home)) + arguments = make_parser().parse_args() + logger.setLevel(logging.WARN if arguments.dry_run else logging.INFO) sys.exit(main(**vars(arguments), fastsurfer_home=fastsurfer_home)) diff --git a/tools/Docker/conda_pack.sh b/tools/Docker/conda_pack.sh deleted file mode 100755 index a82732362..000000000 --- a/tools/Docker/conda_pack.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/bin/bash -# usage: -# conda_pack.sh -# -# packs the environment into /venv - -# set script to stop after first fail -set -e - -# Install conda-pack -mamba install -c conda-forge conda-pack -# make sure setuptools is <81 for conda-pack 0.8.1 https://github.com/conda/conda-pack/issues/391 -setuptools_major=$(mamba list setuptools -e | sed '/^#/d' | grep -oE '=[^.]+\.') -if [[ "${setuptools_major:1:-1}" -lt 81 ]] ; then mamba install -c conda-forge "setuptools<81" ; fi -# Use conda-pack to create a standalone environment in /venv -conda-pack -n "$1" -o /tmp/env.tar -mkdir /venv -cd /venv -tar xf /tmp/env.tar -rm /tmp/env.tar -# Finally, when venv in a new location, fix up paths -/venv/bin/conda-unpack diff --git a/tools/Docker/entrypoint.sh b/tools/Docker/entrypoint.sh index db4f312c0..c26140ba5 100755 --- a/tools/Docker/entrypoint.sh +++ b/tools/Docker/entrypoint.sh @@ -6,9 +6,8 @@ #set -euo pipefail # ... Run whatever commands ... -# Temporarily disable strict mode and activate conda: +# Temporarily disable strict mode and activate venv: set +euo pipefail -#conda activate myenv source /venv/bin/activate # Re-enable strict mode: diff --git a/tools/Docker/install_env.py b/tools/Docker/install_env.py deleted file mode 100644 index 4cc87fe2e..000000000 --- a/tools/Docker/install_env.py +++ /dev/null @@ -1,157 +0,0 @@ -#!/bin/python - -# helper script to install environment files - -import argparse -import logging -import os.path -import re - -logger = logging.getLogger(__name__) - - -arg_pattern = re.compile('^(\\s*-\\s*)(--[a-zA-Z0-9\\-]+)(\\s+\\S+)?(\\s*(#.*)?)$') -package_pattern = re.compile('^(\\s*-\\s*)([a-zA-Z0-9\\.\\_\\-]+|pip:)(\\s*[<=>~]{1,2}\\s*\\S+)?(\\s*(#.*)?\\s*)$') -dependencies_pattern = re.compile('^\\s*dependencies:\\s*$') - - -def mode(arg: str) -> str: - if arg in ["base", "cpu"] or \ - re.match("^cu\\d+$", arg) or \ - re.match("^rocm\\d+\\.\\d+(\\.\\d+)?$", arg): - return arg - else: - raise argparse.ArgumentTypeError(f"The mode was '{arg}', but should be " - f"'base', 'cpu', 'cuXXX', or 'rocmX.X[.X]', " - f"where X are digits.") - - -def make_parser() -> argparse.ArgumentParser: - parser = argparse.ArgumentParser( - description='Filter the yaml files for torch components and return modified files' - ) - - parser.add_argument('-m', - '--mode', - required=True, - type=mode, - help="""targeted return: - - base: conda environment create without pytorch. - - cpu: pytorch install without cuda (only cpu support, only linux) - - cu117: standard pytorch install (with cuda 11.7 on linux) - - cu118: standard pytorch install (with cuda 11.8 on linux) - - rocm5.4.2: rocm pytorch install (with rocm 5.4.2 on linux) - """ - ) - parser.add_argument('-i', - '--yaml_env', - dest="yaml_in", - required=True, - help="Path to the input yaml environment" - ) - parser.add_argument("-o", - dest='yaml_out', - default=None, - help="Path to the output yaml environment (default: print to stdout)" - ) - return parser - - -def main(args): - """Function to split a conda env file for pytorch cuda and cpu versions.""" - - mode = args.mode - if mode is None: - return "ERROR: No mode set." - - yaml_in = getattr(args, 'yaml_in', None) - if yaml_in is None or not os.path.exists(yaml_in): - return f"ERROR: yaml environment file {yaml_in} is not valid!" - with open(yaml_in) as f_yaml: - lines = f_yaml.readlines() - - out_file = args.yaml_out - out_file_pointer = open(out_file, "w") if out_file else None - # filter yaml file for pip content - kwargs = {"sep": "", "end": "", "file": out_file_pointer} - - packages_with_device_tag = ["pytorch", "torch", "torchvision", "torchaudio"] - packages_without_device_tag_but_need_torch = ["torchio"] - packages_that_only_work_with_cuda = [] - packages_requiring_torch = packages_with_device_tag + packages_without_device_tag_but_need_torch - all_special_packages = packages_requiring_torch + packages_that_only_work_with_cuda - - in_dep = False - buffer = "" - has_package = False - pip_indent = -1 - has_pip = False - - for line in lines: - line_stripped = line.lstrip() - in_dep = in_dep and line_stripped.startswith("-") - indent_count = len(line) - len(line_stripped) - - # there is something in the buffer, we are changing indents (but not after pip - # subsection) and there are packages in the buffer, flush the buffer - logger.debug(f"maybe print buffer: {has_package} {pip_indent} {indent_count}") - if buffer != "" and has_package and pip_indent in (-1, indent_count): - has_pip = has_pip or re.search('-\\s*pip', buffer) is not None - print(buffer, **kwargs) - buffer = "" - has_package = False - pip_indent = -1 - - # handle line not part of dependencies - hits_package = package_pattern.search(line) - hits_args = arg_pattern.search(line) - if not in_dep: - print(line, **kwargs) - in_dep = dependencies_pattern.search(line) is not None - # handle lines part of dependencies AND package specs - elif hits_package is not None: # no hit - indent, package_pip, version, comment, _ = hits_package.groups("") - - logger.debug(f"potential package: {mode} - {package_pip} " + - f"base {package_pip not in all_special_packages} " + - f"not base {package_pip in packages_requiring_torch} " + - f"cuda {package_pip in packages_that_only_work_with_cuda}") - if package_pip == "pip": - # pip is automatically added in front of the '- pip:' subsection - pass - elif package_pip == "pip:": - # this adds "- pip" and "- pip:" to buffer - buffer = ("" if has_pip else indent + "pip\n") + line - pip_indent = indent_count - elif mode == "base" and package_pip not in all_special_packages or \ - mode != "base" and package_pip in packages_requiring_torch or \ - mode.startswith("cu") and package_pip in packages_that_only_work_with_cuda: - if mode != "base" and package_pip in packages_with_device_tag: - if "+" in version: - version, _ = version.split("+", 1) - version += "+" + mode - buffer += indent + package_pip + version + comment - has_package = True - # handle lines part of dependencies AND argument to pip - elif hits_args is not None: - # this is an argument line, should only be in pip section - indent, arg, value, comment, _ = hits_args.groups("") - if arg in ("--index-url", "--extra-index-url") and "download.pytorch.org" in value: - value_cpu = re.sub("/whl/[^/]+/?$", f"/whl/{mode}", value) - buffer += indent + arg + value_cpu + comment - else: - buffer += line - else: - raise ValueError(f"Invalid line in environment file, could not interpret `{line}`") - logger.debug("buffer" + buffer) - if buffer != "" and has_package: - print(buffer, **kwargs) - return 0 - - -if __name__ == "__main__": - import sys - logging.basicConfig(stream=sys.stderr) - #logger.setLevel(logging.DEBUG) - - sys.exit(main(make_parser().parse_args())) diff --git a/tools/build/fspython b/tools/build/fspython new file mode 100755 index 000000000..84d322f7f --- /dev/null +++ b/tools/build/fspython @@ -0,0 +1,33 @@ +#!/bin/bash + +# This script is a wrapper to forward commands from fspython to the fastsurfer python virtual environment +# usage: fspython +# It will activate the fastsurfer virtual environment and forward all arguments to the python interpreter + +# add freesurfer packages to PYTHONPATH +if [[ -n "$FREESURFER_HOME" ]] +then + PYTHONPATH="$PYTHONPATH:$FREESURFER_HOME/python/packages" +else + THIS_SCRIPT="$(realpath "$0")" + PYTHONPATH="$PYTHONPATH:$(dirname "$(dirname "$THIS_SCRIPT")")/python/packages" +fi +export PYTHONPATH + +# if we are not already in the fastsurfer venv, but there is a fastsurfer-docker style venv, activate it +if [[ "$VIRTUAL_ENV_PROMPT" != "fastsurfer" ]] && [[ -f "/venv/bin/activate" ]] +then + # activate fastsurfer venv + source /venv/bin/activate +fi + +if [[ "$VIRTUAL_ENV_PROMPT" == "fastsurfer" ]] +then + # already in fastsurfer venv + python "$@" + exit $? +else + echo "ERROR: fastsurfer virtual environment not found. Source an environment with prompt" + echo " (\$VIRTUAL_ENV_PROMPT 'fastsurfer') first." + exit 1 +fi diff --git a/tools/build/install_fs_pruned.sh b/tools/build/install_fs_pruned.sh index c89617860..acc162998 100755 --- a/tools/build/install_fs_pruned.sh +++ b/tools/build/install_fs_pruned.sh @@ -1,5 +1,4 @@ -#!/bin/bash --login -# --login to read bashrc for conda inside docker +#!/bin/bash # This file downloads the FreeSurfer tar ball and extracts from it only what is needed to run # FastSurfer @@ -79,57 +78,38 @@ echo echo "$fslink" echo - -function run_parallel () -{ - # param 1 num_parallel_processes - # param 2 command (printf string) - # param 3 how many entries to consume from $@ per "run" - # param ... parameters to format, ie. we are executing $(printf $command $@...) - i=0 - pids=() - num_parallel_processes=$1 - command=$2 - num=$3 - shift - shift - shift - args=("$@") - j=0 - while [[ "$j" -lt "${#args}" ]] - do - # shellcheck disable=SC2059 - cmd=$(printf "$command" "${args[@]:$j:$num}") - j=$((j + num)) - $cmd & - pids=("${pids[@]}" "$!") - i=$((i + 1)) - if [[ "$i" -ge "$num_parallel_processes" ]] - then - wait "${pids[0]}" - pids=("${pids[@]:1}") - fi - done - for pid in "${pids[@]}" - do - wait "$pid" - done -} - - # get FreeSurfer and unpack (some of it) echo "Downloading FS and unpacking portions ..." # temp freesurfer dl filename (to save the dl) -freesurfer_dl="freesurfer_$(date +%s).tar.gz" +if [[ -d /install ]] ; then + freesurfer_dl="/install/download/$(basename "$fslink")" + if [[ ! -d /install/download/ ]] ; then + mkdir -p /install/download/ + delete_freesurfer_dl="true" + else + delete_freesurfer_dl="false" + fi +else + freesurfer_dl="freesurfer_$(date +%s).tar.gz" + delete_freesurfer_dl="true" +fi + +if [[ -f "$freesurfer_dl" ]] ; then + echo "Found cached download $freesurfer_dl, using that ..." +else + # dl aria2c if that exists, else wget or curl + if [[ -n "$(which aria2c)" ]] ; then dl=(aria2c -cx 16 -s 16 --check-certificate=false -o "$freesurfer_dl" "$fslink") + elif [[ -n "$(which wget)" ]] ; then dl=(wget --no-check-certificate -qO- "$fslink" -O "$freesurfer_dl") + else dl=(curl -L --insecure "$fslink" -o "$freesurfer_dl") + fi -# dl aria2c if that exists, else wget or curl -if [[ -n "$(which aria2c)" ]] ; then dl=(aria2c -x 16 -s 16 -c --check-certificate=false -o "$freesurfer_dl" "$fslink" ) -elif [[ -n "$(which wget)" ]] ; then dl=(wget --no-check-certificate -qO- "$fslink" -O "$freesurfer_dl") -else dl=(curl -L --insecure "$fslink" -o "$freesurfer_dl") + echo "Downloading FreeSurfer from $fslink with ${dl[0]}..." + "${dl[@]}" fi -if ! "${dl[@]}" ; then + +if [[ ! -f "$freesurfer_dl" ]] ; then echo "ERROR: Downloading FreeSurfer failed! This is not recoverable, see message above and retry!" exit 1 fi @@ -186,7 +166,10 @@ tar zxv --no-same-owner -C "$where" \ --exclude='freesurfer/trctrain' \ -f "$freesurfer_dl" -rm "$freesurfer_dl" +if [[ "$delete_freesurfer_dl" == "true" ]] ; then + echo "Deleting temporary download $freesurfer_dl ..." + rm "$freesurfer_dl" +fi # rename download to tmp mv "$where/freesurfer" "$fss" @@ -202,231 +185,231 @@ mkdir -p "$fsd/subjects/fsaverage/label" mkdir -p "$fsd/subjects/fsaverage/surf" # We need these -copy_files=" - ASegStatsLUT.txt - build-stamp.txt - DefectLUT.txt - FreeSurferColorLUT.txt - FreeSurferEnv.sh - SegmentNoLUT.txt - SetUpFreeSurfer.sh - Simple_surface_labels2009.txt - sources.csh - SubCorticalMassLUT.txt - WMParcStatsLUT.txt - average/3T18yoSchwartzReactN32_as_orig.4dfp.hdr - average/3T18yoSchwartzReactN32_as_orig.4dfp.ifh - average/3T18yoSchwartzReactN32_as_orig.4dfp.img - average/3T18yoSchwartzReactN32_as_orig.4dfp.img.rec - average/3T18yoSchwartzReactN32_as_orig.4dfp.mat - average/3T18yoSchwartzReactN32_as_orig.lst - average/711-2B_as_mni_average_305_mask.4dfp.hdr - average/711-2B_as_mni_average_305_mask.4dfp.ifh - average/711-2B_as_mni_average_305_mask.4dfp.img - average/711-2B_as_mni_average_305_mask.4dfp.img.rec - average/711-2C_as_mni_average_305.4dfp.hdr - average/711-2C_as_mni_average_305.4dfp.ifh - average/711-2C_as_mni_average_305.4dfp.img - average/711-2C_as_mni_average_305.4dfp.img.rec - average/711-2C_as_mni_average_305.4dfp.mat - average/colortable_BA.txt - average/colortable_desikan_killiany.txt - average/colortable_vpnl.txt - average/lh.CDaparc.atlas.acfb40.noaparc.i12.2016-08-02.gcs - average/lh.DKaparc.atlas.acfb40.noaparc.i12.2016-08-02.gcs - average/lh.DKTaparc.atlas.acfb40.noaparc.i12.2016-08-02.gcs - average/lh.folding.atlas.acfb40.noaparc.i12.2016-08-02.tif - average/mni305.cor.mgz - average/mni305.mask.cor.mgz - average/rh.CDaparc.atlas.acfb40.noaparc.i12.2016-08-02.gcs - average/rh.DKaparc.atlas.acfb40.noaparc.i12.2016-08-02.gcs - average/rh.DKTaparc.atlas.acfb40.noaparc.i12.2016-08-02.gcs - average/rh.folding.atlas.acfb40.noaparc.i12.2016-08-02.tif - bin/analyzeto4dfp - bin/AntsDenoiseImageFs - bin/asegstats2table - bin/aparcstats2table - bin/avi2talxfm - bin/compute_vox2vox - bin/defect2seg - bin/fname2stem - bin/fspython - bin/fs_temp_dir - bin/fs_temp_file - bin/fs-check-version - bin/fsr-getxopts - bin/gauss_4dfp - bin/ifh2hdr - bin/imgreg_4dfp - bin/isanalyze - bin/isnifti - bin/lta_convert - bin/make_upright - bin/mpr2mni305 - bin/mri_add_xform_to_header - bin/mri_annotation2label - bin/mri_binarize - bin/mri_brainvol_stats - bin/mri_cc - bin/mri_concat - bin/mri_concatenate_lta - bin/mri_convert - bin/mri_coreg - bin/mri_diff - bin/mri_edit_wm_with_aseg - bin/mri_fill - bin/mri_fuse_segmentations - bin/mri_glmfit - bin/mri_info - bin/mri_label2label - bin/mri_label2vol - bin/mri_mask - bin/mri_matrix_multiply - bin/mri_mc - bin/mri_normalize - bin/mri_pretess - bin/mri_relabel_hypointensities - bin/mri_robust_register - bin/mri_robust_template - bin/mri_segment - bin/mri_segstats - bin/mri_surf2surf - bin/mri_surf2volseg - bin/mri_tessellate - bin/mri_vol2surf - bin/mri_vol2vol - bin/mris_anatomical_stats - bin/mris_autodet_gwstats - bin/mris_ca_label - bin/mris_calc - bin/mris_convert - bin/mris_curvature - bin/mris_curvature_stats - bin/mris_defects_pointset - bin/mris_diff - bin/mris_euler_number - bin/mris_extract_main_component - bin/mris_fix_topology - bin/mris_inflate - bin/mris_info - bin/mris_jacobian - bin/mris_label2annot - bin/mris_place_surface - bin/mris_preproc - bin/mris_register - bin/mris_remesh - bin/mris_remove_intersection - bin/mris_sample_parc - bin/mris_smooth - bin/mris_sphere - bin/mris_topo_fixer - bin/mris_volmask - bin/mrisp_paint - bin/pctsurfcon - bin/rca-config - bin/rca-config2csh - bin/recon-all - bin/talairach_avi - bin/UpdateNeeded - bin/vertexvol - etc/recon-config.yaml - lib/bem/ic4.tri - lib/bem/ic7.tri - python/packages/fsbindings/legacy.py - python/scripts/asegstats2table - python/scripts/aparcstats2table - python/scripts/rca-config - python/scripts/rca-config2csh - subjects/fsaverage/label/lh.aparc.annot - subjects/fsaverage/label/lh.BA1_exvivo.label - subjects/fsaverage/label/lh.BA1_exvivo.thresh.label - subjects/fsaverage/label/lh.BA2_exvivo.label - subjects/fsaverage/label/lh.BA2_exvivo.thresh.label - subjects/fsaverage/label/lh.BA3a_exvivo.label - subjects/fsaverage/label/lh.BA3a_exvivo.thresh.label - subjects/fsaverage/label/lh.BA3b_exvivo.label - subjects/fsaverage/label/lh.BA3b_exvivo.thresh.label - subjects/fsaverage/label/lh.BA44_exvivo.label - subjects/fsaverage/label/lh.BA44_exvivo.thresh.label - subjects/fsaverage/label/lh.BA45_exvivo.label - subjects/fsaverage/label/lh.BA45_exvivo.thresh.label - subjects/fsaverage/label/lh.BA4a_exvivo.label - subjects/fsaverage/label/lh.BA4a_exvivo.thresh.label - subjects/fsaverage/label/lh.BA4p_exvivo.label - subjects/fsaverage/label/lh.BA4p_exvivo.thresh.label - subjects/fsaverage/label/lh.BA6_exvivo.label - subjects/fsaverage/label/lh.BA6_exvivo.thresh.label - subjects/fsaverage/label/lh.cortex.label - subjects/fsaverage/label/lh.entorhinal_exvivo.label - subjects/fsaverage/label/lh.entorhinal_exvivo.thresh.label - subjects/fsaverage/label/lh.FG1.mpm.vpnl.label - subjects/fsaverage/label/lh.FG2.mpm.vpnl.label - subjects/fsaverage/label/lh.FG3.mpm.vpnl.label - subjects/fsaverage/label/lh.FG4.mpm.vpnl.label - subjects/fsaverage/label/lh.hOc1.mpm.vpnl.label - subjects/fsaverage/label/lh.hOc2.mpm.vpnl.label - subjects/fsaverage/label/lh.hOc3v.mpm.vpnl.label - subjects/fsaverage/label/lh.hOc4v.mpm.vpnl.label - subjects/fsaverage/label/lh.MT_exvivo.label - subjects/fsaverage/label/lh.MT_exvivo.thresh.label - subjects/fsaverage/label/lh.perirhinal_exvivo.label - subjects/fsaverage/label/lh.perirhinal_exvivo.thresh.label - subjects/fsaverage/label/lh.V1_exvivo.label - subjects/fsaverage/label/lh.V1_exvivo.thresh.label - subjects/fsaverage/label/lh.V2_exvivo.label - subjects/fsaverage/label/lh.V2_exvivo.thresh.label - subjects/fsaverage/label/rh.aparc.annot - subjects/fsaverage/label/rh.BA1_exvivo.label - subjects/fsaverage/label/rh.BA1_exvivo.thresh.label - subjects/fsaverage/label/rh.BA2_exvivo.label - subjects/fsaverage/label/rh.BA2_exvivo.thresh.label - subjects/fsaverage/label/rh.BA3a_exvivo.label - subjects/fsaverage/label/rh.BA3a_exvivo.thresh.label - subjects/fsaverage/label/rh.BA3b_exvivo.label - subjects/fsaverage/label/rh.BA3b_exvivo.thresh.label - subjects/fsaverage/label/rh.BA44_exvivo.label - subjects/fsaverage/label/rh.BA44_exvivo.thresh.label - subjects/fsaverage/label/rh.BA45_exvivo.label - subjects/fsaverage/label/rh.BA45_exvivo.thresh.label - subjects/fsaverage/label/rh.BA4a_exvivo.label - subjects/fsaverage/label/rh.BA4a_exvivo.thresh.label - subjects/fsaverage/label/rh.BA4p_exvivo.label - subjects/fsaverage/label/rh.BA4p_exvivo.thresh.label - subjects/fsaverage/label/rh.BA6_exvivo.label - subjects/fsaverage/label/rh.BA6_exvivo.thresh.label - subjects/fsaverage/label/rh.cortex.label - subjects/fsaverage/label/rh.entorhinal_exvivo.label - subjects/fsaverage/label/rh.entorhinal_exvivo.thresh.label - subjects/fsaverage/label/rh.FG1.mpm.vpnl.label - subjects/fsaverage/label/rh.FG2.mpm.vpnl.label - subjects/fsaverage/label/rh.FG3.mpm.vpnl.label - subjects/fsaverage/label/rh.FG4.mpm.vpnl.label - subjects/fsaverage/label/rh.hOc1.mpm.vpnl.label - subjects/fsaverage/label/rh.hOc2.mpm.vpnl.label - subjects/fsaverage/label/rh.hOc3v.mpm.vpnl.label - subjects/fsaverage/label/rh.hOc4v.mpm.vpnl.label - subjects/fsaverage/label/rh.MT_exvivo.label - subjects/fsaverage/label/rh.MT_exvivo.thresh.label - subjects/fsaverage/label/rh.perirhinal_exvivo.label - subjects/fsaverage/label/rh.perirhinal_exvivo.thresh.label - subjects/fsaverage/label/rh.V1_exvivo.label - subjects/fsaverage/label/rh.V1_exvivo.thresh.label - subjects/fsaverage/label/rh.V2_exvivo.label - subjects/fsaverage/label/rh.V2_exvivo.thresh.label - subjects/fsaverage/surf/lh.curv - subjects/fsaverage/surf/lh.pial - subjects/fsaverage/surf/lh.pial_semi_inflated - subjects/fsaverage/surf/lh.sphere - subjects/fsaverage/surf/lh.sphere.reg - subjects/fsaverage/surf/lh.white - subjects/fsaverage/surf/rh.curv - subjects/fsaverage/surf/rh.pial - subjects/fsaverage/surf/rh.pial_semi_inflated - subjects/fsaverage/surf/rh.sphere - subjects/fsaverage/surf/rh.sphere.reg - subjects/fsaverage/surf/rh.white" +copy_files=( + "ASegStatsLUT.txt" + "build-stamp.txt" + "DefectLUT.txt" + "FreeSurferColorLUT.txt" + "FreeSurferEnv.sh" + "SegmentNoLUT.txt" + "SetUpFreeSurfer.sh" + "Simple_surface_labels2009.txt" + "sources.csh" + "SubCorticalMassLUT.txt" + "WMParcStatsLUT.txt" + "average/3T18yoSchwartzReactN32_as_orig.4dfp.hdr" + "average/3T18yoSchwartzReactN32_as_orig.4dfp.ifh" + "average/3T18yoSchwartzReactN32_as_orig.4dfp.img" + "average/3T18yoSchwartzReactN32_as_orig.4dfp.img.rec" + "average/3T18yoSchwartzReactN32_as_orig.4dfp.mat" + "average/3T18yoSchwartzReactN32_as_orig.lst" + "average/711-2B_as_mni_average_305_mask.4dfp.hdr" + "average/711-2B_as_mni_average_305_mask.4dfp.ifh" + "average/711-2B_as_mni_average_305_mask.4dfp.img" + "average/711-2B_as_mni_average_305_mask.4dfp.img.rec" + "average/711-2C_as_mni_average_305.4dfp.hdr" + "average/711-2C_as_mni_average_305.4dfp.ifh" + "average/711-2C_as_mni_average_305.4dfp.img" + "average/711-2C_as_mni_average_305.4dfp.img.rec" + "average/711-2C_as_mni_average_305.4dfp.mat" + "average/colortable_BA.txt" + "average/colortable_desikan_killiany.txt" + "average/colortable_vpnl.txt" + "average/lh.CDaparc.atlas.acfb40.noaparc.i12.2016-08-02.gcs" + "average/lh.DKaparc.atlas.acfb40.noaparc.i12.2016-08-02.gcs" + "average/lh.DKTaparc.atlas.acfb40.noaparc.i12.2016-08-02.gcs" + "average/lh.folding.atlas.acfb40.noaparc.i12.2016-08-02.tif" + "average/mni305.cor.mgz" + "average/mni305.mask.cor.mgz" + "average/rh.CDaparc.atlas.acfb40.noaparc.i12.2016-08-02.gcs" + "average/rh.DKaparc.atlas.acfb40.noaparc.i12.2016-08-02.gcs" + "average/rh.DKTaparc.atlas.acfb40.noaparc.i12.2016-08-02.gcs" + "average/rh.folding.atlas.acfb40.noaparc.i12.2016-08-02.tif" + "bin/analyzeto4dfp" + "bin/AntsDenoiseImageFs" + "bin/asegstats2table" + "bin/aparcstats2table" + "bin/avi2talxfm" + "bin/compute_vox2vox" + "bin/defect2seg" + "bin/fname2stem" + "bin/fspython" + "bin/fs_temp_dir" + "bin/fs_temp_file" + "bin/fs-check-version" + "bin/fsr-getxopts" + "bin/gauss_4dfp" + "bin/ifh2hdr" + "bin/imgreg_4dfp" + "bin/isanalyze" + "bin/isnifti" + "bin/lta_convert" + "bin/make_upright" + "bin/mpr2mni305" + "bin/mri_add_xform_to_header" + "bin/mri_annotation2label" + "bin/mri_binarize" + "bin/mri_brainvol_stats" + "bin/mri_cc" + "bin/mri_concat" + "bin/mri_concatenate_lta" + "bin/mri_convert" + "bin/mri_coreg" + "bin/mri_diff" + "bin/mri_edit_wm_with_aseg" + "bin/mri_fill" + "bin/mri_fuse_segmentations" + "bin/mri_glmfit" + "bin/mri_info" + "bin/mri_label2label" + "bin/mri_label2vol" + "bin/mri_mask" + "bin/mri_matrix_multiply" + "bin/mri_mc" + "bin/mri_normalize" + "bin/mri_pretess" + "bin/mri_relabel_hypointensities" + "bin/mri_robust_register" + "bin/mri_robust_template" + "bin/mri_segment" + "bin/mri_segstats" + "bin/mri_surf2surf" + "bin/mri_surf2volseg" + "bin/mri_tessellate" + "bin/mri_vol2surf" + "bin/mri_vol2vol" + "bin/mris_anatomical_stats" + "bin/mris_autodet_gwstats" + "bin/mris_ca_label" + "bin/mris_calc" + "bin/mris_convert" + "bin/mris_curvature" + "bin/mris_curvature_stats" + "bin/mris_defects_pointset" + "bin/mris_diff" + "bin/mris_euler_number" + "bin/mris_extract_main_component" + "bin/mris_fix_topology" + "bin/mris_inflate" + "bin/mris_info" + "bin/mris_jacobian" + "bin/mris_label2annot" + "bin/mris_place_surface" + "bin/mris_preproc" + "bin/mris_register" + "bin/mris_remesh" + "bin/mris_remove_intersection" + "bin/mris_sample_parc" + "bin/mris_smooth" + "bin/mris_sphere" + "bin/mris_topo_fixer" + "bin/mris_volmask" + "bin/mrisp_paint" + "bin/pctsurfcon" + "bin/rca-config" + "bin/rca-config2csh" + "bin/recon-all" + "bin/talairach_avi" + "bin/UpdateNeeded" + "bin/vertexvol" + "etc/recon-config.yaml" + "lib/bem/ic4.tri" + "lib/bem/ic7.tri" + "python/packages/fsbindings/legacy.py" + "python/scripts/asegstats2table" + "python/scripts/aparcstats2table" + "python/scripts/rca-config" + "python/scripts/rca-config2csh" + "subjects/fsaverage/label/lh.aparc.annot" + "subjects/fsaverage/label/lh.BA1_exvivo.label" + "subjects/fsaverage/label/lh.BA1_exvivo.thresh.label" + "subjects/fsaverage/label/lh.BA2_exvivo.label" + "subjects/fsaverage/label/lh.BA2_exvivo.thresh.label" + "subjects/fsaverage/label/lh.BA3a_exvivo.label" + "subjects/fsaverage/label/lh.BA3a_exvivo.thresh.label" + "subjects/fsaverage/label/lh.BA3b_exvivo.label" + "subjects/fsaverage/label/lh.BA3b_exvivo.thresh.label" + "subjects/fsaverage/label/lh.BA44_exvivo.label" + "subjects/fsaverage/label/lh.BA44_exvivo.thresh.label" + "subjects/fsaverage/label/lh.BA45_exvivo.label" + "subjects/fsaverage/label/lh.BA45_exvivo.thresh.label" + "subjects/fsaverage/label/lh.BA4a_exvivo.label" + "subjects/fsaverage/label/lh.BA4a_exvivo.thresh.label" + "subjects/fsaverage/label/lh.BA4p_exvivo.label" + "subjects/fsaverage/label/lh.BA4p_exvivo.thresh.label" + "subjects/fsaverage/label/lh.BA6_exvivo.label" + "subjects/fsaverage/label/lh.BA6_exvivo.thresh.label" + "subjects/fsaverage/label/lh.cortex.label" + "subjects/fsaverage/label/lh.entorhinal_exvivo.label" + "subjects/fsaverage/label/lh.entorhinal_exvivo.thresh.label" + "subjects/fsaverage/label/lh.FG1.mpm.vpnl.label" + "subjects/fsaverage/label/lh.FG2.mpm.vpnl.label" + "subjects/fsaverage/label/lh.FG3.mpm.vpnl.label" + "subjects/fsaverage/label/lh.FG4.mpm.vpnl.label" + "subjects/fsaverage/label/lh.hOc1.mpm.vpnl.label" + "subjects/fsaverage/label/lh.hOc2.mpm.vpnl.label" + "subjects/fsaverage/label/lh.hOc3v.mpm.vpnl.label" + "subjects/fsaverage/label/lh.hOc4v.mpm.vpnl.label" + "subjects/fsaverage/label/lh.MT_exvivo.label" + "subjects/fsaverage/label/lh.MT_exvivo.thresh.label" + "subjects/fsaverage/label/lh.perirhinal_exvivo.label" + "subjects/fsaverage/label/lh.perirhinal_exvivo.thresh.label" + "subjects/fsaverage/label/lh.V1_exvivo.label" + "subjects/fsaverage/label/lh.V1_exvivo.thresh.label" + "subjects/fsaverage/label/lh.V2_exvivo.label" + "subjects/fsaverage/label/lh.V2_exvivo.thresh.label" + "subjects/fsaverage/label/rh.aparc.annot" + "subjects/fsaverage/label/rh.BA1_exvivo.label" + "subjects/fsaverage/label/rh.BA1_exvivo.thresh.label" + "subjects/fsaverage/label/rh.BA2_exvivo.label" + "subjects/fsaverage/label/rh.BA2_exvivo.thresh.label" + "subjects/fsaverage/label/rh.BA3a_exvivo.label" + "subjects/fsaverage/label/rh.BA3a_exvivo.thresh.label" + "subjects/fsaverage/label/rh.BA3b_exvivo.label" + "subjects/fsaverage/label/rh.BA3b_exvivo.thresh.label" + "subjects/fsaverage/label/rh.BA44_exvivo.label" + "subjects/fsaverage/label/rh.BA44_exvivo.thresh.label" + "subjects/fsaverage/label/rh.BA45_exvivo.label" + "subjects/fsaverage/label/rh.BA45_exvivo.thresh.label" + "subjects/fsaverage/label/rh.BA4a_exvivo.label" + "subjects/fsaverage/label/rh.BA4a_exvivo.thresh.label" + "subjects/fsaverage/label/rh.BA4p_exvivo.label" + "subjects/fsaverage/label/rh.BA4p_exvivo.thresh.label" + "subjects/fsaverage/label/rh.BA6_exvivo.label" + "subjects/fsaverage/label/rh.BA6_exvivo.thresh.label" + "subjects/fsaverage/label/rh.cortex.label" + "subjects/fsaverage/label/rh.entorhinal_exvivo.label" + "subjects/fsaverage/label/rh.entorhinal_exvivo.thresh.label" + "subjects/fsaverage/label/rh.FG1.mpm.vpnl.label" + "subjects/fsaverage/label/rh.FG2.mpm.vpnl.label" + "subjects/fsaverage/label/rh.FG3.mpm.vpnl.label" + "subjects/fsaverage/label/rh.FG4.mpm.vpnl.label" + "subjects/fsaverage/label/rh.hOc1.mpm.vpnl.label" + "subjects/fsaverage/label/rh.hOc2.mpm.vpnl.label" + "subjects/fsaverage/label/rh.hOc3v.mpm.vpnl.label" + "subjects/fsaverage/label/rh.hOc4v.mpm.vpnl.label" + "subjects/fsaverage/label/rh.MT_exvivo.label" + "subjects/fsaverage/label/rh.MT_exvivo.thresh.label" + "subjects/fsaverage/label/rh.perirhinal_exvivo.label" + "subjects/fsaverage/label/rh.perirhinal_exvivo.thresh.label" + "subjects/fsaverage/label/rh.V1_exvivo.label" + "subjects/fsaverage/label/rh.V1_exvivo.thresh.label" + "subjects/fsaverage/label/rh.V2_exvivo.label" + "subjects/fsaverage/label/rh.V2_exvivo.thresh.label" + "subjects/fsaverage/surf/lh.curv" + "subjects/fsaverage/surf/lh.pial" + "subjects/fsaverage/surf/lh.pial_semi_inflated" + "subjects/fsaverage/surf/lh.sphere" + "subjects/fsaverage/surf/lh.sphere.reg" + "subjects/fsaverage/surf/lh.white" + "subjects/fsaverage/surf/rh.curv" + "subjects/fsaverage/surf/rh.pial" + "subjects/fsaverage/surf/rh.pial_semi_inflated" + "subjects/fsaverage/surf/rh.sphere" + "subjects/fsaverage/surf/rh.sphere.reg" + "subjects/fsaverage/surf/rh.white") echo -for file in $copy_files +for file in "${copy_files[@]}" do echo "copying $file" cp -r "$fss/$file" "$fsd/$file" @@ -435,18 +418,18 @@ done # pack if desired with upx (do this before adding all the links if [[ "$upx" == "true" ]] ; then echo "finding executables in $fsd/bin/..." - exe=$(find "$fsd/bin" -exec file {} \; | grep ELF | cut -d: -f1) + exe=($(find "$fsd/bin" -exec file {} \; | grep ELF | cut -d: -f1)) echo "packing $fsd/bin/ executables (this can take a while) ..." - run_parallel 8 "upx -9 %s %s %s %s" 4 "$exe" + upx -9 "${exe[@]}" fi # Modify fsbindings Python package to allow calling scripts like asegstats2table directly: echo "from . import legacy" > "$fsd/python/packages/fsbindings/__init__.py" # FS looks for them, but does not call them -touch_files="/average/RB_all_2020-01-02.gca" +touch_files=("/average/RB_all_2020-01-02.gca") echo -for file in $touch_files +for file in "${touch_files[@]}" do echo "touching $file" touch "$fsd/$file" diff --git a/tools/build/link_fs.sh b/tools/build/link_fs.sh index 48a757cfe..17682c5b3 100755 --- a/tools/build/link_fs.sh +++ b/tools/build/link_fs.sh @@ -1,17 +1,13 @@ #!/usr/bin/env bash -# usage: link_fs.sh [ []] +# usage: link_fs.sh [] if [[ "$#" -gt 0 ]] && { [[ "${*/-h/}" != "$*" ]] || [[ "${*/--help/}" != "$*" ]] ; } ; then - echo "usage: $0 [ []]" + echo "usage: $0 []" exit 0 -elif [[ "$#" == 1 ]] || [[ "$#" == 2 ]] +elif [[ "$#" == 1 ]] then - if [[ ! -e "$1" ]] ; then echo "ERROR: $1 does not exist!" ; exit 1 ; fi - PYTHON="$1" - if [[ "$#" == 2 ]] ; then FREESURFER_HOME="$2" ; fi -else - PYTHON=$(which python3) + FREESURFER_HOME="$1" fi if [[ -z "$FREESURFER_HOME" ]] || [[ ! -d "$FREESURFER_HOME" ]] then @@ -21,38 +17,39 @@ fi # FS calls these for version info, but we don't need them # so we link them to not_here.sh (created below) to save space. -link_files=" - bin/mri_and - bin/mri_aparc2aseg - bin/mri_ca_label - bin/mri_ca_normalize - bin/mri_ca_register - bin/mri_compute_overlap - bin/mri_compute_seg_overlap - bin/mri_em_register - bin/mri_fwhm - bin/mri_gcut - bin/mri_log_likelihood - bin/mri_motion_correct.fsl - bin/mri_normalize_tp2 - bin/mri_or - bin/mri_relabel_nonwm_hypos - bin/mri_remove_neck - bin/mri_stats2seg - bin/mri_surf2vol - bin/mri_surfcluster - bin/mri_voldiff - bin/mri_watershed - bin/mris_divide_parcellation - bin/mris_left_right_register - bin/mris_surface_stats - bin/mris_thickness - bin/mris_thickness_diff - bin/nu_correct - bin/tkregister2_cmdl" +link_files=( + "bin/mri_and" + "bin/mri_aparc2aseg" + "bin/mri_ca_label" + "bin/mri_ca_normalize" + "bin/mri_ca_register" + "bin/mri_compute_overlap" + "bin/mri_compute_seg_overlap" + "bin/mri_em_register" + "bin/mri_fwhm" + "bin/mri_gcut" + "bin/mri_log_likelihood" + "bin/mri_motion_correct.fsl" + "bin/mri_normalize_tp2" + "bin/mri_or" + "bin/mri_relabel_nonwm_hypos" + "bin/mri_remove_neck" + "bin/mri_stats2seg" + "bin/mri_surf2vol" + "bin/mri_surfcluster" + "bin/mri_voldiff" + "bin/mri_watershed" + "bin/mris_divide_parcellation" + "bin/mris_left_right_register" + "bin/mris_surface_stats" + "bin/mris_thickness" + "bin/mris_thickness_diff" + "bin/nu_correct" + "bin/tkregister2_cmdl") # create target for link with ERROR message if called ltrg=$FREESURFER_HOME/bin/not-here.sh +# shellcheck disable=SC2016 echo '#!/bin/bash if [ "$1" == "-all-info" ]; then echo "$0 not included ..." @@ -65,11 +62,8 @@ exit 1 ' > $ltrg chmod a+x $ltrg echo -for file in $link_files +for file in "${link_files[@]}" do echo "linking $file" ln -s "$ltrg" "$FREESURFER_HOME/$file" done - -# use our python (not really needed in recon-all anyway) -ln -sf "$PYTHON" "$FREESURFER_HOME/bin/fspython"