From 57cf19e23d90bc12d26800434b687f1e6c22b8a5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BCrg=20Billeter?= Date: Fri, 25 Jul 2025 15:33:05 +0200 Subject: [PATCH 01/10] element.py: Use sandbox environment for logging and shell Elements can configure additional environment variables in `configure_sandbox()`, which will be used by `BuildElement` to support `digest-environment`. Use the complete environment for logging and `bst shell`. --- src/buildstream/element.py | 12 ++++++------ src/buildstream/sandbox/sandbox.py | 10 ++++++++++ 2 files changed, 16 insertions(+), 6 deletions(-) diff --git a/src/buildstream/element.py b/src/buildstream/element.py index cb680ad38..f267ce7e3 100644 --- a/src/buildstream/element.py +++ b/src/buildstream/element.py @@ -1663,11 +1663,6 @@ def _assemble(self): # Assert call ordering assert not self._cached_success() - # Print the environment at the beginning of the log file. - env_dump = _yaml.roundtrip_dump_string(self.get_environment()) - - self.log("Build environment for element {}".format(self.name), detail=env_dump) - context = self._get_context() with self._output_file() as output_file: @@ -1693,6 +1688,11 @@ def _assemble(self): # Step 1 - Configure self.__configure_sandbox(sandbox) + + # Print the environment at the beginning of the log file. + env_dump = _yaml.roundtrip_dump_string(sandbox._get_configured_environment() or self.get_environment()) + self.log("Build environment for element {}".format(self.name), detail=env_dump) + # Step 2 - Stage self.__stage(sandbox) try: @@ -2037,7 +2037,7 @@ def _push(self): def _shell(self, scope=None, *, mounts=None, isolate=False, prompt=None, command=None, usebuildtree=False): with self._prepare_sandbox(scope, shell=True, usebuildtree=usebuildtree) as sandbox: - environment = self.get_environment() + environment = sandbox._get_configured_environment() or self.get_environment() environment = copy.copy(environment) flags = _SandboxFlags.INTERACTIVE | _SandboxFlags.ROOT_READ_ONLY diff --git a/src/buildstream/sandbox/sandbox.py b/src/buildstream/sandbox/sandbox.py index a503ac9f7..3d3164f67 100644 --- a/src/buildstream/sandbox/sandbox.py +++ b/src/buildstream/sandbox/sandbox.py @@ -438,6 +438,16 @@ def _get_environment(self, *, cwd=None, env=None): return env + # _get_configured_environment() + # + # Return the environment exactly as configured with `set_environment()`, + # or `None` if `set_environment()` has not been called. + # + # Returns + # (Dict[str, str]): The configured environment + def _get_configured_environment(self): + return self.__env + # _get_work_directory() # # Fetches the working directory for running commands From d48d2608482a34e6985b785b1dc08bf4b92dfd4f Mon Sep 17 00:00:00 2001 From: Abderrahim Kitouni Date: Wed, 9 Jul 2025 10:11:45 +0100 Subject: [PATCH 02/10] sandbox.py: Add method to create a sub-sandbox This allows an element to use a secondary sandbox for manipulating artifacts that don't affect the build. --- src/buildstream/sandbox/sandbox.py | 29 ++++++++++++++++++++++++++++- 1 file changed, 28 insertions(+), 1 deletion(-) diff --git a/src/buildstream/sandbox/sandbox.py b/src/buildstream/sandbox/sandbox.py index 3d3164f67..e266a95c3 100644 --- a/src/buildstream/sandbox/sandbox.py +++ b/src/buildstream/sandbox/sandbox.py @@ -85,13 +85,18 @@ def __init__(self, context: "Context", project: "Project", **kwargs): self.__env = None # type: Optional[Dict[str, str]] self.__mount_sources = {} # type: Dict[str, str] self.__allow_run = True + self.__subsandboxes = [] # type: List[Sandbox] # Plugin element full name for logging plugin = kwargs.get("plugin", None) if plugin: self.__element_name = plugin._get_full_name() else: - self.__element_name = None + parent = kwargs.get("parent", None) + if parent: + self.__element_name = parent._get_element_name() + else: + self.__element_name = None # Configuration from kwargs common to all subclasses self.__config = kwargs["config"] @@ -568,6 +573,28 @@ def _get_element_name(self): def _disable_run(self): self.__allow_run = False + # _create_subsandbox() + # + # Create an empty sandbox + # + # This allows an element to use a secondary sandbox for manipulating artifacts + # that does not affect the build sandbox. + # + def _create_subsandbox(self, **kwargs): + sub = Sandbox( + self.__context, + self.__project, + parent=self, + stdout=self.__stdout, + stderr=self.__stderr, + config=self.__config, + ) + self.__subsandboxes.append(sub) + return sub + + def _get_subsandboxes(self): + return self.__subsandboxes + # SandboxFlags() # From 30c7168cedaee9f4abc492536214cdc3692f286b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BCrg=20Billeter?= Date: Fri, 18 Jul 2025 11:57:24 +0200 Subject: [PATCH 03/10] element.py: Support per-sandbox overlap collector With the introduction of subsandboxes, a single overlap collector is no longer sufficient. --- src/buildstream/_elementproxy.py | 5 +++-- src/buildstream/element.py | 33 ++++++++++++++++++-------------- 2 files changed, 22 insertions(+), 16 deletions(-) diff --git a/src/buildstream/_elementproxy.py b/src/buildstream/_elementproxy.py index 861bc9a15..3425b6cc3 100644 --- a/src/buildstream/_elementproxy.py +++ b/src/buildstream/_elementproxy.py @@ -104,9 +104,10 @@ def stage_artifact( owner = cast("Element", self._owner) element = cast("Element", self._plugin) - assert owner._overlap_collector is not None, "Attempted to stage artifacts outside of Element.stage()" + overlap_collector = owner._overlap_collectors.get(sandbox) + assert overlap_collector is not None, "Attempted to stage artifacts outside of Element.stage()" - with owner._overlap_collector.session(action, path): + with overlap_collector.session(action, path): result = element._stage_artifact( sandbox, path=path, action=action, include=include, exclude=exclude, orphans=orphans, owner=owner ) diff --git a/src/buildstream/element.py b/src/buildstream/element.py index f267ce7e3..ecc4de77a 100644 --- a/src/buildstream/element.py +++ b/src/buildstream/element.py @@ -254,7 +254,7 @@ def __init__( # Internal instance properties # self._depth = None # Depth of Element in its current dependency graph - self._overlap_collector = None # type: Optional[OverlapCollector] + self._overlap_collectors: Dict[Sandbox, OverlapCollector] = {} # Active overlap collector per sandbox self._description = load_element.description or "" # type: str # @@ -642,7 +642,8 @@ def stage_artifact( :func:`Element.stage_dependency_artifacts() ` instead. """ - assert self._overlap_collector is not None, "Attempted to stage artifacts outside of Element.stage()" + overlap_collector = self._overlap_collectors.get(sandbox) + assert overlap_collector is not None, "Attempted to stage artifacts outside of Element.stage()" # # The public API can only be called on the implementing plugin itself. @@ -650,7 +651,7 @@ def stage_artifact( # ElementProxy calls to stage_artifact() are routed directly to _stage_artifact(), # and the ElementProxy takes care of starting and ending the OverlapCollector session. # - with self._overlap_collector.session(action, path): + with overlap_collector.session(action, path): result = self._stage_artifact( sandbox, path=path, action=action, include=include, exclude=exclude, orphans=orphans ) @@ -692,9 +693,10 @@ def stage_dependency_artifacts( Raises: (:class:`.ElementError`): if forbidden overlaps occur. """ - assert self._overlap_collector is not None, "Attempted to stage artifacts outside of Element.stage()" + overlap_collector = self._overlap_collectors.get(sandbox) + assert overlap_collector is not None, "Attempted to stage artifacts outside of Element.stage()" - with self._overlap_collector.session(action, path): + with overlap_collector.session(action, path): for dep in self.dependencies(selection): dep._stage_artifact(sandbox, path=path, include=include, exclude=exclude, orphans=orphans, owner=self) @@ -962,7 +964,8 @@ def _stage_artifact( ) -> FileListResult: owner = owner or self - assert owner._overlap_collector is not None, "Attempted to stage artifacts outside of Element.stage()" + overlap_collector = owner._overlap_collectors.get(sandbox) + assert overlap_collector is not None, "Attempted to stage artifacts outside of Element.stage()" if not self._cached(): detail = ( @@ -989,7 +992,7 @@ def _stage_artifact( result = vstagedir._import_files_internal(files_vdir, filter_callback=split_filter) assert result is not None - owner._overlap_collector.collect_stage_result(self, result) + overlap_collector.collect_stage_result(self, result) return result @@ -1013,7 +1016,7 @@ def _stage_artifact( # occur. # def _stage_dependency_artifacts(self, sandbox, scope, *, path=None, include=None, exclude=None, orphans=True): - with self._overlap_collector.session(OverlapAction.WARNING, path): + with self._overlap_collectors[sandbox].session(OverlapAction.WARNING, path): for dep in self._dependencies(scope): dep._stage_artifact(sandbox, path=path, include=include, exclude=exclude, orphans=orphans, owner=self) @@ -1435,7 +1438,7 @@ def _prepare_sandbox(self, scope, shell=False, integrate=True, usebuildtree=Fals self.__stage(sandbox) else: # Stage deps in the sandbox root - with self.timed_activity("Staging dependencies", silent_nested=True), self.__collect_overlaps(): + with self.timed_activity("Staging dependencies", silent_nested=True), self.__collect_overlaps(sandbox): self._stage_dependency_artifacts(sandbox, scope) # Run any integration commands provided by the dependencies @@ -2703,7 +2706,7 @@ def __configure_sandbox(self, sandbox): def __stage(self, sandbox): # Enable the overlap collector during the staging process - with self.__collect_overlaps(): + with self.__collect_overlaps(sandbox): self.stage(sandbox) # __preflight(): @@ -2806,10 +2809,12 @@ def __get_tainted(self, recalculate=False): # this context manager. # @contextmanager - def __collect_overlaps(self): - self._overlap_collector = OverlapCollector(self) - yield - self._overlap_collector = None + def __collect_overlaps(self, sandbox): + self._overlap_collectors[sandbox] = OverlapCollector(self) + try: + yield + finally: + del self._overlap_collectors[sandbox] # __sandbox(): # From a291951a0d1d97a5f48b7086ea3d961f80931f35 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BCrg=20Billeter?= Date: Fri, 18 Jul 2025 15:16:06 +0200 Subject: [PATCH 04/10] element.py: Add `subsandbox()` method --- src/buildstream/element.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/src/buildstream/element.py b/src/buildstream/element.py index ecc4de77a..a9d12fbde 100644 --- a/src/buildstream/element.py +++ b/src/buildstream/element.py @@ -828,6 +828,21 @@ def run_cleanup_commands(self, sandbox: "Sandbox") -> None: sandbox._clean_directory(build_root) + @contextmanager + def subsandbox(self, sandbox: "Sandbox") -> Iterator["Sandbox"]: + """A context manager for a subsandbox. + + Args: + sandbox: The main build sandbox + + This allows an element to use a secondary sandbox for manipulating + artifacts without affecting the main build sandbox. The subsandbox + is initially empty. + """ + subsandbox = sandbox._create_subsandbox() + with self.__collect_overlaps(subsandbox): + yield subsandbox + ############################################################# # Private Methods used in BuildStream # ############################################################# From e468aea1f47cf6d9d8b6981294f65374171471b2 Mon Sep 17 00:00:00 2001 From: Abderrahim Kitouni Date: Wed, 9 Jul 2025 10:16:41 +0100 Subject: [PATCH 05/10] _sandboxremote.py: Upload blobs for subsandbox roots The subsandboxes can be used to extract a CAS digest that could be used for nested remote execution, and thus need to be available in the remote execution CAS. --- src/buildstream/sandbox/_sandboxremote.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/src/buildstream/sandbox/_sandboxremote.py b/src/buildstream/sandbox/_sandboxremote.py index 5b4bd0414..8072938bf 100644 --- a/src/buildstream/sandbox/_sandboxremote.py +++ b/src/buildstream/sandbox/_sandboxremote.py @@ -174,9 +174,17 @@ def _execute_action(self, action, flags): "Uploading input root", element_name=self._get_element_name() ): # Determine blobs missing on remote + root_digests = [action.input_root_digest] + + # Add virtual directories for subsandboxes + for subsandbox in self._get_subsandboxes(): + vdir = subsandbox.get_virtual_directory() + root_digests.append(vdir._get_digest()) + + missing_blobs = [] try: - input_root_digest = action.input_root_digest - missing_blobs = list(cascache.missing_blobs_for_directory(input_root_digest, remote=casremote)) + for root_digest in root_digests: + missing_blobs.extend(cascache.missing_blobs_for_directory(root_digest, remote=casremote)) except grpc.RpcError as e: raise SandboxError("Failed to determine missing blobs: {}".format(e)) from e From 50c5273aa50638a99d790b58de0387d1e71aa3f3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BCrg=20Billeter?= Date: Fri, 15 Aug 2025 15:08:41 +0200 Subject: [PATCH 06/10] artifact.proto: Add a field for the build sandbox state --- .../_protos/buildstream/v2/artifact.proto | 7 +++++++ .../_protos/buildstream/v2/artifact_pb2.py | 14 ++++++++------ .../_protos/buildstream/v2/artifact_pb2.pyi | 15 +++++++++++++-- 3 files changed, 28 insertions(+), 8 deletions(-) diff --git a/src/buildstream/_protos/buildstream/v2/artifact.proto b/src/buildstream/_protos/buildstream/v2/artifact.proto index 86ac452c8..1024ce860 100644 --- a/src/buildstream/_protos/buildstream/v2/artifact.proto +++ b/src/buildstream/_protos/buildstream/v2/artifact.proto @@ -85,4 +85,11 @@ message Artifact { // digest of a directory build.bazel.remote.execution.v2.Digest buildroot = 17; // optional + + message SandboxState { + repeated build.bazel.remote.execution.v2.Command.EnvironmentVariable environment = 1; + string working_directory = 2; + repeated build.bazel.remote.execution.v2.Digest subsandbox_digests = 3; + }; + SandboxState buildsandbox = 18; // optional } diff --git a/src/buildstream/_protos/buildstream/v2/artifact_pb2.py b/src/buildstream/_protos/buildstream/v2/artifact_pb2.py index 21cc67f4f..7d86ea3b8 100644 --- a/src/buildstream/_protos/buildstream/v2/artifact_pb2.py +++ b/src/buildstream/_protos/buildstream/v2/artifact_pb2.py @@ -26,7 +26,7 @@ from buildstream._protos.google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1d\x62uildstream/v2/artifact.proto\x12\x0e\x62uildstream.v2\x1a\x36\x62uild/bazel/remote/execution/v2/remote_execution.proto\x1a\x1cgoogle/api/annotations.proto\"\x89\x07\n\x08\x41rtifact\x12\x0f\n\x07version\x18\x01 \x01(\x05\x12\x15\n\rbuild_success\x18\x02 \x01(\x08\x12\x13\n\x0b\x62uild_error\x18\x03 \x01(\t\x12\x1b\n\x13\x62uild_error_details\x18\x04 \x01(\t\x12\x12\n\nstrong_key\x18\x05 \x01(\t\x12\x10\n\x08weak_key\x18\x06 \x01(\t\x12\x16\n\x0ewas_workspaced\x18\x07 \x01(\x08\x12\x36\n\x05\x66iles\x18\x08 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x37\n\nbuild_deps\x18\t \x03(\x0b\x32#.buildstream.v2.Artifact.Dependency\x12<\n\x0bpublic_data\x18\n \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12.\n\x04logs\x18\x0b \x03(\x0b\x32 .buildstream.v2.Artifact.LogFile\x12:\n\tbuildtree\x18\x0c \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x38\n\x07sources\x18\r \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x43\n\x12low_diversity_meta\x18\x0e \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x44\n\x13high_diversity_meta\x18\x0f \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x12\n\nstrict_key\x18\x10 \x01(\t\x12:\n\tbuildroot\x18\x11 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x1a\x63\n\nDependency\x12\x14\n\x0cproject_name\x18\x01 \x01(\t\x12\x14\n\x0c\x65lement_name\x18\x02 \x01(\t\x12\x11\n\tcache_key\x18\x03 \x01(\t\x12\x16\n\x0ewas_workspaced\x18\x04 \x01(\x08\x1aP\n\x07LogFile\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x37\n\x06\x64igest\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digestb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1d\x62uildstream/v2/artifact.proto\x12\x0e\x62uildstream.v2\x1a\x36\x62uild/bazel/remote/execution/v2/remote_execution.proto\x1a\x1cgoogle/api/annotations.proto\"\x8a\t\n\x08\x41rtifact\x12\x0f\n\x07version\x18\x01 \x01(\x05\x12\x15\n\rbuild_success\x18\x02 \x01(\x08\x12\x13\n\x0b\x62uild_error\x18\x03 \x01(\t\x12\x1b\n\x13\x62uild_error_details\x18\x04 \x01(\t\x12\x12\n\nstrong_key\x18\x05 \x01(\t\x12\x10\n\x08weak_key\x18\x06 \x01(\t\x12\x16\n\x0ewas_workspaced\x18\x07 \x01(\x08\x12\x36\n\x05\x66iles\x18\x08 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x37\n\nbuild_deps\x18\t \x03(\x0b\x32#.buildstream.v2.Artifact.Dependency\x12<\n\x0bpublic_data\x18\n \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12.\n\x04logs\x18\x0b \x03(\x0b\x32 .buildstream.v2.Artifact.LogFile\x12:\n\tbuildtree\x18\x0c \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x38\n\x07sources\x18\r \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x43\n\x12low_diversity_meta\x18\x0e \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x44\n\x13high_diversity_meta\x18\x0f \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x12\n\nstrict_key\x18\x10 \x01(\t\x12:\n\tbuildroot\x18\x11 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12;\n\x0c\x62uildsandbox\x18\x12 \x01(\x0b\x32%.buildstream.v2.Artifact.SandboxState\x1a\x63\n\nDependency\x12\x14\n\x0cproject_name\x18\x01 \x01(\t\x12\x14\n\x0c\x65lement_name\x18\x02 \x01(\t\x12\x11\n\tcache_key\x18\x03 \x01(\t\x12\x16\n\x0ewas_workspaced\x18\x04 \x01(\x08\x1aP\n\x07LogFile\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x37\n\x06\x64igest\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x1a\xc1\x01\n\x0cSandboxState\x12Q\n\x0b\x65nvironment\x18\x01 \x03(\x0b\x32<.build.bazel.remote.execution.v2.Command.EnvironmentVariable\x12\x19\n\x11working_directory\x18\x02 \x01(\t\x12\x43\n\x12subsandbox_digests\x18\x03 \x03(\x0b\x32\'.build.bazel.remote.execution.v2.Digestb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -34,9 +34,11 @@ if not _descriptor._USE_C_DESCRIPTORS: DESCRIPTOR._loaded_options = None _globals['_ARTIFACT']._serialized_start=136 - _globals['_ARTIFACT']._serialized_end=1041 - _globals['_ARTIFACT_DEPENDENCY']._serialized_start=860 - _globals['_ARTIFACT_DEPENDENCY']._serialized_end=959 - _globals['_ARTIFACT_LOGFILE']._serialized_start=961 - _globals['_ARTIFACT_LOGFILE']._serialized_end=1041 + _globals['_ARTIFACT']._serialized_end=1298 + _globals['_ARTIFACT_DEPENDENCY']._serialized_start=921 + _globals['_ARTIFACT_DEPENDENCY']._serialized_end=1020 + _globals['_ARTIFACT_LOGFILE']._serialized_start=1022 + _globals['_ARTIFACT_LOGFILE']._serialized_end=1102 + _globals['_ARTIFACT_SANDBOXSTATE']._serialized_start=1105 + _globals['_ARTIFACT_SANDBOXSTATE']._serialized_end=1298 # @@protoc_insertion_point(module_scope) diff --git a/src/buildstream/_protos/buildstream/v2/artifact_pb2.pyi b/src/buildstream/_protos/buildstream/v2/artifact_pb2.pyi index 3f80ffa4b..08c6960c1 100644 --- a/src/buildstream/_protos/buildstream/v2/artifact_pb2.pyi +++ b/src/buildstream/_protos/buildstream/v2/artifact_pb2.pyi @@ -8,7 +8,7 @@ from typing import ClassVar as _ClassVar, Iterable as _Iterable, Mapping as _Map DESCRIPTOR: _descriptor.FileDescriptor class Artifact(_message.Message): - __slots__ = ("version", "build_success", "build_error", "build_error_details", "strong_key", "weak_key", "was_workspaced", "files", "build_deps", "public_data", "logs", "buildtree", "sources", "low_diversity_meta", "high_diversity_meta", "strict_key", "buildroot") + __slots__ = ("version", "build_success", "build_error", "build_error_details", "strong_key", "weak_key", "was_workspaced", "files", "build_deps", "public_data", "logs", "buildtree", "sources", "low_diversity_meta", "high_diversity_meta", "strict_key", "buildroot", "buildsandbox") class Dependency(_message.Message): __slots__ = ("project_name", "element_name", "cache_key", "was_workspaced") PROJECT_NAME_FIELD_NUMBER: _ClassVar[int] @@ -27,6 +27,15 @@ class Artifact(_message.Message): name: str digest: _remote_execution_pb2.Digest def __init__(self, name: _Optional[str] = ..., digest: _Optional[_Union[_remote_execution_pb2.Digest, _Mapping]] = ...) -> None: ... + class SandboxState(_message.Message): + __slots__ = ("environment", "working_directory", "subsandbox_digests") + ENVIRONMENT_FIELD_NUMBER: _ClassVar[int] + WORKING_DIRECTORY_FIELD_NUMBER: _ClassVar[int] + SUBSANDBOX_DIGESTS_FIELD_NUMBER: _ClassVar[int] + environment: _containers.RepeatedCompositeFieldContainer[_remote_execution_pb2.Command.EnvironmentVariable] + working_directory: str + subsandbox_digests: _containers.RepeatedCompositeFieldContainer[_remote_execution_pb2.Digest] + def __init__(self, environment: _Optional[_Iterable[_Union[_remote_execution_pb2.Command.EnvironmentVariable, _Mapping]]] = ..., working_directory: _Optional[str] = ..., subsandbox_digests: _Optional[_Iterable[_Union[_remote_execution_pb2.Digest, _Mapping]]] = ...) -> None: ... VERSION_FIELD_NUMBER: _ClassVar[int] BUILD_SUCCESS_FIELD_NUMBER: _ClassVar[int] BUILD_ERROR_FIELD_NUMBER: _ClassVar[int] @@ -44,6 +53,7 @@ class Artifact(_message.Message): HIGH_DIVERSITY_META_FIELD_NUMBER: _ClassVar[int] STRICT_KEY_FIELD_NUMBER: _ClassVar[int] BUILDROOT_FIELD_NUMBER: _ClassVar[int] + BUILDSANDBOX_FIELD_NUMBER: _ClassVar[int] version: int build_success: bool build_error: str @@ -61,4 +71,5 @@ class Artifact(_message.Message): high_diversity_meta: _remote_execution_pb2.Digest strict_key: str buildroot: _remote_execution_pb2.Digest - def __init__(self, version: _Optional[int] = ..., build_success: bool = ..., build_error: _Optional[str] = ..., build_error_details: _Optional[str] = ..., strong_key: _Optional[str] = ..., weak_key: _Optional[str] = ..., was_workspaced: bool = ..., files: _Optional[_Union[_remote_execution_pb2.Digest, _Mapping]] = ..., build_deps: _Optional[_Iterable[_Union[Artifact.Dependency, _Mapping]]] = ..., public_data: _Optional[_Union[_remote_execution_pb2.Digest, _Mapping]] = ..., logs: _Optional[_Iterable[_Union[Artifact.LogFile, _Mapping]]] = ..., buildtree: _Optional[_Union[_remote_execution_pb2.Digest, _Mapping]] = ..., sources: _Optional[_Union[_remote_execution_pb2.Digest, _Mapping]] = ..., low_diversity_meta: _Optional[_Union[_remote_execution_pb2.Digest, _Mapping]] = ..., high_diversity_meta: _Optional[_Union[_remote_execution_pb2.Digest, _Mapping]] = ..., strict_key: _Optional[str] = ..., buildroot: _Optional[_Union[_remote_execution_pb2.Digest, _Mapping]] = ...) -> None: ... + buildsandbox: Artifact.SandboxState + def __init__(self, version: _Optional[int] = ..., build_success: bool = ..., build_error: _Optional[str] = ..., build_error_details: _Optional[str] = ..., strong_key: _Optional[str] = ..., weak_key: _Optional[str] = ..., was_workspaced: bool = ..., files: _Optional[_Union[_remote_execution_pb2.Digest, _Mapping]] = ..., build_deps: _Optional[_Iterable[_Union[Artifact.Dependency, _Mapping]]] = ..., public_data: _Optional[_Union[_remote_execution_pb2.Digest, _Mapping]] = ..., logs: _Optional[_Iterable[_Union[Artifact.LogFile, _Mapping]]] = ..., buildtree: _Optional[_Union[_remote_execution_pb2.Digest, _Mapping]] = ..., sources: _Optional[_Union[_remote_execution_pb2.Digest, _Mapping]] = ..., low_diversity_meta: _Optional[_Union[_remote_execution_pb2.Digest, _Mapping]] = ..., high_diversity_meta: _Optional[_Union[_remote_execution_pb2.Digest, _Mapping]] = ..., strict_key: _Optional[str] = ..., buildroot: _Optional[_Union[_remote_execution_pb2.Digest, _Mapping]] = ..., buildsandbox: _Optional[_Union[Artifact.SandboxState, _Mapping]] = ...) -> None: ... From 7e884747f313bbe77aa7fc33444e9c3540af29bd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BCrg=20Billeter?= Date: Fri, 8 Aug 2025 14:04:32 +0200 Subject: [PATCH 07/10] Store sandbox state in Artifact proto This is stored only when also storing the build tree. It is used to configure the sandbox for `bst shell --use-buildtree`. This is required for elements that create subsandboxes as part of `configure_sandbox()`. --- src/buildstream/_artifact.py | 30 ++++++++++++++++++++++++++++++ src/buildstream/_artifactcache.py | 10 ++++++++++ src/buildstream/element.py | 14 +++++++++++--- 3 files changed, 51 insertions(+), 3 deletions(-) diff --git a/src/buildstream/_artifact.py b/src/buildstream/_artifact.py index 7ccc61c6a..c06603559 100644 --- a/src/buildstream/_artifact.py +++ b/src/buildstream/_artifact.py @@ -200,6 +200,7 @@ def get_extract_key(self): # variables (Variables): The element's Variables # environment (dict): dict of the element's environment variables # sandboxconfig (SandboxConfig): The element's SandboxConfig + # buildsandbox (Sandbox): The element's configured build sandbox # def cache( self, @@ -213,6 +214,7 @@ def cache( variables, environment, sandboxconfig, + buildsandbox, ): context = self._context @@ -317,6 +319,19 @@ def cache( rootvdir._import_files_internal(buildrootvdir, properties=properties, collect_result=False) artifact.buildroot.CopyFrom(rootvdir._get_digest()) + if buildsandbox is not None: + sandbox_env = buildsandbox._get_configured_environment() + if sandbox_env: + for key, value in sorted(sandbox_env.items()): + artifact.buildsandbox.environment.add(name=key, value=value) + + artifact.buildsandbox.working_directory = buildsandbox._get_work_directory() + + for subsandbox in buildsandbox._get_subsandboxes(): + vdir = subsandbox.get_virtual_directory() + digest = artifact.buildsandbox.subsandbox_digests.add() + digest.CopyFrom(vdir._get_digest()) + os.makedirs(os.path.dirname(os.path.join(self._artifactdir, element.get_artifact_name())), exist_ok=True) keys = utils._deduplicate([self._cache_key, self._weak_cache_key]) for key in keys: @@ -681,6 +696,21 @@ def pull(self, *, pull_buildtrees): return True + def configure_sandbox(self, sandbox): + artifact = self._get_proto() + + if artifact.buildsandbox and artifact.buildsandbox.environment: + env = {} + for env_var in artifact.buildsandbox.environment: + env[env_var.name] = env_var.value + else: + env = self.load_environment() + + sandbox.set_environment(env) + + if artifact.buildsandbox and artifact.buildsandbox.working_directory: + sandbox.set_work_directory(artifact.buildsandbox.working_directory) + # load_proto() # # Returns: diff --git a/src/buildstream/_artifactcache.py b/src/buildstream/_artifactcache.py index a32d0ee62..9f4062449 100644 --- a/src/buildstream/_artifactcache.py +++ b/src/buildstream/_artifactcache.py @@ -303,6 +303,10 @@ def _push_artifact_blobs(self, artifact, artifact_digest, remote): except FileNotFoundError: pass + if artifact_proto.buildsandbox: + for subsandbox_digest in artifact_proto.buildsandbox.subsandbox_digests: + self.cas._send_directory(remote, subsandbox_digest) + digests = [artifact_digest, artifact_proto.low_diversity_meta, artifact_proto.high_diversity_meta] if str(artifact_proto.public_data): @@ -361,6 +365,9 @@ def _push_artifact_proto(self, element, artifact, artifact_digest, remote): referenced_directories.append(artifact_proto.sources) if artifact_proto.buildroot: referenced_directories.append(artifact_proto.buildroot) + if artifact_proto.buildsandbox: + for subsandbox_digest in artifact_proto.buildsandbox.subsandbox_digests: + referenced_directories.append(subsandbox_digest) referenced_blobs = [artifact_proto.low_diversity_meta, artifact_proto.high_diversity_meta] + [ log_file.digest for log_file in artifact_proto.logs @@ -419,6 +426,9 @@ def _pull_artifact_storage(self, element, key, artifact_digest, remote, pull_bui self.cas.fetch_directory(remote, artifact.buildtree) if str(artifact.buildroot): self.cas.fetch_directory(remote, artifact.buildroot) + if artifact.buildsandbox: + for subsandbox_digest in artifact.buildsandbox.subsandbox_digests: + self.cas.fetch_directory(remote, subsandbox_digest) digests = [artifact.low_diversity_meta, artifact.high_diversity_meta] if str(artifact.public_data): diff --git a/src/buildstream/element.py b/src/buildstream/element.py index a9d12fbde..fa79c2b44 100644 --- a/src/buildstream/element.py +++ b/src/buildstream/element.py @@ -1440,18 +1440,25 @@ def _prepare_sandbox(self, scope, shell=False, integrate=True, usebuildtree=Fals # pylint: disable-next=contextmanager-generator-missing-cleanup with self.__sandbox(config=self.__sandbox_config, allow_remote=False) as sandbox: - # Configure always comes first, and we need it. - self.__configure_sandbox(sandbox) - if usebuildtree: + # Configure the sandbox from artifact metadata + self.__artifact.configure_sandbox(sandbox) + # Use the cached buildroot directly buildrootvdir = self.__artifact.get_buildroot() sandbox_vroot = sandbox.get_virtual_directory() sandbox_vroot._import_files_internal(buildrootvdir, collect_result=False) elif shell and scope == _Scope.BUILD: + self.__configure_sandbox(sandbox) # Stage what we need self.__stage(sandbox) else: + # Runtime shell or `bst artifact checkout` + + # Don't call `configure_sandbox()` as that may attempt to construct subsandboxes + # with build dependencies and we're not setting up a build sandbox. + sandbox.set_environment(self.get_environment()) + # Stage deps in the sandbox root with self.timed_activity("Staging dependencies", silent_nested=True), self.__collect_overlaps(sandbox): self._stage_dependency_artifacts(sandbox, scope) @@ -1787,6 +1794,7 @@ def _cache_artifact(self, sandbox, collect): variables=self.__variables, environment=self.__environment, sandboxconfig=self.__sandbox_config, + buildsandbox=sandbox if buildrootvdir else None, ) if collect is not None and collectvdir is None: From 425c6531583acac14b8f698730fcace99ce4fbc7 Mon Sep 17 00:00:00 2001 From: Abderrahim Kitouni Date: Tue, 17 Jun 2025 17:02:10 +0100 Subject: [PATCH 08/10] buildelement.py: Add the digest-environment config property This allows setting an environment variable inside the sandbox to the CAS digest of one or more dependencies. Co-authored by: Adrien Plazas --- src/buildstream/buildelement.py | 39 ++++++++++++++++++++++++++++++--- 1 file changed, 36 insertions(+), 3 deletions(-) diff --git a/src/buildstream/buildelement.py b/src/buildstream/buildelement.py index 40355f5cf..d829c7cf4 100644 --- a/src/buildstream/buildelement.py +++ b/src/buildstream/buildelement.py @@ -219,6 +219,7 @@ def configure(self, node): def configure_dependencies(self, dependencies): self.__layout = {} # pylint: disable=attribute-defined-outside-init + self.__digest_environment = {} # pylint: disable=attribute-defined-outside-init # FIXME: Currently this forcefully validates configurations # for all BuildElement subclasses so they are unable to @@ -227,9 +228,18 @@ def configure_dependencies(self, dependencies): for dep in dependencies: # Determine the location to stage each element, default is "/" location = "/" + if dep.config: - dep.config.validate_keys(["location"]) - location = dep.config.get_str("location") + dep.config.validate_keys(["digest-environment", "location"]) + + location = dep.config.get_str("location", "/") + + digest_var_name = dep.config.get_str("digest-environment", None) + + if digest_var_name is not None: + element_list = self.__digest_environment.setdefault(digest_var_name, []) + element_list.append((dep.element, dep.path)) + try: element_list = self.__layout[location] except KeyError: @@ -268,6 +278,16 @@ def get_unique_key(self): } dictionary["layout"] = layout_key + # Specify the layout in the key, if buildstream is to generate an environment + # variable with the digest + # + if self.__digest_environment: + sorted_envs = sorted(self.__digest_environment) + digest_key = { + env: [dependency_path for _, dependency_path in self.__digest_environment[env]] for env in sorted_envs + } + dictionary["digest-enviornment"] = digest_key + return dictionary def configure_sandbox(self, sandbox): @@ -286,7 +306,20 @@ def configure_sandbox(self, sandbox): sandbox.set_work_directory(command_dir) # Setup environment - sandbox.set_environment(self.get_environment()) + env = self.get_environment() + + # Add "CAS digest" environment variables + sorted_envs = sorted(self.__digest_environment) + for digest_variable in sorted_envs: + element_list = [element for element, _ in self.__digest_environment[digest_variable]] + with self.timed_activity( + f"Staging dependencies for '{digest_variable}' in subsandbox", silent_nested=True + ), self.subsandbox(sandbox) as subsandbox: + self.stage_dependency_artifacts(subsandbox, element_list) + digest = subsandbox.get_virtual_directory()._get_digest() + env[digest_variable] = "{}/{}".format(digest.hash, digest.size_bytes) + + sandbox.set_environment(env) def stage(self, sandbox): From 95884258279aa700d611db5643c77c2a7c316df6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BCrg=20Billeter?= Date: Fri, 15 Aug 2025 16:08:11 +0200 Subject: [PATCH 09/10] buildelement.py: Document `digest-environment` --- src/buildstream/buildelement.py | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/src/buildstream/buildelement.py b/src/buildstream/buildelement.py index d829c7cf4..7596df02e 100644 --- a/src/buildstream/buildelement.py +++ b/src/buildstream/buildelement.py @@ -76,6 +76,31 @@ directories before subdirectories. +`digest-environment` for dependencies +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +The BuildElement supports the ``digest-environment`` :term:`dependency configuration `, +which sets the specified environment variable in the build sandbox to the CAS digest +corresponding to a directory that contains all dependencies that are configured +with the same ``digest-environment``. + +This is useful for REAPI clients in the sandbox such as `recc `_, +see ``remote-apis-socket`` in the :ref:`sandbox configuration `. + +**Example:** + +Here is an example of how to set the environment variable `GCC_DIGEST` to the +CAS digest of a directory that contains ``gcc.bst`` and its runtime dependencies. +The ``libpony.bst`` dependency will not be included in that CAS directory. + +.. code:: yaml + + build-depends: + - baseproject.bst:gcc.bst + config: + digest-environment: GCC_DIGEST + - libpony.bst + + Location for running commands ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The ``command-subdir`` variable sets where commands will be executed, From a2f076f9870c0a55cd276193f33f63dc65578a62 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BCrg=20Billeter?= Date: Fri, 25 Jul 2025 16:18:59 +0200 Subject: [PATCH 10/10] tests/integration: Add tests for `digest-environment` --- tests/integration/digest-environment.py | 158 ++++++++++++++++++ .../digest-environment/base-buildtree.bst | 12 ++ .../base-plus-extra-dep.bst | 14 ++ .../elements/digest-environment/base.bst | 12 ++ .../elements/digest-environment/merge.bst | 16 ++ .../elements/digest-environment/two.bst | 17 ++ 6 files changed, 229 insertions(+) create mode 100644 tests/integration/digest-environment.py create mode 100644 tests/integration/project/elements/digest-environment/base-buildtree.bst create mode 100644 tests/integration/project/elements/digest-environment/base-plus-extra-dep.bst create mode 100644 tests/integration/project/elements/digest-environment/base.bst create mode 100644 tests/integration/project/elements/digest-environment/merge.bst create mode 100644 tests/integration/project/elements/digest-environment/two.bst diff --git a/tests/integration/digest-environment.py b/tests/integration/digest-environment.py new file mode 100644 index 000000000..8dcfa1bf4 --- /dev/null +++ b/tests/integration/digest-environment.py @@ -0,0 +1,158 @@ +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# Pylint doesn't play well with fixtures and dependency injection from pytest +# pylint: disable=redefined-outer-name + +import os +import shutil + +import pytest + +from buildstream._testing import cli_integration as cli # pylint: disable=unused-import +from buildstream._testing._utils.site import HAVE_SANDBOX + +from tests.testutils import create_artifact_share + +pytestmark = pytest.mark.integration + + +DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project") + + +# Test that the digest environment variable is set correctly during a build +@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox") +@pytest.mark.datafiles(DATA_DIR) +def test_build_checkout_base(cli, datafiles): + project = str(datafiles) + element_name = "digest-environment/base.bst" + + result = cli.run(project=project, args=["build", element_name]) + assert result.exit_code == 0 + + result = cli.run(project=project, args=["artifact", "checkout", element_name]) + assert result.exit_code == 0 + + +# Test that the digest environment variable is not affected by unrelated build dependencies +@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox") +@pytest.mark.datafiles(DATA_DIR) +def test_build_base_plus_extra_dep(cli, datafiles): + project = str(datafiles) + element_name = "digest-environment/base-plus-extra-dep.bst" + + result = cli.run(project=project, args=["build", element_name]) + assert result.exit_code == 0 + + +# Test that multiple dependencies can be merged into a single digest environment variable +@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox") +@pytest.mark.datafiles(DATA_DIR) +def test_build_merge(cli, datafiles): + project = str(datafiles) + element_name = "digest-environment/merge.bst" + + result = cli.run(project=project, args=["build", element_name]) + assert result.exit_code == 0 + + +# Test that multiple digest environment variables can be configured in a single element +@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox") +@pytest.mark.datafiles(DATA_DIR) +def test_build_two(cli, datafiles): + project = str(datafiles) + element_name = "digest-environment/two.bst" + + result = cli.run(project=project, args=["build", element_name]) + assert result.exit_code == 0 + + +# Test that the digest environment variable is also set in a build shell +@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox") +@pytest.mark.datafiles(DATA_DIR) +def test_build_shell(cli, datafiles): + project = str(datafiles) + element_name = "digest-environment/base.bst" + + # Ensure artifacts of build dependencies are available for build shell + result = cli.run(project=project, args=["build", "--deps", "build", element_name]) + assert result.exit_code == 0 + + result = cli.run(project=project, args=["shell", "--build", element_name, "--", "sh", "-c", "echo $BASE_DIGEST"]) + assert result.exit_code == 0 + assert result.output.strip() == "63450d93eab71f525d08378fe50960aff92b0ec8f1b0be72b2ac4b8259d09833/1227" + + +# Test that the digest environment variable is also set in a build shell staged from a buildtree +@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox") +@pytest.mark.datafiles(DATA_DIR) +def test_build_shell_buildtree(cli, datafiles): + project = str(datafiles) + element_name = "digest-environment/base-buildtree.bst" + + # Generate buildtree + result = cli.run(project=project, args=["--cache-buildtrees", "always", "build", element_name]) + assert result.exit_code == 0 + + result = cli.run( + project=project, + args=["shell", "--build", "--use-buildtree", element_name, "--", "sh", "-c", "echo $BASE_DIGEST"], + ) + assert result.exit_code == 0 + assert result.output.strip() == "63450d93eab71f525d08378fe50960aff92b0ec8f1b0be72b2ac4b8259d09833/1227" + + +# Test that buildtree push works for elements with a digest environment variable +@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox") +@pytest.mark.datafiles(DATA_DIR) +def test_pushed_buildtree(cli, tmpdir, datafiles): + project = str(datafiles) + element_name = "digest-environment/merge.bst" + + with create_artifact_share(os.path.join(str(tmpdir), "share")) as share: + cli.configure( + { + "artifacts": {"servers": [{"url": share.repo, "push": True}]}, + "cachedir": str(tmpdir), + "cache": {"cache-buildtrees": "always"}, + } + ) + + # Generate buildtree + result = cli.run(project=project, args=["build", element_name]) + assert result.exit_code == 0 + + assert cli.get_element_state(project, element_name) == "cached" + assert share.get_artifact(cli.get_artifact_name(project, "test", element_name)) + + # Clear the local cache to make sure everything can and will be pulled from the remote + shutil.rmtree(os.path.join(str(tmpdir), "cas")) + shutil.rmtree(os.path.join(str(tmpdir), "artifacts")) + + result = cli.run( + project=project, + args=[ + "--pull-buildtrees", + "shell", + "--build", + "--use-buildtree", + element_name, + "--", + "sh", + "-c", + "echo $MERGED_DIGEST", + ], + ) + assert result.exit_code == 0 + assert result.output.strip() == "469369597f4faa56c4b8338d6a948c8c1d4f29e6ea8f4d4d261cac4182bcef48/1389" diff --git a/tests/integration/project/elements/digest-environment/base-buildtree.bst b/tests/integration/project/elements/digest-environment/base-buildtree.bst new file mode 100644 index 000000000..19161e806 --- /dev/null +++ b/tests/integration/project/elements/digest-environment/base-buildtree.bst @@ -0,0 +1,12 @@ +kind: manual + +depends: + - filename: base.bst + type: build + config: + digest-environment: BASE_DIGEST + +config: + build-commands: + - env + - test "$BASE_DIGEST" = "63450d93eab71f525d08378fe50960aff92b0ec8f1b0be72b2ac4b8259d09833/1227" diff --git a/tests/integration/project/elements/digest-environment/base-plus-extra-dep.bst b/tests/integration/project/elements/digest-environment/base-plus-extra-dep.bst new file mode 100644 index 000000000..87bd0c61d --- /dev/null +++ b/tests/integration/project/elements/digest-environment/base-plus-extra-dep.bst @@ -0,0 +1,14 @@ +kind: manual + +depends: + - filename: base.bst + type: build + config: + digest-environment: BASE_DIGEST + - filename: manual/import-file.bst + type: build + +config: + build-commands: + - env + - test "$BASE_DIGEST" = "63450d93eab71f525d08378fe50960aff92b0ec8f1b0be72b2ac4b8259d09833/1227" diff --git a/tests/integration/project/elements/digest-environment/base.bst b/tests/integration/project/elements/digest-environment/base.bst new file mode 100644 index 000000000..19161e806 --- /dev/null +++ b/tests/integration/project/elements/digest-environment/base.bst @@ -0,0 +1,12 @@ +kind: manual + +depends: + - filename: base.bst + type: build + config: + digest-environment: BASE_DIGEST + +config: + build-commands: + - env + - test "$BASE_DIGEST" = "63450d93eab71f525d08378fe50960aff92b0ec8f1b0be72b2ac4b8259d09833/1227" diff --git a/tests/integration/project/elements/digest-environment/merge.bst b/tests/integration/project/elements/digest-environment/merge.bst new file mode 100644 index 000000000..a6bfdb5b6 --- /dev/null +++ b/tests/integration/project/elements/digest-environment/merge.bst @@ -0,0 +1,16 @@ +kind: manual + +depends: + - filename: base.bst + type: build + config: + digest-environment: MERGED_DIGEST + - filename: manual/import-file.bst + type: build + config: + digest-environment: MERGED_DIGEST + +config: + build-commands: + - env + - test "$MERGED_DIGEST" = "469369597f4faa56c4b8338d6a948c8c1d4f29e6ea8f4d4d261cac4182bcef48/1389" diff --git a/tests/integration/project/elements/digest-environment/two.bst b/tests/integration/project/elements/digest-environment/two.bst new file mode 100644 index 000000000..7619e6ac0 --- /dev/null +++ b/tests/integration/project/elements/digest-environment/two.bst @@ -0,0 +1,17 @@ +kind: manual + +depends: + - filename: base.bst + type: build + config: + digest-environment: BASE_DIGEST + - filename: manual/import-file.bst + type: build + config: + digest-environment: IMPORT_DIGEST + +config: + build-commands: + - env + - test "$BASE_DIGEST" = "63450d93eab71f525d08378fe50960aff92b0ec8f1b0be72b2ac4b8259d09833/1227" + - test "$IMPORT_DIGEST" = "eec5ed9053acb296a8e7a30ab7ee173abf4f7392b8228ba7644cd5b51a5cfdeb/162"