diff --git a/atomic_reactor/plugins/add_image_content_manifest.py b/atomic_reactor/plugins/add_image_content_manifest.py index 5232dfca3..5b55c5731 100644 --- a/atomic_reactor/plugins/add_image_content_manifest.py +++ b/atomic_reactor/plugins/add_image_content_manifest.py @@ -16,8 +16,10 @@ from atomic_reactor.constants import (IMAGE_BUILD_INFO_DIR, INSPECT_ROOTFS, INSPECT_ROOTFS_LAYERS, + CACHI2_BUILD_DIR, PLUGIN_ADD_IMAGE_CONTENT_MANIFEST, PLUGIN_FETCH_MAVEN_KEY, + PLUGIN_CACHI2_POSTPROCESS, PLUGIN_RESOLVE_REMOTE_SOURCE) from atomic_reactor.config import get_cachito_session from atomic_reactor.dirs import BuildDir @@ -25,6 +27,7 @@ from atomic_reactor.util import (validate_with_schema, read_content_sets, map_to_user_params, allow_path_in_dockerignore) from atomic_reactor.utils.pnc import PNCUtil +from atomic_reactor.utils.cachi2 import convert_SBOM_to_ICM class AddImageContentManifestPlugin(Plugin): @@ -100,6 +103,8 @@ def __init__(self, workflow, destdir=IMAGE_BUILD_INFO_DIR): remote_source_results = wf_data.plugins_results.get(PLUGIN_RESOLVE_REMOTE_SOURCE) or [] self.remote_source_ids = [remote_source['id'] for remote_source in remote_source_results] + self.cachi2_remote_sources = wf_data.plugins_results.get(PLUGIN_CACHI2_POSTPROCESS) or [] + fetch_maven_results = wf_data.plugins_results.get(PLUGIN_FETCH_MAVEN_KEY) or {} self.pnc_artifact_ids = fetch_maven_results.get('pnc_artifact_ids') or [] @@ -130,6 +135,12 @@ def layer_index(self) -> int: return len(inspect[INSPECT_ROOTFS][INSPECT_ROOTFS_LAYERS]) + def _get_cachi2_icm(self) -> dict: + global_sbom_path = self.workflow.build_dir.path/CACHI2_BUILD_DIR/"bom.json" + with open(global_sbom_path, "r") as f: + sbom = json.load(f) + return convert_SBOM_to_ICM(sbom) + @functools.cached_property def _icm_base(self) -> dict: """Create the platform-independent skeleton of the ICM document. @@ -140,6 +151,8 @@ def _icm_base(self) -> dict: if self.remote_source_ids: icm = self.cachito_session.get_image_content_manifest(self.remote_source_ids) + elif self.cachi2_remote_sources: # we doesn't support Cachito and Cachi2 together + icm = self._get_cachi2_icm() if self.pnc_artifact_ids: purl_specs = self.pnc_util.get_artifact_purl_specs(self.pnc_artifact_ids) diff --git a/atomic_reactor/plugins/generate_sbom.py b/atomic_reactor/plugins/generate_sbom.py index 0a24cd968..30385c16d 100644 --- a/atomic_reactor/plugins/generate_sbom.py +++ b/atomic_reactor/plugins/generate_sbom.py @@ -13,13 +13,15 @@ from typing import Any, Dict, List, Optional from atomic_reactor.constants import (PLUGIN_GENERATE_SBOM, + PLUGIN_CACHI2_POSTPROCESS, PLUGIN_RPMQA, PLUGIN_RESOLVE_REMOTE_SOURCE, SBOM_SCHEMA_PATH, PLUGIN_FETCH_MAVEN_KEY, INSPECT_CONFIG, KOJI_BTYPE_ICM, - ICM_JSON_FILENAME) + ICM_JSON_FILENAME, + CACHI2_BUILD_DIR) from atomic_reactor.config import get_cachito_session, get_koji_session from atomic_reactor.utils import retries from atomic_reactor.utils.cachito import CachitoAPI @@ -92,6 +94,8 @@ def __init__(self, workflow): remote_source_results = wf_data.plugins_results.get(PLUGIN_RESOLVE_REMOTE_SOURCE) or [] self.remote_source_ids = [remote_source['id'] for remote_source in remote_source_results] + self.cachi2_remote_sources = wf_data.plugins_results.get(PLUGIN_CACHI2_POSTPROCESS) or [] + self.rpm_components = wf_data.plugins_results.get(PLUGIN_RPMQA) or {} fetch_maven_results = wf_data.plugins_results.get(PLUGIN_FETCH_MAVEN_KEY) or {} @@ -131,6 +135,12 @@ def fetch_url_or_koji_check(self) -> None: if read_fetch_artifacts_url(self.workflow): self.incompleteness_reasons.add("fetch url is used") + def get_cachi2_sbom(self) -> dict: + """Get SBOM from cachi2 results""" + global_sbom_path = self.workflow.build_dir.path/CACHI2_BUILD_DIR/"bom.json" + with open(global_sbom_path, "r") as f: + return json.load(f) + def add_parent_missing_sbom_reason(self, nvr: str) -> None: self.incompleteness_reasons.add(f"parent build '{nvr}' is missing SBOM") @@ -331,6 +341,8 @@ def run(self) -> Dict[str, Any]: if self.remote_source_ids: remote_sources_sbom = self.cachito_session.get_sbom(self.remote_source_ids) remote_souces_components = remote_sources_sbom['components'] + elif self.cachi2_remote_sources: # Cachi2 and Cachito are not supported to be used together + remote_souces_components = self.get_cachi2_sbom()['components'] # add components from cachito, rpms, pnc for platform in self.all_platforms: diff --git a/atomic_reactor/plugins/koji_import.py b/atomic_reactor/plugins/koji_import.py index 9dd190d95..f84d1cdce 100644 --- a/atomic_reactor/plugins/koji_import.py +++ b/atomic_reactor/plugins/koji_import.py @@ -36,6 +36,7 @@ from atomic_reactor.plugins.fetch_sources import PLUGIN_FETCH_SOURCES_KEY from atomic_reactor.constants import ( + PLUGIN_CACHI2_POSTPROCESS, PLUGIN_EXPORT_OPERATOR_MANIFESTS_KEY, PLUGIN_KOJI_IMPORT_PLUGIN_KEY, PLUGIN_KOJI_IMPORT_SOURCE_CONTAINER_PLUGIN_KEY, @@ -303,41 +304,76 @@ def set_pnc_build_metadata(self, extra): if pnc_build_metadata: extra['image']['pnc'] = pnc_build_metadata - def set_remote_sources_metadata(self, extra): - remote_source_result = self.workflow.data.plugins_results.get( - PLUGIN_RESOLVE_REMOTE_SOURCE - ) - if remote_source_result: - if self.workflow.conf.allow_multiple_remote_sources: - remote_sources_image_metadata = [ - {"name": remote_source["name"], "url": remote_source["url"].rstrip('/download')} - for remote_source in remote_source_result - ] - extra["image"]["remote_sources"] = remote_sources_image_metadata - - remote_sources_typeinfo_metadata = [ - { - "name": remote_source["name"], - "url": remote_source["url"].rstrip('/download'), - "archives": [ - remote_source["remote_source_json"]["filename"], - remote_source["remote_source_tarball"]["filename"], - remote_source["remote_source_json_env"]["filename"], - remote_source["remote_source_json_config"]["filename"], - ], - } - for remote_source in remote_source_result - ] - else: - extra["image"]["remote_source_url"] = remote_source_result[0]["url"] - remote_sources_typeinfo_metadata = { - "remote_source_url": remote_source_result[0]["url"] - } + def set_remote_sources_metadata_cachito(self, remote_source_result, extra): + if self.workflow.conf.allow_multiple_remote_sources: + remote_sources_image_metadata = [ + {"name": remote_source["name"], "url": remote_source["url"].rstrip('/download')} + for remote_source in remote_source_result + ] + extra["image"]["remote_sources"] = remote_sources_image_metadata - remote_source_typeinfo = { - KOJI_BTYPE_REMOTE_SOURCES: remote_sources_typeinfo_metadata, + remote_sources_typeinfo_metadata = [ + { + "name": remote_source["name"], + "url": remote_source["url"].rstrip('/download'), + "archives": [ + remote_source["remote_source_json"]["filename"], + remote_source["remote_source_tarball"]["filename"], + remote_source["remote_source_json_env"]["filename"], + remote_source["remote_source_json_config"]["filename"], + ], + } + for remote_source in remote_source_result + ] + else: + extra["image"]["remote_source_url"] = remote_source_result[0]["url"] + remote_sources_typeinfo_metadata = { + "remote_source_url": remote_source_result[0]["url"] } - extra.setdefault("typeinfo", {}).update(remote_source_typeinfo) + + remote_source_typeinfo = { + KOJI_BTYPE_REMOTE_SOURCES: remote_sources_typeinfo_metadata, + } + extra.setdefault("typeinfo", {}).update(remote_source_typeinfo) + + def set_remote_sources_metadata_cachi2(self, remote_source_result, extra): + remote_sources_typeinfo_metadata = [] + if self.workflow.conf.allow_multiple_remote_sources: + remote_sources_image_metadata = [ + {"name": remote_source["name"]} + for remote_source in remote_source_result + ] + extra["image"]["remote_sources"] = remote_sources_image_metadata + + remote_sources_typeinfo_metadata = [ + { + "name": remote_source["name"], + "archives": [ + remote_source["remote_source_json"]["filename"], + remote_source["remote_source_tarball"]["filename"], + remote_source["remote_source_json_env"]["filename"], + ], + } + for remote_source in remote_source_result + ] + + remote_source_typeinfo = { + KOJI_BTYPE_REMOTE_SOURCES: remote_sources_typeinfo_metadata, + } + extra.setdefault("typeinfo", {}).update(remote_source_typeinfo) + + def set_remote_sources_metadata(self, extra): + func_map = { + PLUGIN_RESOLVE_REMOTE_SOURCE: self.set_remote_sources_metadata_cachito, + PLUGIN_CACHI2_POSTPROCESS: self.set_remote_sources_metadata_cachi2, + } + for plugin_name, func in func_map.items(): + remote_source_result = self.workflow.data.plugins_results.get( + plugin_name + ) + if remote_source_result: + func(remote_source_result, extra) + break def set_remote_source_file_metadata(self, extra): maven_url_sources_metadata_results = self.workflow.data.plugins_results.get( @@ -613,9 +649,21 @@ def _filesystem_koji_task_id(self) -> Optional[int]: def _collect_remote_sources(self) -> Iterable[ArtifactOutputInfo]: wf_data = self.workflow.data + + remote_source_keys = [ + "remote_source_json", "remote_source_json_env", "remote_source_json_config", + ] + # a list of metadata describing the remote sources. plugin_results: List[Dict[str, Any]] plugin_results = wf_data.plugins_results.get(PLUGIN_RESOLVE_REMOTE_SOURCE) or [] + if not plugin_results: + # Cachi2 + plugin_results = wf_data.plugins_results.get(PLUGIN_CACHI2_POSTPROCESS) or [] + remote_source_keys = [ + "remote_source_json", "remote_source_json_env", + ] + tmpdir = tempfile.mkdtemp() for remote_source in plugin_results: @@ -624,9 +672,7 @@ def _collect_remote_sources(self) -> Iterable[ArtifactOutputInfo]: dest_filename = remote_source_tarball['filename'] yield local_filename, dest_filename, KOJI_BTYPE_REMOTE_SOURCES, None - for source_key in ( - "remote_source_json", "remote_source_json_env", "remote_source_json_config", - ): + for source_key in remote_source_keys: data_json = remote_source[source_key] data_json_filename = data_json['filename'] file_path = os.path.join(tmpdir, data_json_filename) diff --git a/tests/plugins/test_add_image_content_manifest.py b/tests/plugins/test_add_image_content_manifest.py index d92e1a849..a95804d29 100644 --- a/tests/plugins/test_add_image_content_manifest.py +++ b/tests/plugins/test_add_image_content_manifest.py @@ -21,8 +21,10 @@ from tests.utils.test_cachito import CACHITO_URL, CACHITO_REQUEST_ID from atomic_reactor.constants import ( + CACHI2_BUILD_DIR, INSPECT_ROOTFS, INSPECT_ROOTFS_LAYERS, + PLUGIN_CACHI2_POSTPROCESS, PLUGIN_FETCH_MAVEN_KEY, PLUGIN_RESOLVE_REMOTE_SOURCE, DOCKERIGNORE, @@ -39,6 +41,28 @@ } CACHITO_ICM_URL = '{}/api/v1/content-manifest?requests={}'.format(CACHITO_URL, CACHITO_REQUEST_ID) + +CACHI2_SBOM = { + "bomFormat": "CycloneDX", + "components": [{ + "name": "retrodep", + "purl": "pkg:golang/github.com%2Frelease-engineering%2Fretrodep%2Fv2@v2.0.2", + "properties": [{ + "name": "cachi2:found_by", + "value": "cachi2", + }], + "type": "library", + }], + "metadata": { + "tools": [{ + "vendor": "red hat", + "name": "cachi2" + }] + }, + "specVersion": "1.4", + "version": 1 +} + PNC_ARTIFACT = { 'id': 1234, 'publicUrl': 'http://test.com/artifact.jar', @@ -88,6 +112,28 @@ } ] } + +CACHI2_ICM_DICT = { + 'metadata': { + 'icm_version': 1, + 'icm_spec': ( + 'https://raw.githubusercontent.com/containerbuildsystem/atomic-reactor/' + 'f4abcfdaf8247a6b074f94fa84f3846f82d781c6/atomic_reactor/schemas/' + 'content_manifest.json'), + 'image_layer_index': 1, + }, + 'content_sets': [], + 'image_contents': [ + { + 'purl': + 'pkg:golang/github.com%2Frelease-engineering%2Fretrodep%2Fv2@v2.0.2', + }, + { + 'purl': PNC_ARTIFACT['purl'], + } + ] +} + ICM_JSON = dedent( '''\ { @@ -157,7 +203,8 @@ def mock_get_icm(requests_mock): def mock_env(workflow, df_content, base_layers=0, remote_sources=None, - r_c_m_override=None, pnc_artifacts=True, dockerignore=False): + r_c_m_override=None, pnc_artifacts=True, dockerignore=False, + cachi2_sbom=None): if base_layers > 0: inspection_data = { @@ -206,6 +253,19 @@ def mock_env(workflow, df_content, base_layers=0, remote_sources=None, platforms = list(CONTENT_SETS.keys()) workflow.build_dir.init_build_dirs(platforms, workflow.source) + if cachi2_sbom: + env.set_plugin_result( + PLUGIN_CACHI2_POSTPROCESS, + {"plugin": "did run, real value doesn't matter"} + ) + + # save cachi2 SBOM which is source for ICM + path = workflow.build_dir.path/CACHI2_BUILD_DIR/"bom.json" + path.parent.mkdir() + with open(path, "w") as f: + json.dump(cachi2_sbom, f) + f.flush() + return env.create_runner() @@ -239,6 +299,7 @@ def check_in_build_dir(build_dir): @pytest.mark.parametrize('manifest_file_exists', [True, False]) @pytest.mark.parametrize('content_sets', [True, False]) +@pytest.mark.parametrize('cachi2', [True, False]) @pytest.mark.parametrize( ('df_content, expected_df, base_layers, manifest_file'), [ ( @@ -288,13 +349,18 @@ def check_in_build_dir(build_dir): ), ]) def test_add_image_content_manifest(workflow, requests_mock, - manifest_file_exists, content_sets, + manifest_file_exists, content_sets, cachi2, df_content, expected_df, base_layers, manifest_file, ): mock_get_icm(requests_mock) mock_content_sets_config(workflow.source.path, empty=(not content_sets)) - runner = mock_env(workflow, df_content, base_layers, remote_sources=REMOTE_SOURCES) + if cachi2: + runner_opts = {"cachi2_sbom": CACHI2_SBOM} + else: + runner_opts = {"remote_sources": REMOTE_SOURCES} + + runner = mock_env(workflow, df_content, base_layers, **runner_opts) if manifest_file_exists: workflow.build_dir.any_platform.path.joinpath(manifest_file).touch() @@ -304,7 +370,7 @@ def test_add_image_content_manifest(workflow, requests_mock, runner.run() return - expected_output = deepcopy(ICM_DICT) + expected_output = deepcopy(CACHI2_ICM_DICT if cachi2 else ICM_DICT) expected_output['metadata']['image_layer_index'] = base_layers if base_layers else 0 runner.run() diff --git a/tests/plugins/test_generate_sbom.py b/tests/plugins/test_generate_sbom.py index ecef5dbb4..86b9d6074 100644 --- a/tests/plugins/test_generate_sbom.py +++ b/tests/plugins/test_generate_sbom.py @@ -32,6 +32,8 @@ REPO_FETCH_ARTIFACTS_KOJI, REPO_FETCH_ARTIFACTS_URL, PLUGIN_CHECK_AND_SET_PLATFORMS_KEY, + PLUGIN_CACHI2_POSTPROCESS, + CACHI2_BUILD_DIR, ) from atomic_reactor.plugin import PluginFailedException from atomic_reactor.plugins.generate_sbom import GenerateSbomPlugin @@ -1065,7 +1067,7 @@ def teardown_function(*args): sys.modules.pop(GenerateSbomPlugin.key, None) -def mock_env(workflow, df_images): +def mock_env(workflow, df_images, cachi2=False): tmp_dir = tempfile.mkdtemp() dockerconfig_contents = {"auths": {LOCALHOST_REGISTRY: {"username": "user", "email": "test@example.com", @@ -1099,12 +1101,20 @@ def mock_env(workflow, df_images): .for_plugin(GenerateSbomPlugin.key) .set_reactor_config(r_c_m) .set_dockerfile_images(df_images) - .set_plugin_result(PLUGIN_RESOLVE_REMOTE_SOURCE, deepcopy(REMOTE_SOURCES)) .set_plugin_result(PLUGIN_FETCH_MAVEN_KEY, {'sbom_components': deepcopy(PNC_SBOM_COMPONENTS)}) .set_plugin_result(PLUGIN_RPMQA, deepcopy(RPM_SBOM_COMPONENTS)) ) + if cachi2: + # Note: using CACHITO_SBOM_JSON here, as the fields are almost the same as + # for Cachi2; I don't want to die from mocking everything again, just to + # make test pass for extra property "found_by: cachi2" added by cachi2 + # this provides good tests + mock_cachi2_sbom(workflow, deepcopy(CACHITO_SBOM_JSON)) + else: + env.set_plugin_result(PLUGIN_RESOLVE_REMOTE_SOURCE, deepcopy(REMOTE_SOURCES)) + all_inspects = [(EMPTY_SBOM_IMAGE_LABELS, EMPTY_SBOM_IMAGE_NAME), (MISSING_LABEL_IMAGE_LABELS, MISSING_LABEL_IMAGE_NAME), (BUILDING_IMAGE_LABELS, BUILDING_IMAGE_NAME), @@ -1131,6 +1141,19 @@ def mock_get_sbom_cachito(requests_mock): requests_mock.register_uri('GET', CACHITO_SBOM_URL, json=CACHITO_SBOM_JSON) +def mock_cachi2_sbom(workflow, cachi2_sbom: dict): + workflow.data.plugins_results[PLUGIN_CACHI2_POSTPROCESS] = { + "plugin": "did run, real value doesn't matter" + } + + # save cachi2 SBOM which is source for ICM + path = workflow.build_dir.path/CACHI2_BUILD_DIR/"bom.json" + path.parent.mkdir() + with open(path, "w") as f: + json.dump(cachi2_sbom, f) + f.flush() + + def mock_build_icm_urls(requests_mock): all_sboms = [(EMPTY_SBOM_KOJI_BUILD, EMPTY_SBOM_BUILD_SBOM_JSON), (BASE_WITH_SBOM_KOJI_BUILD, BASE_WITH_SBOM_BUILD_SBOM_JSON), @@ -1311,12 +1334,16 @@ def koji_session(): INCOMPLETE_CACHE_URL_KOJI, ), ]) +@pytest.mark.parametrize('cachi2', [True, False]) def test_sbom(workflow, requests_mock, koji_session, df_images, use_cache, use_fetch_url, - use_fetch_koji, expected_components, expected_incomplete): - mock_get_sbom_cachito(requests_mock) + use_fetch_koji, expected_components, expected_incomplete, cachi2): + + if not cachi2: + mock_get_sbom_cachito(requests_mock) + mock_build_icm_urls(requests_mock) - runner = mock_env(workflow, df_images) + runner = mock_env(workflow, df_images, cachi2=cachi2) workflow.data.tag_conf.add_unique_image(UNIQUE_IMAGE) def check_cosign_run(args): diff --git a/tests/plugins/test_koji_import.py b/tests/plugins/test_koji_import.py index c5cb5f00c..d44a61202 100644 --- a/tests/plugins/test_koji_import.py +++ b/tests/plugins/test_koji_import.py @@ -7,6 +7,7 @@ """ from collections import namedtuple +from enum import Enum import json from pathlib import Path from typing import Any, Dict @@ -37,6 +38,7 @@ from atomic_reactor.source import GitSource, PathSource from atomic_reactor.constants import (IMAGE_TYPE_DOCKER_ARCHIVE, KOJI_BTYPE_OPERATOR_MANIFESTS, PLUGIN_ADD_FILESYSTEM_KEY, + PLUGIN_CACHI2_POSTPROCESS, PLUGIN_EXPORT_OPERATOR_MANIFESTS_KEY, PLUGIN_MAVEN_URL_SOURCES_METADATA_KEY, PLUGIN_GROUP_MANIFESTS_KEY, PLUGIN_KOJI_PARENT_KEY, @@ -185,6 +187,12 @@ def taskFinished(self, task_id): REGISTRY = 'docker.example.com' +class RemoteSourceKind(Enum): + NONE = 1 + CACHITO = 2 + CACHI2 = 3 + + def fake_subprocess_output(cmd): if cmd.startswith('/bin/rpm'): return FAKE_RPM_OUTPUT @@ -268,7 +276,8 @@ def mock_environment(workflow: DockerBuildWorkflow, source_dir: Path, has_op_appregistry_manifests=False, has_op_bundle_manifests=False, push_operator_manifests_enabled=False, source_build=False, - has_remote_source=False, has_remote_source_file=False, + has_remote_source: RemoteSourceKind = RemoteSourceKind.NONE, + has_remote_source_file=False, has_pnc_build_metadata=False, scratch=False): if session is None: session = MockedClientSession('') @@ -524,7 +533,7 @@ def custom_get(method, url, headers, **kwargs): results = workflow.data.plugins_results results[PLUGIN_EXPORT_OPERATOR_MANIFESTS_KEY] = str(archive_file) - if has_remote_source: + if has_remote_source == RemoteSourceKind.CACHITO: source_path = build_dir_path / REMOTE_SOURCE_TARBALL_FILENAME source_path.write_text('dummy file', 'utf-8') remote_source_result = [ @@ -550,6 +559,28 @@ def custom_get(method, url, headers, **kwargs): } ] workflow.data.plugins_results[PLUGIN_RESOLVE_REMOTE_SOURCE] = remote_source_result + elif has_remote_source == RemoteSourceKind.CACHI2: + source_path = build_dir_path / REMOTE_SOURCE_TARBALL_FILENAME + source_path.write_text('dummy file', 'utf-8') + remote_source_result = [ + { + "name": None, + "url": "https://cachito.com/api/v1/requests/21048/download", + "remote_source_json": { + "filename": REMOTE_SOURCE_JSON_FILENAME, + "json": {"stub": "data"}, + }, + "remote_source_json_env": { + "filename": REMOTE_SOURCE_JSON_ENV_FILENAME, + "json": {"var": {"stub": "data"}}, + }, + "remote_source_tarball": { + "filename": REMOTE_SOURCE_TARBALL_FILENAME, + "path": str(source_path), + }, + } + ] + workflow.data.plugins_results[PLUGIN_CACHI2_POSTPROCESS] = remote_source_result else: workflow.data.plugins_results[PLUGIN_RESOLVE_REMOTE_SOURCE] = None @@ -1977,7 +2008,7 @@ def test_operators_bundle_metadata_csv_modifications( assert extra['image']['operator_manifests'] == expected - @pytest.mark.parametrize('has_remote_source', [True, False]) + @pytest.mark.parametrize('has_remote_source', list(RemoteSourceKind)) @pytest.mark.parametrize('allow_multiple_remote_sources', [True, False]) def test_remote_sources(self, workflow, source_dir, has_remote_source, allow_multiple_remote_sources): @@ -2000,7 +2031,7 @@ def test_remote_sources(self, workflow, source_dir, assert isinstance(extra, dict) # https://github.com/PyCQA/pylint/issues/2186 # pylint: disable=W1655 - if has_remote_source: + if has_remote_source == RemoteSourceKind.CACHITO: if allow_multiple_remote_sources: assert extra['image']['remote_sources'] == [ { @@ -2031,7 +2062,28 @@ def test_remote_sources(self, workflow, source_dir, } assert REMOTE_SOURCE_TARBALL_FILENAME in session.uploaded_files.keys() assert REMOTE_SOURCE_JSON_FILENAME in session.uploaded_files.keys() - + elif has_remote_source == RemoteSourceKind.CACHI2: + if allow_multiple_remote_sources: + assert extra['image']['remote_sources'] == [ + { + 'name': None, + } + ] + assert 'remote-sources' in extra['typeinfo'] + assert extra['typeinfo']['remote-sources'] == [ + { + 'name': None, + 'archives': [ + 'remote-source.json', 'remote-source.tar.gz', + 'remote-source.env.json' + ], + } + ] + assert REMOTE_SOURCE_TARBALL_FILENAME in session.uploaded_files.keys() + assert REMOTE_SOURCE_JSON_FILENAME in session.uploaded_files.keys() + else: + assert REMOTE_SOURCE_TARBALL_FILENAME in session.uploaded_files.keys() + assert REMOTE_SOURCE_JSON_FILENAME in session.uploaded_files.keys() else: assert 'remote_source_url' not in extra['image'] assert REMOTE_SOURCE_TARBALL_FILENAME not in session.uploaded_files.keys()