Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Cachi2: update postbuild plugins #2127

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 13 additions & 0 deletions atomic_reactor/plugins/add_image_content_manifest.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,15 +16,18 @@

from atomic_reactor.constants import (IMAGE_BUILD_INFO_DIR, INSPECT_ROOTFS,
INSPECT_ROOTFS_LAYERS,
CACHI2_BUILD_DIR,
PLUGIN_ADD_IMAGE_CONTENT_MANIFEST,
PLUGIN_FETCH_MAVEN_KEY,
PLUGIN_CACHI2_POSTPROCESS,
PLUGIN_RESOLVE_REMOTE_SOURCE)
from atomic_reactor.config import get_cachito_session
from atomic_reactor.dirs import BuildDir
from atomic_reactor.plugin import Plugin
from atomic_reactor.util import (validate_with_schema, read_content_sets, map_to_user_params,
allow_path_in_dockerignore)
from atomic_reactor.utils.pnc import PNCUtil
from atomic_reactor.utils.cachi2 import convert_SBOM_to_ICM


class AddImageContentManifestPlugin(Plugin):
Expand Down Expand Up @@ -100,6 +103,8 @@ def __init__(self, workflow, destdir=IMAGE_BUILD_INFO_DIR):
remote_source_results = wf_data.plugins_results.get(PLUGIN_RESOLVE_REMOTE_SOURCE) or []
self.remote_source_ids = [remote_source['id'] for remote_source in remote_source_results]

self.cachi2_remote_sources = wf_data.plugins_results.get(PLUGIN_CACHI2_POSTPROCESS) or []

fetch_maven_results = wf_data.plugins_results.get(PLUGIN_FETCH_MAVEN_KEY) or {}
self.pnc_artifact_ids = fetch_maven_results.get('pnc_artifact_ids') or []

Expand Down Expand Up @@ -130,6 +135,12 @@ def layer_index(self) -> int:

return len(inspect[INSPECT_ROOTFS][INSPECT_ROOTFS_LAYERS])

def _get_cachi2_icm(self) -> dict:
global_sbom_path = self.workflow.build_dir.path/CACHI2_BUILD_DIR/"bom.json"
with open(global_sbom_path, "r") as f:
sbom = json.load(f)
return convert_SBOM_to_ICM(sbom)

@functools.cached_property
def _icm_base(self) -> dict:
"""Create the platform-independent skeleton of the ICM document.
Expand All @@ -140,6 +151,8 @@ def _icm_base(self) -> dict:

if self.remote_source_ids:
icm = self.cachito_session.get_image_content_manifest(self.remote_source_ids)
elif self.cachi2_remote_sources: # we doesn't support Cachito and Cachi2 together
icm = self._get_cachi2_icm()

if self.pnc_artifact_ids:
purl_specs = self.pnc_util.get_artifact_purl_specs(self.pnc_artifact_ids)
Expand Down
14 changes: 13 additions & 1 deletion atomic_reactor/plugins/generate_sbom.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,15 @@
from typing import Any, Dict, List, Optional

from atomic_reactor.constants import (PLUGIN_GENERATE_SBOM,
PLUGIN_CACHI2_POSTPROCESS,
PLUGIN_RPMQA,
PLUGIN_RESOLVE_REMOTE_SOURCE,
SBOM_SCHEMA_PATH,
PLUGIN_FETCH_MAVEN_KEY,
INSPECT_CONFIG,
KOJI_BTYPE_ICM,
ICM_JSON_FILENAME)
ICM_JSON_FILENAME,
CACHI2_BUILD_DIR)
from atomic_reactor.config import get_cachito_session, get_koji_session
from atomic_reactor.utils import retries
from atomic_reactor.utils.cachito import CachitoAPI
Expand Down Expand Up @@ -92,6 +94,8 @@ def __init__(self, workflow):
remote_source_results = wf_data.plugins_results.get(PLUGIN_RESOLVE_REMOTE_SOURCE) or []
self.remote_source_ids = [remote_source['id'] for remote_source in remote_source_results]

self.cachi2_remote_sources = wf_data.plugins_results.get(PLUGIN_CACHI2_POSTPROCESS) or []

self.rpm_components = wf_data.plugins_results.get(PLUGIN_RPMQA) or {}

fetch_maven_results = wf_data.plugins_results.get(PLUGIN_FETCH_MAVEN_KEY) or {}
Expand Down Expand Up @@ -131,6 +135,12 @@ def fetch_url_or_koji_check(self) -> None:
if read_fetch_artifacts_url(self.workflow):
self.incompleteness_reasons.add("fetch url is used")

def get_cachi2_sbom(self) -> dict:
"""Get SBOM from cachi2 results"""
global_sbom_path = self.workflow.build_dir.path/CACHI2_BUILD_DIR/"bom.json"
with open(global_sbom_path, "r") as f:
return json.load(f)

def add_parent_missing_sbom_reason(self, nvr: str) -> None:
self.incompleteness_reasons.add(f"parent build '{nvr}' is missing SBOM")

Expand Down Expand Up @@ -331,6 +341,8 @@ def run(self) -> Dict[str, Any]:
if self.remote_source_ids:
remote_sources_sbom = self.cachito_session.get_sbom(self.remote_source_ids)
remote_souces_components = remote_sources_sbom['components']
elif self.cachi2_remote_sources: # Cachi2 and Cachito are not supported to be used together
remote_souces_components = self.get_cachi2_sbom()['components']

# add components from cachito, rpms, pnc
for platform in self.all_platforms:
Expand Down
118 changes: 82 additions & 36 deletions atomic_reactor/plugins/koji_import.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
from atomic_reactor.plugins.fetch_sources import PLUGIN_FETCH_SOURCES_KEY

from atomic_reactor.constants import (
PLUGIN_CACHI2_POSTPROCESS,
PLUGIN_EXPORT_OPERATOR_MANIFESTS_KEY,
PLUGIN_KOJI_IMPORT_PLUGIN_KEY,
PLUGIN_KOJI_IMPORT_SOURCE_CONTAINER_PLUGIN_KEY,
Expand Down Expand Up @@ -303,41 +304,76 @@ def set_pnc_build_metadata(self, extra):
if pnc_build_metadata:
extra['image']['pnc'] = pnc_build_metadata

def set_remote_sources_metadata(self, extra):
remote_source_result = self.workflow.data.plugins_results.get(
PLUGIN_RESOLVE_REMOTE_SOURCE
)
if remote_source_result:
if self.workflow.conf.allow_multiple_remote_sources:
remote_sources_image_metadata = [
{"name": remote_source["name"], "url": remote_source["url"].rstrip('/download')}
for remote_source in remote_source_result
]
extra["image"]["remote_sources"] = remote_sources_image_metadata

remote_sources_typeinfo_metadata = [
{
"name": remote_source["name"],
"url": remote_source["url"].rstrip('/download'),
"archives": [
remote_source["remote_source_json"]["filename"],
remote_source["remote_source_tarball"]["filename"],
remote_source["remote_source_json_env"]["filename"],
remote_source["remote_source_json_config"]["filename"],
],
}
for remote_source in remote_source_result
]
else:
extra["image"]["remote_source_url"] = remote_source_result[0]["url"]
remote_sources_typeinfo_metadata = {
"remote_source_url": remote_source_result[0]["url"]
}
def set_remote_sources_metadata_cachito(self, remote_source_result, extra):
if self.workflow.conf.allow_multiple_remote_sources:
remote_sources_image_metadata = [
{"name": remote_source["name"], "url": remote_source["url"].rstrip('/download')}
for remote_source in remote_source_result
]
extra["image"]["remote_sources"] = remote_sources_image_metadata

remote_source_typeinfo = {
KOJI_BTYPE_REMOTE_SOURCES: remote_sources_typeinfo_metadata,
remote_sources_typeinfo_metadata = [
{
"name": remote_source["name"],
"url": remote_source["url"].rstrip('/download'),
"archives": [
remote_source["remote_source_json"]["filename"],
remote_source["remote_source_tarball"]["filename"],
remote_source["remote_source_json_env"]["filename"],
remote_source["remote_source_json_config"]["filename"],
],
}
for remote_source in remote_source_result
]
else:
extra["image"]["remote_source_url"] = remote_source_result[0]["url"]
remote_sources_typeinfo_metadata = {
"remote_source_url": remote_source_result[0]["url"]
}
extra.setdefault("typeinfo", {}).update(remote_source_typeinfo)

remote_source_typeinfo = {
KOJI_BTYPE_REMOTE_SOURCES: remote_sources_typeinfo_metadata,
}
extra.setdefault("typeinfo", {}).update(remote_source_typeinfo)

def set_remote_sources_metadata_cachi2(self, remote_source_result, extra):
remote_sources_typeinfo_metadata = []
if self.workflow.conf.allow_multiple_remote_sources:
chmeliik marked this conversation as resolved.
Show resolved Hide resolved
remote_sources_image_metadata = [
{"name": remote_source["name"]}
for remote_source in remote_source_result
]
extra["image"]["remote_sources"] = remote_sources_image_metadata

remote_sources_typeinfo_metadata = [
{
"name": remote_source["name"],
"archives": [
remote_source["remote_source_json"]["filename"],
remote_source["remote_source_tarball"]["filename"],
remote_source["remote_source_json_env"]["filename"],
],
}
for remote_source in remote_source_result
]

remote_source_typeinfo = {
KOJI_BTYPE_REMOTE_SOURCES: remote_sources_typeinfo_metadata,
}
extra.setdefault("typeinfo", {}).update(remote_source_typeinfo)

def set_remote_sources_metadata(self, extra):
func_map = {
PLUGIN_RESOLVE_REMOTE_SOURCE: self.set_remote_sources_metadata_cachito,
PLUGIN_CACHI2_POSTPROCESS: self.set_remote_sources_metadata_cachi2,
}
for plugin_name, func in func_map.items():
remote_source_result = self.workflow.data.plugins_results.get(
plugin_name
)
if remote_source_result:
func(remote_source_result, extra)
break

def set_remote_source_file_metadata(self, extra):
maven_url_sources_metadata_results = self.workflow.data.plugins_results.get(
Expand Down Expand Up @@ -613,9 +649,21 @@ def _filesystem_koji_task_id(self) -> Optional[int]:

def _collect_remote_sources(self) -> Iterable[ArtifactOutputInfo]:
wf_data = self.workflow.data

remote_source_keys = [
"remote_source_json", "remote_source_json_env", "remote_source_json_config",
]

# a list of metadata describing the remote sources.
plugin_results: List[Dict[str, Any]]
plugin_results = wf_data.plugins_results.get(PLUGIN_RESOLVE_REMOTE_SOURCE) or []
if not plugin_results:
# Cachi2
plugin_results = wf_data.plugins_results.get(PLUGIN_CACHI2_POSTPROCESS) or []
remote_source_keys = [
"remote_source_json", "remote_source_json_env",
]

tmpdir = tempfile.mkdtemp()

for remote_source in plugin_results:
Expand All @@ -624,9 +672,7 @@ def _collect_remote_sources(self) -> Iterable[ArtifactOutputInfo]:
dest_filename = remote_source_tarball['filename']
yield local_filename, dest_filename, KOJI_BTYPE_REMOTE_SOURCES, None

for source_key in (
"remote_source_json", "remote_source_json_env", "remote_source_json_config",
):
for source_key in remote_source_keys:
data_json = remote_source[source_key]
data_json_filename = data_json['filename']
file_path = os.path.join(tmpdir, data_json_filename)
Expand Down
74 changes: 70 additions & 4 deletions tests/plugins/test_add_image_content_manifest.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,10 @@
from tests.utils.test_cachito import CACHITO_URL, CACHITO_REQUEST_ID

from atomic_reactor.constants import (
CACHI2_BUILD_DIR,
INSPECT_ROOTFS,
INSPECT_ROOTFS_LAYERS,
PLUGIN_CACHI2_POSTPROCESS,
PLUGIN_FETCH_MAVEN_KEY,
PLUGIN_RESOLVE_REMOTE_SOURCE,
DOCKERIGNORE,
Expand All @@ -39,6 +41,28 @@
}
CACHITO_ICM_URL = '{}/api/v1/content-manifest?requests={}'.format(CACHITO_URL,
CACHITO_REQUEST_ID)

CACHI2_SBOM = {
"bomFormat": "CycloneDX",
"components": [{
"name": "retrodep",
"purl": "pkg:golang/github.com%2Frelease-engineering%2Fretrodep%[email protected]",
"properties": [{
"name": "cachi2:found_by",
"value": "cachi2",
}],
"type": "library",
}],
"metadata": {
"tools": [{
"vendor": "red hat",
"name": "cachi2"
}]
},
"specVersion": "1.4",
"version": 1
}

PNC_ARTIFACT = {
'id': 1234,
'publicUrl': 'http://test.com/artifact.jar',
Expand Down Expand Up @@ -88,6 +112,28 @@
}
]
}

CACHI2_ICM_DICT = {
'metadata': {
'icm_version': 1,
'icm_spec': (
'https://raw.githubusercontent.com/containerbuildsystem/atomic-reactor/'
'f4abcfdaf8247a6b074f94fa84f3846f82d781c6/atomic_reactor/schemas/'
'content_manifest.json'),
'image_layer_index': 1,
},
'content_sets': [],
'image_contents': [
{
'purl':
'pkg:golang/github.com%2Frelease-engineering%2Fretrodep%[email protected]',
},
{
'purl': PNC_ARTIFACT['purl'],
}
]
}

ICM_JSON = dedent(
'''\
{
Expand Down Expand Up @@ -157,7 +203,8 @@ def mock_get_icm(requests_mock):


def mock_env(workflow, df_content, base_layers=0, remote_sources=None,
r_c_m_override=None, pnc_artifacts=True, dockerignore=False):
r_c_m_override=None, pnc_artifacts=True, dockerignore=False,
cachi2_sbom=None):

if base_layers > 0:
inspection_data = {
Expand Down Expand Up @@ -206,6 +253,19 @@ def mock_env(workflow, df_content, base_layers=0, remote_sources=None,
platforms = list(CONTENT_SETS.keys())
workflow.build_dir.init_build_dirs(platforms, workflow.source)

if cachi2_sbom:
env.set_plugin_result(
PLUGIN_CACHI2_POSTPROCESS,
{"plugin": "did run, real value doesn't matter"}
)

# save cachi2 SBOM which is source for ICM
path = workflow.build_dir.path/CACHI2_BUILD_DIR/"bom.json"
path.parent.mkdir()
with open(path, "w") as f:
json.dump(cachi2_sbom, f)
f.flush()

return env.create_runner()


Expand Down Expand Up @@ -239,6 +299,7 @@ def check_in_build_dir(build_dir):

@pytest.mark.parametrize('manifest_file_exists', [True, False])
@pytest.mark.parametrize('content_sets', [True, False])
@pytest.mark.parametrize('cachi2', [True, False])
@pytest.mark.parametrize(
('df_content, expected_df, base_layers, manifest_file'), [
(
Expand Down Expand Up @@ -288,13 +349,18 @@ def check_in_build_dir(build_dir):
),
])
def test_add_image_content_manifest(workflow, requests_mock,
manifest_file_exists, content_sets,
manifest_file_exists, content_sets, cachi2,
df_content, expected_df, base_layers, manifest_file,
):
mock_get_icm(requests_mock)
mock_content_sets_config(workflow.source.path, empty=(not content_sets))

runner = mock_env(workflow, df_content, base_layers, remote_sources=REMOTE_SOURCES)
if cachi2:
runner_opts = {"cachi2_sbom": CACHI2_SBOM}
else:
runner_opts = {"remote_sources": REMOTE_SOURCES}

runner = mock_env(workflow, df_content, base_layers, **runner_opts)

if manifest_file_exists:
workflow.build_dir.any_platform.path.joinpath(manifest_file).touch()
Expand All @@ -304,7 +370,7 @@ def test_add_image_content_manifest(workflow, requests_mock,
runner.run()
return

expected_output = deepcopy(ICM_DICT)
expected_output = deepcopy(CACHI2_ICM_DICT if cachi2 else ICM_DICT)
expected_output['metadata']['image_layer_index'] = base_layers if base_layers else 0

runner.run()
Expand Down
Loading