From 948d8d19b33ee9539ffb043b4c97c8502777dccd Mon Sep 17 00:00:00 2001 From: Dan Luhring Date: Tue, 28 Mar 2023 14:47:51 -0400 Subject: [PATCH] Implement provider for Chainguard Linux (#132) * Implement provider for Chainguard Linux Signed-off-by: Dan Luhring * Fix tests Signed-off-by: Dan Luhring * wip: configs.yaml Signed-off-by: Dan Luhring * Fix failing test Signed-off-by: Dan Luhring * Use main branch of Grype for quality gate Signed-off-by: Dan Luhring * Use latest grype and grype-db Signed-off-by: Dan Luhring --------- Signed-off-by: Dan Luhring --- Makefile | 4 +- src/vunnel/cli/config.py | 1 + src/vunnel/providers/__init__.py | 2 + src/vunnel/providers/chainguard/__init__.py | 64 ++ src/vunnel/providers/wolfi/__init__.py | 11 +- src/vunnel/providers/wolfi/parser.py | 165 ++-- tests/quality/config.yaml | 14 +- tests/quality/vulnerability-match-labels | 2 +- tests/unit/cli/test-fixtures/full.yaml | 3 + tests/unit/cli/test_cli.py | 12 + tests/unit/cli/test_config.py | 4 + tests/unit/providers/chainguard/__init__.py | 0 .../test-fixtures/input/secdb/security.json | 783 ++++++++++++++++++ .../providers/chainguard/test_chainguard.py | 34 + .../input/secdb/{os => }/security.json | 0 tests/unit/providers/wolfi/test_wolfi.py | 115 ++- 16 files changed, 1060 insertions(+), 154 deletions(-) create mode 100644 src/vunnel/providers/chainguard/__init__.py create mode 100644 tests/unit/providers/chainguard/__init__.py create mode 100644 tests/unit/providers/chainguard/test-fixtures/input/secdb/security.json create mode 100644 tests/unit/providers/chainguard/test_chainguard.py rename tests/unit/providers/wolfi/test-fixtures/input/secdb/{os => }/security.json (100%) diff --git a/Makefile b/Makefile index 15f9e43b..507d1f70 100644 --- a/Makefile +++ b/Makefile @@ -79,11 +79,11 @@ dev: ## Get a development shell with locally editable grype, grype-db, and vunn @DEV_VUNNEL_BIN_DIR=$(ABS_BIN_DIR) .github/scripts/dev-shell.sh $(provider) $(providers) .PHONY: build-grype -build-grype: $(TEMP_DIR) ## Build grype for local development +build-grype: $(BIN_DIR) ## Build grype for local development @cd $(GRYPE_PATH) && go build -o $(ABS_BIN_DIR)/grype . .PHONY: build-grype-db -build-grype-db: $(TEMP_DIR) ## Build grype-db for local development +build-grype-db: $(BIN_DIR) ## Build grype-db for local development @cd $(GRYPE_DB_PATH) && go build -o $(ABS_BIN_DIR)/grype-db ./cmd/grype-db .PHONY: update-db diff --git a/src/vunnel/cli/config.py b/src/vunnel/cli/config.py index 4d23dec7..0ef8cb6c 100644 --- a/src/vunnel/cli/config.py +++ b/src/vunnel/cli/config.py @@ -16,6 +16,7 @@ class Providers: alpine: providers.alpine.Config = field(default_factory=providers.alpine.Config) amazon: providers.amazon.Config = field(default_factory=providers.amazon.Config) centos: providers.centos.Config = field(default_factory=providers.centos.Config) + chainguard: providers.chainguard.Config = field(default_factory=providers.chainguard.Config) debian: providers.debian.Config = field(default_factory=providers.debian.Config) github: providers.github.Config = field(default_factory=providers.github.Config) nvd: providers.nvd.Config = field(default_factory=providers.nvd.Config) diff --git a/src/vunnel/providers/__init__.py b/src/vunnel/providers/__init__.py index d62fd4e5..d0f66d3a 100644 --- a/src/vunnel/providers/__init__.py +++ b/src/vunnel/providers/__init__.py @@ -8,6 +8,7 @@ alpine, amazon, centos, + chainguard, debian, github, nvd, @@ -38,6 +39,7 @@ sles.Provider.name(): sles.Provider, ubuntu.Provider.name(): ubuntu.Provider, wolfi.Provider.name(): wolfi.Provider, + chainguard.Provider.name(): chainguard.Provider, } diff --git a/src/vunnel/providers/chainguard/__init__.py b/src/vunnel/providers/chainguard/__init__.py new file mode 100644 index 00000000..8c3affb7 --- /dev/null +++ b/src/vunnel/providers/chainguard/__init__.py @@ -0,0 +1,64 @@ +from __future__ import annotations + +import os +from dataclasses import dataclass, field +from typing import TYPE_CHECKING + +from vunnel import provider, result, schema +from vunnel.providers.wolfi.parser import Parser + +if TYPE_CHECKING: + import datetime + + +@dataclass +class Config: + runtime: provider.RuntimeConfig = field( + default_factory=lambda: provider.RuntimeConfig( + result_store=result.StoreStrategy.SQLITE, + existing_results=provider.ResultStatePolicy.DELETE_BEFORE_WRITE, + ), + ) + request_timeout: int = 125 + + +class Provider(provider.Provider): + _url = "https://packages.cgr.dev/chainguard/security.json" + _namespace = "chainguard" + + def __init__(self, root: str, config: Config | None = None): + if not config: + config = Config() + super().__init__(root, runtime_cfg=config.runtime) + self.config = config + + self.logger.debug(f"config: {config}") + + self.schema = schema.OSSchema() + self.parser = Parser( + workspace=self.workspace, + url=self._url, + namespace=self._namespace, + download_timeout=self.config.request_timeout, + logger=self.logger, + ) + + # this provider requires the previous state from former runs + provider.disallow_existing_input_policy(config.runtime) + + @classmethod + def name(cls) -> str: + return "chainguard" + + def update(self, last_updated: datetime.datetime | None) -> tuple[list[str], int]: + with self.results_writer() as writer: + # TODO: tech debt: on subsequent runs, we should only write new vulns (this currently re-writes all) + for release, vuln_dict in self.parser.get(): + for vuln_id, record in vuln_dict.items(): + writer.write( + identifier=os.path.join(f"{self._namespace.lower()}:{release.lower()}", vuln_id), + schema=self.schema, + payload=record, + ) + + return [self._url], len(writer) diff --git a/src/vunnel/providers/wolfi/__init__.py b/src/vunnel/providers/wolfi/__init__.py index 68f03481..dae39709 100644 --- a/src/vunnel/providers/wolfi/__init__.py +++ b/src/vunnel/providers/wolfi/__init__.py @@ -6,7 +6,7 @@ from vunnel import provider, result, schema -from .parser import Parser, namespace +from .parser import Parser if TYPE_CHECKING: import datetime @@ -24,6 +24,9 @@ class Config: class Provider(provider.Provider): + _url = "https://packages.wolfi.dev/os/security.json" + _namespace = "wolfi" + def __init__(self, root: str, config: Config | None = None): if not config: config = Config() @@ -35,6 +38,8 @@ def __init__(self, root: str, config: Config | None = None): self.schema = schema.OSSchema() self.parser = Parser( workspace=self.workspace, + url=self._url, + namespace=self._namespace, download_timeout=self.config.request_timeout, logger=self.logger, ) @@ -52,9 +57,9 @@ def update(self, last_updated: datetime.datetime | None) -> tuple[list[str], int for release, vuln_dict in self.parser.get(): for vuln_id, record in vuln_dict.items(): writer.write( - identifier=os.path.join(f"{namespace.lower()}:{release.lower()}", vuln_id), + identifier=os.path.join(f"{self._namespace.lower()}:{release.lower()}", vuln_id), schema=self.schema, payload=record, ) - return self.parser.urls, len(writer) + return [self._url], len(writer) diff --git a/src/vunnel/providers/wolfi/parser.py b/src/vunnel/providers/wolfi/parser.py index 474d712c..5edc294a 100644 --- a/src/vunnel/providers/wolfi/parser.py +++ b/src/vunnel/providers/wolfi/parser.py @@ -1,35 +1,45 @@ from __future__ import annotations import copy -import glob import json import logging import os import re +from urllib.parse import urlparse import requests from vunnel import utils from vunnel.utils import vulnerability -namespace = "wolfi" - class Parser: - _url_ = "https://packages.wolfi.dev" + _release_ = "rolling" _secdb_dir_ = "secdb" - _db_types = ["os"] - def __init__(self, workspace, download_timeout=125, url=None, logger=None): + def __init__( # noqa: PLR0913 + self, + workspace, + url: str, + namespace: str, + download_timeout: int = 125, + logger: logging.Logger = None, # noqa: PLR0913 + ): self.download_timeout = download_timeout self.secdb_dir_path = os.path.join(workspace.input_path, self._secdb_dir_) self.metadata_url = url.strip("/") if url else Parser._url_ - self.urls = [] + self.url = url + self.namespace = namespace + self._db_filename = self._extract_filename_from_url(url) if not logger: logger = logging.getLogger(self.__class__.__name__) self.logger = logger + @staticmethod + def _extract_filename_from_url(url): + return os.path.basename(urlparse(url).path) + @utils.retry_with_backoff() def _download(self): """ @@ -39,55 +49,38 @@ def _download(self): if not os.path.exists(self.secdb_dir_path): os.makedirs(self.secdb_dir_path, exist_ok=True) - for t in self._db_types: - try: - rel_dir = os.path.join(self.secdb_dir_path, t) - os.makedirs(rel_dir, exist_ok=True) - - filename = "security.json" - download_url = f"{self.metadata_url}/{t}/{filename}" - - self.urls.append(download_url) - - self.logger.info(f"downloading Wolfi secdb {download_url}") - r = requests.get(download_url, stream=True, timeout=self.download_timeout) - if r.status_code == 200: - file_path = os.path.join(rel_dir, filename) - with open(file_path, "wb") as fp: - for chunk in r.iter_content(): - fp.write(chunk) - else: - r.raise_for_status() - except: # noqa - self.logger.exception(f"ignoring error processing secdb for {t}") + try: + self.logger.info(f"downloading {self.namespace} secdb {self.url}") + r = requests.get(self.url, stream=True, timeout=self.download_timeout) + if r.status_code == 200: + file_path = os.path.join(self.secdb_dir_path, self._db_filename) + with open(file_path, "wb") as fp: + for chunk in r.iter_content(): + fp.write(chunk) + else: + r.raise_for_status() + except: # noqa + self.logger.exception(f"ignoring error processing secdb for {self.url}") def _load(self): """ - Loads all db json an yield it + Loads all db json and yields it :return: """ dbtype_data_dict = {} # parse and transform the json try: - if os.path.exists(self.secdb_dir_path): - for s in glob.glob(f"{self.secdb_dir_path}/**/security.json", recursive=True): - dbtype = s.split("/")[-2] + with open(f"{self.secdb_dir_path}/{self._db_filename}") as fh: + dbtype_data_dict = json.load(fh) - if os.path.exists(s): - self.logger.debug(f"loading secdb data from: {s}") - with open(s, encoding="utf-8") as fh: - dbtype_data_dict[dbtype] = json.load(fh) - - yield "rolling", dbtype_data_dict - else: - raise Exception("Cannot find Wolfi sec db source ") + yield self._release_, dbtype_data_dict except Exception: - self.logger.exception("failed to load Wolfi sec db data") + self.logger.exception(f"failed to load {self.namespace} sec db data") raise # noqa - def _normalize(self, release, dbtype_data_dict): + def _normalize(self, release, data): """ Normalize all the sec db entries into vulnerability payload records :param release: @@ -97,52 +90,48 @@ def _normalize(self, release, dbtype_data_dict): vuln_dict = {} - for dbtype, data in dbtype_data_dict.items(): - self.logger.debug(f"normalizing {release}:{dbtype}") - - if not data["packages"]: - continue - - for el in data["packages"]: - pkg_el = el["pkg"] - - pkg = pkg_el["name"] - for pkg_version in pkg_el["secfixes"]: - vids = [] - if pkg_el["secfixes"][pkg_version]: - for rawvid in pkg_el["secfixes"][pkg_version]: - tmp = rawvid.split() - for newvid in tmp: - if newvid not in vids: - vids.append(newvid) - - for vid in vids: - if not re.match("^CVE-.*", vid): - # skip non-CVE records - continue - - if vid not in vuln_dict: - # create a new record - vuln_dict[vid] = copy.deepcopy(vulnerability.vulnerability_element) - vuln_record = vuln_dict[vid] - - # populate the static information about the new vuln record - vuln_record["Vulnerability"]["Name"] = str(vid) - vuln_record["Vulnerability"]["NamespaceName"] = namespace + ":" + str(release) - vuln_record["Vulnerability"]["Link"] = "http://cve.mitre.org/cgi-bin/cvename.cgi?name=" + str(vid) - vuln_record["Vulnerability"]["Severity"] = "Unknown" - else: - vuln_record = vuln_dict[vid] - - # SET UP fixedins - fixed_el = { - "Name": pkg, - "Version": pkg_version, - "VersionFormat": "apk", - "NamespaceName": namespace + ":" + str(release), - } - - vuln_record["Vulnerability"]["FixedIn"].append(fixed_el) + self.logger.debug("normalizing vulnerability data") + + for el in data["packages"]: + pkg_el = el["pkg"] + + pkg = pkg_el["name"] + for pkg_version in pkg_el["secfixes"]: + vids = [] + if pkg_el["secfixes"][pkg_version]: + for rawvid in pkg_el["secfixes"][pkg_version]: + tmp = rawvid.split() + for newvid in tmp: + if newvid not in vids: + vids.append(newvid) + + for vid in vids: + if not re.match("^CVE-.*", vid): + # skip non-CVE records + continue + + if vid not in vuln_dict: + # create a new record + vuln_dict[vid] = copy.deepcopy(vulnerability.vulnerability_element) + vuln_record = vuln_dict[vid] + + # populate the static information about the new vuln record + vuln_record["Vulnerability"]["Name"] = str(vid) + vuln_record["Vulnerability"]["NamespaceName"] = self.namespace + ":" + str(release) + vuln_record["Vulnerability"]["Link"] = "http://cve.mitre.org/cgi-bin/cvename.cgi?name=" + str(vid) + vuln_record["Vulnerability"]["Severity"] = "Unknown" + else: + vuln_record = vuln_dict[vid] + + # SET UP fixedins + fixed_el = { + "Name": pkg, + "Version": pkg_version, + "VersionFormat": "apk", + "NamespaceName": self.namespace + ":" + str(release), + } + + vuln_record["Vulnerability"]["FixedIn"].append(fixed_el) return vuln_dict diff --git a/tests/quality/config.yaml b/tests/quality/config.yaml index 5e0d4a27..752507b6 100644 --- a/tests/quality/config.yaml +++ b/tests/quality/config.yaml @@ -14,7 +14,7 @@ yardstick: # - "latest" to use the latest released grype # - a released version name (e.g. "v0.52.1") # - a branch name (e.g. "dev-fix-foo") - # - a repo reference and optional "@branch" (e.g. "my-user-fork/grype@dev-fix-foo") + # - a repo reference and optional "@branch" (e.g. "github.com/my-user-fork/grype@dev-fix-foo") # Note: # - ALWAYS leave the "import-db" annotation as-is # - this version should ALWAYS match that of the other "grype" tool below @@ -26,7 +26,7 @@ yardstick: # - "latest" to use the latest released grype # - a released version name (e.g. "v0.52.1") # - a branch name (e.g. "dev-fix-foo") - # - a repo reference and optional "@branch" (e.g. "my-user-fork/grype@dev-fix-foo") + # - a repo reference and optional "@branch" (e.g. "github.com/my-user-fork/grype@dev-fix-foo") # Note: # - this version should ALWAYS match that of the other "grype" tool above version: latest @@ -64,6 +64,16 @@ tests: # images: # - docker.io/centos:6@sha256:3688aa867eb84332460e172b9250c9c198fdfd8d987605fd53f246f498c60bcf + - provider: chainguard + additional_providers: + - name: nvd + use_cache: true + additional-trigger-globs: + # this provider imports and uses the wolfi provider code + - src/vunnel/providers/wolfi/** + images: + - ghcr.io/chainguard-images/scanner-test:latest@sha256:59bddc101fba0c45d5c093575c6bc5bfee7f0e46ff127e6bb4e5acaaafb525f9 + - provider: debian # ideally we would not use cache, however, the in order to test if we are properly keeping the processing # of legacy information that is in the debian data cache (for debian 7, 8, and 9) we must test with diff --git a/tests/quality/vulnerability-match-labels b/tests/quality/vulnerability-match-labels index 7ec8bd3a..8eccd319 160000 --- a/tests/quality/vulnerability-match-labels +++ b/tests/quality/vulnerability-match-labels @@ -1 +1 @@ -Subproject commit 7ec8bd3a64d0a6cc79f33ef01797abd66c9cf43e +Subproject commit 8eccd319cf21fa0f467e8bce728de5d9b4a790a8 diff --git a/tests/unit/cli/test-fixtures/full.yaml b/tests/unit/cli/test-fixtures/full.yaml index e5177eb6..bd9f44dd 100644 --- a/tests/unit/cli/test-fixtures/full.yaml +++ b/tests/unit/cli/test-fixtures/full.yaml @@ -26,6 +26,9 @@ providers: request_timeout: 20 skip_namespaces: - centos:8 + chainguard: + runtime: *runtime + request_timeout: 20 debian: runtime: *runtime request_timeout: 20 diff --git a/tests/unit/cli/test_cli.py b/tests/unit/cli/test_cli.py index 596ea17e..14302e1d 100644 --- a/tests/unit/cli/test_cli.py +++ b/tests/unit/cli/test_cli.py @@ -163,6 +163,18 @@ def test_config(monkeypatch) -> None: skip_namespaces: - centos:3 - centos:4 + chainguard: + request_timeout: 125 + runtime: + existing_input: keep + existing_results: delete-before-write + on_error: + action: fail + input: keep + results: keep + retry_count: 3 + retry_delay: 5 + result_store: sqlite debian: releases: bookworm: '12' diff --git a/tests/unit/cli/test_config.py b/tests/unit/cli/test_config.py index 86ea64cb..de1b8e12 100644 --- a/tests/unit/cli/test_config.py +++ b/tests/unit/cli/test_config.py @@ -50,6 +50,10 @@ def test_full_config(helpers): skip_namespaces=["centos:8"], request_timeout=20, ), + chainguard=providers.chainguard.Config( + runtime=runtime_cfg, + request_timeout=20, + ), debian=providers.debian.Config( releases={ "trixie": "13", diff --git a/tests/unit/providers/chainguard/__init__.py b/tests/unit/providers/chainguard/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/unit/providers/chainguard/test-fixtures/input/secdb/security.json b/tests/unit/providers/chainguard/test-fixtures/input/secdb/security.json new file mode 100644 index 00000000..a720b5f3 --- /dev/null +++ b/tests/unit/providers/chainguard/test-fixtures/input/secdb/security.json @@ -0,0 +1,783 @@ +{ + "apkurl": "{{urlprefix}}/{{reponame}}/{{arch}}/{{pkg.name}}-{{pkg.ver}}.apk", + "archs": [ + "aarch64", + "x86_64" + ], + "reponame": "chainguard", + "urlprefix": "https://packages.cgr.dev", + "packages": [ + { + "pkg": { + "name": "grafana", + "secfixes": { + "7.5.19-r0": [ + "CVE-2021-36156", + "CVE-2022-31130", + "CVE-2022-31107", + "CVE-2022-39201", + "CVE-2022-31123" + ] + } + } + }, + { + "pkg": { + "name": "bind", + "secfixes": { + "9.18.10-r0": [ + "CVE-2022-2795", + "CVE-2022-2881", + "CVE-2022-2906", + "CVE-2022-3080", + "CVE-2022-38177", + "CVE-2022-38178", + "CVE-2022-0396", + "CVE-2021-25220", + "CVE-2021-25219", + "CVE-2021-25218", + "CVE-2021-25214", + "CVE-2021-25215", + "CVE-2021-25216", + "CVE-2020-8625", + "CVE-2020-8620", + "CVE-2020-8621", + "CVE-2020-8622", + "CVE-2020-8623", + "CVE-2020-8624", + "CVE-2020-8618", + "CVE-2020-8619", + "CVE-2020-8616", + "CVE-2020-8617", + "CVE-2019-6477", + "CVE-2019-6475", + "CVE-2019-6476", + "CVE-2019-6471", + "CVE-2019-6467", + "CVE-2018-5743", + "CVE-2019-6465", + "CVE-2018-5745", + "CVE-2018-5744", + "CVE-2018-5740", + "CVE-2018-5738", + "CVE-2018-5737", + "CVE-2018-5736", + "CVE-2017-3145", + "CVE-2017-3136", + "CVE-2017-3137", + "CVE-2017-3138", + "CVE-2016-9131", + "CVE-2016-9147", + "CVE-2016-9444", + "CVE-2019-6470" + ], + "9.18.11-r0": [ + "CVE-2022-3094", + "CVE-2022-3736", + "CVE-2022-3924" + ] + } + } + }, + { + "pkg": { + "name": "binutils", + "secfixes": { + "2.39-r1": [ + "CVE-2022-38126" + ], + "2.39-r2": [ + "CVE-2022-38533" + ], + "2.39-r3": [ + "CVE-2022-38128" + ] + } + } + }, + { + "pkg": { + "name": "brotli", + "secfixes": { + "1.0.9-r0": [ + "CVE-2020-8927" + ] + } + } + }, + { + "pkg": { + "name": "busybox", + "secfixes": { + "1.35.0-r3": [ + "CVE-2022-28391", + "CVE-2022-30065" + ] + } + } + }, + { + "pkg": { + "name": "coreutils", + "secfixes": { + "0": [ + "CVE-2016-2781" + ] + } + } + }, + { + "pkg": { + "name": "cups", + "secfixes": { + "2.4.2-r0": [ + "CVE-2022-26691" + ] + } + } + }, + { + "pkg": { + "name": "curl", + "secfixes": { + "7.86.0-r0": [ + "CVE-2022-42916", + "CVE-2022-32221" + ], + "7.87.0-r0": [ + "CVE-2022-43551", + "CVE-2022-43552" + ] + } + } + }, + { + "pkg": { + "name": "dbus", + "secfixes": { + "1.14.4-r0": [ + "CVE-2022-42010", + "CVE-2022-42011", + "CVE-2022-42012" + ] + } + } + }, + { + "pkg": { + "name": "deno", + "secfixes": { + "1.30.0-r0": [ + "CVE-2023-22499" + ] + } + } + }, + { + "pkg": { + "name": "expat", + "secfixes": { + "2.4.9-r0": [ + "CVE-2022-40674" + ], + "2.5.0-r0": [ + "CVE-2022-43680" + ] + } + } + }, + { + "pkg": { + "name": "flex", + "secfixes": { + "0": [ + "CVE-2019-6293" + ] + } + } + }, + { + "pkg": { + "name": "freetype", + "secfixes": { + "2.12.1-r0": [ + "CVE-2022-27404", + "CVE-2022-27405", + "CVE-2022-27406" + ] + } + } + }, + { + "pkg": { + "name": "giflib", + "secfixes": { + "5.2.1-r0": [ + "CVE-2022-28506" + ] + } + } + }, + { + "pkg": { + "name": "git", + "secfixes": { + "0": [ + "CVE-2023-22743" + ], + "2.38.1-r0": [ + "CVE-2022-39253", + "CVE-2022-39260" + ], + "2.39.1-r0": [ + "CVE-2022-23521", + "CVE-2022-41903" + ], + "2.39.2-r0": [ + "CVE-2023-22490", + "CVE-2023-23946" + ] + } + } + }, + { + "pkg": { + "name": "glibc", + "secfixes": { + "0": [ + "CVE-2019-1010022", + "CVE-2019-1010023", + "CVE-2019-1010024", + "CVE-2019-1010025", + "CVE-2010-4756" + ], + "2.36-r1": [ + "CVE-2022-39046" + ], + "2.37-r1": [ + "CVE-2023-25139" + ] + } + } + }, + { + "pkg": { + "name": "gmp", + "secfixes": { + "6.2.1-r4": [ + "CVE-2021-43618" + ] + } + } + }, + { + "pkg": { + "name": "gnupg", + "secfixes": { + "2.2.41-r0": [ + "CVE-2022-34903", + "CVE-2020-25125", + "CVE-2019-14855", + "CVE-2018-12020" + ] + } + } + }, + { + "pkg": { + "name": "gnutls", + "secfixes": { + "3.7.8-r0": [ + "CVE-2022-2509", + "CVE-2021-20231", + "CVE-2021-20232", + "CVE-2020-24659", + "CVE-2020-13777", + "CVE-2020-11501", + "CVE-2019-3836", + "CVE-2019-3829", + "CVE-2017-7507" + ] + } + } + }, + { + "pkg": { + "name": "go-1.19", + "secfixes": { + "0": [ + "CVE-2020-29509" + ], + "1.19.3-r0": [ + "CVE-2022-41716" + ], + "1.19.4-r0": [ + "CVE-2022-41717", + "CVE-2022-41720" + ], + "1.19.6-r1": [ + "CVE-2022-41723" + ], + "1.19.7-r0": [ + "CVE-2023-24532" + ] + } + } + }, + { + "pkg": { + "name": "go-1.20", + "secfixes": { + "0": [ + "CVE-2020-29509", + "CVE-2020-29511" + ], + "1.20.1-r1": [ + "CVE-2022-41723" + ], + "1.20.2-r0": [ + "CVE-2023-24532" + ] + } + } + }, + { + "pkg": { + "name": "haproxy", + "secfixes": { + "0": [ + "CVE-2016-2102" + ], + "2.6.9-r0": [ + "CVE-2023-25725" + ] + } + } + }, + { + "pkg": { + "name": "heimdal", + "secfixes": { + "7.8.0-r1": [ + "CVE-2022-45142" + ] + } + } + }, + { + "pkg": { + "name": "helm", + "secfixes": { + "3.11.1-r0": [ + "CVE-2023-25165" + ] + } + } + }, + { + "pkg": { + "name": "jenkins", + "secfixes": { + "2.394-r0": [ + "CVE-2023-27898" + ] + } + } + }, + { + "pkg": { + "name": "libarchive", + "secfixes": { + "3.6.1-r2": [ + "CVE-2022-36227" + ] + } + } + }, + { + "pkg": { + "name": "libidn2", + "secfixes": { + "2.3.4-r0": [ + "CVE-2019-12290", + "CVE-2019-18224" + ] + } + } + }, + { + "pkg": { + "name": "libksba", + "secfixes": { + "1.6.3-r0": [ + "CVE-2022-47629", + "CVE-2022-3515" + ] + } + } + }, + { + "pkg": { + "name": "libtasn1", + "secfixes": { + "4.19.0-r0": [ + "CVE-2021-46848" + ] + } + } + }, + { + "pkg": { + "name": "libxml2", + "secfixes": { + "2.10.3-r0": [ + "CVE-2022-40303", + "CVE-2022-40304" + ] + } + } + }, + { + "pkg": { + "name": "libxpm", + "secfixes": { + "3.5.15-r0": [ + "CVE-2022-44617" + ] + } + } + }, + { + "pkg": { + "name": "lua5.4", + "secfixes": { + "5.4.4-r0": [ + "CVE-2022-28805", + "CVE-2019-6706" + ] + } + } + }, + { + "pkg": { + "name": "mariadb-10.11", + "secfixes": { + "10.6.12-r1": [ + "CVE-2022-47015" + ] + } + } + }, + { + "pkg": { + "name": "mariadb-10.6", + "secfixes": { + "10.6.12-r1": [ + "CVE-2022-47015" + ] + } + } + }, + { + "pkg": { + "name": "ncurses", + "secfixes": { + "6.3-r0": [ + "CVE-2022-29458" + ] + } + } + }, + { + "pkg": { + "name": "nettle", + "secfixes": { + "3.8.1-r0": [ + "CVE-2021-3580", + "CVE-2021-20305" + ] + } + } + }, + { + "pkg": { + "name": "openssh", + "secfixes": { + "9.2_p1-r0": [ + "CVE-2023-25136" + ] + } + } + }, + { + "pkg": { + "name": "openssl", + "secfixes": { + "3.0.7-r0": [ + "CVE-2022-3358", + "CVE-2022-3602", + "CVE-2022-3786" + ], + "3.0.7-r1": [ + "CVE-2022-3996" + ], + "3.0.8-r0": [ + "CVE-2023-0286", + "CVE-2022-4304", + "CVE-2022-4203", + "CVE-2023-0215", + "CVE-2022-4450", + "CVE-2023-0216", + "CVE-2023-0217", + "CVE-2023-0401" + ], + "3.1.0-r1": [ + "CVE-2023-0464" + ] + } + } + }, + { + "pkg": { + "name": "patch", + "secfixes": { + "2.7.6-r3": [ + "CVE-2019-20633", + "CVE-2018-1000156", + "CVE-2019-13638", + "CVE-2018-20969", + "CVE-2019-13636", + "CVE-2018-6951", + "CVE-2018-6952" + ] + } + } + }, + { + "pkg": { + "name": "pcre2", + "secfixes": { + "10.40-r0": [ + "CVE-2022-1586", + "CVE-2022-1587" + ] + } + } + }, + { + "pkg": { + "name": "php", + "secfixes": { + "0": [ + "CVE-2007-2728", + "CVE-2007-3205", + "CVE-2007-4596" + ], + "8.1.13-r0": [ + "CVE-2022-31630" + ] + } + } + }, + { + "pkg": { + "name": "pkgconf", + "secfixes": { + "1.9.4-r0": [ + "CVE-2023-24056" + ] + } + } + }, + { + "pkg": { + "name": "postgresql-15", + "secfixes": { + "0": [ + "CVE-2017-8806" + ], + "15.2-r0": [ + "CVE-2022-41862" + ] + } + } + }, + { + "pkg": { + "name": "protobuf-c", + "secfixes": { + "1.4.1-r0": [ + "CVE-2022-33070", + "CVE-2021-3121" + ] + } + } + }, + { + "pkg": { + "name": "python-3.10", + "secfixes": { + "0": [ + "CVE-2007-4559" + ], + "3.10.9-r0": [ + "CVE-2020-10735" + ] + } + } + }, + { + "pkg": { + "name": "python-3.11", + "secfixes": { + "0": [ + "CVE-2007-4559" + ], + "3.0.7-r0": [ + "CVE-2020-10735" + ] + } + } + }, + { + "pkg": { + "name": "python-3.12", + "secfixes": { + "0": [ + "CVE-2007-4559" + ], + "3.0.7-r0": [ + "CVE-2020-10735" + ] + } + } + }, + { + "pkg": { + "name": "redis", + "secfixes": { + "7.0.7-r0": [ + "CVE-2022-0543", + "CVE-2022-3734", + "CVE-2022-3647" + ], + "7.0.8-r0": [ + "CVE-2022-35977", + "CVE-2023-22458" + ], + "7.0.9-r0": [ + "CVE-2022-36021", + "CVE-2023-25155" + ] + } + } + }, + { + "pkg": { + "name": "ruby-3.0", + "secfixes": { + "3.0.5-r0": [ + "CVE-2021-33621" + ] + } + } + }, + { + "pkg": { + "name": "ruby3.2-fluentd14", + "secfixes": { + "1.14.6-r3": [ + "CVE-2022-39379" + ] + } + } + }, + { + "pkg": { + "name": "samurai", + "secfixes": { + "1.2-r0": [ + "CVE-2021-30218", + "CVE-2021-30219" + ] + } + } + }, + { + "pkg": { + "name": "sqlite", + "secfixes": { + "3.40.0-r1": [ + "CVE-2022-46908" + ] + } + } + }, + { + "pkg": { + "name": "traefik", + "secfixes": { + "2.9.6-r0": [ + "CVE-2022-23469", + "CVE-2022-46153" + ], + "2.9.8-r1": [ + "CVE-2021-41803", + "CVE-2022-40716" + ] + } + } + }, + { + "pkg": { + "name": "vault", + "secfixes": { + "1.12.4-r0": [ + "CVE-2023-24999" + ] + } + } + }, + { + "pkg": { + "name": "vim", + "secfixes": { + "9.0.1378-r0": [ + "CVE-2023-1127", + "CVE-2023-1175" + ], + "9.0.1392-r0": [ + "CVE-2023-1264" + ], + "9.0.1402-r0": [ + "CVE-2023-1355" + ] + } + } + }, + { + "pkg": { + "name": "wasmtime", + "secfixes": { + "6.0.1-r0": [ + "CVE-2023-26489", + "CVE-2023-27477" + ] + } + } + }, + { + "pkg": { + "name": "zlib", + "secfixes": { + "1.2.12-r0": [ + "CVE-2018-25032" + ], + "1.2.12-r3": [ + "CVE-2022-37434" + ] + } + } + } + ] +} diff --git a/tests/unit/providers/chainguard/test_chainguard.py b/tests/unit/providers/chainguard/test_chainguard.py new file mode 100644 index 00000000..04d43d00 --- /dev/null +++ b/tests/unit/providers/chainguard/test_chainguard.py @@ -0,0 +1,34 @@ +from __future__ import annotations + +import json +import os +import shutil + +import pytest +from vunnel import result, workspace +from vunnel.providers.chainguard import Config, Provider +from vunnel.providers.wolfi import parser + + +@pytest.fixture() +def disable_get_requests(monkeypatch): + def disabled(*args, **kwargs): + raise RuntimeError("requests disabled but HTTP GET attempted") + + monkeypatch.setattr(parser.requests, "get", disabled) + + +def test_provider_schema(helpers, disable_get_requests): + workspace = helpers.provider_workspace_helper(name=Provider.name()) + + c = Config() + c.runtime.result_store = result.StoreStrategy.FLAT_FILE + p = Provider(root=workspace.root, config=c) + + mock_data_path = helpers.local_dir("test-fixtures/input") + shutil.copytree(mock_data_path, workspace.input_dir, dirs_exist_ok=True) + + p.update(None) + + assert workspace.num_result_entries() == 189 + assert workspace.result_schemas_valid(require_entries=True) diff --git a/tests/unit/providers/wolfi/test-fixtures/input/secdb/os/security.json b/tests/unit/providers/wolfi/test-fixtures/input/secdb/security.json similarity index 100% rename from tests/unit/providers/wolfi/test-fixtures/input/secdb/os/security.json rename to tests/unit/providers/wolfi/test-fixtures/input/secdb/security.json diff --git a/tests/unit/providers/wolfi/test_wolfi.py b/tests/unit/providers/wolfi/test_wolfi.py index b3364a32..00b028fe 100644 --- a/tests/unit/providers/wolfi/test_wolfi.py +++ b/tests/unit/providers/wolfi/test_wolfi.py @@ -74,89 +74,88 @@ def mock_parsed_data(self): """ release = "rolling" dbtype_data_dict = { - "os": { - "apkurl": "{{urlprefix}}/{{reponame}}/{{arch}}/{{pkg.name}}-{{pkg.ver}}.apk", - "archs": ["x86_64"], - "reponame": "os", - "urlprefix": "https://packages.wolfi.dev", - "packages": [ - { - "pkg": { - "name": "binutils", - "secfixes": { - "2.39-r1": ["CVE-2022-38126"], - "2.39-r2": ["CVE-2022-38533"], - "2.39-r3": ["CVE-2022-38128"], - }, + "apkurl": "{{urlprefix}}/{{reponame}}/{{arch}}/{{pkg.name}}-{{pkg.ver}}.apk", + "archs": ["x86_64"], + "reponame": "os", + "urlprefix": "https://packages.wolfi.dev", + "packages": [ + { + "pkg": { + "name": "binutils", + "secfixes": { + "2.39-r1": ["CVE-2022-38126"], + "2.39-r2": ["CVE-2022-38533"], + "2.39-r3": ["CVE-2022-38128"], }, }, - { - "pkg": { - "name": "brotli", - "secfixes": {"1.0.9-r0": ["CVE-2020-8927"]}, - }, + }, + { + "pkg": { + "name": "brotli", + "secfixes": {"1.0.9-r0": ["CVE-2020-8927"]}, }, - { - "pkg": { - "name": "busybox", - "secfixes": {"1.35.0-r3": ["CVE-2022-28391", "CVE-2022-30065"]}, - }, + }, + { + "pkg": { + "name": "busybox", + "secfixes": {"1.35.0-r3": ["CVE-2022-28391", "CVE-2022-30065"]}, }, - { - "pkg": { - "name": "coreutils", - "secfixes": {"0": ["CVE-2016-2781"]}, - }, + }, + { + "pkg": { + "name": "coreutils", + "secfixes": {"0": ["CVE-2016-2781"]}, }, - { - "pkg": { - "name": "cups", - "secfixes": {"2.4.2-r0": ["CVE-2022-26691"]}, - }, + }, + { + "pkg": { + "name": "cups", + "secfixes": {"2.4.2-r0": ["CVE-2022-26691"]}, }, - { - "pkg": { - "name": "dbus", - "secfixes": { - "1.14.4-r0": [ - "CVE-2022-42010", - "CVE-2022-42011", - "CVE-2022-42012", - ], - }, + }, + { + "pkg": { + "name": "dbus", + "secfixes": { + "1.14.4-r0": [ + "CVE-2022-42010", + "CVE-2022-42011", + "CVE-2022-42012", + ], }, }, - ], - }, + }, + ], } return release, dbtype_data_dict def test_load(self, mock_raw_data, tmpdir): - p = Parser(workspace=workspace.Workspace(tmpdir, "test", create=True)) + p = Parser( + workspace=workspace.Workspace(tmpdir, "test", create=True), + url="https://packages.wolfi.dev/os/security.json", + namespace="wolfi", + ) - a = os.path.join(p.secdb_dir_path, "rolling/os") - os.makedirs(a, exist_ok=True) - b = os.path.join(a, "security.json") + os.makedirs(p.secdb_dir_path, exist_ok=True) + b = os.path.join(p.secdb_dir_path, "security.json") with open(b, "w") as fp: fp.write(mock_raw_data) counter = 0 for release, dbtype_data_dict in p._load(): counter += 1 - # print( - # "got secdb data for release {}, db types: {}".format( - # release, list(dbtype_data_dict.keys()) - # ) - # ) assert release == "rolling" assert isinstance(dbtype_data_dict, dict) - assert list(dbtype_data_dict.keys()) == ["os"] - assert all("packages" in x for x in dbtype_data_dict.values()) + assert "packages" in dbtype_data_dict assert counter == 1 def test_normalize(self, mock_parsed_data, tmpdir): - p = Parser(workspace=workspace.Workspace(tmpdir, "test", create=True)) + p = Parser( + workspace=workspace.Workspace(tmpdir, "test", create=True), + url="https://packages.wolfi.dev/os/security.json", + namespace="wolfi", + ) release = mock_parsed_data[0] dbtype_data_dict = mock_parsed_data[1]