From 7cbb194b83cc2f4fcddc446ebce9d8012ecb6199 Mon Sep 17 00:00:00 2001 From: Maja Massarini Date: Wed, 16 Oct 2024 14:37:05 +0200 Subject: [PATCH] Add handler for OpenScanHub task finish --- packit_service/constants.py | 10 + packit_service/worker/allowlist.py | 2 + packit_service/worker/events/event.py | 32 ++- packit_service/worker/events/openscanhub.py | 33 ++- packit_service/worker/handlers/__init__.py | 4 + packit_service/worker/handlers/abstract.py | 1 + packit_service/worker/handlers/copr.py | 211 +-------------- .../worker/handlers/open_scan_hub.py | 125 +++++++++ packit_service/worker/helpers/scan.py | 240 ++++++++++++++++++ packit_service/worker/tasks.py | 14 + tests/unit/events/test_open_scan_hub.py | 63 ++++- tests/unit/test_scan.py | 185 +++++++++++++- 12 files changed, 697 insertions(+), 223 deletions(-) create mode 100644 packit_service/worker/handlers/open_scan_hub.py create mode 100644 packit_service/worker/helpers/scan.py diff --git a/packit_service/constants.py b/packit_service/constants.py index 67698def9..a8e68dc8e 100644 --- a/packit_service/constants.py +++ b/packit_service/constants.py @@ -302,3 +302,13 @@ def from_number(number: int): USAGE_PAST_YEAR_DATE_STR = (datetime.now() - timedelta(days=365)).strftime("%Y-%m-%d") USAGE_DATE_IN_THE_PAST = USAGE_CURRENT_DATE.replace(year=USAGE_CURRENT_DATE.year - 100) USAGE_DATE_IN_THE_PAST_STR = USAGE_DATE_IN_THE_PAST.strftime("%Y-%m-%d") + +OPEN_SCAN_HUB_FEATURE_DESCRIPTION = ( + "This is an experimental feature. Once the scan finishes, you can see the " + "new findings in the `added.html` in `Logs`. \n\n" + ":warning: You can see the list of known issues and also provide your feedback" + " [here](https://github.com/packit/packit/discussions/2371). \n\n" + "You can disable the scanning in your configuration by " + "setting `osh_diff_scan_after_copr_build` to `false`. For more information, " + f"see [docs]({DOCS_URL}/configuration#osh_diff_scan_after_copr_build)." +) diff --git a/packit_service/worker/allowlist.py b/packit_service/worker/allowlist.py index ed489bd4b..06e142d22 100644 --- a/packit_service/worker/allowlist.py +++ b/packit_service/worker/allowlist.py @@ -41,6 +41,7 @@ ReleaseEvent, TestingFarmResultsEvent, CheckRerunEvent, + OpenScanHubTaskFinishEvent, ) from packit_service.worker.events.gitlab import ReleaseGitlabEvent from packit_service.worker.events.koji import KojiBuildEvent, KojiBuildTagEvent @@ -485,6 +486,7 @@ def check_and_report( KojiBuildTagEvent, CheckRerunEvent, NewHotnessUpdateEvent, + OpenScanHubTaskFinishEvent, ): self._check_unchecked_event, ( ReleaseEvent, diff --git a/packit_service/worker/events/event.py b/packit_service/worker/events/event.py index 08c97af45..747000c5a 100644 --- a/packit_service/worker/events/event.py +++ b/packit_service/worker/events/event.py @@ -141,6 +141,24 @@ def from_event_dict(cls, event: dict): branches_override=branches_override, ) + def to_event(self) -> "Event": + """ + Create an instance of Event class from the data in this class. + """ + mod = __import__("packit_service.worker.events", fromlist=[self.event_type]) + kls = getattr(mod, self.event_type) + kwargs = copy.copy(self.event_dict) + # The following data should be reconstructed by the Event instance (when needed) + kwargs.pop("event_type", None) + kwargs.pop("event_id", None) + kwargs.pop("task_accepted_time", None) + kwargs.pop("build_targets_override", None) + kwargs.pop("tests_targets_override", None) + kwargs.pop("branches_override", None) + pr_id = kwargs.pop("_pr_id", None) + kwargs["pr_id"] = pr_id + return kls(**kwargs) + @property def project(self): if not self._project: @@ -367,12 +385,17 @@ def store_packages_config(self): self.db_project_event.set_packages_config(package_config_dict) def get_non_serializable_attributes(self): + """List here both non serializable attributes and attributes that + we want to skip from the dict because are not needed to re-create + the event. + """ return [ "_db_project_object", "_db_project_event", "_project", "_base_project", "_package_config", + "_package_config_searched", ] def get_dict(self, default_dict: Optional[Dict] = None) -> dict: @@ -618,10 +641,11 @@ def get_all_build_targets_by_status( statuses_to_filter_with=statuses_to_filter_with, ) - def get_dict(self, default_dict: Optional[Dict] = None) -> dict: - result = super().get_dict() - result.pop("_pull_request_object") - return result + def get_non_serializable_attributes(self): + return super().get_non_serializable_attributes() + [ + "fail_when_config_file_missing", + "_pull_request_object", + ] class AbstractResultEvent(AbstractForgeIndependentEvent): diff --git a/packit_service/worker/events/openscanhub.py b/packit_service/worker/events/openscanhub.py index 09e52596c..25238cc85 100644 --- a/packit_service/worker/events/openscanhub.py +++ b/packit_service/worker/events/openscanhub.py @@ -1,23 +1,50 @@ # Copyright Contributors to the Packit project. # SPDX-License-Identifier: MIT +from typing import Optional from logging import getLogger -from packit_service.worker.events import Event + +from ogr.abstract import GitProject +from packit_service.config import ServiceConfig +from packit_service.worker.events.event import AbstractResultEvent +from packit_service.models import ( + AbstractProjectObjectDbType, + ProjectEventModel, + ScanModel, +) logger = getLogger(__name__) -class OpenScanHubTaskFinishEvent(Event): +class OpenScanHubTaskFinishEvent(AbstractResultEvent): def __init__( self, task_id: int, issues_added_url: str, issues_fixed_url: str, scan_results_url: str, + **kwargs, ): - super().__init__() + super().__init__(**kwargs) self.task_id = task_id self.issues_added_url = issues_added_url self.issues_fixed_url = issues_fixed_url self.scan_results_url = scan_results_url + + self.scan = ScanModel.get_by_task_id(task_id) + self.build = self.scan.copr_build_target + + def get_db_project_object(self) -> Optional[AbstractProjectObjectDbType]: + return self.build.get_project_event_object() + + def get_db_project_event(self) -> Optional[ProjectEventModel]: + return self.build.get_project_event_model() + + def get_project(self) -> GitProject: + return ServiceConfig.get_service_config().get_project( + self.db_project_object.project.project_url + ) + + def get_non_serializable_attributes(self): + return super().get_non_serializable_attributes() + ["build", "scan"] diff --git a/packit_service/worker/handlers/__init__.py b/packit_service/worker/handlers/__init__.py index eb1a15cd2..b25aa223d 100644 --- a/packit_service/worker/handlers/__init__.py +++ b/packit_service/worker/handlers/__init__.py @@ -38,6 +38,9 @@ VMImageBuildHandler, VMImageBuildResultHandler, ) +from packit_service.worker.handlers.open_scan_hub import ( + OpenScanHubTaskFinishHandler, +) __all__ = [ Handler.__name__, @@ -55,4 +58,5 @@ GithubFasVerificationHandler.__name__, VMImageBuildHandler.__name__, VMImageBuildResultHandler.__name__, + OpenScanHubTaskFinishHandler.__name__, ] diff --git a/packit_service/worker/handlers/abstract.py b/packit_service/worker/handlers/abstract.py index 126bb4300..3e9f1d277 100644 --- a/packit_service/worker/handlers/abstract.py +++ b/packit_service/worker/handlers/abstract.py @@ -191,6 +191,7 @@ class TaskName(str, enum.Enum): check_onboarded_projects = "task.check_onboarded_projects" koji_build_tag = "task.koji_build_tag" tag_into_sidetag = "task.tag_into_sidetag" + openscanhub_task_finish = "task.openscanhub_task_finish" class Handler(PackitAPIProtocol, Config): diff --git a/packit_service/worker/handlers/copr.py b/packit_service/worker/handlers/copr.py index 02e95fd59..bd6182daf 100644 --- a/packit_service/worker/handlers/copr.py +++ b/packit_service/worker/handlers/copr.py @@ -1,14 +1,8 @@ # Copyright Contributors to the Packit project. # SPDX-License-Identifier: MIT -import json import logging -import re -import tempfile from datetime import datetime, timezone -from os import getenv -from os.path import basename -from pathlib import Path from typing import Tuple, Type, Optional from celery import signature, Task @@ -25,13 +19,11 @@ from packit_service.constants import ( COPR_API_SUCC_STATE, COPR_SRPM_CHROOT, - DOCS_URL, ) from packit_service.models import ( CoprBuildTargetModel, BuildStatus, ProjectEventModelType, - SRPMBuildModel, ) from packit_service.service.urls import get_copr_build_info_url, get_srpm_build_info_url from packit_service.utils import ( @@ -39,7 +31,6 @@ dump_package_config, elapsed_seconds, pr_labels_match_configuration, - download_file, ) from packit_service.worker.checker.abstract import Checker from packit_service.worker.checker.copr import ( @@ -79,7 +70,7 @@ GetCoprBuildJobHelperMixin, ConfigFromEventMixin, ) -from packit_service.worker.helpers.build import CoprBuildJobHelper +from packit_service.worker.helpers.scan import ScanHelper from packit_service.worker.mixin import PackitAPIWithDownstreamMixin from packit_service.worker.reporting import BaseCommitStatus, DuplicateCheckMode from packit_service.worker.result import TaskResults @@ -509,203 +500,3 @@ def handle_testing_farm(self): "build_id": self.build.id, }, ).apply_async() - - -class ScanHelper: - def __init__( - self, copr_build_helper: CoprBuildJobHelper, build: CoprBuildTargetModel - ): - self.build = build - self.copr_build_helper = copr_build_helper - - @staticmethod - def osh_disabled() -> bool: - disabled = getenv("DISABLE_OPENSCANHUB", "False").lower() in ( - "true", - "t", - "yes", - "y", - "1", - ) - if disabled: - logger.info("OpenScanHub disabled via env var.") - return disabled - - def handle_scan(self): - """ - Try to find a job that can provide the base SRPM, - download both SRPM and base SRPM and trigger the scan in OpenScanHub. - """ - if not (base_build_job := self.find_base_build_job()): - logger.debug("No base build job needed for diff scan found in the config.") - return - - logger.info("Preparing to trigger scan in OpenScanHub...") - - if not (base_srpm_model := self.get_base_srpm_model(base_build_job)): - return - - srpm_model = self.build.get_srpm_build() - - with tempfile.TemporaryDirectory() as directory: - if not ( - paths := self.download_srpms(directory, base_srpm_model, srpm_model) - ): - return - - build_dashboard_url = get_copr_build_info_url(self.build.id) - - output = self.copr_build_helper.api.run_osh_build( - srpm_path=paths[1], - base_srpm=paths[0], - comment=f"Submitted via Packit Service for {build_dashboard_url}", - ) - - if not output: - logger.debug("Something went wrong, skipping the reporting.") - return - - logger.info("Scan submitted successfully.") - - response_dict = self.parse_dict_from_output(output) - - logger.debug(f"Parsed dict from output: {response_dict} ") - - if id := response_dict.get("id"): - self.build.add_scan(task_id=id) - else: - logger.debug( - "It was not possible to get the Open Scan Hub task_id from the response." - ) - - if not (url := response_dict.get("url")): - logger.debug("It was not possible to get the URL from the response.") - return - - self.copr_build_helper._report( - state=BaseCommitStatus.progress, - description=( - "Scan in OpenScanHub submitted successfully. Check the URL for more details." - ), - url=url, - check_names=["osh-diff-scan:fedora-rawhide-x86_64"], - markdown_content=( - "This is an experimental feature. Once the scan finishes, you can see the " - "new findings in the `added.html` in `Logs`. \n\n" - ":warning: You can see the list of known issues and also provide your feedback" - " [here](https://github.com/packit/packit/discussions/2371). \n\n" - "You can disable the scanning in your configuration by " - "setting `osh_diff_scan_after_copr_build` to `false`. For more information, " - f"see [docs]({DOCS_URL}/configuration#osh_diff_scan_after_copr_build)." - ), - ) - - @staticmethod - def parse_dict_from_output(output: str) -> dict: - json_pattern = r"\{.*?\}" - matches = re.findall(json_pattern, output, re.DOTALL) - - if not matches: - return {} - - json_str = matches[-1] - return json.loads(json_str) - - def find_base_build_job(self) -> Optional[JobConfig]: - """ - Find the job in the config that can provide the base build for the scan - (with `commit` trigger and same branch configured as the target PR branch). - """ - base_build_job = None - - for job in self.copr_build_helper.package_config.get_job_views(): - if ( - job.type in (JobType.copr_build, JobType.build) - and job.trigger == JobConfigTriggerType.commit - and ( - ( - job.branch - and job.branch - == self.copr_build_helper.pull_request_object.target_branch - ) - or ( - not job.branch - and self.copr_build_helper.project.default_branch - == self.copr_build_helper.pull_request_object.target_branch - ) - ) - ): - base_build_job = job - break - - return base_build_job - - def get_base_srpm_model( - self, base_build_job: JobConfig - ) -> Optional[SRPMBuildModel]: - """ - Get the SRPM build model of the latest successful Copr build - for the given job config. - """ - base_build_project_name = ( - self.copr_build_helper.job_project_for_commit_job_config(base_build_job) - ) - base_build_owner = self.copr_build_helper.job_owner_for_job_config( - base_build_job - ) - - def get_srpm_build(commit_sha): - logger.debug( - f"Searching for base build for {target_branch_commit} commit " - f"in {base_build_owner}/{base_build_project_name} Copr project in our DB. " - ) - - builds = CoprBuildTargetModel.get_all_by( - commit_sha=commit_sha, - project_name=base_build_project_name, - owner=base_build_owner, - target="fedora-rawhide-x86_64", - status=BuildStatus.success, - ) - try: - return next(iter(builds)).get_srpm_build() - except StopIteration: - return None - - target_branch_commit = ( - self.copr_build_helper.pull_request_object.target_branch_head_commit - ) - - if srpm_build := get_srpm_build(target_branch_commit): - return srpm_build - - for target_branch_commit in self.copr_build_helper.project.get_commits( - self.copr_build_helper.pull_request_object.target_branch - )[1:]: - if srpm_build := get_srpm_build(target_branch_commit): - return srpm_build - else: - logger.debug("No matching base build found in our DB.") - return None - - @staticmethod - def download_srpms( - directory: str, - base_srpm_model: SRPMBuildModel, - srpm_model: SRPMBuildModel, - ) -> Optional[tuple[Path, Path]]: - - def download_srpm(srpm_model: SRPMBuildModel) -> Optional[Path]: - srpm_path = Path(directory).joinpath(basename(srpm_model.url)) - if not download_file(srpm_model.url, srpm_path): - logger.info(f"Downloading of SRPM {srpm_model.url} was not successful.") - return None - return srpm_path - - if (base_srpm_path := download_srpm(base_srpm_model)) is None: - return None - - if (srpm_path := download_srpm(srpm_model)) is None: - return None - - return base_srpm_path, srpm_path diff --git a/packit_service/worker/handlers/open_scan_hub.py b/packit_service/worker/handlers/open_scan_hub.py new file mode 100644 index 000000000..49b9d959a --- /dev/null +++ b/packit_service/worker/handlers/open_scan_hub.py @@ -0,0 +1,125 @@ +# Copyright Contributors to the Packit project. +# SPDX-License-Identifier: MIT + +import logging +from typing import Tuple, Type + +from packit.config import ( + JobType, + aliases, +) +from packit_service.worker.checker.abstract import Checker +from packit_service.worker.events import ( + OpenScanHubTaskFinishEvent, +) +from packit_service.worker.handlers.abstract import ( + RetriableJobHandler, + TaskName, + configured_as, + reacts_to, +) +from packit_service.worker.helpers.build import CoprBuildJobHelper +from packit_service.worker.helpers.scan import ScanHelper + +from packit_service.worker.handlers.mixin import ( + ConfigFromEventMixin, +) +from packit_service.worker.result import TaskResults +from packit_service.worker.mixin import ( + LocalProjectMixin, + PackitAPIWithUpstreamMixin, +) + +from packit_service.worker.reporting import BaseCommitStatus + +logger = logging.getLogger(__name__) + + +@configured_as(job_type=JobType.copr_build) +@reacts_to(OpenScanHubTaskFinishEvent) +class OpenScanHubTaskFinishHandler( + RetriableJobHandler, + LocalProjectMixin, + ConfigFromEventMixin, + PackitAPIWithUpstreamMixin, +): + task_name = TaskName.openscanhub_task_finish + + @staticmethod + def get_checkers() -> Tuple[Type[Checker], ...]: + return () + + def run(self) -> TaskResults: + task_id = self.data.event_dict["task_id"] + event = self.data.to_event() + if not event.scan or not event.build: + return TaskResults( + success=False, + details={ + "msg": f"Scan {task_id} not found or not associated with a Copr build" + }, + ) + elif not self.job_config: + return TaskResults( + success=False, + details={ + "msg": ( + "No job configuration found for " + f"openscanhub_task_finish in {self.project.repo}" + ) + }, + ) + + branches = aliases.get_build_targets( + *self.job_config.targets, + ) + if "fedora-rawhide-x86_64" not in branches: + return TaskResults( + success=False, + details={ + "msg": "Skipping job configuration with no fedora-rawhide-x86_64 target." + }, + ) + + build_helper = CoprBuildJobHelper( + service_config=self.service_config, + package_config=self.package_config, + project=self.project, + metadata=self.data, + db_project_event=self.data.db_project_event, + job_config=self.job_config, + celery_task=self.celery_task, + ) + + scan_helper = ScanHelper(copr_build_helper=build_helper, build=event.build) + + external_links = { + "Added issues": event.issues_added_url, + "Fixed issues": event.issues_fixed_url, + "Scan results": event.scan_results_url, + } + + # TODO: probably we need a babysit task for when the build is not finished yet + if event.build.status == "success": + state = BaseCommitStatus.success + description = ( + "Scan in OpenScanHub is finished. Check the URL for more details." + ) + else: + state = BaseCommitStatus.neutral + description = ( + "Scan in OpenScanHub is finished but the build did not " + "finish yet or did not succeed." + ) + + scan_helper.report( + state=state, + description=description, + url=event.scan_results_url, + links_to_external_services=external_links, + ) + + return TaskResults( + success=True, + details={}, + ) diff --git a/packit_service/worker/helpers/scan.py b/packit_service/worker/helpers/scan.py new file mode 100644 index 000000000..22667862d --- /dev/null +++ b/packit_service/worker/helpers/scan.py @@ -0,0 +1,240 @@ +# Copyright Contributors to the Packit project. +# SPDX-License-Identifier: MIT + +import json +import logging +import re +import tempfile +from os import getenv +from os.path import basename +from pathlib import Path +from typing import Optional, Dict + + +from packit.config import ( + JobConfig, + JobType, +) +from packit.config import JobConfigTriggerType +from packit_service.constants import ( + OPEN_SCAN_HUB_FEATURE_DESCRIPTION, +) +from packit_service.models import ( + CoprBuildTargetModel, + BuildStatus, + SRPMBuildModel, +) +from packit_service.service.urls import get_copr_build_info_url +from packit_service.utils import ( + download_file, +) +from packit_service.worker.helpers.build import CoprBuildJobHelper +from packit_service.worker.reporting import BaseCommitStatus + +logger = logging.getLogger(__name__) + + +class ScanHelper: + def __init__( + self, copr_build_helper: CoprBuildJobHelper, build: CoprBuildTargetModel + ): + self.build = build + self.copr_build_helper = copr_build_helper + + @staticmethod + def osh_disabled() -> bool: + disabled = getenv("DISABLE_OPENSCANHUB", "False").lower() in ( + "true", + "t", + "yes", + "y", + "1", + ) + if disabled: + logger.info("OpenScanHub disabled via env var.") + return disabled + + def handle_scan(self): + """ + Try to find a job that can provide the base SRPM, + download both SRPM and base SRPM and trigger the scan in OpenScanHub. + """ + if not (base_build_job := self.find_base_build_job()): + logger.debug("No base build job needed for diff scan found in the config.") + return + + logger.info("Preparing to trigger scan in OpenScanHub...") + + if not (base_srpm_model := self.get_base_srpm_model(base_build_job)): + return + + srpm_model = self.build.get_srpm_build() + + with tempfile.TemporaryDirectory() as directory: + if not ( + paths := self.download_srpms(directory, base_srpm_model, srpm_model) + ): + return + + build_dashboard_url = get_copr_build_info_url(self.build.id) + + output = self.copr_build_helper.api.run_osh_build( + srpm_path=paths[1], + base_srpm=paths[0], + comment=f"Submitted via Packit Service for {build_dashboard_url}", + ) + + if not output: + logger.debug("Something went wrong, skipping the reporting.") + return + + logger.info("Scan submitted successfully.") + + response_dict = self.parse_dict_from_output(output) + + logger.debug(f"Parsed dict from output: {response_dict} ") + + if id := response_dict.get("id"): + self.build.add_scan(task_id=id) + else: + logger.debug( + "It was not possible to get the Open Scan Hub task_id from the response." + ) + + if not (url := response_dict.get("url")): + logger.debug("It was not possible to get the URL from the response.") + return + + self.report( + state=BaseCommitStatus.running, + description=( + "Scan in OpenScanHub submitted successfully. Check the URL for more details." + ), + url=url, + ) + + def report( + self, + state: BaseCommitStatus, + description: str, + url: str, + links_to_external_services: Optional[Dict[str, str]] = None, + ): + self.copr_build_helper._report( + state=state, + description=description, + url=url, + check_names=["osh-diff-scan:fedora-rawhide-x86_64"], + markdown_content=OPEN_SCAN_HUB_FEATURE_DESCRIPTION, + links_to_external_services=links_to_external_services, + ) + + @staticmethod + def parse_dict_from_output(output: str) -> dict: + json_pattern = r"\{.*?\}" + matches = re.findall(json_pattern, output, re.DOTALL) + + if not matches: + return {} + + json_str = matches[-1] + return json.loads(json_str) + + def find_base_build_job(self) -> Optional[JobConfig]: + """ + Find the job in the config that can provide the base build for the scan + (with `commit` trigger and same branch configured as the target PR branch). + """ + base_build_job = None + + for job in self.copr_build_helper.package_config.get_job_views(): + if ( + job.type in (JobType.copr_build, JobType.build) + and job.trigger == JobConfigTriggerType.commit + and ( + ( + job.branch + and job.branch + == self.copr_build_helper.pull_request_object.target_branch + ) + or ( + not job.branch + and self.copr_build_helper.project.default_branch + == self.copr_build_helper.pull_request_object.target_branch + ) + ) + ): + base_build_job = job + break + + return base_build_job + + def get_base_srpm_model( + self, base_build_job: JobConfig + ) -> Optional[SRPMBuildModel]: + """ + Get the SRPM build model of the latest successful Copr build + for the given job config. + """ + base_build_project_name = ( + self.copr_build_helper.job_project_for_commit_job_config(base_build_job) + ) + base_build_owner = self.copr_build_helper.job_owner_for_job_config( + base_build_job + ) + + def get_srpm_build(commit_sha): + logger.debug( + f"Searching for base build for {target_branch_commit} commit " + f"in {base_build_owner}/{base_build_project_name} Copr project in our DB. " + ) + + builds = CoprBuildTargetModel.get_all_by( + commit_sha=commit_sha, + project_name=base_build_project_name, + owner=base_build_owner, + target="fedora-rawhide-x86_64", + status=BuildStatus.success, + ) + try: + return next(iter(builds)).get_srpm_build() + except StopIteration: + return None + + target_branch_commit = ( + self.copr_build_helper.pull_request_object.target_branch_head_commit + ) + + if srpm_build := get_srpm_build(target_branch_commit): + return srpm_build + + for target_branch_commit in self.copr_build_helper.project.get_commits( + self.copr_build_helper.pull_request_object.target_branch + )[1:]: + if srpm_build := get_srpm_build(target_branch_commit): + return srpm_build + else: + logger.debug("No matching base build found in our DB.") + return None + + @staticmethod + def download_srpms( + directory: str, + base_srpm_model: SRPMBuildModel, + srpm_model: SRPMBuildModel, + ) -> Optional[tuple[Path, Path]]: + + def download_srpm(srpm_model: SRPMBuildModel) -> Optional[Path]: + srpm_path = Path(directory).joinpath(basename(srpm_model.url)) + if not download_file(srpm_model.url, srpm_path): + logger.info(f"Downloading of SRPM {srpm_model.url} was not successful.") + return None + return srpm_path + + if (base_srpm_path := download_srpm(base_srpm_model)) is None: + return None + + if (srpm_path := download_srpm(srpm_model)) is None: + return None + + return base_srpm_path, srpm_path diff --git a/packit_service/worker/tasks.py b/packit_service/worker/tasks.py index 26a82a50d..79c543dfe 100644 --- a/packit_service/worker/tasks.py +++ b/packit_service/worker/tasks.py @@ -59,6 +59,7 @@ TestingFarmResultsHandler, VMImageBuildHandler, VMImageBuildResultHandler, + OpenScanHubTaskFinishHandler, ) from packit_service.worker.handlers.abstract import TaskName from packit_service.worker.handlers.bodhi import ( @@ -608,6 +609,19 @@ def run_tag_into_sidetag_handler( return get_handlers_task_results(handler.run_job(), event) +@celery_app.task(bind=True, name=TaskName.openscanhub_task_finish, base=TaskWithRetry) +def run_openscanhub_task_finish_handler( + self, event: dict, package_config: dict, job_config: dict +): + handler = OpenScanHubTaskFinishHandler( + package_config=load_package_config(package_config), + job_config=load_job_config(job_config), + event=event, + celery_task=self, + ) + return get_handlers_task_results(handler.run_job(), event) + + def get_handlers_task_results(results: dict, event: dict) -> dict: # include original event to provide more info return {"job": results, "event": event} diff --git a/tests/unit/events/test_open_scan_hub.py b/tests/unit/events/test_open_scan_hub.py index 88abe9a25..53b4775ec 100644 --- a/tests/unit/events/test_open_scan_hub.py +++ b/tests/unit/events/test_open_scan_hub.py @@ -3,9 +3,20 @@ import json import pytest +import datetime +from flexmock import flexmock +from packit.config import ( + JobConfig, + CommonPackageConfig, + PackageConfig, + JobType, + JobConfigTriggerType, +) from packit_service.worker.events import OpenScanHubTaskFinishEvent from packit_service.worker.parser import Parser +from packit_service.models import ScanModel + from tests.spellbook import DATA_DIR @@ -15,7 +26,53 @@ def openscanhub_task_finish_event(): return json.load(outfile) -def test_parse_openscanhub_task_finish(openscanhub_task_finish_event): +def test_parse_openscanhub_task_finish( + openscanhub_task_finish_event, add_pull_request_event_with_sha_123456 +): + db_project_object, db_project_event = add_pull_request_event_with_sha_123456 + db_build = ( + flexmock( + build_id="55", + status="success", + build_submitted_time=datetime.datetime.utcnow(), + target="the-target", + owner="the-owner", + project_name="the-namespace-repo_name-5", + commit_sha="123456", + project_event=flexmock(), + srpm_build=flexmock(url=None) + .should_receive("set_url") + .with_args("https://some.host/my.srpm") + .mock(), + ) + .should_receive("get_project_event_object") + .and_return(db_project_object) + .mock() + .should_receive("get_project_event_model") + .and_return(db_project_event) + .mock() + ) + flexmock(OpenScanHubTaskFinishEvent).should_receive( + "get_packages_config" + ).and_return( + PackageConfig( + jobs=[ + JobConfig( + type=JobType.copr_build, + trigger=JobConfigTriggerType.commit, + packages={ + "package": CommonPackageConfig( + _targets=["fedora-rawhide-x86_64"], + ) + }, + ) + ], + packages={"package": CommonPackageConfig()}, + ) + ) + flexmock(ScanModel).should_receive("get_by_task_id").and_return( + flexmock(copr_build_target=db_build) + ) event_object = Parser.parse_event(openscanhub_task_finish_event) assert isinstance(event_object, OpenScanHubTaskFinishEvent) @@ -32,3 +89,7 @@ def test_parse_openscanhub_task_finish(openscanhub_task_finish_event): "http://openscanhub.fedoraproject.org/task/15649/log/gvisor-tap-vsock" "-0.7.5-1.20241007054606793155.pr405.23.g829aafd6/scan-results.js?format=raw" ) + assert event_object.db_project_event + assert event_object.db_project_object + assert event_object.project + assert json.dumps(event_object.get_dict()) diff --git a/tests/unit/test_scan.py b/tests/unit/test_scan.py index 4a8689eb6..8944be953 100644 --- a/tests/unit/test_scan.py +++ b/tests/unit/test_scan.py @@ -1,21 +1,47 @@ # Copyright Contributors to the Packit project. # SPDX-License-Identifier: MIT +import datetime import pytest +import json from flexmock import flexmock +from celery.canvas import group as celery_group from packit.api import PackitAPI -from packit.config import JobType, JobConfigTriggerType +from packit.config import ( + JobType, + JobConfigTriggerType, + PackageConfig, + JobConfig, + CommonPackageConfig, +) from packit_service.models import ( CoprBuildTargetModel, ProjectEventModelType, BuildStatus, + ScanModel, +) +from packit_service.worker.tasks import run_openscanhub_task_finish_handler +from packit_service.worker.jobs import SteveJobs +from packit_service.worker.monitoring import Pushgateway +from packit_service.worker.reporting import BaseCommitStatus + +from packit_service.worker.events import ( + AbstractCoprBuildEvent, + OpenScanHubTaskFinishEvent, ) -from packit_service.worker.events import AbstractCoprBuildEvent -from packit_service.worker.handlers import copr +from packit_service.worker.helpers import scan from packit_service.worker.handlers.copr import ScanHelper from packit_service.worker.helpers.build import CoprBuildJobHelper +from tests.spellbook import DATA_DIR, get_parameters_from_results + + +@pytest.fixture() +def openscanhub_task_finish_event(): + with open(DATA_DIR / "fedmsg" / "open_scan_hub_task_finish.json") as outfile: + return json.load(outfile) + @pytest.mark.parametrize( "build_models", @@ -35,7 +61,7 @@ def test_handle_scan(build_models): flexmock(AbstractCoprBuildEvent).should_receive("from_event_dict").and_return( flexmock(chroot="fedora-rawhide-x86_64", build_id="123", pr_id=12) ) - flexmock(copr).should_receive("download_file").twice().and_return(True) + flexmock(scan).should_receive("download_file").twice().and_return(True) for commit_sha, models in build_models: flexmock(CoprBuildTargetModel).should_receive("get_all_by").with_args( @@ -43,7 +69,7 @@ def test_handle_scan(build_models): project_name="commit-project", owner="user-123", target="fedora-rawhide-x86_64", - status=BuildStatus.progress, + status=BuildStatus.success, ).and_return(models).once() flexmock(PackitAPI).should_receive("run_osh_build").once().and_return( @@ -89,3 +115,152 @@ def test_handle_scan(build_models): job_config=flexmock(), ), ).handle_scan() + + +@pytest.mark.parametrize( + "job_config_type,job_config_trigger,job_config_targets,copr_build_state,num_of_handlers", + [ + ( + JobType.copr_build, + JobConfigTriggerType.commit, + ["fedora-rawhide-x86_64"], + "success", + 1, + ), + ( + JobType.copr_build, + JobConfigTriggerType.pull_request, + ["fedora-rawhide-x86_64"], + "success", + 2, + ), + ( + JobType.copr_build, + JobConfigTriggerType.pull_request, + ["fedora-rawhide-x86_64"], + "failed", + 2, + ), + ], +) +def test_handle_scan_task_finish( + openscanhub_task_finish_event, + add_pull_request_event_with_sha_123456, + job_config_type, + job_config_trigger, + job_config_targets, + copr_build_state, + num_of_handlers, +): + db_project_object, db_project_event = add_pull_request_event_with_sha_123456 + db_build = ( + flexmock( + build_id="55", + status=copr_build_state, + build_submitted_time=datetime.datetime.utcnow(), + target="the-target", + owner="the-owner", + project_name="the-namespace-repo_name-5", + commit_sha="123456", + project_event=flexmock(), + srpm_build=flexmock(url=None) + .should_receive("set_url") + .with_args("https://some.host/my.srpm") + .mock(), + ) + .should_receive("get_project_event_object") + .and_return(db_project_object) + .mock() + .should_receive("get_project_event_model") + .and_return(db_project_event) + .mock() + ) + flexmock(OpenScanHubTaskFinishEvent).should_receive( + "get_packages_config" + ).and_return( + PackageConfig( + jobs=[ + JobConfig( + type=job_config_type, + trigger=job_config_trigger, + packages={ + "package": CommonPackageConfig( + _targets=job_config_targets, + specfile_path="test.spec", + ) + }, + ), + JobConfig( + type=JobType.copr_build, + trigger=JobConfigTriggerType.pull_request, + packages={ + "package": CommonPackageConfig( + _targets=["fedora-stable"], + specfile_path="test.spec", + ) + }, + ), + ], + packages={"package": CommonPackageConfig()}, + ) + ) + flexmock(celery_group).should_receive("apply_async") + flexmock(ScanModel).should_receive("get_by_task_id").and_return( + flexmock(copr_build_target=db_build) + ) + flexmock(Pushgateway).should_receive("push").and_return() + + processing_results = SteveJobs().process_message(openscanhub_task_finish_event) + assert len(processing_results) == num_of_handlers + + if processing_results: + url = ( + "http://openscanhub.fedoraproject.org/task/15649/log/gvisor-tap-vsock-0.7.5-1." + "20241007054606793155.pr405.23.g829aafd6/scan-results.js?format=raw" + ) + links_to_external_services = { + "Added issues": ( + "http://openscanhub.fedoraproject.org/task/15649/log/added.js" + "?format=raw" + ), + "Fixed issues": ( + "http://openscanhub.fedoraproject.org/task/15649/log/fixed.js" + "?format=raw" + ), + "Scan results": ( + "http://openscanhub.fedoraproject.org/task/15649/log/gvisor-tap-vsock-" + "0.7.5-1.20241007054606793155.pr405.23.g829aafd6/scan-results.js?format=raw" + ), + } + if copr_build_state == "success": + state = BaseCommitStatus.success + description = ( + "Scan in OpenScanHub is finished. Check the URL for more details." + ) + else: + state = BaseCommitStatus.neutral + description = ( + "Scan in OpenScanHub is finished but the build did not finish yet" + " or did not succeed." + ) + if num_of_handlers > 1: + # one handler is always skipped because it is for fedora-stable -> + # no rawhide build + flexmock(ScanHelper).should_receive("report").with_args( + state=state, + description=description, + url=url, + links_to_external_services=links_to_external_services, + ).once().and_return() + + for sub_results in processing_results: + event_dict, job, job_config, package_config = get_parameters_from_results( + [sub_results] + ) + assert json.dumps(event_dict) + + run_openscanhub_task_finish_handler( + package_config=package_config, + event=event_dict, + job_config=job_config, + )