From a64543ffd30c0f4f7d18481f68fc153b2c6ec1aa Mon Sep 17 00:00:00 2001 From: evidencebp Date: Sun, 1 Dec 2024 14:35:26 +0200 Subject: [PATCH 01/10] bioconda_utils\artifacts.py superfluous-parens Removed unnecessary parenthesis --- bioconda_utils/artifacts.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bioconda_utils/artifacts.py b/bioconda_utils/artifacts.py index 1827aaba08..d46f677d43 100644 --- a/bioconda_utils/artifacts.py +++ b/bioconda_utils/artifacts.py @@ -234,7 +234,7 @@ def parse_gha_build_id(url: str) -> str: def get_gha_artifacts(check_run, platform, repo): gha_workflow_id = parse_gha_build_id(check_run.details_url) - if (gha_workflow_id) : + if gha_workflow_id: # The workflow run is different from the check run run = repo.get_workflow_run(int(gha_workflow_id)) artifacts = run.get_artifacts() From 21e5b65c7877e86aba0a2455be6750a70c668c4f Mon Sep 17 00:00:00 2001 From: evidencebp Date: Sun, 1 Dec 2024 16:36:27 +0200 Subject: [PATCH 02/10] versioneer.py too-many-branches MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Method do_setup had 15 branches and function get_versions had 13 branches while Pylint recommends to have no more than 12. --- versioneer.py | 72 ++++++++++++++++++++++++++++++--------------------- 1 file changed, 43 insertions(+), 29 deletions(-) diff --git a/versioneer.py b/versioneer.py index 3aa5da3720..7d6326b904 100644 --- a/versioneer.py +++ b/versioneer.py @@ -1427,18 +1427,10 @@ def get_versions(verbose=False): # and for users of a tarball/zipball created by 'git archive' or github's # download-from-tag feature or the equivalent in other VCSes. - get_keywords_f = handlers.get("get_keywords") - from_keywords_f = handlers.get("keywords") - if get_keywords_f and from_keywords_f: - try: - keywords = get_keywords_f(versionfile_abs) - ver = from_keywords_f(keywords, cfg.tag_prefix, verbose) - if verbose: - print("got version from expanded keyword %s" % ver) - return ver - except NotThisMethod: - pass - + found, ver = _handle_keywords(cfg, handlers, versionfile_abs, verbose) + if found: + return ver + try: ver = versions_from_file(versionfile_abs) if verbose: @@ -1475,6 +1467,24 @@ def get_versions(verbose=False): "date": None} +def _handle_keywords(cfg, handlers, versionfile_abs, verbose): + found = False + ver = None + + get_keywords_f = handlers.get("get_keywords") + from_keywords_f = handlers.get("keywords") + if get_keywords_f and from_keywords_f: + try: + keywords = get_keywords_f(versionfile_abs) + ver = from_keywords_f(keywords, cfg.tag_prefix, verbose) + if verbose: + print("got version from expanded keyword %s" % ver) + return True, ver + except NotThisMethod: + pass + + return False, ver + def get_version(): """Get the short version string for this project.""" return get_versions()["version"] @@ -1719,23 +1729,7 @@ def do_setup(): "VERSIONFILE_SOURCE": cfg.versionfile_source, }) - ipy = os.path.join(os.path.dirname(cfg.versionfile_source), - "__init__.py") - if os.path.exists(ipy): - try: - with open(ipy, "r") as f: - old = f.read() - except EnvironmentError: - old = "" - if INIT_PY_SNIPPET not in old: - print(" appending to %s" % ipy) - with open(ipy, "a") as f: - f.write(INIT_PY_SNIPPET) - else: - print(" %s unmodified" % ipy) - else: - print(" %s doesn't exist, ok" % ipy) - ipy = None + ipy = _compute_ipy(cfg) # Make sure both the top-level "versioneer.py" and versionfile_source # (PKG/_version.py, used by runtime code) are in MANIFEST.in, so @@ -1775,6 +1769,26 @@ def do_setup(): do_vcs_install(manifest_in, cfg.versionfile_source, ipy) return 0 +def _compute_ipy(cfg): + ipy = os.path.join(os.path.dirname(cfg.versionfile_source), + "__init__.py") + if os.path.exists(ipy): + try: + with open(ipy, "r") as f: + old = f.read() + except EnvironmentError: + old = "" + if INIT_PY_SNIPPET not in old: + print(" appending to %s" % ipy) + with open(ipy, "a") as f: + f.write(INIT_PY_SNIPPET) + else: + print(" %s unmodified" % ipy) + else: + print(" %s doesn't exist, ok" % ipy) + ipy = None + return ipy + def scan_setup_py(): """Validate the contents of setup.py against Versioneer's expectations.""" From e03618f77526a44afc22a9ef62bd45b2029a58e1 Mon Sep 17 00:00:00 2001 From: evidencebp Date: Sun, 1 Dec 2024 18:05:32 +0200 Subject: [PATCH 03/10] bioconda_utils\cli.py simplifiable-if-statement MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The function build checks the value of "pkg_dir is None" in a if statement and assigns it to the variable use_host_conda_bld (line 469).I assigned the expression directly to the variable, removing the unneeded if and simplifying the code. --- bioconda_utils/cli.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/bioconda_utils/cli.py b/bioconda_utils/cli.py index 8f75948ce0..4cedc7c686 100644 --- a/bioconda_utils/cli.py +++ b/bioconda_utils/cli.py @@ -466,10 +466,7 @@ def build(recipe_folder, config, packages="*", git_range=None, testonly=False, build_script_template = open(build_script_template).read() else: build_script_template = docker_utils.BUILD_SCRIPT_TEMPLATE - if pkg_dir is None: - use_host_conda_bld = True - else: - use_host_conda_bld = False + use_host_conda_bld = pkg_dir is None if not utils.is_stable_version(VERSION): image_tag = utils.extract_stable_version(VERSION) From 372285c502fec4cec4d84374e4bd7241e5630ff7 Mon Sep 17 00:00:00 2001 From: evidencebp Date: Sun, 1 Dec 2024 18:13:05 +0200 Subject: [PATCH 04/10] bioconda_utils\githandler.py unnecessary-pass Removed unnecessary pass --- bioconda_utils/githandler.py | 1 - 1 file changed, 1 deletion(-) diff --git a/bioconda_utils/githandler.py b/bioconda_utils/githandler.py index a25881b781..d10e0d37ae 100644 --- a/bioconda_utils/githandler.py +++ b/bioconda_utils/githandler.py @@ -510,7 +510,6 @@ def __init__(self, folder: str=".", except: # This will fail on CI nodes from forks, but we don't need to switch back and forth between branches there logger.warning("Couldn't get the active branch name, we must be on detached HEAD") - pass def checkout_master(self): """Check out master branch (original branch restored by `close()`)""" From 2a2baf9b2194b43114eef5ef9c570300b849785b Mon Sep 17 00:00:00 2001 From: evidencebp Date: Sun, 1 Dec 2024 18:13:56 +0200 Subject: [PATCH 05/10] bioconda_utils\bioconductor_skeleton.py superfluous-parens Removed unnecessary parenthesis --- bioconda_utils/bioconductor_skeleton.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bioconda_utils/bioconductor_skeleton.py b/bioconda_utils/bioconductor_skeleton.py index 9a98fedca1..4b5a92d3d9 100755 --- a/bioconda_utils/bioconductor_skeleton.py +++ b/bioconda_utils/bioconductor_skeleton.py @@ -775,7 +775,7 @@ def dependencies(self): # Check SystemRequirements in the DESCRIPTION file to make sure # packages with such requirements are provided correct recipes. - if (self.packages[self.package].get('SystemRequirements') is not None): + if self.packages[self.package].get('SystemRequirements') is not None: logger.warning( "The 'SystemRequirements' {} are needed".format( self.packages[self.package].get('SystemRequirements')) + From 2dc9f1b014112dc78c1b3778d1bcae4a32b25580 Mon Sep 17 00:00:00 2001 From: evidencebp Date: Sun, 1 Dec 2024 18:32:45 +0200 Subject: [PATCH 06/10] bioconda_utils\build.py too-many-statements The function build had 66 statements and the function build_recipes had 86, while Pylint recommends having no more than 50. Functions were structured so I extracted methods to handle specific logics, simplifying the code. --- bioconda_utils/build.py | 109 ++++++++++++++++++++++++---------------- 1 file changed, 67 insertions(+), 42 deletions(-) diff --git a/bioconda_utils/build.py b/bioconda_utils/build.py index bbdd0c426a..2722fd81f7 100644 --- a/bioconda_utils/build.py +++ b/bioconda_utils/build.py @@ -106,14 +106,7 @@ def build(recipe: str, pkg_paths: List[str] = None, logger.info("BUILD START %s", recipe) - args = ['--override-channels'] - if testonly: - args += ["--test"] - else: - args += ["--no-anaconda-upload"] - - for channel in channels or ['local']: - args += ['-c', channel] + args = _build_args(testonly, channels) logger.debug('Build and Channel Args: %s', args) @@ -155,17 +148,7 @@ def build(recipe: str, pkg_paths: List[str] = None, "cannot be found", pkg_path) return BuildResult(False, None) else: - conda_build_cmd = [utils.bin_for('conda-build')] - # - Temporarily reset os.environ to avoid leaking env vars - # - Also pass filtered env to run() - # - Point conda-build to meta.yaml, to avoid building subdirs - with utils.sandboxed_env(whitelisted_env): - cmd = conda_build_cmd + args - for config_file in utils.get_conda_build_config_files(): - cmd += [config_file.arg, config_file.path] - cmd += [os.path.join(recipe, 'meta.yaml')] - with utils.Progress(): - utils.run(cmd, mask=False, live=live_logs) + _handle_conda_build(recipe, live_logs, whitelisted_env, args) logger.info('BUILD SUCCESS %s', ' '.join(os.path.basename(p) for p in pkg_paths)) @@ -183,6 +166,28 @@ def build(recipe: str, pkg_paths: List[str] = None, raise exc return BuildResult(False, None) + return _handle_mulled_test(mulled_test + , recipe + , pkg_paths + , base_image + , mulled_conda_image + , live_logs) + +def _handle_conda_build(recipe, live_logs, whitelisted_env, args): + conda_build_cmd = [utils.bin_for('conda-build')] + # - Temporarily reset os.environ to avoid leaking env vars + # - Also pass filtered env to run() + # - Point conda-build to meta.yaml, to avoid building subdirs + with utils.sandboxed_env(whitelisted_env): + cmd = conda_build_cmd + args + for config_file in utils.get_conda_build_config_files(): + cmd += [config_file.arg, config_file.path] + cmd += [os.path.join(recipe, 'meta.yaml')] + with utils.Progress(): + utils.run(cmd, mask=False, live=live_logs) + +def _handle_mulled_test(mulled_test, recipe, pkg_paths, base_image, mulled_conda_image, live_logs): + if mulled_test: logger.info('TEST START via mulled-build %s', recipe) mulled_images = [] @@ -200,6 +205,17 @@ def build(recipe: str, pkg_paths: List[str] = None, return BuildResult(True, None) +def _build_args(testonly, channels): + args = ['--override-channels'] + if testonly: + args += ["--test"] + else: + args += ["--no-anaconda-upload"] + + for channel in channels or ['local']: + args += ['-c', channel] + return args + def store_build_failure_record(recipe, output, meta, dag, skiplist_leafs): """ @@ -421,6 +437,38 @@ def build_recipes(recipe_folder: str, config_path: str, recipes: List[str], skipped_recipes = [] failed_uploads = [] + _handle_recipes(recipe_folder, recipes, mulled_test, testonly, force, docker_builder, label + , anaconda_upload, mulled_upload_target, check_channels, keep_old_work, mulled_conda_image + , record_build_failures, skiplist_leafs, live_logs, config, linter, failed, dag + , skip_dependent, subdag, built_recipes, skipped_recipes, failed_uploads) + + if failed or failed_uploads: + logger.error('BUILD SUMMARY: of %s recipes, ' + '%s failed and %s were skipped. ' + 'Details of recipes and environments follow.', + len(recipes), len(failed), len(skipped_recipes)) + if built_recipes: + logger.error('BUILD SUMMARY: while the entire build failed, ' + 'the following recipes were built successfully:\n%s', + '\n'.join(built_recipes)) + for recipe in failed: + logger.error('BUILD SUMMARY: FAILED recipe %s', recipe) + for name, dep in skip_dependent.items(): + logger.error('BUILD SUMMARY: SKIPPED recipe %s ' + 'due to failed dependencies %s', name, dep) + if failed_uploads: + logger.error('UPLOAD SUMMARY: the following packages failed to upload:\n%s', + '\n'.join(failed_uploads)) + return False + + logger.info("BUILD SUMMARY: successfully built %s of %s recipes", + len(built_recipes), len(recipes)) + return True + +def _handle_recipes(recipe_folder, recipes, mulled_test, testonly, force, docker_builder, label, anaconda_upload + , mulled_upload_target, check_channels, keep_old_work, mulled_conda_image, record_build_failures + , skiplist_leafs, live_logs, config, linter, failed, dag, skip_dependent, subdag, built_recipes + , skipped_recipes, failed_uploads): for recipe, name in recipes: platform = utils.RepoData().native_platform() if not force and do_not_consider_for_additional_platform(recipe_folder, recipe, platform): @@ -488,26 +536,3 @@ def build_recipes(recipe_folder: str, config_path: str, recipes: List[str], # remove traces of the build if not keep_old_work: conda_build_purge() - - if failed or failed_uploads: - logger.error('BUILD SUMMARY: of %s recipes, ' - '%s failed and %s were skipped. ' - 'Details of recipes and environments follow.', - len(recipes), len(failed), len(skipped_recipes)) - if built_recipes: - logger.error('BUILD SUMMARY: while the entire build failed, ' - 'the following recipes were built successfully:\n%s', - '\n'.join(built_recipes)) - for recipe in failed: - logger.error('BUILD SUMMARY: FAILED recipe %s', recipe) - for name, dep in skip_dependent.items(): - logger.error('BUILD SUMMARY: SKIPPED recipe %s ' - 'due to failed dependencies %s', name, dep) - if failed_uploads: - logger.error('UPLOAD SUMMARY: the following packages failed to upload:\n%s', - '\n'.join(failed_uploads)) - return False - - logger.info("BUILD SUMMARY: successfully built %s of %s recipes", - len(built_recipes), len(recipes)) - return True From ea7a2266a921adf20acaafef5e7e4bbd782e6b60 Mon Sep 17 00:00:00 2001 From: evidencebp Date: Sun, 1 Dec 2024 18:54:25 +0200 Subject: [PATCH 07/10] bioconda_utils\recipe.py too-many-branches In the class Recipe the method replace had 13 branches and the static method _rewrite_selector_block had 14, while Pylint recommends having no more than 12. I extracted small methods to reduce the complexity. --- bioconda_utils/recipe.py | 27 ++++++++++++++++++--------- 1 file changed, 18 insertions(+), 9 deletions(-) diff --git a/bioconda_utils/recipe.py b/bioconda_utils/recipe.py index ef3dbddfed..e774f0e4bd 100644 --- a/bioconda_utils/recipe.py +++ b/bioconda_utils/recipe.py @@ -313,6 +313,18 @@ def _rewrite_selector_block(text, block_top, block_left): # can't handle "[py2k or osx]" style things return None + new_lines = Recipe._compute_new_lines(block_left, variants) + + logger.debug("Replacing: lines %i - %i with %i lines:\n%s\n---\n%s", + block_top, block_top+block_height, len(new_lines), + "\n".join(lines[block_top:block_top+block_height]), + "\n".join(new_lines)) + + lines[block_top:block_top+block_height] = new_lines + return "\n".join(lines) + + @staticmethod + def _compute_new_lines(block_left, variants): new_lines = [] for variant in variants.values(): first = True @@ -322,14 +334,7 @@ def _rewrite_selector_block(text, block_top, block_left): first = False else: new_lines.append("".join((" " * (block_left + 2), line))) - - logger.debug("Replacing: lines %i - %i with %i lines:\n%s\n---\n%s", - block_top, block_top+block_height, len(new_lines), - "\n".join(lines[block_top:block_top+block_height]), - "\n".join(new_lines)) - - lines[block_top:block_top+block_height] = new_lines - return "\n".join(lines) + return new_lines def get_template(self): """Create a Jinja2 template from the current raw recipe""" @@ -636,7 +641,11 @@ def replace(self, before: str, after: str, re_before = re.compile(before_pattern) re_select = re.compile(before_pattern + r".*#.*\[") - # replace within those lines, erroring on "# [asd]" selectors + + return self._handle_replacements(lines, re_before, re_select, after) + + def _handle_replacements(self, lines, re_before, re_select, after): + # replace within those lines, erroring on "# [asd]" selectors replacements = 0 for lineno in sorted(lines): line = self.meta_yaml[lineno] From bc08e5b341e617fce9ba37625ab438acc10a236f Mon Sep 17 00:00:00 2001 From: evidencebp Date: Sun, 1 Dec 2024 18:55:17 +0200 Subject: [PATCH 08/10] bioconda_utils\update_pinnings.py too-many-branches The function "check" had 14 branches , while Pylint recommends having no more than 12. I extracted a method to handle the metas. --- bioconda_utils/update_pinnings.py | 22 +++++++++++++--------- 1 file changed, 13 insertions(+), 9 deletions(-) diff --git a/bioconda_utils/update_pinnings.py b/bioconda_utils/update_pinnings.py index 5d5f77cca9..d5f728e115 100644 --- a/bioconda_utils/update_pinnings.py +++ b/bioconda_utils/update_pinnings.py @@ -338,6 +338,18 @@ def check( return State.FAIL, recipe flags = State(0) + maybe_bump = _handle_metas(skip_variant_keys, metas, flags) + if maybe_bump: + # Skip bump if we only add to the build matrix. + if will_build_only_missing(metas): + flags |= State.BUMPED + else: + flags |= State.BUMP + if not keep_metas: + recipe.conda_release() + return flags, recipe + +def _handle_metas(skip_variant_keys, metas, flags): maybe_bump = False for meta in metas: if meta.skip() or skip_for_variants(meta, skip_variant_keys): @@ -352,12 +364,4 @@ def check( logger.info("Package %s=%s=%s missing!", meta.name(), meta.version(), meta.build_id()) maybe_bump = True - if maybe_bump: - # Skip bump if we only add to the build matrix. - if will_build_only_missing(metas): - flags |= State.BUMPED - else: - flags |= State.BUMP - if not keep_metas: - recipe.conda_release() - return flags, recipe + return maybe_bump From 71ca27e149677514f6369d7b924c57c1455f5e97 Mon Sep 17 00:00:00 2001 From: evidencebp Date: Sun, 1 Dec 2024 19:06:45 +0200 Subject: [PATCH 09/10] bioconda_utils\utils.py too-many-branches The function run had 13 branches and, while Pylint recommends having no more than 12. I extracted _handle_process to reduce the complexity. --- bioconda_utils/utils.py | 36 ++++++++++++++++++++---------------- 1 file changed, 20 insertions(+), 16 deletions(-) diff --git a/bioconda_utils/utils.py b/bioconda_utils/utils.py index 3591faabec..7503dc3470 100644 --- a/bioconda_utils/utils.py +++ b/bioconda_utils/utils.py @@ -670,22 +670,7 @@ def handle_output(output_lines): else: masked_cmds = [do_mask(c) for c in cmds] - if proc.poll() is None: - mylogger.log(loglevel, 'Command closed STDOUT/STDERR but is still running') - waitfor = 30 - waittimes = 5 - for attempt in range(waittimes): - mylogger.log(loglevel, "Waiting %s seconds (%i/%i)", waitfor, attempt+1, waittimes) - try: - proc.wait(timeout=waitfor) - break; - except sp.TimeoutExpired: - pass - else: - mylogger.log(loglevel, "Terminating process") - proc.kill() - proc.wait() - returncode = proc.poll() + returncode = _handle_process(mylogger, loglevel, proc) if returncode: if not quiet_failure: @@ -697,6 +682,25 @@ def handle_output(output_lines): return sp.CompletedProcess(masked_cmds, returncode, stdout=output) +def _handle_process(mylogger, loglevel, proc): + if proc.poll() is None: + mylogger.log(loglevel, 'Command closed STDOUT/STDERR but is still running') + waitfor = 30 + waittimes = 5 + for attempt in range(waittimes): + mylogger.log(loglevel, "Waiting %s seconds (%i/%i)", waitfor, attempt+1, waittimes) + try: + proc.wait(timeout=waitfor) + break; + except sp.TimeoutExpired: + pass + else: + mylogger.log(loglevel, "Terminating process") + proc.kill() + proc.wait() + returncode = proc.poll() + return returncode + def envstr(env): env = dict(env) From 1689998861b4a613dc305f7c1b80627b2a376d18 Mon Sep 17 00:00:00 2001 From: evidencebp Date: Sun, 1 Dec 2024 19:13:40 +0200 Subject: [PATCH 10/10] bioconda_utils\autobump.py too-many-branches The method apply of the class CreatePullRequest had 14 branches and, while Pylint recommends having no more than 12. I extracted _handle_open_PRs to reduce the complexity. --- bioconda_utils/autobump.py | 54 ++++++++++++++++++++------------------ 1 file changed, 29 insertions(+), 25 deletions(-) diff --git a/bioconda_utils/autobump.py b/bioconda_utils/autobump.py index 936b88a4d9..9f8162bfcc 100644 --- a/bioconda_utils/autobump.py +++ b/bioconda_utils/autobump.py @@ -1163,31 +1163,7 @@ async def apply(self, recipe: Recipe) -> None: # check if we already have an open PR (=> update in progress) pullreqs = await self.ghub.get_prs(from_branch=branch_name, from_user="bioconda") if pullreqs: - if len(pullreqs) > 1: - logger.error("Multiple PRs updating %s: %s", - recipe, - ", ".join(str(pull['number']) for pull in pullreqs)) - for pull in pullreqs: - logger.debug("Found PR %i updating %s: %s", - pull["number"], recipe, pull["title"]) - # update the PR if title or body changed - pull = pullreqs[0] - if body == pull["body"]: - body = None - if title == pull["title"]: - title = None - if not (body is None and title is None): - if await self.ghub.modify_issue(number=pull['number'], body=body, title=title): - logger.info("Updated PR %i updating %s to %s", - pull['number'], recipe, recipe.version) - else: - logger.error("Failed to update PR %i with title=%s and body=%s", - pull['number'], title, body) - else: - logger.debug("Not updating PR %i updating %s - no changes", - pull['number'], recipe) - - raise self.UpdateInProgress(recipe) + title, body = await self._handle_open_PRs(recipe, title, body, pullreqs) # check for matching closed PR (=> update rejected) pullreqs = await self.ghub.get_prs(from_branch=branch_name, state=self.ghub.STATE.closed) @@ -1206,6 +1182,34 @@ async def apply(self, recipe: Recipe) -> None: logger.info("Created PR %i: %s", pull['number'], title) + async def _handle_open_PRs(self, recipe, title, body, pullreqs): + if len(pullreqs) > 1: + logger.error("Multiple PRs updating %s: %s", + recipe, + ", ".join(str(pull['number']) for pull in pullreqs)) + for pull in pullreqs: + logger.debug("Found PR %i updating %s: %s", + pull["number"], recipe, pull["title"]) + # update the PR if title or body changed + pull = pullreqs[0] + if body == pull["body"]: + body = None + if title == pull["title"]: + title = None + if not (body is None and title is None): + if await self.ghub.modify_issue(number=pull['number'], body=body, title=title): + logger.info("Updated PR %i updating %s to %s", + pull['number'], recipe, recipe.version) + else: + logger.error("Failed to update PR %i with title=%s and body=%s", + pull['number'], title, body) + else: + logger.debug("Not updating PR %i updating %s - no changes", + pull['number'], recipe) + + raise self.UpdateInProgress(recipe) + return title,body + class MaxUpdates(Filter): """Terminate pipeline after **max_updates** recipes have been updated."""