diff --git a/cachi2/core/package_managers/yarn_classic/main.py b/cachi2/core/package_managers/yarn_classic/main.py index 593180e6b..3663153eb 100644 --- a/cachi2/core/package_managers/yarn_classic/main.py +++ b/cachi2/core/package_managers/yarn_classic/main.py @@ -9,7 +9,7 @@ run_yarn_cmd, ) from cachi2.core.package_managers.yarn_classic.project import Project -from cachi2.core.package_managers.yarn_classic.workspaces import extract_workspace_metadata +from cachi2.core.package_managers.yarn_classic.resolver import resolve_packages from cachi2.core.rooted_path import RootedPath log = logging.getLogger(__name__) @@ -26,22 +26,26 @@ def _ensure_mirror_dir_exists(output_dir: RootedPath) -> None: output_dir.join_within_root(MIRROR_DIR).path.mkdir(parents=True, exist_ok=True) for package in request.yarn_classic_packages: - path = request.source_dir.join_within_root(package.path) + package_path = request.source_dir.join_within_root(package.path) _ensure_mirror_dir_exists(request.output_dir) - prefetch_env = _get_prefetch_environment_variables(request.output_dir) - _verify_corepack_yarn_version(path, prefetch_env) - _fetch_dependencies(path, prefetch_env) - # Workspaces metadata is not used at the moment, but will - # eventualy be converted into components. Using a noop assertion - # to prevent linters from complaining. - workspaces = extract_workspace_metadata(package, request.source_dir) - assert workspaces is not None # nosec -- see comment above + _resolve_yarn_project(Project.from_source_dir(package_path), request.output_dir) return RequestOutput.from_obj_list( components, _generate_build_environment_variables(), project_files=[] ) +def _resolve_yarn_project(project: Project, output_dir: RootedPath) -> None: + """Process a request for a single yarn source directory.""" + log.info(f"Fetching the yarn dependencies at the subpath {project.source_dir}") + + _verify_repository(project) + prefetch_env = _get_prefetch_environment_variables(output_dir) + _verify_corepack_yarn_version(project.source_dir, prefetch_env) + _fetch_dependencies(project.source_dir, prefetch_env) + resolve_packages(project) + + def _fetch_dependencies(source_dir: RootedPath, env: dict[str, str]) -> None: """Fetch dependencies using 'yarn install'. diff --git a/cachi2/core/package_managers/yarn_classic/project.py b/cachi2/core/package_managers/yarn_classic/project.py index a1a58e57f..4ee307012 100644 --- a/cachi2/core/package_managers/yarn_classic/project.py +++ b/cachi2/core/package_managers/yarn_classic/project.py @@ -93,6 +93,7 @@ def from_file(cls, path: RootedPath) -> "PackageJson": return cls(path, package_json_data) +@dataclass class YarnLock(_CommonConfigFile): """A yarn.lock file. @@ -131,7 +132,7 @@ def from_file(cls, path: RootedPath) -> "YarnLock": solution="Please verify the content of the file.", ) - return cls(path, yarn_lockfile.data) + return cls(path, yarn_lockfile.data, yarn_lockfile) ConfigFile = Union[PackageJson, YarnLock] diff --git a/cachi2/core/package_managers/yarn_classic/resolver.py b/cachi2/core/package_managers/yarn_classic/resolver.py new file mode 100644 index 000000000..bb747c68a --- /dev/null +++ b/cachi2/core/package_managers/yarn_classic/resolver.py @@ -0,0 +1,237 @@ +import re +from itertools import chain +from pathlib import Path +from typing import Iterable, Optional, Union +from urllib.parse import urlparse + +from pyarn.lockfile import Package as PYarnPackage +from pydantic import BaseModel + +from cachi2.core.errors import PackageRejected, UnexpectedFormat +from cachi2.core.package_managers.npm import NPM_REGISTRY_CNAMES +from cachi2.core.package_managers.yarn_classic.project import PackageJson, Project, YarnLock +from cachi2.core.package_managers.yarn_classic.workspaces import ( + Workspace, + extract_workspace_metadata, +) +from cachi2.core.rooted_path import RootedPath + +# https://github.com/yarnpkg/yarn/blob/7cafa512a777048ce0b666080a24e80aae3d66a9/src/resolvers/exotics/git-resolver.js#L15-L17 +GIT_HOSTS = frozenset(("github.com", "gitlab.com", "bitbucket.com", "bitbucket.org")) +GIT_PATTERN_MATCHERS = ( + re.compile(r"^git:"), + re.compile(r"^git\+.+:"), + re.compile(r"^ssh:"), + re.compile(r"^https?:.+\.git$"), + re.compile(r"^https?:.+\.git#.+"), +) + + +class _BasePackage(BaseModel): + """A base Yarn 1.x package.""" + + name: str + version: Optional[str] = None + integrity: Optional[str] = None + dev: bool = False + + +class _UrlMixin(BaseModel): + url: str + + +class _RelpathMixin(BaseModel): + relpath: Path + + +class RegistryPackage(_BasePackage, _UrlMixin): + """A Yarn 1.x package from the registry.""" + + +class GitPackage(_BasePackage, _UrlMixin): + """A Yarn 1.x package from a git repo.""" + + +class UrlPackage(_BasePackage, _UrlMixin): + """A Yarn 1.x package from a http/https URL.""" + + +class FilePackage(_BasePackage, _RelpathMixin): + """A Yarn 1.x package from a local file path.""" + + +class WorkspacePackage(_BasePackage, _RelpathMixin): + """A Yarn 1.x local workspace package.""" + + +class LinkPackage(_BasePackage, _RelpathMixin): + """A Yarn 1.x local link package.""" + + +YarnClassicPackage = Union[ + FilePackage, + GitPackage, + LinkPackage, + RegistryPackage, + UrlPackage, + WorkspacePackage, +] + + +class _YarnClassicPackageFactory: + def __init__(self, source_dir: RootedPath): + self._source_dir = source_dir + + def create_package_from_pyarn_package(self, package: PYarnPackage) -> YarnClassicPackage: + def assert_package_has_relative_path(package: PYarnPackage) -> None: + if package.path and Path(package.path).is_absolute(): + raise PackageRejected( + ( + f"The package {package.name}@{package.version} has an absolute path " + f"({package.path}), which is not permitted." + ), + solution="Ensure that file/link packages in yarn.lock do not have absolute paths.", + ) + + if _is_from_npm_registry(package.url): + return RegistryPackage( + name=package.name, + version=package.version, + integrity=package.checksum, + url=package.url, + ) + elif package.path is not None: + # Ensure path is not absolute + assert_package_has_relative_path(package) + # Ensure path is within the repository root + path = self._source_dir.join_within_root(package.path) + # File packages have a url, whereas link packages do not + if package.url: + return FilePackage( + name=package.name, + version=package.version, + relpath=path.subpath_from_root, + integrity=package.checksum, + ) + return LinkPackage( + name=package.name, + version=package.version, + relpath=path.subpath_from_root, + ) + elif _is_git_url(package.url): + return GitPackage( + name=package.name, + version=package.version, + url=package.url, + ) + elif _is_tarball_url(package.url): + return UrlPackage( + name=package.name, + version=package.version, + url=package.url, + integrity=package.checksum, + ) + else: + raise UnexpectedFormat( + ( + "Cachi2 could not determine the package type for the following package in " + f"yarn.lock: {vars(package)}" + ), + solution=( + "Ensure yarn.lock is well-formed and if so, report this error to the Cachi2 team" + ), + ) + + +def _is_tarball_url(url: str) -> bool: + """Return True if a package URL is a tarball URL.""" + # Parse the URL to extract components + parsed_url = urlparse(url) + + # https://github.com/yarnpkg/yarn/blob/7cafa512a777048ce0b666080a24e80aae3d66a9/src/resolvers/exotics/tarball-resolver.js#L34 + if parsed_url.scheme not in {"http", "https"}: + return False + + # https://github.com/yarnpkg/yarn/blob/7cafa512a777048ce0b666080a24e80aae3d66a9/src/resolvers/exotics/tarball-resolver.js#L40 + # https://github.com/yarnpkg/yarn/blob/7cafa512a777048ce0b666080a24e80aae3d66a9/src/resolvers/exotics/bitbucket-resolver.js#L11 + # https://github.com/yarnpkg/yarn/blob/7cafa512a777048ce0b666080a24e80aae3d66a9/src/resolvers/exotics/gitlab-resolver.js#L10C10-L10C23 + if parsed_url.path.endswith((".tar", ".tar.gz", ".tgz")): + return True + + # https://github.com/yarnpkg/yarn/blob/7cafa512a777048ce0b666080a24e80aae3d66a9/src/resolvers/exotics/github-resolver.js#L24 + if parsed_url.hostname == "codeload.github.com" and "tar.gz" in parsed_url.path: + return True + + return False + + +def _is_git_url(url: str) -> bool: + """Return True if a package URL is a git URL.""" + # https://github.com/yarnpkg/yarn/blob/7cafa512a777048ce0b666080a24e80aae3d66a9/src/resolvers/exotics/git-resolver.js#L32 + if any(matcher.match(url) for matcher in GIT_PATTERN_MATCHERS): + return True + + # https://github.com/yarnpkg/yarn/blob/7cafa512a777048ce0b666080a24e80aae3d66a9/src/resolvers/exotics/git-resolver.js#L39 + parsed_url = urlparse(url) + if parsed_url.hostname in GIT_HOSTS: + path_segments = [segment for segment in parsed_url.path.split("/") if segment] + # Return True if the path has exactly two segments (e.g. org/repo, not org/repo/file.tar.gz) + return len(path_segments) == 2 + + return False + + +def _is_from_npm_registry(url: str) -> bool: + """Return True if a package URL is from the NPM or Yarn registry.""" + return urlparse(url).hostname in NPM_REGISTRY_CNAMES + + +def _get_packages_from_lockfile( + source_dir: RootedPath, yarn_lock: YarnLock +) -> list[YarnClassicPackage]: + """Return a list of Packages for all dependencies in yarn.lock.""" + pyarn_packages: list[PYarnPackage] = yarn_lock.yarn_lockfile.packages() + package_factory = _YarnClassicPackageFactory(source_dir) + + return [ + package_factory.create_package_from_pyarn_package(package) for package in pyarn_packages + ] + + +def _get_main_package(package_json: PackageJson) -> WorkspacePackage: + """Return a WorkspacePackage for the main package in package.json.""" + if "name" not in package_json._data: + raise PackageRejected( + f"The package.json file located at {package_json.path.path} is missing the name field", + solution="Ensure the package.json file has a valid name.", + ) + return WorkspacePackage( + name=package_json.data["name"], + version=package_json.data.get("version"), + relpath=package_json.path.subpath_from_root.parent, + ) + + +def _get_workspace_packages( + source_dir: RootedPath, workspaces: list[Workspace] +) -> list[WorkspacePackage]: + """Return a WorkspacePackage for each Workspace.""" + return [ + WorkspacePackage( + name=ws.package_contents["name"], + version=ws.package_contents.get("version"), + relpath=ws.path.relative_to(source_dir.path), + ) + for ws in workspaces + ] + + +def resolve_packages(project: Project) -> Iterable[YarnClassicPackage]: + """Return a list of Packages corresponding to all project dependencies.""" + workspaces = extract_workspace_metadata(project.source_dir) + yarn_lock = YarnLock.from_file(project.source_dir.join_within_root("yarn.lock")) + return chain( + [_get_main_package(project.package_json)], + _get_workspace_packages(project.source_dir, workspaces), + _get_packages_from_lockfile(project.source_dir, yarn_lock), + ) diff --git a/cachi2/core/package_managers/yarn_classic/workspaces.py b/cachi2/core/package_managers/yarn_classic/workspaces.py index 314bcc82c..75253253d 100644 --- a/cachi2/core/package_managers/yarn_classic/workspaces.py +++ b/cachi2/core/package_managers/yarn_classic/workspaces.py @@ -6,7 +6,6 @@ import pydantic from cachi2.core.errors import PackageRejected -from cachi2.core.models.input import YarnClassicPackageInput from cachi2.core.rooted_path import PathOutsideRoot, RootedPath @@ -93,21 +92,20 @@ def _read_package_from(path: RootedPath) -> dict[str, Any]: def extract_workspace_metadata( - package: YarnClassicPackageInput, - source_dir: RootedPath, + package_path: RootedPath, ) -> list[Workspace]: """Extract workspace metadata from a package.""" - processed_package = _read_package_from(source_dir.join_within_root(package.path)) + processed_package = _read_package_from(package_path) workspaces_globs = _extract_workspaces_globs(processed_package) - workspaces_paths = _get_workspace_paths(workspaces_globs, source_dir) - ensure_no_path_leads_out(workspaces_paths, source_dir) + workspaces_paths = _get_workspace_paths(workspaces_globs, package_path) + ensure_no_path_leads_out(workspaces_paths, package_path) _ensure_workspaces_are_well_formed(workspaces_paths) parsed_workspaces = [] for wp in workspaces_paths: parsed_workspaces.append( Workspace( path=wp, - package_contents=_read_package_from(source_dir.join_within_root(wp)), + package_contents=_read_package_from(package_path.join_within_root(wp)), ) ) return parsed_workspaces diff --git a/pyproject.toml b/pyproject.toml index 5593db0fc..ea75508c8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -28,6 +28,7 @@ dependencies = [ "gitpython", "packageurl-python", "packaging", + "pyarn", "pydantic", "pypi-simple", "pyarn", @@ -81,6 +82,11 @@ disallow_untyped_defs = true disallow_incomplete_defs = true disallow_untyped_decorators = true +# TODO: pyarn does not currently have type annotations +[[tool.mypy.overrides]] +module = "pyarn.*" +ignore_missing_imports = true + [tool.coverage.report] skip_covered = true show_missing = true diff --git a/requirements-extras.txt b/requirements-extras.txt index e54e7bcd2..0b3eb2f54 100644 --- a/requirements-extras.txt +++ b/requirements-extras.txt @@ -786,9 +786,9 @@ propcache==0.2.0 \ --hash=sha256:fc2db02409338bf36590aa985a461b2c96fce91f8e7e0f14c50c5fcc4f229016 \ --hash=sha256:ffcad6c564fe6b9b8916c1aefbb37a362deebf9394bd2974e9d84232e3e08504 # via yarl -pyarn==0.2.0 \ - --hash=sha256:542ff739af2b81a1200776eff2b4d2566a330846decbd0f815999b196d7b067d \ - --hash=sha256:d06e8b79bb830f142187b57ee664dc0104f658efdb2b2bae7ed99eaf7746eb1a +pyarn==0.3.0 \ + --hash=sha256:8f799d94a9b2fd4bd54185149d8745187c7ededcde98576ff70bf0823fd8a710 \ + --hash=sha256:e8eaed3ae80c1891a8ea7f6b7538c049a79dd7e7d3b4279f275699b3477b38c2 # via cachi2 (pyproject.toml) pycodestyle==2.12.1 \ --hash=sha256:46f0fb92069a7c28ab7bb558f05bfc0110dac69a0cd23c61ea0040283a9d78b3 \ diff --git a/requirements.txt b/requirements.txt index 43688f0f6..397f06075 100644 --- a/requirements.txt +++ b/requirements.txt @@ -593,9 +593,9 @@ propcache==0.2.0 \ --hash=sha256:fc2db02409338bf36590aa985a461b2c96fce91f8e7e0f14c50c5fcc4f229016 \ --hash=sha256:ffcad6c564fe6b9b8916c1aefbb37a362deebf9394bd2974e9d84232e3e08504 # via yarl -pyarn==0.2.0 \ - --hash=sha256:542ff739af2b81a1200776eff2b4d2566a330846decbd0f815999b196d7b067d \ - --hash=sha256:d06e8b79bb830f142187b57ee664dc0104f658efdb2b2bae7ed99eaf7746eb1a +pyarn==0.3.0 \ + --hash=sha256:8f799d94a9b2fd4bd54185149d8745187c7ededcde98576ff70bf0823fd8a710 \ + --hash=sha256:e8eaed3ae80c1891a8ea7f6b7538c049a79dd7e7d3b4279f275699b3477b38c2 # via cachi2 (pyproject.toml) pydantic==2.9.1 \ --hash=sha256:1363c7d975c7036df0db2b4a61f2e062fbc0aa5ab5f2772e0ffc7191a4f4bce2 \ diff --git a/tests/integration/test_yarn_classic.py b/tests/integration/test_yarn_classic.py index 3d4905800..c53216d2d 100644 --- a/tests/integration/test_yarn_classic.py +++ b/tests/integration/test_yarn_classic.py @@ -113,7 +113,7 @@ def test_yarn_classic_packages( pytest.param( utils.TestParameters( repo="https://github.com/cachito-testing/cachi2-yarn.git", - ref="67679cb740122cfa6c17238c7a1ff9cfcdb875f3", + ref="valid_yarn_all_dependency_types", packages=({"path": ".", "type": "yarn-classic"},), flags=["--dev-package-managers"], check_vendor_checksums=False, diff --git a/tests/unit/package_managers/yarn_classic/test_main.py b/tests/unit/package_managers/yarn_classic/test_main.py index 6bd0326e6..7139b193d 100644 --- a/tests/unit/package_managers/yarn_classic/test_main.py +++ b/tests/unit/package_managers/yarn_classic/test_main.py @@ -1,4 +1,6 @@ +import json from pathlib import Path +from typing import Any from unittest import mock import pytest @@ -11,12 +13,22 @@ _fetch_dependencies, _generate_build_environment_variables, _get_prefetch_environment_variables, + _resolve_yarn_project, _verify_corepack_yarn_version, fetch_yarn_source, ) +from cachi2.core.package_managers.yarn_classic.project import Project from cachi2.core.rooted_path import RootedPath +def _prepare_project(source_dir: RootedPath, package_json: dict[str, Any]) -> Project: + package_json_path = source_dir.join_within_root("package.json") + with open(package_json_path.path, "w") as f: + json.dump(package_json, f) + + return Project.from_source_dir(source_dir) + + @pytest.fixture(scope="module") def yarn_classic_env_variables() -> list[EnvironmentVariable]: return [ @@ -50,15 +62,11 @@ def test_generate_build_environment_variables( ], indirect=["input_request"], ) -@mock.patch("cachi2.core.package_managers.yarn_classic.main._verify_corepack_yarn_version") -@mock.patch("cachi2.core.package_managers.yarn_classic.main._get_prefetch_environment_variables") -@mock.patch("cachi2.core.package_managers.yarn_classic.main._fetch_dependencies") -@mock.patch("cachi2.core.package_managers.yarn_classic.main.extract_workspace_metadata") +@mock.patch("cachi2.core.package_managers.yarn_classic.main._resolve_yarn_project") +@mock.patch("cachi2.core.package_managers.yarn_classic.main.Project.from_source_dir") def test_fetch_yarn_source( - mock_extract_metadata: mock.Mock, - mock_fetch_dependencies: mock.Mock, - mock_prefetch_env_vars: mock.Mock, - mock_verify_yarn_version: mock.Mock, + mock_create_project: mock.Mock, + mock_resolve_yarn: mock.Mock, input_request: Request, yarn_classic_env_variables: list[EnvironmentVariable], components: list[Component], @@ -67,24 +75,50 @@ def test_fetch_yarn_source( components=components, build_config=BuildConfig(environment_variables=yarn_classic_env_variables), ) + package_dirs = [ + input_request.source_dir.join_within_root(p.path) for p in input_request.packages + ] + projects = [_prepare_project(path, {}) for path in package_dirs] + mock_create_project.side_effect = projects output = fetch_yarn_source(input_request) - mock_prefetch_env_vars.assert_has_calls( - [mock.call(input_request.output_dir) for _ in input_request.packages] - ) - - calls = [] - for package in input_request.packages: - package_path = input_request.source_dir.join_within_root(package.path) - calls.append(mock.call(package_path, mock_prefetch_env_vars(input_request.output_dir))) - mock_verify_yarn_version.assert_has_calls(calls) - mock_fetch_dependencies.assert_has_calls(calls) + mock_create_project.assert_has_calls([mock.call(path) for path in package_dirs]) + mock_resolve_yarn.assert_has_calls([mock.call(p, input_request.output_dir) for p in projects]) assert input_request.output_dir.join_within_root("deps/yarn-classic").path.exists() assert output == expected_output +@mock.patch("cachi2.core.package_managers.yarn_classic.main.resolve_packages") +@mock.patch("cachi2.core.package_managers.yarn_classic.main._verify_corepack_yarn_version") +@mock.patch("cachi2.core.package_managers.yarn_classic.main._get_prefetch_environment_variables") +@mock.patch("cachi2.core.package_managers.yarn_classic.main._fetch_dependencies") +@mock.patch("cachi2.core.package_managers.yarn_classic.main._verify_repository") +def test_resolve_yarn_project( + mock_verify_repository: mock.Mock, + mock_fetch_dependencies: mock.Mock, + mock_prefetch_env_vars: mock.Mock, + mock_verify_yarn_version: mock.Mock, + mock_resolve_packages: mock.Mock, + rooted_tmp_path: RootedPath, +) -> None: + project = _prepare_project(rooted_tmp_path, {}) + output_dir = rooted_tmp_path.join_within_root("output") + + _resolve_yarn_project(project, output_dir) + + mock_verify_repository.assert_called_once_with(project) + mock_prefetch_env_vars.assert_called_once_with(output_dir) + mock_verify_yarn_version.assert_called_once_with( + project.source_dir, mock_prefetch_env_vars.return_value + ) + mock_fetch_dependencies.assert_called_once_with( + project.source_dir, mock_prefetch_env_vars.return_value + ) + mock_resolve_packages.assert_called_once_with(project) + + @mock.patch("cachi2.core.package_managers.yarn_classic.main.run_yarn_cmd") def test_fetch_dependencies(mock_run_yarn_cmd: mock.Mock, tmp_path: Path) -> None: env = {"foo": "bar"} diff --git a/tests/unit/package_managers/yarn_classic/test_resolver.py b/tests/unit/package_managers/yarn_classic/test_resolver.py new file mode 100644 index 000000000..1b5ec44b2 --- /dev/null +++ b/tests/unit/package_managers/yarn_classic/test_resolver.py @@ -0,0 +1,341 @@ +import re +from pathlib import Path +from unittest import mock + +import pytest +from pyarn.lockfile import Package as PYarnPackage + +from cachi2.core.errors import PackageRejected, UnexpectedFormat +from cachi2.core.package_managers.yarn_classic.project import PackageJson +from cachi2.core.package_managers.yarn_classic.resolver import ( + FilePackage, + GitPackage, + LinkPackage, + RegistryPackage, + UrlPackage, + WorkspacePackage, + YarnClassicPackage, + _get_main_package, + _get_packages_from_lockfile, + _get_workspace_packages, + _is_from_npm_registry, + _is_git_url, + _is_tarball_url, + _YarnClassicPackageFactory, + resolve_packages, +) +from cachi2.core.package_managers.yarn_classic.workspaces import Workspace +from cachi2.core.rooted_path import PathOutsideRoot, RootedPath + +VALID_GIT_URLS = [ + "git://git.host.com/some/path", + "ssh://git.host.com/some/path", + "git+http://git.host.com/some/path", + "git+https://git.host.com/some/path", + "git+ssh://git.host.com/some/path", + "git+file://git.host.com/some/path", + "git+file://git.host.com/some/path", + "http://git.host.com/some/path.git", + "https://git.host.com/some/path.git", + "http://git.host.com/some/path.git#fffffff", + "https://github.com/some/path", + "https://gitlab.com/some/path", + "https://bitbucket.com/some/path", + "https://bitbucket.org/some/path", +] +VALID_TARBALL_URLS = [ + "https://foo.com/bar.tar.gz", + "https://foo.com/bar.tgz", + "https://foo.com/bar.tar", + "http://foo.com/bar.tar.gz", + "http://foo.com/bar.tgz", + "http://foo.com/bar.tar", + "https://codeload.github.com/org/foo/tar.gz/fffffff", +] +INVALID_GIT_URLS = [ + "https://github.com/some/path/file", + "ftp://foo.com/bar.tar", + "https://foo.com/bar", + "https://foo.com/bar.txt", + *VALID_TARBALL_URLS, +] +INVALID_TARBALL_URLS = [ + "ftp://foo.com/bar.tar", + "git+https://git.host.com/some/path", + "https://foo.com/bar", + "https://foo.com/bar.txt", + *VALID_GIT_URLS, +] + + +@pytest.mark.parametrize("url", VALID_TARBALL_URLS) +def test__is_tarball_url_can_parse_correct_tarball_urls(url: str) -> None: + assert _is_tarball_url(url) + + +@pytest.mark.parametrize("url", INVALID_TARBALL_URLS) +def test__is_tarball_url_rejects_incorrect_tarball_urls(url: str) -> None: + assert not _is_tarball_url(url) + + +@pytest.mark.parametrize("url", VALID_GIT_URLS) +def test__is_git_url_can_parse_correct_git_urls(url: str) -> None: + assert _is_git_url(url) + + +@pytest.mark.parametrize("url", INVALID_GIT_URLS) +def test__is_git_url_rejects_incorrect_git_urls(url: str) -> None: + assert not _is_git_url(url) + + +@pytest.mark.parametrize( + "url", + [ + "https://registry.npmjs.org/chai/-/chai-4.2.0.tgz", + "https://registry.yarnpkg.com/chai/-/chai-4.2.0.tgz", + ], +) +def test__is_from_npm_registry_can_parse_correct_registry_urls(url: str) -> None: + assert _is_from_npm_registry(url) + + +def test__is_from_npm_registry_can_parse_incorrect_registry_urls() -> None: + assert not _is_from_npm_registry("https://example.org/fecha.tar.gz") + + +@pytest.mark.parametrize( + "pyarn_package, expected_package", + [ + ( + PYarnPackage( + name="foo", + version="1.0.0", + url="https://registry.yarnpkg.com/foo/-/foo-1.0.0.tgz#fffffff", + checksum="sha512-fffffff", + ), + RegistryPackage( + name="foo", + version="1.0.0", + url="https://registry.yarnpkg.com/foo/-/foo-1.0.0.tgz#fffffff", + dev=False, + integrity="sha512-fffffff", + ), + ), + ( + PYarnPackage( + name="foo", + version="1.0.0", + url="./path/foo-1.0.0.tgz#fffffff", + path="path/foo-1.0.0.tgz", + ), + FilePackage( + name="foo", + version="1.0.0", + dev=False, + relpath=Path("path/foo-1.0.0.tgz"), + ), + ), + ( + PYarnPackage( + name="foo", + version="0.0.0", + path="link", + ), + LinkPackage( + name="foo", + version="0.0.0", + dev=False, + relpath=Path("link"), + ), + ), + ( + PYarnPackage( + name="foo", + version="1.0.0", + url="https://github.com/org/foo.git#fffffff", + ), + GitPackage( + name="foo", + version="1.0.0", + dev=False, + url="https://github.com/org/foo.git#fffffff", + ), + ), + ( + PYarnPackage( + name="foo", + version="1.0.0", + url="https://example.com/foo-1.0.0.tgz", + ), + UrlPackage( + name="foo", + version="1.0.0", + dev=False, + url="https://example.com/foo-1.0.0.tgz", + ), + ), + ], +) +def test_create_package_from_pyarn_package( + pyarn_package: PYarnPackage, expected_package: YarnClassicPackage, rooted_tmp_path: RootedPath +) -> None: + package_factory = _YarnClassicPackageFactory(rooted_tmp_path) + assert package_factory.create_package_from_pyarn_package(pyarn_package) == expected_package + + +def test_create_package_from_pyarn_package_fail_absolute_path(rooted_tmp_path: RootedPath) -> None: + pyarn_package = PYarnPackage( + name="foo", + version="1.0.0", + path="/root/some/path", + ) + error_msg = ( + f"The package {pyarn_package.name}@{pyarn_package.version} has an absolute path " + f"({pyarn_package.path}), which is not permitted." + ) + + package_factory = _YarnClassicPackageFactory(rooted_tmp_path) + with pytest.raises(PackageRejected, match=re.escape(error_msg)): + package_factory.create_package_from_pyarn_package(pyarn_package) + + +def test_create_package_from_pyarn_package_fail_path_outside_root( + rooted_tmp_path: RootedPath, +) -> None: + pyarn_package = PYarnPackage( + name="foo", + version="1.0.0", + path="../path/outside/root", + ) + + package_factory = _YarnClassicPackageFactory(rooted_tmp_path) + with pytest.raises(PathOutsideRoot): + package_factory.create_package_from_pyarn_package(pyarn_package) + + +def test_create_package_from_pyarn_package_fail_unexpected_format( + rooted_tmp_path: RootedPath, +) -> None: + pyarn_package = PYarnPackage( + name="foo", + version="1.0.0", + url="ftp://some-tarball.tgz", + ) + + package_factory = _YarnClassicPackageFactory(rooted_tmp_path) + with pytest.raises(UnexpectedFormat): + package_factory.create_package_from_pyarn_package(pyarn_package) + + +@mock.patch( + "cachi2.core.package_managers.yarn_classic.resolver._YarnClassicPackageFactory.create_package_from_pyarn_package" +) +def test__get_packages_from_lockfile( + mock_create_package: mock.Mock, rooted_tmp_path: RootedPath +) -> None: + + # Setup lockfile instance + mock_pyarn_lockfile = mock.Mock() + mock_yarn_lock = mock.Mock(yarn_lockfile=mock_pyarn_lockfile) + mock_pyarn_package_1 = mock.Mock() + mock_pyarn_package_2 = mock.Mock() + mock_pyarn_lockfile.packages.return_value = [mock_pyarn_package_1, mock_pyarn_package_2] + + # Setup classifier + mock_package_1 = mock.Mock() + mock_package_2 = mock.Mock() + mock_create_package.side_effect = [mock_package_1, mock_package_2] + create_package_expected_calls = [ + mock.call(mock_pyarn_package_1), + mock.call(mock_pyarn_package_2), + ] + + output = _get_packages_from_lockfile(rooted_tmp_path, mock_yarn_lock) + + mock_pyarn_lockfile.packages.assert_called_once() + mock_create_package.assert_has_calls(create_package_expected_calls) + assert output == [mock_package_1, mock_package_2] + + +@mock.patch("cachi2.core.package_managers.yarn_classic.project.YarnLock.from_file") +@mock.patch("cachi2.core.package_managers.yarn_classic.resolver._get_workspace_packages") +@mock.patch("cachi2.core.package_managers.yarn_classic.resolver.extract_workspace_metadata") +@mock.patch("cachi2.core.package_managers.yarn_classic.resolver._get_packages_from_lockfile") +@mock.patch("cachi2.core.package_managers.yarn_classic.resolver._get_main_package") +def test_resolve_packages( + mock_get_main_package: mock.Mock, + mock_get_lockfile_packages: mock.Mock, + mock_extract_workspaces: mock.Mock, + mock_get_workspace_packages: mock.Mock, + mock_get_yarn_lock: mock.Mock, + rooted_tmp_path: RootedPath, +) -> None: + project = mock.Mock(source_dir=rooted_tmp_path) + yarn_lock_path = rooted_tmp_path.join_within_root("yarn.lock") + + main_package = mock.Mock() + workspace_packages = [mock.Mock()] + lockfile_packages = [mock.Mock(), mock.Mock()] + expected_output = [main_package, *workspace_packages, *lockfile_packages] + + mock_get_main_package.return_value = main_package + mock_get_lockfile_packages.return_value = lockfile_packages + mock_get_workspace_packages.return_value = workspace_packages + + output = resolve_packages(project) + mock_extract_workspaces.assert_called_once_with(rooted_tmp_path) + mock_get_yarn_lock.assert_called_once_with(yarn_lock_path) + mock_get_main_package.assert_called_once_with(project.package_json) + mock_get_workspace_packages.assert_called_once_with( + rooted_tmp_path, mock_extract_workspaces.return_value + ) + mock_get_lockfile_packages.assert_called_once_with( + rooted_tmp_path, mock_get_yarn_lock.return_value + ) + assert list(output) == expected_output + + +def test__get_main_package(rooted_tmp_path: RootedPath) -> None: + package_json = PackageJson( + _path=rooted_tmp_path.join_within_root("package.json"), + _data={"name": "foo", "version": "1.0.0"}, + ) + expected_output = WorkspacePackage( + name="foo", + version="1.0.0", + relpath=rooted_tmp_path.subpath_from_root, + ) + + output = _get_main_package(package_json) + assert output == expected_output + + +def test__get_main_package_no_name(rooted_tmp_path: RootedPath) -> None: + package_json = PackageJson( + _path=rooted_tmp_path.join_within_root("package.json"), + _data={}, + ) + error_msg = ( + f"The package.json file located at {package_json._path.path} is missing the name field" + ) + + with pytest.raises(PackageRejected, match=error_msg): + _get_main_package(package_json) + + +def test__get_workspace_packages(rooted_tmp_path: RootedPath) -> None: + workspace_path = rooted_tmp_path.join_within_root("foo").path + workspace = Workspace( + path=workspace_path, + package_contents={"name": "foo", "version": "1.0.0"}, + ) + expected = [ + WorkspacePackage( + name="foo", + version="1.0.0", + relpath=workspace_path.relative_to(rooted_tmp_path.path), + ) + ] + + output = _get_workspace_packages(rooted_tmp_path, [workspace]) + assert output == expected diff --git a/tests/unit/package_managers/yarn_classic/test_workspaces.py b/tests/unit/package_managers/yarn_classic/test_workspaces.py index 778e357c2..7d24af4f4 100644 --- a/tests/unit/package_managers/yarn_classic/test_workspaces.py +++ b/tests/unit/package_managers/yarn_classic/test_workspaces.py @@ -4,7 +4,6 @@ import pytest from cachi2.core.errors import PackageRejected -from cachi2.core.models.input import YarnClassicPackageInput from cachi2.core.package_managers.yarn_classic.workspaces import ( Workspace, _extract_workspaces_globs, @@ -20,13 +19,12 @@ def test_packages_with_workspaces_outside_source_dir_are_rejected( mock_get_ws_paths: mock.Mock, mock_read_package_from: mock.Mock, ) -> None: - package = YarnClassicPackageInput(type="yarn-classic", path=".") mock_read_package_from.return_value = {"workspaces": ["../../usr"]} mock_get_ws_paths.return_value = [Path("/tmp/foo/bar"), Path("/usr")] - source_dir = RootedPath("/tmp/foo") + package_path = RootedPath("/tmp/foo") with pytest.raises(PackageRejected): - extract_workspace_metadata(package, source_dir=source_dir) + extract_workspace_metadata(package_path) @mock.patch("cachi2.core.package_managers.yarn_classic.workspaces._read_package_from") @@ -39,19 +37,17 @@ def test_workspaces_could_be_parsed( mock_get_ws_paths: mock.Mock, mock_read_package_from: mock.Mock, ) -> None: - package = YarnClassicPackageInput(type="yarn-classic", path=".") mock_read_package_from.side_effect = [{"workspaces": ["quux"]}, {"name": "inner_package"}] mock_get_ws_paths.return_value = [Path("/tmp/foo/bar")] - source_dir = RootedPath("/tmp/foo") + package_path = RootedPath("/tmp/foo") expected_result = [ Workspace( path="/tmp/foo/bar", - package=YarnClassicPackageInput(type="yarn-classic", path=Path("bar")), package_contents={"name": "inner_package"}, ), ] - result = extract_workspace_metadata(package, source_dir=source_dir) + result = extract_workspace_metadata(package_path) assert result == expected_result @@ -81,10 +77,26 @@ def test_extracting_workspace_globs_works_for_all_types_of_workspaces( assert expected == result -def test_workspace_paths_could_be_resolved(rooted_tmp_path: RootedPath) -> None: - expected = rooted_tmp_path.path / "foo" - expected.mkdir() +@pytest.mark.parametrize( + "package_relpath", + [ + pytest.param( + ".", + id="workspace_root_is_source_root", + ), + pytest.param( + "src", + id="workspace_root_is_not_source_root", + ), + ], +) +def test_workspace_paths_could_be_resolved( + package_relpath: str, rooted_tmp_path: RootedPath +) -> None: + package_path = rooted_tmp_path.join_within_root(package_relpath) + workspace_path = package_path.join_within_root("foo") + workspace_path.path.mkdir(parents=True) - result = list(_get_workspace_paths(["foo"], rooted_tmp_path)) + result = list(_get_workspace_paths(["foo"], package_path)) - assert result == [expected] + assert result == [workspace_path.path]