Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

For 1.9.0 release #1096

Merged
merged 11 commits into from
Dec 23, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 10 additions & 2 deletions .github/workflows/complementary-config-test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,8 @@ name: Complementary config test
on:
pull_request:
branches:
- 'master'
- 'develop-1.9'
- 'develop'
paths:
- '**'
- '!docs/**'
Expand All @@ -16,7 +17,8 @@ on:

push:
branches:
- 'master'
- 'develop-1.9'
- 'develop'
paths:
- '**'
- '!docs/**'
Expand All @@ -30,6 +32,12 @@ env:
ORG: opendatacube
IMAGE: ows

# When a PR is updated, cancel the jobs from the previous version. Merges
# do not define head_ref, so use run_id to never cancel those jobs.
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true

jobs:
dea-config:
runs-on: ubuntu-latest
Expand Down
9 changes: 8 additions & 1 deletion .github/workflows/docker.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,8 @@ name: Build Docker Image
on:
push:
branches:
- master
- develop
- develop-1.9
paths:
- "**"
- '!docs/**'
Expand All @@ -19,6 +20,12 @@ env:
ORG: opendatacube
IMAGE: ows

# When a PR is updated, cancel the jobs from the previous version. Merges
# do not define head_ref, so use run_id to never cancel those jobs.
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true

jobs:
docker:
runs-on: ubuntu-latest
Expand Down
10 changes: 8 additions & 2 deletions .github/workflows/dockerfile-lint.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,20 +4,26 @@ name: dockerfile Linting
on:
pull_request:
branches:
- 'master'
- 'develop'
- 'develop-1.9'
paths:
- 'Dockerfile'
- '.github/workflows/dockerfile-lint.yml'

push:
branches:
- 'master'
- 'develop'
paths:
- 'Dockerfile'
- '.github/workflows/dockerfile-lint.yml'


# When a PR is updated, cancel the jobs from the previous version. Merges
# do not define head_ref, so use run_id to never cancel those jobs.
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true

jobs:
dockerfile-lint:
runs-on: ubuntu-latest
Expand Down
6 changes: 6 additions & 0 deletions .github/workflows/docpreview.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,12 @@ on:
permissions:
pull-requests: write

# When a PR is updated, cancel the jobs from the previous version. Merges
# do not define head_ref, so use run_id to never cancel those jobs.
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true

jobs:

documentation-preview:
Expand Down
10 changes: 8 additions & 2 deletions .github/workflows/lint.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ name: Code Linting
on:
pull_request:
branches:
- 'master'
- 'develop'
- 'develop-1.9'
paths:
- '**'
Expand All @@ -17,7 +17,7 @@ on:

push:
branches:
- 'master'
- 'develop'
- 'develop-1.9'
paths:
- '**'
Expand All @@ -28,6 +28,12 @@ on:
- '!.github/**'
- '.github/workflows/lint.yml'

# When a PR is updated, cancel the jobs from the previous version. Merges
# do not define head_ref, so use run_id to never cancel those jobs.
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true

jobs:
pylint:
runs-on: ubuntu-latest
Expand Down
11 changes: 9 additions & 2 deletions .github/workflows/pyspy-profiling.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ name: Pyspy Profiling Test
on:
pull_request:
branches:
- 'master'
- 'develop'
- 'develop-1.9'
paths:
- '**'
Expand All @@ -17,7 +17,8 @@ on:

push:
branches:
- 'master'
- 'develop'
- 'develop-1.9'
paths:
- '**'
- '!docs/**'
Expand All @@ -27,6 +28,12 @@ on:
- '!.github/**'
- '.github/workflows/pyspy-profiling.yaml'

# When a PR is updated, cancel the jobs from the previous version. Merges
# do not define head_ref, so use run_id to never cancel those jobs.
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true

jobs:
build:
runs-on: ubuntu-latest
Expand Down
11 changes: 9 additions & 2 deletions .github/workflows/scan.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,15 +4,16 @@ name: Scan
on:
push:
branches:
- master
- develop
- develop-1.9
paths:
- ".github/workflows/scan.yml"
- "Dockerfile"

pull_request:
branches:
- master
- develop
- develop-1.9
paths:
- ".github/workflows/scan.yml"
- "Dockerfile"
Expand All @@ -23,6 +24,12 @@ on:
env:
IMAGE_NAME: opendatacube/ows

# When a PR is updated, cancel the jobs from the previous version. Merges
# do not define head_ref, so use run_id to never cancel those jobs.
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true

jobs:
cve-scanner:
runs-on: ubuntu-latest
Expand Down
11 changes: 9 additions & 2 deletions .github/workflows/spellcheck.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ name: Spell check
on:
pull_request:
branches:
- 'master'
- 'develop'
- 'develop-1.9'
paths:
- 'docs/**'
Expand All @@ -14,12 +14,19 @@ on:

push:
branches:
- 'master'
- 'develop'
- 'develop-1.9'
paths:
- 'docs/**'
- '*.md'
- '.github/workflows/spellcheck.yaml'

# When a PR is updated, cancel the jobs from the previous version. Merges
# do not define head_ref, so use run_id to never cancel those jobs.
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true

jobs:

pyspellcheck:
Expand Down
12 changes: 10 additions & 2 deletions .github/workflows/test-prod.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,8 @@ name: Prod dockercompose test
on:
pull_request:
branches:
- 'master'
- 'develop'
- 'develop-1.9'
paths:
- '**'
- '!docs/**'
Expand All @@ -16,7 +17,8 @@ on:

push:
branches:
- 'master'
- 'develop'
- 'develop-1.9'
paths:
- '**'
- '!docs/**'
Expand All @@ -30,6 +32,12 @@ env:
ORG: opendatacube
IMAGE: ows

# When a PR is updated, cancel the jobs from the previous version. Merges
# do not define head_ref, so use run_id to never cancel those jobs.
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true

jobs:
prod-docker-compose-tests:
runs-on: ubuntu-latest
Expand Down
11 changes: 9 additions & 2 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ name: Tests
on:
pull_request:
branches:
- 'master'
- 'develop'
- 'develop-1.9'
paths:
- '**'
Expand All @@ -17,7 +17,8 @@ on:

push:
branches:
- 'master'
- 'develop'
- 'develop-1.9'
paths:
- '**'
- '!docs/**'
Expand All @@ -31,6 +32,12 @@ env:
ORG: opendatacube
IMAGE: ows

# When a PR is updated, cancel the jobs from the previous version. Merges
# do not define head_ref, so use run_id to never cancel those jobs.
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true

jobs:
unit-integration-performance-tests:
runs-on: ubuntu-latest
Expand Down
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ repos:
# hooks:
# - id: bandit
- repo: https://github.com/PyCQA/pylint
rev: v3.3.1
rev: v3.3.2
hooks:
- id: pylint
args: ["--disable=C,R,W,E1136"]
2 changes: 1 addition & 1 deletion Dockerfile.micromamba
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM mambaorg/micromamba:2.0.3
FROM mambaorg/micromamba:2.0.5
COPY --chown=$MAMBA_USER:$MAMBA_USER env.micromamba.yaml /tmp/env.yaml
RUN --mount=type=cache,target=/opt/conda/pkgs micromamba install -y -n base -f /tmp/env.yaml && \
micromamba clean --all --yes --force-pkgs-dirs && \
Expand Down
17 changes: 17 additions & 0 deletions HISTORY.rst
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,23 @@ History

Datacube-ows version 1.9.x indicates that it is designed work with datacube-core versions 1.9.x.

1.9.0 (2024-12-23)
------------------

This is the first OWS release compatible with the 1.9.x series datacube-core releases.

New features from the 1.8.x series releases include:

* Full support for the postgis index driver
* Support for multi-database environment (Can serve data out of multiple indexes)
* Seamless rendering around the antimeridian for most wms and wmts clients.

Changes from 1.9.0-rc2:

* Update HISTORY.rst and default version number ready for release
* Active CI jobs are now cancelled on PR update (#1091)
* Auto upstream library bumps (#1093, #1095)

1.9.0-rc2 (2024-11-15)
----------------------

Expand Down
2 changes: 1 addition & 1 deletion datacube_ows/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,4 +7,4 @@
try:
from ._version import version as __version__
except ImportError:
__version__ = "1.9.0-rc2?"
__version__ = "1.9.0?"
6 changes: 3 additions & 3 deletions datacube_ows/feature_info.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
import re
from datetime import datetime
from itertools import chain
from typing import cast
from typing import cast, Iterable

import numpy
import xarray
Expand Down Expand Up @@ -206,7 +206,7 @@ def feature_info(args: dict[str, str]) -> FlaskResponse:
ds: Dataset | None = None
for pbq, dss in time_datasets.items():
if pbq.main:
ds = dss.sel(time=dt).values.tolist()[0]
ds = cast(Dataset, dss.sel(time=dt).values.tolist()[0])
break
assert ds is not None
if params.layer.multi_product:
Expand Down Expand Up @@ -271,7 +271,7 @@ def feature_info(args: dict[str, str]) -> FlaskResponse:
pt_native = None
for d in all_time_datasets.coords["time"].values:
dt_datasets = all_time_datasets.sel(time=d)
for ds in dt_datasets.values.item():
for ds in cast(Iterable[Dataset], dt_datasets.values.item()):
assert ds is not None # For type checker
if pt_native is None:
pt_native = geo_point.to_crs(ds.crs)
Expand Down
2 changes: 1 addition & 1 deletion datacube_ows/loading.py
Original file line number Diff line number Diff line change
Expand Up @@ -355,7 +355,7 @@ def manual_data_stack(self,
for dt in datasets.time.values:
tds = datasets.sel(time=dt)
merged = None
for ds in tds.values.item():
for ds in cast(Iterable[datacube.model.Dataset], tds.values.item()):
d = self.read_data_for_single_dataset(ds, measurements, self._geobox, fuse_func=fuse_func)
extent_mask = None
for band in non_flag_bands:
Expand Down
2 changes: 1 addition & 1 deletion datacube_ows/styles/ramp.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,7 @@ def read_mpl_ramp(mpl_ramp: str) -> RAMP_SPEC:
}
)
for val in val_range:
rgba_hex = to_hex(cmap(val))
rgba_hex = to_hex(cast(tuple[float, float, float, float], cmap(val)))
unscaled_cmap.append(
{
"value": float(val),
Expand Down
3 changes: 1 addition & 2 deletions datacube_ows/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
from datacube import Datacube
from datacube.api.query import GroupBy, solar_day
from datacube.model import Dataset
from numpy import datetime64
from numpy import datetime64 as npdt64
from sqlalchemy.engine.base import Connection

Expand Down Expand Up @@ -103,7 +102,7 @@ def group_by_solar(pnames: list[str] | None = None) -> GroupBy:
sort_key = base_sort_key
return GroupBy(
dimension='time',
group_by_func=lambda x: datetime64(solar_day(x), "ns"),
group_by_func=lambda x: npdt64(solar_day(x), "ns"), # type: ignore[call-overload]
units='seconds since 1970-01-01 00:00:00',
sort_key=sort_key
)
Expand Down
Loading
Loading