diff --git a/cubedash/_model.py b/cubedash/_model.py
index f1ee6030d..e9bc5878b 100644
--- a/cubedash/_model.py
+++ b/cubedash/_model.py
@@ -96,8 +96,9 @@ def get_time_summary(
year: Optional[int] = None,
month: Optional[int] = None,
day: Optional[int] = None,
+ region: Optional[str] = None,
) -> Optional[TimePeriodOverview]:
- return STORE.get(product_name, year, month, day)
+ return STORE.get(product_name, year, month, day, region)
@cache.memoize(timeout=60)
diff --git a/cubedash/_pages.py b/cubedash/_pages.py
index 075a236ef..e8f5069a1 100644
--- a/cubedash/_pages.py
+++ b/cubedash/_pages.py
@@ -286,7 +286,7 @@ def region_page(
selected_summary,
year_selector_summary,
time_selector_summary,
- ) = _load_product(product_name, year, month, day)
+ ) = _load_product(product_name, year, month, day, region_code)
region_info = _model.STORE.get_product_region_info(product_name)
if not region_info:
@@ -394,7 +394,7 @@ def timeline_page(product_name: str):
def _load_product(
- product_name, year, month, day
+ product_name, year, month, day, region: str = None
) -> Tuple[
DatasetType,
ProductSummary,
@@ -410,9 +410,9 @@ def _load_product(
abort(404, f"Unknown product {product_name!r}")
product_summary = _model.get_product_summary(product_name)
- time_summary = _model.get_time_summary(product_name, year, month, day)
- year_selector_summary = _model.get_time_summary(product_name, None, None, None)
- time_selector_summary = _model.get_time_summary(product_name, year, None, None)
+ time_summary = _model.get_time_summary(product_name, year, month, day, region)
+ year_selector_summary = _model.get_time_summary(product_name, None, None, None, region)
+ time_selector_summary = _model.get_time_summary(product_name, year, None, None, region)
return (
product,
product_summary,
diff --git a/cubedash/summary/_extents.py b/cubedash/summary/_extents.py
index 8f2f0b8dc..92ab974b0 100644
--- a/cubedash/summary/_extents.py
+++ b/cubedash/summary/_extents.py
@@ -3,6 +3,7 @@
import sys
import uuid
from dataclasses import dataclass
+
from datetime import date, datetime
from pathlib import Path
from typing import Dict, Generator, Iterable, List, Optional
diff --git a/cubedash/summary/_model.py b/cubedash/summary/_model.py
index a39dbfb48..23c1666db 100644
--- a/cubedash/summary/_model.py
+++ b/cubedash/summary/_model.py
@@ -142,6 +142,7 @@ def add_periods(
for p in periods:
timeline_counter.update(p.timeline_dataset_counts)
period = p.timeline_period
+
timeline_counter, period = cls._group_counter_if_needed(
timeline_counter, period
)
diff --git a/cubedash/summary/_schema.py b/cubedash/summary/_schema.py
index 9ec19684e..b64280dc5 100644
--- a/cubedash/summary/_schema.py
+++ b/cubedash/summary/_schema.py
@@ -184,7 +184,10 @@
Column("crses", postgres.ARRAY(String)),
# Size of this dataset in bytes, if the product includes it.
Column("size_bytes", BigInteger),
- PrimaryKeyConstraint("product_ref", "start_day", "period_type"),
+ Column("regions_hash", String),
+ PrimaryKeyConstraint(
+ "product_ref", "start_day", "period_type", "regions_hash",
+ ),
CheckConstraint(
r"array_length(timeline_dataset_start_days, 1) = "
r"array_length(timeline_dataset_counts, 1)",
diff --git a/cubedash/summary/_stores.py b/cubedash/summary/_stores.py
index 30edcfe9c..e69cd3e08 100644
--- a/cubedash/summary/_stores.py
+++ b/cubedash/summary/_stores.py
@@ -755,6 +755,7 @@ def get(
year: Optional[int] = None,
month: Optional[int] = None,
day: Optional[int] = None,
+ region_code: Optional[str] = None,
) -> Optional[TimePeriodOverview]:
period, start_day = TimePeriodOverview.flat_period_representation(
year, month, day
@@ -771,15 +772,56 @@ def get(
if not product:
return None
- res = self._engine.execute(
- select([TIME_OVERVIEW]).where(
- and_(
- TIME_OVERVIEW.c.product_ref == product.id_,
- TIME_OVERVIEW.c.start_day == start_day,
- TIME_OVERVIEW.c.period_type == period,
- )
+ if region_code and year:
+ return self._summariser.calculate_summary(
+ product_name,
+ year_month_day=(year, month, day),
+ product_refresh_time=datetime.now(),
+ region_code=region_code,
)
- ).fetchone()
+
+ if region_code:
+ res = self._engine.execute(
+ select([TIME_OVERVIEW]).where(
+ and_(
+ TIME_OVERVIEW.c.product_ref == product.id_,
+ TIME_OVERVIEW.c.start_day == start_day,
+ TIME_OVERVIEW.c.period_type == period,
+ TIME_OVERVIEW.c.regions.contains([region_code]),
+ func.cardinality(TIME_OVERVIEW.c.regions) == 1,
+ )
+ )
+ ).fetchone()
+ else:
+ if self.get_product_all_regions(product.name, period, start_day):
+ """
+ if the product contains region
+ """
+ res = self._engine.execute(
+ select([TIME_OVERVIEW]).where(
+ and_(
+ TIME_OVERVIEW.c.product_ref == product.id_,
+ TIME_OVERVIEW.c.start_day == start_day,
+ TIME_OVERVIEW.c.period_type == period,
+ func.cardinality(TIME_OVERVIEW.c.regions) == len(
+ self.get_product_all_regions(product.name, period, start_day)
+ ),
+ )
+ )
+ ).fetchone()
+ else:
+ """
+ if the product doesnt contain region
+ """
+ res = self._engine.execute(
+ select([TIME_OVERVIEW]).where(
+ and_(
+ TIME_OVERVIEW.c.product_ref == product.id_,
+ TIME_OVERVIEW.c.start_day == start_day,
+ TIME_OVERVIEW.c.period_type == period,
+ )
+ ).order_by(TIME_OVERVIEW.c.generation_time.desc())
+ ).fetchone()
if not res:
return None
@@ -834,6 +876,13 @@ def get_dataset_type(self, name) -> DatasetType:
return d
raise KeyError(f"Unknown dataset type {name!r}")
+ @ttl_cache(ttl=DEFAULT_TTL)
+ def get_dataset_type_return_none(self, name) -> DatasetType:
+ for d in self.all_dataset_types():
+ if d.name == name:
+ return d
+ return None
+
@ttl_cache(ttl=DEFAULT_TTL)
def _dataset_type_by_id(self, id_) -> DatasetType:
for d in self.all_dataset_types():
@@ -1034,24 +1083,34 @@ def _put(
log.info("product.put")
product = self._product(summary.product_name)
period, start_day = summary.as_flat_period()
+ region_values, _ = _counter_key_vals(summary.region_dataset_counts)
row = _summary_to_row(summary)
+
+ import hashlib
+ import json
ret = self._engine.execute(
postgres.insert(TIME_OVERVIEW)
.returning(TIME_OVERVIEW.c.generation_time)
.on_conflict_do_update(
- index_elements=["product_ref", "start_day", "period_type"],
+ index_elements=[
+ "product_ref", "start_day", "period_type",
+ "regions_hash"
+ ],
set_=row,
where=and_(
TIME_OVERVIEW.c.product_ref == product.id_,
TIME_OVERVIEW.c.start_day == start_day,
TIME_OVERVIEW.c.period_type == period,
+ TIME_OVERVIEW.c.regions == region_values,
),
)
.values(
- product_ref=product.id_, start_day=start_day, period_type=period, **row
+ product_ref=product.id_, start_day=start_day, period_type=period,
+ regions_hash=hashlib.sha224(json.dumps(region_values).encode("utf-8")).hexdigest(), **row
)
)
+
[gen_time] = ret.fetchone()
summary.summary_gen_time = gen_time
@@ -1313,6 +1372,7 @@ def _recalculate_period(
year: Optional[int] = None,
month: Optional[int] = None,
product_refresh_time: datetime = None,
+ region_code: str = None,
) -> TimePeriodOverview:
"""Recalculate the given period and store it in the DB"""
if year and month:
@@ -1320,6 +1380,7 @@ def _recalculate_period(
product.name,
year_month_day=(year, month, None),
product_refresh_time=product_refresh_time,
+ region_code=region_code,
)
elif year:
summary = TimePeriodOverview.add_periods(
@@ -1329,12 +1390,26 @@ def _recalculate_period(
# Product. Does it have data?
elif product.dataset_count > 0:
summary = TimePeriodOverview.add_periods(
- self.get(product.name, year_, None, None)
+ self.get(product.name, year_, None, None, region_code=None)
for year_ in range(
product.time_earliest.astimezone(timezone).year,
product.time_latest.astimezone(timezone).year + 1
)
)
+
+ if self.get_product_all_regions(product_name=product.name):
+ for region in self.get_product_all_regions(product_name=product.name):
+ region_summary = TimePeriodOverview.add_periods(
+ self.get(product.name, year_, None, None, region_code=region)
+ for year_ in range(
+ product.time_earliest.astimezone(timezone).year,
+ product.time_latest.astimezone(timezone).year + 1
+ )
+ )
+ region_summary.product_refresh_time = product_refresh_time
+ region_summary.period_tuple = (product.name, year, month, None)
+ self._put(region_summary)
+
else:
summary = TimePeriodOverview.empty(product.name)
@@ -1342,6 +1417,7 @@ def _recalculate_period(
summary.period_tuple = (product.name, year, month, None)
self._put(summary)
+
for listener in self._update_listeners:
listener(
product_name=product.name,
@@ -1642,6 +1718,55 @@ def _region_summaries(self, product_name: str) -> Dict[str, RegionSummary]:
if geom is not None
}
+ def get_product_all_regions(self, product_name: str, period_type: str = None, start_day=None) -> List:
+ """
+ return list of regions per date range
+ """
+ dt = self.get_dataset_type_return_none(product_name)
+ if not dt:
+ return None
+ rows = self._engine.execute(
+ select(
+ [
+ REGION.c.region_code,
+ ]
+ )
+ .where(REGION.c.dataset_type_ref == dt.id)
+ .order_by(REGION.c.region_code)
+ )
+
+ if period_type != 'all' and start_day:
+ year, month, day = TimePeriodOverview.from_flat_period_representation(
+ period_type, start_day
+ )
+ time = _utils.as_time_range(year, month, day)
+
+ begin_time = time.begin.replace(tzinfo=tz.gettz("Australia/Darwin"))
+ end_time = time.end.replace(tzinfo=tz.gettz("Australia/Darwin"))
+ rows = self._engine.execute(
+ select(
+ [
+ DATASET_SPATIAL.c.region_code
+ ]
+ )
+ .where(
+ and_(
+ func.tstzrange(
+ begin_time, end_time, "[]", type_=TSTZRANGE
+ ).contains(
+ DATASET_SPATIAL.c.center_time
+ ),
+ DATASET_SPATIAL.c.dataset_type_ref == dt.id
+
+ )
+ )
+ .distinct()
+ )
+ if not rows:
+ return None
+
+ return [region["region_code"] for region in rows]
+
def get_product_region_info(self, product_name: str) -> RegionInfo:
return RegionInfo.for_product(
dataset_type=self.get_dataset_type(product_name),
diff --git a/cubedash/summary/_summarise.py b/cubedash/summary/_summarise.py
index 63db08c13..d670192d4 100644
--- a/cubedash/summary/_summarise.py
+++ b/cubedash/summary/_summarise.py
@@ -56,6 +56,7 @@ def calculate_summary(
product_name: str,
year_month_day: Tuple[Optional[int], Optional[int], Optional[int]],
product_refresh_time: datetime,
+ region_code: str = None,
) -> TimePeriodOverview:
"""
Create a summary of the given product/time range.
@@ -64,7 +65,7 @@ def calculate_summary(
log = self.log.bind(product_name=product_name, time=time)
log.debug("summary.query")
- begin_time, end_time, where_clause = self._where(product_name, time)
+ begin_time, end_time, where_clause = self._where(product_name, time, region_code)
select_by_srid = (
select(
(
@@ -208,25 +209,43 @@ def _with_default_tz(self, d: datetime) -> datetime:
return d
def _where(
- self, product_name: str, time: Range
+ self, product_name: str, time: Range, region: str = None,
) -> Tuple[datetime, datetime, ColumnElement]:
begin_time = self._with_default_tz(time.begin)
end_time = self._with_default_tz(time.end)
- where_clause = and_(
- func.tstzrange(begin_time, end_time, "[]", type_=TSTZRANGE).contains(
- DATASET_SPATIAL.c.center_time
- ),
- DATASET_SPATIAL.c.dataset_type_ref
- == _scalar_subquery(
- select([ODC_DATASET_TYPE.c.id]).where(
- ODC_DATASET_TYPE.c.name == product_name
- )
- ),
- or_(
- func.st_isvalid(DATASET_SPATIAL.c.footprint).is_(True),
- func.st_isvalid(DATASET_SPATIAL.c.footprint).is_(None),
- ),
- )
+ if region:
+ where_clause = and_(
+ func.tstzrange(begin_time, end_time, "[]", type_=TSTZRANGE).contains(
+ DATASET_SPATIAL.c.center_time
+ ),
+ DATASET_SPATIAL.c.dataset_type_ref
+ == _scalar_subquery(
+ select([ODC_DATASET_TYPE.c.id]).where(
+ ODC_DATASET_TYPE.c.name == product_name
+ )
+ ),
+ DATASET_SPATIAL.c.region_code == region,
+ or_(
+ func.st_isvalid(DATASET_SPATIAL.c.footprint).is_(True),
+ func.st_isvalid(DATASET_SPATIAL.c.footprint).is_(None),
+ ),
+ )
+ else:
+ where_clause = and_(
+ func.tstzrange(begin_time, end_time, "[]", type_=TSTZRANGE).contains(
+ DATASET_SPATIAL.c.center_time
+ ),
+ DATASET_SPATIAL.c.dataset_type_ref
+ == _scalar_subquery(
+ select([ODC_DATASET_TYPE.c.id]).where(
+ ODC_DATASET_TYPE.c.name == product_name
+ )
+ ),
+ or_(
+ func.st_isvalid(DATASET_SPATIAL.c.footprint).is_(True),
+ func.st_isvalid(DATASET_SPATIAL.c.footprint).is_(None),
+ ),
+ )
return begin_time, end_time, where_clause
@lru_cache() # noqa: B019
diff --git a/cubedash/templates/layout/macros.html b/cubedash/templates/layout/macros.html
index 559c6e41a..05c6dbdf0 100644
--- a/cubedash/templates/layout/macros.html
+++ b/cubedash/templates/layout/macros.html
@@ -128,3 +128,56 @@
{{ _show_raw_keyval(doc) }}
{%- endmacro %}
+
+
+{% macro region_chart_timeline(timeline, product, region, period='month') -%}
+
+ {% if timeline %}
+ {% set month_width = 650 / timeline | length %}
+ {% if month_width > 5 %}
+ {% set month_width = 5 %}
+ {%endif%}
+
+ {% set max_count = timeline.most_common(1)[0][1] %}
+
+
+ {# We're assuming only 1 control per page, sorry. #}
+
+ {% endif %}
+
+{%- endmacro %}
diff --git a/cubedash/templates/region.html b/cubedash/templates/region.html
index eb67839ec..b1ced636c 100644
--- a/cubedash/templates/region.html
+++ b/cubedash/templates/region.html
@@ -88,6 +88,7 @@ {{ product_region_info.region_label(region_code) }} {{ prod
{% block body_footer %}
{{ super() }}
+ {% from "layout/macros.html" import region_chart_timeline %}
diff --git a/integration_tests/asserts.py b/integration_tests/asserts.py
index f2b819f8e..4eb83b82c 100644
--- a/integration_tests/asserts.py
+++ b/integration_tests/asserts.py
@@ -145,6 +145,55 @@ def check_dataset_count(html, count: int):
), f"Incorrect dataset count: found {actual} instead of {expected}"
+def check_product_date_selector_contains(html, year: str, month: str = None, day: str = None):
+ """
+ Testing date selector only showing date containing datasets
+ """
+ __tracebackhide__ = True
+ date_selector = html.find("#product-headers .limited")
+ if year:
+ year_option = date_selector[0].find(".option-menu ul", first=True).find("li")
+ assert (year in [years.text for years in year_option]), f"{year} not in {[years.text for years in year_option]}"
+ if month:
+ month_option = date_selector[1].find(".option-menu ul", first=True).find("li")
+ assert (
+ month in [months.text for months in month_option]
+ ), f"{month} not in {[months.text for months in month_option]}"
+ if month and day:
+ day_option = date_selector[2].find(".option-menu ul", first=True).find("li")
+ assert (day in [days.text for days in day_option]), f"{day} not in {[days.text for days in day_option]}"
+
+
+def check_product_date_selector_not_contain(html, year: str, month: str = None, day: str = None):
+ """
+ Testing date selector is not showing date containing no datasets
+ if year is provided: year must be invalid
+ if month is provided: year must be valid, but month isn't
+ if day is provided: year and month must be valid, but day isnt'
+ """
+ __tracebackhide__ = True
+ date_selector = html.find("#product-headers .limited")
+ if year and not month and not day:
+ year_option = date_selector[0].find(".option-menu ul", first=True).find("li")
+ assert (year not in [years.text for years in year_option]), f"{year} in {[years.text for years in year_option]}"
+ if year and month and not day:
+ year_option = date_selector[0].find(".option-menu ul", first=True).find("li")
+ month_option = date_selector[1].find(".option-menu ul", first=True).find("li")
+ assert (year in [years.text for years in year_option]), f"{year} not in {[years.text for years in year_option]}"
+ assert (
+ month not in [months.text for months in month_option]
+ ), f"{month} in {[months.text for months in month_option]}"
+ if year and month and day:
+ year_option = date_selector[0].find(".option-menu ul", first=True).find("li")
+ month_option = date_selector[1].find(".option-menu ul", first=True).find("li")
+ day_option = date_selector[2].find(".option-menu ul", first=True).find("li")
+ assert (year in [years.text for years in year_option]), f"{year} not in {[years.text for years in year_option]}"
+ assert (
+ month in [months.text for months in month_option]
+ ), f"{month} not in {[months.text for months in month_option]}"
+ assert (day not in [days.text for days in day_option]), f"{day} in {[days.text for days in day_option]}"
+
+
def expect_values(
s: TimePeriodOverview,
dataset_count: int,
diff --git a/integration_tests/data/ls5_sr-sample.yaml b/integration_tests/data/ls5_sr-sample.yaml
new file mode 100644
index 000000000..fb6a3776d
--- /dev/null
+++ b/integration_tests/data/ls5_sr-sample.yaml
@@ -0,0 +1,686 @@
+---
+# Dataset
+# url: https://explorer.digitalearth.africa/dataset/94772957-fb4d-5a42-ae99-7f01010fc2c7.odc-metadata.yaml
+$schema: https://schemas.opendatacube.org/dataset
+id: 94772957-fb4d-5a42-ae99-7f01010fc2c7
+
+label: LT05_L2SP_205050_19840408_20200918_02_T1_SR
+product:
+ name: ls5_sr
+
+location: s3://deafrica-landsat/collection02/level-2/standard/tm/1984/205/050/LT05_L2SP_205050_19840408_20200918_02_T1/LT05_L2SP_205050_19840408_20200918_02_T1_SR_stac.json
+
+crs: epsg:32628
+geometry:
+ type: Polygon
+ coordinates: [[[245850.0, 1701060.0], [205770.0, 1524600.0], [388200.0, 1495560.0],
+ [427890.0, 1672320.0], [245850.0, 1701060.0]]]
+grids:
+ default:
+ shape: [6971, 7791]
+ transform: [30.0, 0.0, 200385.0, 0.0, -30.0, 1702515.0]
+
+properties:
+ datetime: '1984-04-08T10:54:37.835069Z'
+ eo:cloud_cover: 19.0
+ eo:constellation: Landsat
+ eo:instrument: TM
+ eo:off_nadir: 0
+ eo:platform: LANDSAT_5
+ eo:sun_azimuth: 98.5418956
+ eo:sun_elevation: 56.38333026
+ landsat:cloud_cover_land: 3.0
+ landsat:collection_category: T1
+ landsat:collection_number: '02'
+ landsat:correction: L2SP
+ landsat:rmse: 4.606
+ landsat:rmse_x: 3.2
+ landsat:rmse_y: 3.313
+ landsat:scene_id: LT52050501984099XXX07
+ landsat:wrs_path: '205'
+ landsat:wrs_row: '050'
+ landsat:wrs_type: '2'
+ odc:file_format: GeoTIFF
+ odc:processing_datetime: '1984-04-08T10:54:37.835069Z'
+ odc:product: ls5_sr
+ odc:region_code: '205050'
+ proj:epsg: 32628
+ proj:shape:
+ - 6971
+ - 7791
+ proj:transform:
+ - 30.0
+ - 0.0
+ - 200385.0
+ - 0.0
+ - -30.0
+ - 1702515.0
+
+measurements:
+ SR_B1:
+ path: LT05_L2SP_205050_19840408_20200918_02_T1_SR_B1.TIF
+ SR_B2:
+ path: LT05_L2SP_205050_19840408_20200918_02_T1_SR_B2.TIF
+ SR_B3:
+ path: LT05_L2SP_205050_19840408_20200918_02_T1_SR_B3.TIF
+ SR_B4:
+ path: LT05_L2SP_205050_19840408_20200918_02_T1_SR_B4.TIF
+ SR_B5:
+ path: LT05_L2SP_205050_19840408_20200918_02_T1_SR_B5.TIF
+ SR_B7:
+ path: LT05_L2SP_205050_19840408_20200918_02_T1_SR_B7.TIF
+ QA_PIXEL:
+ path: LT05_L2SP_205050_19840408_20200918_02_T1_QA_PIXEL.TIF
+ QA_RADSAT:
+ path: LT05_L2SP_205050_19840408_20200918_02_T1_QA_RADSAT.TIF
+ SR_CLOUD_QA:
+ path: LT05_L2SP_205050_19840408_20200918_02_T1_SR_CLOUD_QA.TIF
+ SR_ATMOS_OPACITY:
+ path: LT05_L2SP_205050_19840408_20200918_02_T1_SR_ATMOS_OPACITY.TIF
+
+accessories:
+ index:
+ path: LT05_L2SP_205050_19840408_20200918_02_T1
+ ANG.txt:
+ path: LT05_L2SP_205050_19840408_20200918_02_T1_ANG.txt
+ MTL.txt:
+ path: LT05_L2SP_205050_19840408_20200918_02_T1_MTL.txt
+ MTL.xml:
+ path: LT05_L2SP_205050_19840408_20200918_02_T1_MTL.xml
+ MTL.json:
+ path: LT05_L2SP_205050_19840408_20200918_02_T1_MTL.json
+ thumbnail:
+ path: LT05_L2SP_205050_19840408_20200918_02_T1_thumb_small.jpeg
+ reduced_resolution_browse:
+ path: LT05_L2SP_205050_19840408_20200918_02_T1_thumb_large.jpeg
+
+lineage: {}
+...
+---
+# Dataset
+# url: https://explorer.digitalearth.africa/dataset/08d432b3-36f3-5818-9d6a-04bb03c6947b.odc-metadata.yaml
+$schema: https://schemas.opendatacube.org/dataset
+id: 08d432b3-36f3-5818-9d6a-04bb03c6947b
+
+label: LT05_L2SP_205050_19840611_20200918_02_T1_SR
+product:
+ name: ls5_sr
+
+location: s3://deafrica-landsat/collection02/level-2/standard/tm/1984/205/050/LT05_L2SP_205050_19840611_20200918_02_T1/LT05_L2SP_205050_19840611_20200918_02_T1_SR_stac.json
+
+crs: epsg:32628
+geometry:
+ type: Polygon
+ coordinates: [[[247050.0, 1701810.0], [207060.0, 1525320.0], [389370.0, 1496340.0],
+ [429030.0, 1673100.0], [247050.0, 1701810.0]]]
+grids:
+ default:
+ shape: [6961, 7781]
+ transform: [30.0, 0.0, 201885.0, 0.0, -30.0, 1703115.0]
+
+properties:
+ datetime: '1984-06-11T10:55:42.580038Z'
+ eo:cloud_cover: 22.0
+ eo:constellation: Landsat
+ eo:instrument: TM
+ eo:off_nadir: 0
+ eo:platform: LANDSAT_5
+ eo:sun_azimuth: 69.37743188
+ eo:sun_elevation: 57.95878382
+ landsat:cloud_cover_land: 10.0
+ landsat:collection_category: T1
+ landsat:collection_number: '02'
+ landsat:correction: L2SP
+ landsat:rmse: 4.827
+ landsat:rmse_x: 3.357
+ landsat:rmse_y: 3.469
+ landsat:scene_id: LT52050501984163XXX03
+ landsat:wrs_path: '205'
+ landsat:wrs_row: '050'
+ landsat:wrs_type: '2'
+ odc:file_format: GeoTIFF
+ odc:processing_datetime: '1984-06-11T10:55:42.580038Z'
+ odc:product: ls5_sr
+ odc:region_code: '205050'
+ proj:epsg: 32628
+ proj:shape:
+ - 6961
+ - 7781
+ proj:transform:
+ - 30.0
+ - 0.0
+ - 201885.0
+ - 0.0
+ - -30.0
+ - 1703115.0
+
+measurements:
+ SR_B1:
+ path: LT05_L2SP_205050_19840611_20200918_02_T1_SR_B1.TIF
+ SR_B2:
+ path: LT05_L2SP_205050_19840611_20200918_02_T1_SR_B2.TIF
+ SR_B3:
+ path: LT05_L2SP_205050_19840611_20200918_02_T1_SR_B3.TIF
+ SR_B4:
+ path: LT05_L2SP_205050_19840611_20200918_02_T1_SR_B4.TIF
+ SR_B5:
+ path: LT05_L2SP_205050_19840611_20200918_02_T1_SR_B5.TIF
+ SR_B7:
+ path: LT05_L2SP_205050_19840611_20200918_02_T1_SR_B7.TIF
+ QA_PIXEL:
+ path: LT05_L2SP_205050_19840611_20200918_02_T1_QA_PIXEL.TIF
+ QA_RADSAT:
+ path: LT05_L2SP_205050_19840611_20200918_02_T1_QA_RADSAT.TIF
+ SR_CLOUD_QA:
+ path: LT05_L2SP_205050_19840611_20200918_02_T1_SR_CLOUD_QA.TIF
+ SR_ATMOS_OPACITY:
+ path: LT05_L2SP_205050_19840611_20200918_02_T1_SR_ATMOS_OPACITY.TIF
+
+accessories:
+ index:
+ path: LT05_L2SP_205050_19840611_20200918_02_T1
+ ANG.txt:
+ path: LT05_L2SP_205050_19840611_20200918_02_T1_ANG.txt
+ MTL.txt:
+ path: LT05_L2SP_205050_19840611_20200918_02_T1_MTL.txt
+ MTL.xml:
+ path: LT05_L2SP_205050_19840611_20200918_02_T1_MTL.xml
+ MTL.json:
+ path: LT05_L2SP_205050_19840611_20200918_02_T1_MTL.json
+ thumbnail:
+ path: LT05_L2SP_205050_19840611_20200918_02_T1_thumb_small.jpeg
+ reduced_resolution_browse:
+ path: LT05_L2SP_205050_19840611_20200918_02_T1_thumb_large.jpeg
+
+lineage: {}
+...
+---
+# Dataset
+# url: https://explorer.digitalearth.africa/dataset/8e922543-90d6-558f-bd0d-352cf8feac4c.odc-metadata.yaml
+$schema: https://schemas.opendatacube.org/dataset
+id: 8e922543-90d6-558f-bd0d-352cf8feac4c
+
+label: LT05_L2SP_205050_19890913_20200916_02_T1_SR
+product:
+ name: ls5_sr
+
+location: s3://deafrica-landsat/collection02/level-2/standard/tm/1989/205/050/LT05_L2SP_205050_19890913_20200916_02_T1/LT05_L2SP_205050_19890913_20200916_02_T1_SR_stac.json
+
+crs: epsg:32628
+geometry:
+ type: Polygon
+ coordinates: [[[247530.0, 1700820.0], [207450.0, 1523280.0], [390450.0, 1494480.0],
+ [430080.0, 1671660.0], [247530.0, 1700820.0]]]
+grids:
+ default:
+ shape: [6991, 7801]
+ transform: [30.0, 0.0, 202185.0, 0.0, -30.0, 1702215.0]
+
+properties:
+ datetime: '1989-09-13T10:53:22.450013Z'
+ eo:cloud_cover: 5.0
+ eo:constellation: Landsat
+ eo:instrument: TM
+ eo:off_nadir: 0
+ eo:platform: LANDSAT_5
+ eo:sun_azimuth: 105.47192095
+ eo:sun_elevation: 56.38244577
+ landsat:cloud_cover_land: 6.0
+ landsat:collection_category: T1
+ landsat:collection_number: '02'
+ landsat:correction: L2SP
+ landsat:rmse: 4.752
+ landsat:rmse_x: 3.327
+ landsat:rmse_y: 3.393
+ landsat:scene_id: LT52050501989256MPS00
+ landsat:wrs_path: '205'
+ landsat:wrs_row: '050'
+ landsat:wrs_type: '2'
+ odc:file_format: GeoTIFF
+ odc:processing_datetime: '1989-09-13T10:53:22.450013Z'
+ odc:product: ls5_sr
+ odc:region_code: '205050'
+ proj:epsg: 32628
+ proj:shape:
+ - 6991
+ - 7801
+ proj:transform:
+ - 30.0
+ - 0.0
+ - 202185.0
+ - 0.0
+ - -30.0
+ - 1702215.0
+
+measurements:
+ SR_B1:
+ path: LT05_L2SP_205050_19890913_20200916_02_T1_SR_B1.TIF
+ SR_B2:
+ path: LT05_L2SP_205050_19890913_20200916_02_T1_SR_B2.TIF
+ SR_B3:
+ path: LT05_L2SP_205050_19890913_20200916_02_T1_SR_B3.TIF
+ SR_B4:
+ path: LT05_L2SP_205050_19890913_20200916_02_T1_SR_B4.TIF
+ SR_B5:
+ path: LT05_L2SP_205050_19890913_20200916_02_T1_SR_B5.TIF
+ SR_B7:
+ path: LT05_L2SP_205050_19890913_20200916_02_T1_SR_B7.TIF
+ QA_PIXEL:
+ path: LT05_L2SP_205050_19890913_20200916_02_T1_QA_PIXEL.TIF
+ QA_RADSAT:
+ path: LT05_L2SP_205050_19890913_20200916_02_T1_QA_RADSAT.TIF
+ SR_CLOUD_QA:
+ path: LT05_L2SP_205050_19890913_20200916_02_T1_SR_CLOUD_QA.TIF
+ SR_ATMOS_OPACITY:
+ path: LT05_L2SP_205050_19890913_20200916_02_T1_SR_ATMOS_OPACITY.TIF
+
+accessories:
+ index:
+ path: LT05_L2SP_205050_19890913_20200916_02_T1
+ ANG.txt:
+ path: LT05_L2SP_205050_19890913_20200916_02_T1_ANG.txt
+ MTL.txt:
+ path: LT05_L2SP_205050_19890913_20200916_02_T1_MTL.txt
+ MTL.xml:
+ path: LT05_L2SP_205050_19890913_20200916_02_T1_MTL.xml
+ MTL.json:
+ path: LT05_L2SP_205050_19890913_20200916_02_T1_MTL.json
+ thumbnail:
+ path: LT05_L2SP_205050_19890913_20200916_02_T1_thumb_small.jpeg
+ reduced_resolution_browse:
+ path: LT05_L2SP_205050_19890913_20200916_02_T1_thumb_large.jpeg
+
+lineage: {}
+...
+---
+# Dataset
+# url: https://explorer.digitalearth.africa/dataset/7ffb6023-0536-5a01-9d03-2ef5ceb6f721.odc-metadata.yaml
+$schema: https://schemas.opendatacube.org/dataset
+id: 7ffb6023-0536-5a01-9d03-2ef5ceb6f721
+
+label: LT05_L2SP_205050_20070408_20200830_02_T1_SR
+product:
+ name: ls5_sr
+
+location: s3://deafrica-landsat/collection02/level-2/standard/tm/2007/205/050/LT05_L2SP_205050_20070408_20200830_02_T1/LT05_L2SP_205050_20070408_20200830_02_T1_SR_stac.json
+
+crs: epsg:32628
+geometry:
+ type: Polygon
+ coordinates: [[[234000.0, 1704600.0], [194430.0, 1528260.0], [384300.0, 1497900.0],
+ [423780.0, 1674390.0], [234000.0, 1704600.0]]]
+grids:
+ default:
+ shape: [7001, 7911]
+ transform: [30.0, 0.0, 190485.0, 0.0, -30.0, 1705815.0]
+
+properties:
+ datetime: '2007-04-08T11:22:17.998050Z'
+ eo:cloud_cover: 4.0
+ eo:constellation: Landsat
+ eo:instrument: TM
+ eo:off_nadir: 0
+ eo:platform: LANDSAT_5
+ eo:sun_azimuth: 102.75501138
+ eo:sun_elevation: 62.79753454
+ landsat:cloud_cover_land: 0.0
+ landsat:collection_category: T1
+ landsat:collection_number: '02'
+ landsat:correction: L2SP
+ landsat:rmse: 4.002
+ landsat:rmse_x: 2.892
+ landsat:rmse_y: 2.766
+ landsat:scene_id: LT52050502007098MPS00
+ landsat:wrs_path: '205'
+ landsat:wrs_row: '050'
+ landsat:wrs_type: '2'
+ odc:file_format: GeoTIFF
+ odc:processing_datetime: '2007-04-08T11:22:17.998050Z'
+ odc:product: ls5_sr
+ odc:region_code: '205050'
+ proj:epsg: 32628
+ proj:shape:
+ - 7001
+ - 7911
+ proj:transform:
+ - 30.0
+ - 0.0
+ - 190485.0
+ - 0.0
+ - -30.0
+ - 1705815.0
+
+measurements:
+ SR_B1:
+ path: LT05_L2SP_205050_20070408_20200830_02_T1_SR_B1.TIF
+ SR_B2:
+ path: LT05_L2SP_205050_20070408_20200830_02_T1_SR_B2.TIF
+ SR_B3:
+ path: LT05_L2SP_205050_20070408_20200830_02_T1_SR_B3.TIF
+ SR_B4:
+ path: LT05_L2SP_205050_20070408_20200830_02_T1_SR_B4.TIF
+ SR_B5:
+ path: LT05_L2SP_205050_20070408_20200830_02_T1_SR_B5.TIF
+ SR_B7:
+ path: LT05_L2SP_205050_20070408_20200830_02_T1_SR_B7.TIF
+ QA_PIXEL:
+ path: LT05_L2SP_205050_20070408_20200830_02_T1_QA_PIXEL.TIF
+ QA_RADSAT:
+ path: LT05_L2SP_205050_20070408_20200830_02_T1_QA_RADSAT.TIF
+ SR_CLOUD_QA:
+ path: LT05_L2SP_205050_20070408_20200830_02_T1_SR_CLOUD_QA.TIF
+ SR_ATMOS_OPACITY:
+ path: LT05_L2SP_205050_20070408_20200830_02_T1_SR_ATMOS_OPACITY.TIF
+
+accessories:
+ index:
+ path: LT05_L2SP_205050_20070408_20200830_02_T1
+ ANG.txt:
+ path: LT05_L2SP_205050_20070408_20200830_02_T1_ANG.txt
+ MTL.txt:
+ path: LT05_L2SP_205050_20070408_20200830_02_T1_MTL.txt
+ MTL.xml:
+ path: LT05_L2SP_205050_20070408_20200830_02_T1_MTL.xml
+ MTL.json:
+ path: LT05_L2SP_205050_20070408_20200830_02_T1_MTL.json
+ thumbnail:
+ path: LT05_L2SP_205050_20070408_20200830_02_T1_thumb_small.jpeg
+ reduced_resolution_browse:
+ path: LT05_L2SP_205050_20070408_20200830_02_T1_thumb_large.jpeg
+
+lineage: {}
+...
+---
+# Dataset
+# url: https://explorer.digitalearth.africa/dataset/a6299b8d-2e9f-512c-98f3-8fa70746e9e9.odc-metadata.yaml
+$schema: https://schemas.opendatacube.org/dataset
+id: a6299b8d-2e9f-512c-98f3-8fa70746e9e9
+
+label: LT05_L2SP_168053_19840421_20200918_02_T1_SR
+product:
+ name: ls5_sr
+
+location: s3://deafrica-landsat/collection02/level-2/standard/tm/1984/168/053/LT05_L2SP_168053_19840421_20200918_02_T1/LT05_L2SP_168053_19840421_20200918_02_T1_SR_stac.json
+
+crs: epsg:32637
+geometry:
+ type: Polygon
+ coordinates: [[[491040.0, 1219440.0], [452670.0, 1042770.0], [635010.0, 1015830.0],
+ [672900.0, 1192830.0], [491040.0, 1219440.0]]]
+grids:
+ default:
+ shape: [6891, 7731]
+ transform: [30.0, 0.0, 447285.0, 0.0, -30.0, 1220715.0]
+
+properties:
+ datetime: '1984-04-21T07:07:10.080000Z'
+ eo:cloud_cover: 0.0
+ eo:constellation: Landsat
+ eo:instrument: TM
+ eo:off_nadir: 0
+ eo:platform: LANDSAT_5
+ eo:sun_azimuth: 83.59297863
+ eo:sun_elevation: 57.26686646
+ landsat:cloud_cover_land: 0.0
+ landsat:collection_category: T1
+ landsat:collection_number: '02'
+ landsat:correction: L2SP
+ landsat:rmse: 4.41
+ landsat:rmse_x: 3.079
+ landsat:rmse_y: 3.157
+ landsat:scene_id: LT51680531984112XXX02
+ landsat:wrs_path: '168'
+ landsat:wrs_row: '053'
+ landsat:wrs_type: '2'
+ odc:file_format: GeoTIFF
+ odc:processing_datetime: '1984-04-21T07:07:10.080000Z'
+ odc:product: ls5_sr
+ odc:region_code: '168053'
+ proj:epsg: 32637
+ proj:shape:
+ - 6891
+ - 7731
+ proj:transform:
+ - 30.0
+ - 0.0
+ - 447285.0
+ - 0.0
+ - -30.0
+ - 1220715.0
+
+measurements:
+ SR_B1:
+ path: LT05_L2SP_168053_19840421_20200918_02_T1_SR_B1.TIF
+ SR_B2:
+ path: LT05_L2SP_168053_19840421_20200918_02_T1_SR_B2.TIF
+ SR_B3:
+ path: LT05_L2SP_168053_19840421_20200918_02_T1_SR_B3.TIF
+ SR_B4:
+ path: LT05_L2SP_168053_19840421_20200918_02_T1_SR_B4.TIF
+ SR_B5:
+ path: LT05_L2SP_168053_19840421_20200918_02_T1_SR_B5.TIF
+ SR_B7:
+ path: LT05_L2SP_168053_19840421_20200918_02_T1_SR_B7.TIF
+ QA_PIXEL:
+ path: LT05_L2SP_168053_19840421_20200918_02_T1_QA_PIXEL.TIF
+ QA_RADSAT:
+ path: LT05_L2SP_168053_19840421_20200918_02_T1_QA_RADSAT.TIF
+ SR_CLOUD_QA:
+ path: LT05_L2SP_168053_19840421_20200918_02_T1_SR_CLOUD_QA.TIF
+ SR_ATMOS_OPACITY:
+ path: LT05_L2SP_168053_19840421_20200918_02_T1_SR_ATMOS_OPACITY.TIF
+
+accessories:
+ index:
+ path: LT05_L2SP_168053_19840421_20200918_02_T1
+ ANG.txt:
+ path: LT05_L2SP_168053_19840421_20200918_02_T1_ANG.txt
+ MTL.txt:
+ path: LT05_L2SP_168053_19840421_20200918_02_T1_MTL.txt
+ MTL.xml:
+ path: LT05_L2SP_168053_19840421_20200918_02_T1_MTL.xml
+ MTL.json:
+ path: LT05_L2SP_168053_19840421_20200918_02_T1_MTL.json
+ thumbnail:
+ path: LT05_L2SP_168053_19840421_20200918_02_T1_thumb_small.jpeg
+ reduced_resolution_browse:
+ path: LT05_L2SP_168053_19840421_20200918_02_T1_thumb_large.jpeg
+
+lineage: {}
+...
+---
+# Dataset
+# url: https://explorer.digitalearth.africa/dataset/2cafc5d5-1207-54eb-b8dc-48382648390a.odc-metadata.yaml
+$schema: https://schemas.opendatacube.org/dataset
+id: 2cafc5d5-1207-54eb-b8dc-48382648390a
+
+label: LT05_L2SP_168053_19841030_20200918_02_T1_SR
+product:
+ name: ls5_sr
+
+location: s3://deafrica-landsat/collection02/level-2/standard/tm/1984/168/053/LT05_L2SP_168053_19841030_20200918_02_T1/LT05_L2SP_168053_19841030_20200918_02_T1_SR_stac.json
+
+crs: epsg:32637
+geometry:
+ type: Polygon
+ coordinates: [[[488130.0, 1220010.0], [449850.0, 1043310.0], [632160.0, 1016400.0],
+ [670050.0, 1193400.0], [488130.0, 1220010.0]]]
+grids:
+ default:
+ shape: [6901, 7731]
+ transform: [30.0, 0.0, 444285.0, 0.0, -30.0, 1221315.0]
+
+properties:
+ datetime: '1984-10-30T07:09:59.330075Z'
+ eo:cloud_cover: 4.0
+ eo:constellation: Landsat
+ eo:instrument: TM
+ eo:off_nadir: 0
+ eo:platform: LANDSAT_5
+ eo:sun_azimuth: 129.40676507
+ eo:sun_elevation: 52.63393032
+ landsat:cloud_cover_land: 4.0
+ landsat:collection_category: T1
+ landsat:collection_number: '02'
+ landsat:correction: L2SP
+ landsat:rmse: 4.555
+ landsat:rmse_x: 3.047
+ landsat:rmse_y: 3.386
+ landsat:scene_id: LT51680531984304XXX01
+ landsat:wrs_path: '168'
+ landsat:wrs_row: '053'
+ landsat:wrs_type: '2'
+ odc:file_format: GeoTIFF
+ odc:processing_datetime: '1984-10-30T07:09:59.330075Z'
+ odc:product: ls5_sr
+ odc:region_code: '168053'
+ proj:epsg: 32637
+ proj:shape:
+ - 6901
+ - 7731
+ proj:transform:
+ - 30.0
+ - 0.0
+ - 444285.0
+ - 0.0
+ - -30.0
+ - 1221315.0
+
+measurements:
+ SR_B1:
+ path: LT05_L2SP_168053_19841030_20200918_02_T1_SR_B1.TIF
+ SR_B2:
+ path: LT05_L2SP_168053_19841030_20200918_02_T1_SR_B2.TIF
+ SR_B3:
+ path: LT05_L2SP_168053_19841030_20200918_02_T1_SR_B3.TIF
+ SR_B4:
+ path: LT05_L2SP_168053_19841030_20200918_02_T1_SR_B4.TIF
+ SR_B5:
+ path: LT05_L2SP_168053_19841030_20200918_02_T1_SR_B5.TIF
+ SR_B7:
+ path: LT05_L2SP_168053_19841030_20200918_02_T1_SR_B7.TIF
+ QA_PIXEL:
+ path: LT05_L2SP_168053_19841030_20200918_02_T1_QA_PIXEL.TIF
+ QA_RADSAT:
+ path: LT05_L2SP_168053_19841030_20200918_02_T1_QA_RADSAT.TIF
+ SR_CLOUD_QA:
+ path: LT05_L2SP_168053_19841030_20200918_02_T1_SR_CLOUD_QA.TIF
+ SR_ATMOS_OPACITY:
+ path: LT05_L2SP_168053_19841030_20200918_02_T1_SR_ATMOS_OPACITY.TIF
+
+accessories:
+ index:
+ path: LT05_L2SP_168053_19841030_20200918_02_T1
+ ANG.txt:
+ path: LT05_L2SP_168053_19841030_20200918_02_T1_ANG.txt
+ MTL.txt:
+ path: LT05_L2SP_168053_19841030_20200918_02_T1_MTL.txt
+ MTL.xml:
+ path: LT05_L2SP_168053_19841030_20200918_02_T1_MTL.xml
+ MTL.json:
+ path: LT05_L2SP_168053_19841030_20200918_02_T1_MTL.json
+ thumbnail:
+ path: LT05_L2SP_168053_19841030_20200918_02_T1_thumb_small.jpeg
+ reduced_resolution_browse:
+ path: LT05_L2SP_168053_19841030_20200918_02_T1_thumb_large.jpeg
+
+lineage: {}
+...
+---
+# Dataset
+# url: https://explorer.digitalearth.africa/dataset/24dc56c2-922a-5572-896b-9168b7d02595.odc-metadata.yaml
+$schema: https://schemas.opendatacube.org/dataset
+id: 24dc56c2-922a-5572-896b-9168b7d02595
+
+label: LT05_L2SP_205047_19840408_20200918_02_T1_SR
+product:
+ name: ls5_sr
+
+location: s3://deafrica-landsat/collection02/level-2/standard/tm/1984/205/047/LT05_L2SP_205047_19840408_20200918_02_T1/LT05_L2SP_205047_19840408_20200918_02_T1_SR_stac.json
+
+crs: epsg:32628
+geometry:
+ type: Polygon
+ coordinates: [[[353070.0, 2179860.0], [313170.0, 2003460.0], [495840.0, 1974510.0],
+ [535350.0, 2151330.0], [353070.0, 2179860.0]]]
+grids:
+ default:
+ shape: [6961, 7781]
+ transform: [30.0, 0.0, 308085.0, 0.0, -30.0, 2181315.0]
+
+properties:
+ datetime: '1984-04-08T10:53:26.058000Z'
+ eo:cloud_cover: 14.0
+ eo:constellation: Landsat
+ eo:instrument: TM
+ eo:off_nadir: 0
+ eo:platform: LANDSAT_5
+ eo:sun_azimuth: 105.29540246
+ eo:sun_elevation: 56.12926809
+ landsat:cloud_cover_land: 7.0
+ landsat:collection_category: T1
+ landsat:collection_number: '02'
+ landsat:correction: L2SP
+ landsat:rmse: 9.243
+ landsat:rmse_x: 7.89
+ landsat:rmse_y: 4.815
+ landsat:scene_id: LT52050471984099XXX02
+ landsat:wrs_path: '205'
+ landsat:wrs_row: '047'
+ landsat:wrs_type: '2'
+ odc:file_format: GeoTIFF
+ odc:processing_datetime: '1984-04-08T10:53:26.058000Z'
+ odc:product: ls5_sr
+ odc:region_code: '205047'
+ proj:epsg: 32628
+ proj:shape:
+ - 6961
+ - 7781
+ proj:transform:
+ - 30.0
+ - 0.0
+ - 308085.0
+ - 0.0
+ - -30.0
+ - 2181315.0
+
+measurements:
+ SR_B1:
+ path: LT05_L2SP_205047_19840408_20200918_02_T1_SR_B1.TIF
+ SR_B2:
+ path: LT05_L2SP_205047_19840408_20200918_02_T1_SR_B2.TIF
+ SR_B3:
+ path: LT05_L2SP_205047_19840408_20200918_02_T1_SR_B3.TIF
+ SR_B4:
+ path: LT05_L2SP_205047_19840408_20200918_02_T1_SR_B4.TIF
+ SR_B5:
+ path: LT05_L2SP_205047_19840408_20200918_02_T1_SR_B5.TIF
+ SR_B7:
+ path: LT05_L2SP_205047_19840408_20200918_02_T1_SR_B7.TIF
+ QA_PIXEL:
+ path: LT05_L2SP_205047_19840408_20200918_02_T1_QA_PIXEL.TIF
+ QA_RADSAT:
+ path: LT05_L2SP_205047_19840408_20200918_02_T1_QA_RADSAT.TIF
+ SR_CLOUD_QA:
+ path: LT05_L2SP_205047_19840408_20200918_02_T1_SR_CLOUD_QA.TIF
+ SR_ATMOS_OPACITY:
+ path: LT05_L2SP_205047_19840408_20200918_02_T1_SR_ATMOS_OPACITY.TIF
+
+accessories:
+ index:
+ path: LT05_L2SP_205047_19840408_20200918_02_T1
+ ANG.txt:
+ path: LT05_L2SP_205047_19840408_20200918_02_T1_ANG.txt
+ MTL.txt:
+ path: LT05_L2SP_205047_19840408_20200918_02_T1_MTL.txt
+ MTL.xml:
+ path: LT05_L2SP_205047_19840408_20200918_02_T1_MTL.xml
+ MTL.json:
+ path: LT05_L2SP_205047_19840408_20200918_02_T1_MTL.json
+ thumbnail:
+ path: LT05_L2SP_205047_19840408_20200918_02_T1_thumb_small.jpeg
+ reduced_resolution_browse:
+ path: LT05_L2SP_205047_19840408_20200918_02_T1_thumb_large.jpeg
+
+lineage: {}
+...
diff --git a/integration_tests/data/products/ls5_sr.odc-product.yaml b/integration_tests/data/products/ls5_sr.odc-product.yaml
new file mode 100644
index 000000000..19887758a
--- /dev/null
+++ b/integration_tests/data/products/ls5_sr.odc-product.yaml
@@ -0,0 +1,250 @@
+---
+# Product
+# url: https://explorer.digitalearth.africa/products/ls5_sr.odc-product.yaml
+name: ls5_sr
+license: CC-BY-4.0
+metadata_type: eo3
+description: USGS Landsat 5 Collection 2 Level-2 Surface Reflectance
+metadata:
+ product:
+ name: ls5_sr
+measurements:
+- name: SR_B1
+ dtype: uint16
+ units: '1'
+ nodata: 0
+ aliases:
+ - band_1
+ - blue
+- name: SR_B2
+ dtype: uint16
+ units: '1'
+ nodata: 0
+ aliases:
+ - band_2
+ - green
+- name: SR_B3
+ dtype: uint16
+ units: '1'
+ nodata: 0
+ aliases:
+ - band_3
+ - red
+- name: SR_B4
+ dtype: uint16
+ units: '1'
+ nodata: 0
+ aliases:
+ - band_4
+ - nir
+- name: SR_B5
+ dtype: uint16
+ units: '1'
+ nodata: 0
+ aliases:
+ - band_5
+ - swir_1
+- name: SR_B7
+ dtype: uint16
+ units: '1'
+ nodata: 0
+ aliases:
+ - band_7
+ - swir_2
+- name: QA_PIXEL
+ dtype: uint16
+ units: bit_index
+ nodata: 1
+ aliases:
+ - pq
+ - pixel_quality
+ flags_definition:
+ snow:
+ bits: 5
+ values:
+ '0': not_high_confidence
+ '1': high_confidence
+ clear:
+ bits: 6
+ values:
+ '0': false
+ '1': true
+ cloud:
+ bits: 3
+ values:
+ '0': not_high_confidence
+ '1': high_confidence
+ water:
+ bits: 7
+ values:
+ '0': land_or_cloud
+ '1': water
+ nodata:
+ bits: 0
+ values:
+ '0': false
+ '1': true
+ cloud_shadow:
+ bits: 4
+ values:
+ '0': not_high_confidence
+ '1': high_confidence
+ dilated_cloud:
+ bits: 1
+ values:
+ '0': not_dilated
+ '1': dilated
+ cloud_confidence:
+ bits:
+ - 8
+ - 9
+ values:
+ '0': none
+ '1': low
+ '2': medium
+ '3': high
+ snow_ice_confidence:
+ bits:
+ - 12
+ - 13
+ values:
+ '0': none
+ '1': low
+ '2': reserved
+ '3': high
+ cloud_shadow_confidence:
+ bits:
+ - 10
+ - 11
+ values:
+ '0': none
+ '1': low
+ '2': reserved
+ '3': high
+- name: QA_RADSAT
+ dtype: uint16
+ units: bit_index
+ nodata: 0
+ aliases:
+ - radsat
+ - radiometric_saturation
+ flags_definition:
+ dropped_pixel:
+ bits: 9
+ values:
+ '0': false
+ '1': true
+ nir_saturation:
+ bits: 3
+ values:
+ '0': false
+ '1': true
+ red_saturation:
+ bits: 2
+ values:
+ '0': false
+ '1': true
+ tir_saturation:
+ bits: 5
+ values:
+ '0': false
+ '1': true
+ blue_saturation:
+ bits: 0
+ values:
+ '0': false
+ '1': true
+ green_saturation:
+ bits: 1
+ values:
+ '0': false
+ '1': true
+ band_1_saturation:
+ bits: 0
+ values:
+ '0': false
+ '1': true
+ band_2_saturation:
+ bits: 1
+ values:
+ '0': false
+ '1': true
+ band_3_saturation:
+ bits: 2
+ values:
+ '0': false
+ '1': true
+ band_4_saturation:
+ bits: 3
+ values:
+ '0': false
+ '1': true
+ band_5_saturation:
+ bits: 4
+ values:
+ '0': false
+ '1': true
+ band_6_saturation:
+ bits: 5
+ values:
+ '0': false
+ '1': true
+ band_7_saturation:
+ bits: 6
+ values:
+ '0': false
+ '1': true
+ swir_1_saturation:
+ bits: 4
+ values:
+ '0': false
+ '1': true
+ swir_2_saturation:
+ bits: 6
+ values:
+ '0': false
+ '1': true
+- name: SR_ATMOS_OPACITY
+ dtype: int16
+ units: '1'
+ nodata: -9999
+ aliases:
+ - atmos_opacity
+- name: SR_CLOUD_QA
+ dtype: uint8
+ units: bit_index
+ nodata: 0
+ aliases:
+ - cloud_qa
+ flags_definition:
+ snow:
+ bits: 4
+ values:
+ '0': false
+ '1': true
+ cloud:
+ bits: 1
+ values:
+ '0': false
+ '1': true
+ water:
+ bits: 5
+ values:
+ '0': false
+ '1': true
+ cloud_shadow:
+ bits: 2
+ values:
+ '0': false
+ '1': true
+ adjacent_to_cloud:
+ bits: 3
+ values:
+ '0': false
+ '1': true
+ dark_dense_vegetation:
+ bits: 0
+ values:
+ '0': false
+ '1': true
+...
diff --git a/integration_tests/test_page_loads.py b/integration_tests/test_page_loads.py
index a9e6370aa..7af8a2021 100644
--- a/integration_tests/test_page_loads.py
+++ b/integration_tests/test_page_loads.py
@@ -126,6 +126,11 @@ def test_invalid_footprint_wofs_summary_load(client: FlaskClient):
html = get_html(client, "/wofs_summary")
check_dataset_count(html, 1244)
+ # this test setup the page will return 0 dataset listing
+ html = get_html(client, "/products/wofs_summary/datasets")
+ search_results = html.find(".search-result a")
+ assert len(search_results) == 0
+
def test_all_products_are_shown(client: FlaskClient):
"""
diff --git a/integration_tests/test_region_page.py b/integration_tests/test_region_page.py
new file mode 100644
index 000000000..43f64ebeb
--- /dev/null
+++ b/integration_tests/test_region_page.py
@@ -0,0 +1,109 @@
+"""
+Tests that load pages and check the contained text.
+"""
+from pathlib import Path
+
+import pytest
+from datacube.index.hl import Doc2Dataset
+from datacube.utils import read_documents
+from flask.testing import FlaskClient
+
+from integration_tests.asserts import (
+ check_product_date_selector_not_contain,
+ check_product_date_selector_contains,
+ get_html,
+)
+
+TEST_DATA_DIR = Path(__file__).parent / "data"
+
+
+@pytest.fixture(scope="module", autouse=True)
+def populate_index(dataset_loader, module_dea_index):
+ """
+ Index populated with example datasets. Assumes our tests wont modify the data!
+
+ It's module-scoped as it's expensive to populate.
+ """
+ dataset_count = 0
+ create_dataset = Doc2Dataset(module_dea_index)
+ for _, s2_dataset_doc in read_documents(TEST_DATA_DIR / "ls5_sr-sample.yaml"):
+ try:
+ dataset, err = create_dataset(
+ s2_dataset_doc, "file://example.com/test_dataset/"
+ )
+ assert dataset is not None, err
+ created = module_dea_index.datasets.add(dataset)
+ assert created.type.name == "ls5_sr"
+ dataset_count += 1
+ except AttributeError as ae:
+ assert dataset_count == 7
+ print(ae)
+ assert dataset_count == 7
+ return module_dea_index
+
+
+def test_product_region_page_dataset_count(client: FlaskClient):
+ # These datasets have gigantic footprints that can trip up postgis.
+ html = get_html(client, "/product/ls5_sr/regions/168053")
+
+ search_results = html.find(".search-result")
+ assert len(search_results) == 2
+
+ html = get_html(client, "/product/ls5_sr/regions/205050")
+
+ search_results = html.find(".search-result")
+ assert len(search_results) == 4
+
+
+def test_product_page_date_selector(client: FlaskClient):
+ """
+ This tests the product page date selector displays correctly
+ """
+ html = get_html(client, "/product/ls5_sr")
+ check_product_date_selector_contains(
+ html, "1984"
+ )
+ check_product_date_selector_contains(
+ html, "1989"
+ )
+ check_product_date_selector_contains(
+ html, "2007"
+ )
+
+
+def test_product_region_page_date_selector(client: FlaskClient):
+ html = get_html(client, "/product/ls5_sr/regions/168053")
+ check_product_date_selector_contains(
+ html, "1984"
+ )
+ check_product_date_selector_not_contain(
+ html, "1989"
+ )
+ check_product_date_selector_not_contain(
+ html, "2007"
+ )
+
+ html = get_html(client, "/product/ls5_sr/regions/205050")
+ check_product_date_selector_contains(
+ html, "1984"
+ )
+ check_product_date_selector_contains(
+ html, "1989"
+ )
+ check_product_date_selector_contains(
+ html, "2007"
+ )
+
+ html = get_html(client, "/product/ls5_sr/regions/168053/1984")
+ check_product_date_selector_contains(
+ html, "1984", "October"
+ )
+
+ check_product_date_selector_not_contain(
+ html, "1984", "June"
+ )
+
+ html = get_html(client, "/product/ls5_sr/regions/168053/1984/10")
+ check_product_date_selector_contains(
+ html, "1984", "October", "30th"
+ )
diff --git a/integration_tests/test_summarise_data.py b/integration_tests/test_summarise_data.py
index 779b47a8e..4d5422a1e 100644
--- a/integration_tests/test_summarise_data.py
+++ b/integration_tests/test_summarise_data.py
@@ -166,6 +166,8 @@ def test_generate_incremental_archivals(run_generate, summary_store: SummaryStor
original_summary = summary_store.get("ls8_nbar_scene")
original_dataset_count = original_summary.dataset_count
+ assert original_dataset_count == 3036
+
# ... and we archive one dataset ...
product_name = "ls8_nbar_scene"
dataset_id = _one_dataset(index, product_name)
diff --git a/integration_tests/test_utc_tst.py b/integration_tests/test_utc_tst.py
index e913597ab..004aa7d26 100644
--- a/integration_tests/test_utc_tst.py
+++ b/integration_tests/test_utc_tst.py
@@ -8,7 +8,7 @@
from datacube.utils import read_documents
from flask.testing import FlaskClient
-from integration_tests.asserts import check_dataset_count, get_html
+from integration_tests.asserts import check_dataset_count, check_product_date_selector_contains, get_html
TEST_DATA_DIR = Path(__file__).parent / "data"
@@ -51,3 +51,4 @@ def test_yearly_dataset_count(client: FlaskClient):
html = get_html(client, "/ls5_fc_albers/2011")
check_dataset_count(html, 3)
+ check_product_date_selector_contains(html, "2011", "January")