diff --git a/.nojekyll b/.nojekyll new file mode 100644 index 00000000..e69de29b diff --git a/404.html b/404.html new file mode 100644 index 00000000..c691db28 --- /dev/null +++ b/404.html @@ -0,0 +1,1075 @@ + + + + + + + + + + + + + + + + + + + Sopa + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ +

404 - Not found

+ +
+
+ + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/api/_sdata/_sdata.md b/api/_sdata/_sdata.md new file mode 100644 index 00000000..8683d4ee --- /dev/null +++ b/api/_sdata/_sdata.md @@ -0,0 +1,26 @@ +!!! note + These are convenient tools that operates on `SpatialData` objects + +::: sopa._sdata.get_boundaries + options: + show_root_heading: true + +::: sopa._sdata.get_intrinsic_cs + options: + show_root_heading: true + +::: sopa._sdata.to_intrinsic + options: + show_root_heading: true + +::: sopa._sdata.get_intensities + options: + show_root_heading: true + +::: sopa._sdata.iter_scales + options: + show_root_heading: true + +::: sopa._sdata.get_spatial_image + options: + show_root_heading: true diff --git a/api/_sdata/index.html b/api/_sdata/index.html new file mode 100644 index 00000000..1bca4501 --- /dev/null +++ b/api/_sdata/index.html @@ -0,0 +1,2006 @@ + + + + + + + + + + + + + + + + + + + + + + + sopa._sdata - Sopa + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + +

sopa._sdata

+ +
+

Note

+

These are convenient tools that operates on SpatialData objects

+
+ + + +
+ + + +

+ sopa._sdata.get_boundaries(sdata, return_key=False, warn=False) + +

+ + +
+ +

Gets the baysor boundaries or cellpose boundaries of a SpatialData object after running Sopa

+ + + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
sdata + SpatialData + +
+

A SpatialData object

+
+
+ required +
return_key + bool + +
+

Whether to return the key of the shapes or not.

+
+
+ False +
warn + bool + +
+

If True, prints a warning if no boundary is found. Else, raises an error.

+
+
+ False +
+ + + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ GeoDataFrame | tuple[str, GeoDataFrame] | None + +
+

A GeoDataFrame containing the boundaries, or a tuple (shapes_key, geo_df)

+
+
+ +
+ Source code in sopa/_sdata.py +
18
+19
+20
+21
+22
+23
+24
+25
+26
+27
+28
+29
+30
+31
+32
+33
+34
+35
+36
+37
+38
+39
+40
+41
+42
def get_boundaries(
+    sdata: SpatialData, return_key: bool = False, warn: bool = False
+) -> gpd.GeoDataFrame | tuple[str, gpd.GeoDataFrame] | None:
+    """Gets the baysor boundaries or cellpose boundaries of a SpatialData object after running Sopa
+
+    Args:
+        sdata: A SpatialData object
+        return_key: Whether to return the key of the shapes or not.
+        warn: If `True`, prints a warning if no boundary is found. Else, raises an error.
+
+    Returns:
+        A `GeoDataFrame` containing the boundaries, or a tuple `(shapes_key, geo_df)`
+    """
+    for shapes_key in [SopaKeys.BAYSOR_BOUNDARIES, SopaKeys.CELLPOSE_BOUNDARIES]:
+        res = _try_get_boundaries(sdata, shapes_key, return_key)
+        if res is not None:
+            return res
+
+    error_message = "sdata object has no cellpose boundaries and no baysor boundaries. Consider running segmentation first."
+
+    if not warn:
+        raise ValueError(error_message)
+
+    log.warn(error_message)
+    return (None, None) if return_key else None
+
+
+
+ +
+ + +
+ + + +

+ sopa._sdata.get_intrinsic_cs(sdata, element, name=None) + +

+ + +
+ +

Gets the name of the intrinsic coordinate system of an element

+ + + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
sdata + SpatialData + +
+

A SpatialData object

+
+
+ required +
element + SpatialElement | str + +
+

SpatialElement, or its key

+
+
+ required +
name + str | None + +
+

Name to provide to the intrinsic coordinate system if not existing. By default, uses the element id.

+
+
+ None +
+ + + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ str + +
+

Name of the intrinsic coordinate system

+
+
+ +
+ Source code in sopa/_sdata.py +
53
+54
+55
+56
+57
+58
+59
+60
+61
+62
+63
+64
+65
+66
+67
+68
+69
+70
+71
+72
+73
+74
+75
+76
+77
def get_intrinsic_cs(
+    sdata: SpatialData, element: SpatialElement | str, name: str | None = None
+) -> str:
+    """Gets the name of the intrinsic coordinate system of an element
+
+    Args:
+        sdata: A SpatialData object
+        element: `SpatialElement`, or its key
+        name: Name to provide to the intrinsic coordinate system if not existing. By default, uses the element id.
+
+    Returns:
+        Name of the intrinsic coordinate system
+    """
+    if name is None:
+        name = f"_{element if isinstance(element, str) else id(element)}_intrinsic"
+
+    if isinstance(element, str):
+        element = sdata[element]
+
+    for cs, transform in get_transformation(element, get_all=True).items():
+        if isinstance(transform, Identity):
+            return cs
+
+    set_transformation(element, Identity(), name)
+    return name
+
+
+
+ +
+ + +
+ + + +

+ sopa._sdata.to_intrinsic(sdata, element, element_cs) + +

+ + +
+ +

Transforms a SpatialElement into the intrinsic coordinate system of another SpatialElement

+ + + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
sdata + SpatialData + +
+

A SpatialData object

+
+
+ required +
element + SpatialElement | str + +
+

SpatialElement to transform, or its key

+
+
+ required +
element_cs + SpatialElement | str + +
+

SpatialElement of the target coordinate system, or its key

+
+
+ required +
+ + + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ SpatialElement + +
+

The SpatialElement after transformation in the target coordinate system

+
+
+ +
+ Source code in sopa/_sdata.py +
80
+81
+82
+83
+84
+85
+86
+87
+88
+89
+90
+91
+92
+93
+94
+95
+96
def to_intrinsic(
+    sdata: SpatialData, element: SpatialElement | str, element_cs: SpatialElement | str
+) -> SpatialElement:
+    """Transforms a `SpatialElement` into the intrinsic coordinate system of another `SpatialElement`
+
+    Args:
+        sdata: A SpatialData object
+        element: `SpatialElement` to transform, or its key
+        element_cs: `SpatialElement` of the target coordinate system, or its key
+
+    Returns:
+        The `SpatialElement` after transformation in the target coordinate system
+    """
+    if isinstance(element, str):
+        element = sdata[element]
+    cs = get_intrinsic_cs(sdata, element_cs)
+    return sdata.transform_element_to_coordinate_system(element, cs)
+
+
+
+ +
+ + +
+ + + +

+ sopa._sdata.get_intensities(sdata) + +

+ + +
+ +

Gets the intensity dataframe of shape n_obs x n_channels

+ +
+ Source code in sopa/_sdata.py +
def get_intensities(sdata: SpatialData) -> pd.DataFrame | None:
+    """Gets the intensity dataframe of shape `n_obs x n_channels`"""
+    if not sdata.table.uns[SopaKeys.UNS_KEY][SopaKeys.UNS_HAS_INTENSITIES]:
+        return None
+
+    if sdata.table.uns[SopaKeys.UNS_KEY][SopaKeys.UNS_HAS_TRANSCRIPTS]:
+        return sdata.table.obsm[SopaKeys.INTENSITIES_OBSM]
+
+    return sdata.table.to_df()
+
+
+
+ +
+ + +
+ + + +

+ sopa._sdata.iter_scales(image) + +

+ + +
+ +

Iterates through all the scales of a MultiscaleSpatialImage

+ + + +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
image + MultiscaleSpatialImage + +
+

a MultiscaleSpatialImage

+
+
+ required +
+ + + +

Yields:

+ + + + + + + + + + + + + +
TypeDescription
+ DataArray + +
+

Each scale (as a xr.DataArray)

+
+
+ +
+ Source code in sopa/_sdata.py +
def iter_scales(image: MultiscaleSpatialImage) -> Iterator[xr.DataArray]:
+    """Iterates through all the scales of a `MultiscaleSpatialImage`
+
+    Args:
+        image: a `MultiscaleSpatialImage`
+
+    Yields:
+        Each scale (as a `xr.DataArray`)
+    """
+    assert isinstance(
+        image, MultiscaleSpatialImage
+    ), f"Multiscale iteration is reserved for type MultiscaleSpatialImage. Found {type(image)}"
+
+    for scale in image:
+        yield next(iter(image[scale].values()))
+
+
+
+ +
+ + +
+ + + +

+ sopa._sdata.get_spatial_image(sdata, key=None, return_key=False) + +

+ + +
+ +

Gets a SpatialImage from a SpatialData object (if the image has multiple scale, the scale0 is returned)

+ + + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
sdata + SpatialData + +
+

SpatialData object.

+
+
+ required +
key + str | None + +
+

Optional image key. If None, returns the only image (if only one), or raises an error.

+
+
+ None +
return_key + bool + +
+

Whether to also return the key of the image.

+
+
+ False +
+ + + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ SpatialImage | tuple[str, SpatialImage] + +
+

If return_key is False, only the image is returned, else a tuple (image_key, image)

+
+
+ +
+ Source code in sopa/_sdata.py +
def get_spatial_image(
+    sdata: SpatialData, key: str | None = None, return_key: bool = False
+) -> SpatialImage | tuple[str, SpatialImage]:
+    """Gets a SpatialImage from a SpatialData object (if the image has multiple scale, the `scale0` is returned)
+
+    Args:
+        sdata: SpatialData object.
+        key: Optional image key. If `None`, returns the only image (if only one), or raises an error.
+        return_key: Whether to also return the key of the image.
+
+    Returns:
+        If `return_key` is False, only the image is returned, else a tuple `(image_key, image)`
+    """
+    key = get_key(sdata, "images", key)
+
+    assert key is not None, "One image in `sdata.images` is required"
+
+    image = sdata.images[key]
+    if isinstance(image, MultiscaleSpatialImage):
+        image = SpatialImage(next(iter(image["scale0"].values())))
+
+    if return_key:
+        return key, image
+    return image
+
+
+
+ +
+ + + + + + + + + + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/api/annotation/fluorescence/fluorescence.md b/api/annotation/fluorescence/fluorescence.md new file mode 100644 index 00000000..882af767 --- /dev/null +++ b/api/annotation/fluorescence/fluorescence.md @@ -0,0 +1,7 @@ +::: sopa.annotation.higher_z_score + options: + show_root_heading: true + +::: sopa.annotation.preprocess_fluo + options: + show_root_heading: true diff --git a/api/annotation/fluorescence/index.html b/api/annotation/fluorescence/index.html new file mode 100644 index 00000000..6db11533 --- /dev/null +++ b/api/annotation/fluorescence/index.html @@ -0,0 +1,1413 @@ + + + + + + + + + + + + + + + + + + + + + + + sopa.annotation.fluorescence - Sopa + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + +

sopa.annotation.fluorescence

+ +
+ + + +

+ sopa.annotation.higher_z_score(adata, marker_cell_dict, cell_type_key='cell_type') + +

+ + +
+ +

Simple channel-based segmentation using a marker-to-population dictionary

+ + + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
adata + AnnData + +
+

An AnnData object

+
+
+ required +
marker_cell_dict + dict + +
+

Dictionary whose keys are channels, and values are the corresponding populations.

+
+
+ required +
cell_type_key + str + +
+

Key of adata.obs where annotations will be stored

+
+
+ 'cell_type' +
+ +
+ Source code in sopa/annotation/fluorescence.py +
33
+34
+35
+36
+37
+38
+39
+40
+41
+42
+43
+44
+45
+46
+47
+48
+49
def higher_z_score(adata: AnnData, marker_cell_dict: dict, cell_type_key: str = "cell_type"):
+    """Simple channel-based segmentation using a marker-to-population dictionary
+
+    Args:
+        adata: An `AnnData` object
+        marker_cell_dict: Dictionary whose keys are channels, and values are the corresponding populations.
+        cell_type_key: Key of `adata.obs` where annotations will be stored
+    """
+    adata.obsm[SopaKeys.Z_SCORES] = preprocess_fluo(adata)
+
+    markers, cell_types = list(marker_cell_dict.keys()), np.array(list(marker_cell_dict.values()))
+    ct_indices = adata.obsm[SopaKeys.Z_SCORES][markers].values.argmax(1)
+
+    adata.obs[cell_type_key] = cell_types[ct_indices]
+    adata.uns[SopaKeys.UNS_KEY][SopaKeys.UNS_CELL_TYPES] = [cell_type_key]
+
+    log.info(f"Annotation counts: {adata.obs[cell_type_key].value_counts()}")
+
+
+
+ +
+ + +
+ + + +

+ sopa.annotation.preprocess_fluo(adata) + +

+ + +
+ +

Preprocess fluorescence data. For each column \(X\), we compute \(asinh(\frac{X}{5Q(0.2, X)})\) and apply standardization

+ + + +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
adata + AnnData + +
+

An AnnData object

+
+
+ required +
+ + + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ DataFrame + +
+

A dataframe of preprocessed channels intensities

+
+
+ +
+ Source code in sopa/annotation/fluorescence.py +
12
+13
+14
+15
+16
+17
+18
+19
+20
+21
+22
+23
+24
+25
+26
+27
+28
+29
+30
def preprocess_fluo(adata: AnnData) -> pd.DataFrame:
+    """Preprocess fluorescence data. For each column $X$, we compute $asinh(\\frac{X}{5Q(0.2, X)})$ and apply standardization
+
+    Args:
+        adata: An `AnnData` object
+
+    Returns:
+        A dataframe of preprocessed channels intensities
+    """
+    if SopaKeys.INTENSITIES_OBSM in adata.obsm:
+        df = adata.obsm[SopaKeys.INTENSITIES_OBSM]
+    else:
+        df = adata.to_df()
+
+    divider = 5 * np.quantile(df, 0.2, axis=0)
+    divider[divider == 0] = df.max(axis=0)[divider == 0]
+
+    scaled = np.arcsinh(df / divider)
+    return (scaled - scaled.mean(0)) / scaled.std(0)
+
+
+
+ +
+ + + + + + + + + + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/api/annotation/tangram/index.html b/api/annotation/tangram/index.html new file mode 100644 index 00000000..89f7b209 --- /dev/null +++ b/api/annotation/tangram/index.html @@ -0,0 +1,1349 @@ + + + + + + + + + + + + + + + + + + + + + + + sopa.annotation.tangram - Sopa + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + +

sopa.annotation.tangram

+ +
+ + + +

+ sopa.annotation.tangram.tangram_annotate(sdata, adata_sc, cell_type_key, reference_preprocessing=None, bag_size=10000, max_obs_reference=10000, **kwargs) + +

+ + +
+ +

Tangram multi-level annotation. Tangram is run on multiple bags of cells to decrease the RAM usage.

+ + + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
sdata + SpatialData + +
+

A SpatialData object

+
+
+ required +
adata_sc + AnnData + +
+

A scRNAseq annotated reference

+
+
+ required +
cell_type_key + str + +
+

Key of adata_sc.obs containing the cell types. For multi-level annotation, provide other levels like such: if cell_type_key = "ct", then "ct_level1" and "ct_level2" are the two next levels

+
+
+ required +
reference_preprocessing + str + +
+

Preprocessing method used on the reference. Can be "log1p" (normalize_total + log1p) or "normalized" (just normalize_total). By default, consider that no processing was applied (raw counts)

+
+
+ None +
bag_size + int + +
+

Size of each bag on which tangram will be run. Use smaller bags to lower the RAM usage

+
+
+ 10000 +
max_obs_reference + int + +
+

Maximum number of cells used in adata_sc at each level. Decrease it to lower the RAM usage.

+
+
+ 10000 +
+ +
+ Source code in sopa/annotation/tangram/run.py +
23
+24
+25
+26
+27
+28
+29
+30
+31
+32
+33
+34
+35
+36
+37
+38
+39
+40
+41
+42
+43
+44
+45
+46
+47
+48
+49
+50
+51
+52
def tangram_annotate(
+    sdata: SpatialData,
+    adata_sc: AnnData,
+    cell_type_key: str,
+    reference_preprocessing: str = None,
+    bag_size: int = 10_000,
+    max_obs_reference: int = 10_000,
+    **kwargs,
+):
+    """Tangram multi-level annotation. Tangram is run on multiple bags of cells to decrease the RAM usage.
+
+    Args:
+        sdata: A `SpatialData` object
+        adata_sc: A scRNAseq annotated reference
+        cell_type_key: Key of `adata_sc.obs` containing the cell types. For multi-level annotation, provide other levels like such: if `cell_type_key = "ct"`, then `"ct_level1"` and `"ct_level2"` are the two next levels
+        reference_preprocessing: Preprocessing method used on the reference. Can be `"log1p"` (normalize_total + log1p) or `"normalized"` (just normalize_total). By default, consider that no processing was applied (raw counts)
+        bag_size: Size of each bag on which tangram will be run. Use smaller bags to lower the RAM usage
+        max_obs_reference: Maximum number of cells used in `adata_sc` at each level. Decrease it to lower the RAM usage.
+    """
+    ad_sp = sdata.table
+
+    MultiLevelAnnotation(
+        ad_sp,
+        adata_sc,
+        cell_type_key,
+        reference_preprocessing,
+        bag_size,
+        max_obs_reference,
+        **kwargs,
+    ).run()
+
+
+
+ +
+ + + + + + + + + + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/api/annotation/tangram/tangram.md b/api/annotation/tangram/tangram.md new file mode 100644 index 00000000..2a3cfb23 --- /dev/null +++ b/api/annotation/tangram/tangram.md @@ -0,0 +1,3 @@ +::: sopa.annotation.tangram.tangram_annotate + options: + show_root_heading: true diff --git a/api/io.explorer/index.html b/api/io.explorer/index.html new file mode 100644 index 00000000..7ad2aab1 --- /dev/null +++ b/api/io.explorer/index.html @@ -0,0 +1,3663 @@ + + + + + + + + + + + + + + + + + + + + + + + sopa.io.explorer - Sopa + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + + + + + +
+
+ + + + + + + +

sopa.io.explorer

+ +
+ + + +

+ sopa.io.explorer.write(path, sdata, image_key=None, shapes_key=None, points_key=None, gene_column=None, pixel_size=0.2125, layer=None, polygon_max_vertices=13, lazy=True, ram_threshold_gb=4, mode=None, save_h5ad=False) + +

+ + +
+ +

Transform a SpatialData object into inputs for the Xenium Explorer. +After running this function, double-click on the experiment.xenium file to open it.

+
+

Software download

+

Make sure you have the latest version of the Xenium Explorer

+
+ +
+ Note +

This function will create up to 7 files, depending on the SpatialData object and the arguments:

+
    +
  • +

    experiment.xenium contains some experiment metadata. Double-click on this file to open the Xenium Explorer. This file can also be created with write_metadata.

    +
  • +
  • +

    morphology.ome.tif is the primary image. This file can also be created with write_image. Add more images with align.

    +
  • +
  • +

    analysis.zarr.zip contains the cells categories (or clusters), i.e. adata.obs. This file can also be created with write_cell_categories.

    +
  • +
  • +

    cell_feature_matrix.zarr.zip contains the cell-by-gene counts. This file can also be created with write_gene_counts.

    +
  • +
  • +

    cells.zarr.zip contains the cells polygon boundaries. This file can also be created with write_polygons.

    +
  • +
  • +

    transcripts.zarr.zip contains transcripts locations. This file can also be created with write_transcripts.

    +
  • +
  • +

    adata.h5ad is the AnnData object from the SpatialData. This is not used by the Explorer, but only saved for convenience.

    +
  • +
+
+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
path + str + +
+

Path to the directory where files will be saved.

+
+
+ required +
sdata + SpatialData + +
+

SpatialData object.

+
+
+ required +
image_key + str | None + +
+

Name of the image of interest (key of sdata.images).

+
+
+ None +
shapes_key + str | None + +
+

Name of the cell shapes (key of sdata.shapes).

+
+
+ None +
points_key + str | None + +
+

Name of the transcripts (key of sdata.points).

+
+
+ None +
gene_column + str | None + +
+

Column name of the points dataframe containing the gene names.

+
+
+ None +
pixel_size + float + +
+

Number of microns in a pixel. Invalid value can lead to inconsistent scales in the Explorer.

+
+
+ 0.2125 +
layer + str | None + +
+

Layer of sdata.table where the gene counts are saved. If None, uses sdata.table.X.

+
+
+ None +
polygon_max_vertices + int + +
+

Maximum number of vertices for the cell polygons.

+
+
+ 13 +
lazy + bool + +
+

If True, will not load the full images in memory (except if the image memory is below ram_threshold_gb).

+
+
+ True +
ram_threshold_gb + int | None + +
+

Threshold (in gygabytes) from which image can be loaded in memory. If None, the image is never loaded in memory.

+
+
+ 4 +
mode + str + +
+

string that indicated which files should be created. "-ib" means everything except images and boundaries, while "+tocm" means only transcripts/observations/counts/metadata (each letter corresponds to one explorer file). By default, keeps everything.

+
+
+ None +
save_h5ad + bool + +
+

Whether to save the adata as h5ad in the explorer directory (for convenience only, since h5ad is faster to open than the original .zarr table)

+
+
+ False +
+ +
+ Source code in sopa/io/explorer/converter.py +
def write(
+    path: str,
+    sdata: SpatialData,
+    image_key: str | None = None,
+    shapes_key: str | None = None,
+    points_key: str | None = None,
+    gene_column: str | None = None,
+    pixel_size: float = 0.2125,
+    layer: str | None = None,
+    polygon_max_vertices: int = 13,
+    lazy: bool = True,
+    ram_threshold_gb: int | None = 4,
+    mode: str = None,
+    save_h5ad: bool = False,
+) -> None:
+    """
+    Transform a SpatialData object into inputs for the Xenium Explorer.
+    After running this function, double-click on the `experiment.xenium` file to open it.
+
+    !!! note "Software download"
+        Make sure you have the latest version of the [Xenium Explorer](https://www.10xgenomics.com/support/software/xenium-explorer)
+
+    Note:
+        This function will create up to 7 files, depending on the `SpatialData` object and the arguments:
+
+        - `experiment.xenium` contains some experiment metadata. Double-click on this file to open the Xenium Explorer. This file can also be created with [`write_metadata`](./#sopa.io.explorer.write_metadata).
+
+        - `morphology.ome.tif` is the primary image. This file can also be created with [`write_image`](./#sopa.io.explorer.write_image). Add more images with `align`.
+
+        - `analysis.zarr.zip` contains the cells categories (or clusters), i.e. `adata.obs`. This file can also be created with [`write_cell_categories`](./#sopa.io.explorer.write_cell_categories).
+
+        - `cell_feature_matrix.zarr.zip` contains the cell-by-gene counts. This file can also be created with [`write_gene_counts`](./#sopa.io.explorer.write_gene_counts).
+
+        - `cells.zarr.zip` contains the cells polygon boundaries. This file can also be created with [`write_polygons`](./#sopa.io.explorer.write_polygons).
+
+        - `transcripts.zarr.zip` contains transcripts locations. This file can also be created with [`write_transcripts`](./#sopa.io.explorer.write_transcripts).
+
+        - `adata.h5ad` is the `AnnData` object from the `SpatialData`. This is **not** used by the Explorer, but only saved for convenience.
+
+    Args:
+        path: Path to the directory where files will be saved.
+        sdata: SpatialData object.
+        image_key: Name of the image of interest (key of `sdata.images`).
+        shapes_key: Name of the cell shapes (key of `sdata.shapes`).
+        points_key: Name of the transcripts (key of `sdata.points`).
+        gene_column: Column name of the points dataframe containing the gene names.
+        pixel_size: Number of microns in a pixel. Invalid value can lead to inconsistent scales in the Explorer.
+        layer: Layer of `sdata.table` where the gene counts are saved. If `None`, uses `sdata.table.X`.
+        polygon_max_vertices: Maximum number of vertices for the cell polygons.
+        lazy: If `True`, will not load the full images in memory (except if the image memory is below `ram_threshold_gb`).
+        ram_threshold_gb: Threshold (in gygabytes) from which image can be loaded in memory. If `None`, the image is never loaded in memory.
+        mode: string that indicated which files should be created. "-ib" means everything except images and boundaries, while "+tocm" means only transcripts/observations/counts/metadata (each letter corresponds to one explorer file). By default, keeps everything.
+        save_h5ad: Whether to save the adata as h5ad in the explorer directory (for convenience only, since h5ad is faster to open than the original .zarr table)
+    """
+    path: Path = Path(path)
+    _check_explorer_directory(path)
+
+    image_key, image = get_spatial_image(sdata, image_key, return_key=True)
+
+    ### Saving cell categories and gene counts
+    if sdata.table is not None:
+        adata = sdata.table
+
+        shapes_key = adata.uns["spatialdata_attrs"]["region"]
+        geo_df = sdata[shapes_key]
+
+        if _should_save(mode, "c"):
+            write_gene_counts(path, adata, layer=layer)
+        if _should_save(mode, "o"):
+            write_cell_categories(path, adata)
+
+    ### Saving cell boundaries
+    if shapes_key is None:
+        shapes_key, geo_df = get_boundaries(sdata, return_key=True, warn=True)
+    else:
+        geo_df = sdata[shapes_key]
+
+    if _should_save(mode, "b") and geo_df is not None:
+        geo_df = to_intrinsic(sdata, geo_df, image_key)
+
+        if sdata.table is not None:
+            geo_df = geo_df.loc[adata.obs[adata.uns["spatialdata_attrs"]["instance_key"]]]
+
+        write_polygons(path, geo_df.geometry, polygon_max_vertices, pixel_size=pixel_size)
+
+    ### Saving transcripts
+    df = get_element(sdata, "points", points_key)
+
+    if _should_save(mode, "t") and df is not None:
+        if gene_column is not None:
+            df = to_intrinsic(sdata, df, image_key)
+            write_transcripts(path, df, gene_column, pixel_size=pixel_size)
+        else:
+            log.warn("The argument 'gene_column' has to be provided to save the transcripts")
+
+    ### Saving image
+    if _should_save(mode, "i"):
+        write_image(
+            path, image, lazy=lazy, ram_threshold_gb=ram_threshold_gb, pixel_size=pixel_size
+        )
+
+    ### Saving experiment.xenium file
+    if _should_save(mode, "m"):
+        write_metadata(path, image_key, shapes_key, _get_n_obs(sdata, geo_df), pixel_size)
+
+    if save_h5ad:
+        sdata.table.write_h5ad(path / FileNames.H5AD)
+
+    log.info(f"Saved files in the following directory: {path}")
+    log.info(f"You can open the experiment with 'open {path / FileNames.METADATA}'")
+
+
+
+ +
+ + +
+ + + +

+ sopa.io.explorer.write_image(path, image, lazy=True, tile_width=1024, n_subscales=5, pixel_size=0.2125, ram_threshold_gb=4, is_dir=True) + +

+ + +
+ +

Convert an image into a morphology.ome.tif file that can be read by the Xenium Explorer

+ + + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
path + str + +
+

Path to the Xenium Explorer directory where the image will be written

+
+
+ required +
image + SpatialImage | ndarray + +
+

Image of shape (C, Y, X)

+
+
+ required +
lazy + bool + +
+

If False, the image will not be read in-memory (except if the image size is below ram_threshold_gb). If True, all the images levels are always loaded in-memory.

+
+
+ True +
tile_width + int + +
+

Xenium tile width (do not update).

+
+
+ 1024 +
n_subscales + int + +
+

Number of sub-scales in the pyramidal image.

+
+
+ 5 +
pixel_size + float + +
+

Xenium pixel size (do not update).

+
+
+ 0.2125 +
ram_threshold_gb + int | None + +
+

If an image (of any level of the pyramid) is below this threshold, it will be loaded in-memory.

+
+
+ 4 +
is_dir + bool + +
+

If False, then path is a path to a single file, not to the Xenium Explorer directory.

+
+
+ True +
+ +
+ Source code in sopa/io/explorer/images.py +
def write_image(
+    path: str,
+    image: SpatialImage | np.ndarray,
+    lazy: bool = True,
+    tile_width: int = 1024,
+    n_subscales: int = 5,
+    pixel_size: float = 0.2125,
+    ram_threshold_gb: int | None = 4,
+    is_dir: bool = True,
+):
+    """Convert an image into a `morphology.ome.tif` file that can be read by the Xenium Explorer
+
+    Args:
+        path: Path to the Xenium Explorer directory where the image will be written
+        image: Image of shape `(C, Y, X)`
+        lazy: If `False`, the image will not be read in-memory (except if the image size is below `ram_threshold_gb`). If `True`, all the images levels are always loaded in-memory.
+        tile_width: Xenium tile width (do not update).
+        n_subscales: Number of sub-scales in the pyramidal image.
+        pixel_size: Xenium pixel size (do not update).
+        ram_threshold_gb: If an image (of any level of the pyramid) is below this threshold, it will be loaded in-memory.
+        is_dir: If `False`, then `path` is a path to a single file, not to the Xenium Explorer directory.
+    """
+    path = explorer_file_path(path, FileNames.IMAGE, is_dir)
+
+    if isinstance(image, np.ndarray):
+        assert len(image.shape) == 3, "Can only write channels with shape (C,Y,X)"
+        log.info(f"Converting image of shape {image.shape} into a SpatialImage (with dims: C,Y,X)")
+        image = SpatialImage(image, dims=["c", "y", "x"], name="image")
+
+    image: MultiscaleSpatialImage = to_multiscale(image, [2] * n_subscales)
+
+    image_writer = MultiscaleImageWriter(image, pixel_size=pixel_size, tile_width=tile_width)
+    image_writer.write(path, lazy=lazy, ram_threshold_gb=ram_threshold_gb)
+
+
+
+ +
+ + +
+ + + +

+ sopa.io.explorer.write_cell_categories(path, adata, is_dir=True) + +

+ + +
+ +

Write a analysis.zarr.zip file containing the cell categories/clusters (i.e., from adata.obs)

+ + + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
path + str + +
+

Path to the Xenium Explorer directory where the cell-categories file will be written

+
+
+ required +
adata + AnnData + +
+

An AnnData object

+
+
+ required +
is_dir + bool + +
+

If False, then path is a path to a single file, not to the Xenium Explorer directory.

+
+
+ True +
+ +
+ Source code in sopa/io/explorer/table.py +
def write_cell_categories(path: str, adata: AnnData, is_dir: bool = True) -> None:
+    """Write a `analysis.zarr.zip` file containing the cell categories/clusters (i.e., from `adata.obs`)
+
+    Args:
+        path: Path to the Xenium Explorer directory where the cell-categories file will be written
+        adata: An `AnnData` object
+        is_dir: If `False`, then `path` is a path to a single file, not to the Xenium Explorer directory.
+    """
+    path = explorer_file_path(path, FileNames.CELL_CATEGORIES, is_dir)
+
+    adata.strings_to_categoricals()
+    cat_columns = [name for name, cat in adata.obs.dtypes.items() if cat == "category"]
+
+    log.info(f"Writing {len(cat_columns)} cell categories: {', '.join(cat_columns)}")
+
+    ATTRS = cell_categories_attrs()
+    ATTRS["number_groupings"] = len(cat_columns)
+
+    with zarr.ZipStore(path, mode="w") as store:
+        g = zarr.group(store=store)
+        cell_groups = g.create_group("cell_groups")
+
+        for i, name in enumerate(cat_columns):
+            if adata.obs[name].isna().any():
+                NA = "NA"
+                log.warn(f"Column {name} has nan values. They will be displayed as '{NA}'")
+                adata.obs[name] = adata.obs[name].cat.add_categories(NA).fillna(NA)
+
+            categories = list(adata.obs[name].cat.categories)
+            ATTRS["grouping_names"].append(name)
+            ATTRS["group_names"].append(categories)
+
+            _write_categorical_column(cell_groups, i, adata.obs[name], categories)
+
+        cell_groups.attrs.put(ATTRS)
+
+
+
+ +
+ + +
+ + + +

+ sopa.io.explorer.write_transcripts(path, df, gene='gene', max_levels=15, is_dir=True, pixel_size=0.2125) + +

+ + +
+ +

Write a transcripts.zarr.zip file containing pyramidal transcript locations

+ + + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
path + Path + +
+

Path to the Xenium Explorer directory where the transcript file will be written

+
+
+ required +
df + DataFrame + +
+

DataFrame representing the transcripts, with "x", "y" column required, as well as the gene column (see the corresponding argument)

+
+
+ required +
gene + str + +
+

Column of df containing the genes names.

+
+
+ 'gene' +
max_levels + int + +
+

Maximum number of levels in the pyramid.

+
+
+ 15 +
is_dir + bool + +
+

If False, then path is a path to a single file, not to the Xenium Explorer directory.

+
+
+ True +
pixel_size + float + +
+

Number of microns in a pixel. Invalid value can lead to inconsistent scales in the Explorer.

+
+
+ 0.2125 +
+ +
+ Source code in sopa/io/explorer/points.py +
def write_transcripts(
+    path: Path,
+    df: dd.DataFrame,
+    gene: str = "gene",
+    max_levels: int = 15,
+    is_dir: bool = True,
+    pixel_size: float = 0.2125,
+):
+    """Write a `transcripts.zarr.zip` file containing pyramidal transcript locations
+
+    Args:
+        path: Path to the Xenium Explorer directory where the transcript file will be written
+        df: DataFrame representing the transcripts, with `"x"`, `"y"` column required, as well as the `gene` column (see the corresponding argument)
+        gene: Column of `df` containing the genes names.
+        max_levels: Maximum number of levels in the pyramid.
+        is_dir: If `False`, then `path` is a path to a single file, not to the Xenium Explorer directory.
+        pixel_size: Number of microns in a pixel. Invalid value can lead to inconsistent scales in the Explorer.
+    """
+    path = explorer_file_path(path, FileNames.POINTS, is_dir)
+
+    # TODO: make everything using dask instead of pandas
+    df = df.compute()
+
+    num_transcripts = len(df)
+    grid_size = ExplorerConstants.GRID_SIZE / ExplorerConstants.PIXELS_TO_MICRONS * pixel_size
+    df[gene] = df[gene].astype("category")
+
+    location = df[["x", "y"]]
+    location *= pixel_size
+    location = np.concatenate([location, np.zeros((num_transcripts, 1))], axis=1)
+
+    if location.min() < 0:
+        log.warn("Some transcripts are located outside of the image (pixels < 0)")
+    log.info(f"Writing {len(df)} transcripts")
+
+    xmax, ymax = location[:, :2].max(axis=0)
+
+    gene_names = list(df[gene].cat.categories)
+    num_genes = len(gene_names)
+
+    codeword_gene_mapping = list(range(num_genes))
+
+    valid = np.ones((num_transcripts, 1))
+    uuid = np.stack([np.arange(num_transcripts), np.full(num_transcripts, 65535)], axis=1)
+    transcript_id = np.stack([np.arange(num_transcripts), np.full(num_transcripts, 65535)], axis=1)
+    gene_identity = df[gene].cat.codes.values[:, None]
+    codeword_identity = np.stack([gene_identity[:, 0], np.full(num_transcripts, 65535)], axis=1)
+    status = np.zeros((num_transcripts, 1))
+    quality_score = np.full((num_transcripts, 1), ExplorerConstants.QUALITY_SCORE)
+
+    ATTRS = {
+        "codeword_count": num_genes,
+        "codeword_gene_mapping": codeword_gene_mapping,
+        "codeword_gene_names": gene_names,
+        "gene_names": gene_names,
+        "gene_index_map": {name: index for name, index in zip(gene_names, codeword_gene_mapping)},
+        "number_genes": num_genes,
+        "spatial_units": "micron",
+        "coordinate_space": "refined-final_global_micron",
+        "major_version": 4,
+        "minor_version": 1,
+        "name": "RnaDataset",
+        "number_rnas": num_transcripts,
+        "dataset_uuid": "unique-id-test",
+        "data_format": 0,
+    }
+
+    GRIDS_ATTRS = {
+        "grid_key_names": ["grid_x_loc", "grid_y_loc"],
+        "grid_zip": False,
+        "grid_size": [grid_size],
+        "grid_array_shapes": [],
+        "grid_number_objects": [],
+        "grid_keys": [],
+    }
+
+    with zarr.ZipStore(path, mode="w") as store:
+        g = zarr.group(store=store)
+        g.attrs.put(ATTRS)
+
+        grids = g.create_group("grids")
+
+        for level in range(max_levels):
+            log.info(f"   > Level {level}: {len(location)} transcripts")
+            level_group = grids.create_group(level)
+
+            tile_size = grid_size * 2**level
+
+            indices = np.floor(location[:, :2] / tile_size).clip(0).astype(int)
+            tiles_str_indices = np.array([f"{tx},{ty}" for (tx, ty) in indices])
+
+            GRIDS_ATTRS["grid_array_shapes"].append([])
+            GRIDS_ATTRS["grid_number_objects"].append([])
+            GRIDS_ATTRS["grid_keys"].append([])
+
+            n_tiles_x, n_tiles_y = ceil(xmax / tile_size), ceil(ymax / tile_size)
+
+            for tx in range(n_tiles_x):
+                for ty in range(n_tiles_y):
+                    str_index = f"{tx},{ty}"
+                    loc = np.where(tiles_str_indices == str_index)[0]
+
+                    n_points_tile = len(loc)
+                    chunks = (n_points_tile, 1)
+
+                    if n_points_tile == 0:
+                        continue
+
+                    GRIDS_ATTRS["grid_array_shapes"][-1].append({})
+                    GRIDS_ATTRS["grid_keys"][-1].append(str_index)
+                    GRIDS_ATTRS["grid_number_objects"][-1].append(n_points_tile)
+
+                    tile_group = level_group.create_group(str_index)
+                    tile_group.array(
+                        "valid",
+                        valid[loc],
+                        dtype="uint8",
+                        chunks=chunks,
+                    )
+                    tile_group.array(
+                        "status",
+                        status[loc],
+                        dtype="uint8",
+                        chunks=chunks,
+                    )
+                    tile_group.array(
+                        "location",
+                        location[loc],
+                        dtype="float32",
+                        chunks=chunks,
+                    )
+                    tile_group.array(
+                        "gene_identity",
+                        gene_identity[loc],
+                        dtype="uint16",
+                        chunks=chunks,
+                    )
+                    tile_group.array(
+                        "quality_score",
+                        quality_score[loc],
+                        dtype="float32",
+                        chunks=chunks,
+                    )
+                    tile_group.array(
+                        "codeword_identity",
+                        codeword_identity[loc],
+                        dtype="uint16",
+                        chunks=chunks,
+                    )
+                    tile_group.array(
+                        "uuid",
+                        uuid[loc],
+                        dtype="uint32",
+                        chunks=chunks,
+                    )
+                    tile_group.array(
+                        "id",
+                        transcript_id[loc],
+                        dtype="uint32",
+                        chunks=chunks,
+                    )
+
+            if n_tiles_x * n_tiles_y == 1:
+                GRIDS_ATTRS["number_levels"] = level + 1
+                break
+
+            sub_indices = subsample_indices(len(location))
+
+            location = location[sub_indices]
+            valid = valid[sub_indices]
+            status = status[sub_indices]
+            gene_identity = gene_identity[sub_indices]
+            quality_score = quality_score[sub_indices]
+            codeword_identity = codeword_identity[sub_indices]
+            uuid = uuid[sub_indices]
+            transcript_id = transcript_id[sub_indices]
+
+        grids.attrs.put(GRIDS_ATTRS)
+
+
+
+ +
+ + +
+ + + +

+ sopa.io.explorer.write_gene_counts(path, adata, layer=None, is_dir=True) + +

+ + +
+ +

Write a cell_feature_matrix.zarr.zip file containing the cell-by-gene transcript counts (i.e., from adata.X)

+ + + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
path + str + +
+

Path to the Xenium Explorer directory where the cell-by-gene file will be written

+
+
+ required +
adata + AnnData + +
+

An AnnData object. Note that adata.X has to be a sparse matrix (and contain the raw counts), else use the layer argument.

+
+
+ required +
layer + str | None + +
+

If not None, adata.layers[layer] should be sparse (and contain the raw counts).

+
+
+ None +
is_dir + bool + +
+

If False, then path is a path to a single file, not to the Xenium Explorer directory.

+
+
+ True +
+ +
+ Source code in sopa/io/explorer/table.py +
15
+16
+17
+18
+19
+20
+21
+22
+23
+24
+25
+26
+27
+28
+29
+30
+31
+32
+33
+34
+35
+36
+37
+38
+39
+40
+41
+42
+43
+44
+45
+46
+47
+48
+49
+50
+51
+52
+53
+54
+55
+56
+57
+58
+59
+60
+61
+62
+63
+64
+65
def write_gene_counts(
+    path: str, adata: AnnData, layer: str | None = None, is_dir: bool = True
+) -> None:
+    """Write a `cell_feature_matrix.zarr.zip` file containing the cell-by-gene transcript counts (i.e., from `adata.X`)
+
+    Args:
+        path: Path to the Xenium Explorer directory where the cell-by-gene file will be written
+        adata: An `AnnData` object. Note that `adata.X` has to be a sparse matrix (and contain the raw counts), else use the `layer` argument.
+        layer: If not `None`, `adata.layers[layer]` should be sparse (and contain the raw counts).
+        is_dir: If `False`, then `path` is a path to a single file, not to the Xenium Explorer directory.
+    """
+    path = explorer_file_path(path, FileNames.TABLE, is_dir)
+
+    log.info(f"Writing table with {adata.n_vars} columns")
+    counts = adata.X if layer is None else adata.layers[layer]
+    counts = csr_matrix(counts.T)
+
+    feature_keys = list(adata.var_names) + ["Total transcripts"]
+    feature_ids = feature_keys
+    feature_types = ["gene"] * len(adata.var_names) + ["aggregate_gene"]
+
+    ATTRS = {
+        "major_version": 3,
+        "minor_version": 0,
+        "number_cells": adata.n_obs,
+        "number_features": adata.n_vars + 1,
+        "feature_keys": feature_keys,
+        "feature_ids": feature_ids,
+        "feature_types": feature_types,
+    }
+
+    total_counts = counts.sum(1).A1
+    loc = total_counts > 0
+
+    data = np.concatenate([counts.data, total_counts[loc]])
+    indices = np.concatenate([counts.indices, np.where(loc)[0]])
+    indptr = counts.indptr
+    indptr = np.append(indptr, indptr[-1] + loc.sum())
+
+    cell_id = np.ones((adata.n_obs, 2))
+    cell_id[:, 0] = np.arange(adata.n_obs)
+
+    with zarr.ZipStore(path, mode="w") as store:
+        g = zarr.group(store=store)
+        cells_group = g.create_group("cell_features")
+        cells_group.attrs.put(ATTRS)
+
+        cells_group.array("cell_id", cell_id, dtype="uint32", chunks=cell_id.shape)
+        cells_group.array("data", data, dtype="uint32", chunks=data.shape)
+        cells_group.array("indices", indices, dtype="uint32", chunks=indices.shape)
+        cells_group.array("indptr", indptr, dtype="uint32", chunks=indptr.shape)
+
+
+
+ +
+ + +
+ + + +

+ sopa.io.explorer.write_polygons(path, polygons, max_vertices, is_dir=True, pixel_size=0.2125) + +

+ + +
+ +

Write a cells.zarr.zip file containing the cell polygonal boundaries

+ + + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
path + Path + +
+

Path to the Xenium Explorer directory where the transcript file will be written

+
+
+ required +
polygons + Iterable[Polygon] + +
+

A list of shapely polygons to be written

+
+
+ required +
max_vertices + int + +
+

The number of vertices per polygon (they will be transformed to have the right number of vertices)

+
+
+ required +
is_dir + bool + +
+

If False, then path is a path to a single file, not to the Xenium Explorer directory.

+
+
+ True +
pixel_size + float + +
+

Number of microns in a pixel. Invalid value can lead to inconsistent scales in the Explorer.

+
+
+ 0.2125 +
+ +
+ Source code in sopa/io/explorer/shapes.py +
def write_polygons(
+    path: Path,
+    polygons: Iterable[Polygon],
+    max_vertices: int,
+    is_dir: bool = True,
+    pixel_size: float = 0.2125,
+) -> None:
+    """Write a `cells.zarr.zip` file containing the cell polygonal boundaries
+
+    Args:
+        path: Path to the Xenium Explorer directory where the transcript file will be written
+        polygons: A list of `shapely` polygons to be written
+        max_vertices: The number of vertices per polygon (they will be transformed to have the right number of vertices)
+        is_dir: If `False`, then `path` is a path to a single file, not to the Xenium Explorer directory.
+        pixel_size: Number of microns in a pixel. Invalid value can lead to inconsistent scales in the Explorer.
+    """
+    path = explorer_file_path(path, FileNames.SHAPES, is_dir)
+
+    log.info(f"Writing {len(polygons)} cell polygons")
+    coordinates = np.stack([pad_polygon(p, max_vertices) for p in polygons])
+    coordinates *= pixel_size
+
+    num_cells = len(coordinates)
+    cells_fourth = ceil(num_cells / 4)
+    cells_half = ceil(num_cells / 2)
+
+    GROUP_ATTRS = group_attrs()
+    GROUP_ATTRS["number_cells"] = num_cells
+
+    polygon_vertices = np.stack([coordinates, coordinates])
+    num_points = polygon_vertices.shape[2]
+    n_vertices = num_points // 2
+
+    with zarr.ZipStore(path, mode="w") as store:
+        g = zarr.group(store=store)
+        g.attrs.put(GROUP_ATTRS)
+
+        g.array(
+            "polygon_vertices",
+            polygon_vertices,
+            dtype="float32",
+            chunks=(1, cells_fourth, ceil(num_points / 4)),
+        )
+
+        cell_id = np.ones((num_cells, 2))
+        cell_id[:, 0] = np.arange(num_cells)
+        g.array("cell_id", cell_id, dtype="uint32", chunks=(cells_half, 1))
+
+        cell_summary = np.zeros((num_cells, 7))
+        cell_summary[:, 2] = [p.area for p in polygons]
+        g.array(
+            "cell_summary",
+            cell_summary,
+            dtype="float64",
+            chunks=(num_cells, 1),
+        )
+        g["cell_summary"].attrs.put(cell_summary_attrs())
+
+        g.array(
+            "polygon_num_vertices",
+            np.full((2, num_cells), n_vertices),
+            dtype="int32",
+            chunks=(1, cells_half),
+        )
+
+        g.array(
+            "seg_mask_value",
+            np.arange(num_cells),
+            dtype="uint32",
+            chunks=(cells_half,),
+        )
+
+
+
+ +
+ + +
+ + + +

+ sopa.io.explorer.write_metadata(path, image_key='NA', shapes_key='NA', n_obs=0, is_dir=True, pixel_size=0.2125) + +

+ + +
+ +

Create an experiment.xenium file that can be open by the Xenium Explorer.

+ +
+ Note +

This function alone is not enough to actually open an experiment. You will need at least to wrun write_image, or create all the outputs with write.

+
+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
path + str + +
+

Path to the Xenium Explorer directory where the metadata file will be written

+
+
+ required +
image_key + str + +
+

Key of SpatialData object containing the primary image used on the explorer.

+
+
+ 'NA' +
shapes_key + str + +
+

Key of SpatialData object containing the boundaries shown on the explorer.

+
+
+ 'NA' +
n_obs + int + +
+

Number of cells

+
+
+ 0 +
is_dir + bool + +
+

If False, then path is a path to a single file, not to the Xenium Explorer directory.

+
+
+ True +
pixel_size + float + +
+

Number of microns in a pixel. Invalid value can lead to inconsistent scales in the Explorer.

+
+
+ 0.2125 +
+ +
+ Source code in sopa/io/explorer/converter.py +
def write_metadata(
+    path: str,
+    image_key: str = "NA",
+    shapes_key: str = "NA",
+    n_obs: int = 0,
+    is_dir: bool = True,
+    pixel_size: float = 0.2125,
+):
+    """Create an `experiment.xenium` file that can be open by the Xenium Explorer.
+
+    Note:
+        This function alone is not enough to actually open an experiment. You will need at least to wrun `write_image`, or create all the outputs with `write`.
+
+    Args:
+        path: Path to the Xenium Explorer directory where the metadata file will be written
+        image_key: Key of `SpatialData` object containing the primary image used on the explorer.
+        shapes_key: Key of `SpatialData` object containing the boundaries shown on the explorer.
+        n_obs: Number of cells
+        is_dir: If `False`, then `path` is a path to a single file, not to the Xenium Explorer directory.
+        pixel_size: Number of microns in a pixel. Invalid value can lead to inconsistent scales in the Explorer.
+    """
+    path = explorer_file_path(path, FileNames.METADATA, is_dir)
+
+    with open(path, "w") as f:
+        metadata = experiment_dict(image_key, shapes_key, n_obs, pixel_size)
+        json.dump(metadata, f, indent=4)
+
+
+
+ +
+ + +
+ + + +

+ sopa.io.explorer.int_cell_id(explorer_cell_id) + +

+ + +
+ +

Transforms an alphabetical cell id from the Xenium Explorer to an integer ID

+

E.g., int_cell_id('aaaachba-1') = 10000

+ +
+ Source code in sopa/io/explorer/utils.py +
13
+14
+15
+16
+17
+18
+19
def int_cell_id(explorer_cell_id: str) -> int:
+    """Transforms an alphabetical cell id from the Xenium Explorer to an integer ID
+
+    E.g., int_cell_id('aaaachba-1') = 10000"""
+    code = explorer_cell_id[:-2] if explorer_cell_id[-2] == "-" else explorer_cell_id
+    coefs = [ord(c) - 97 for c in code][::-1]
+    return sum(value * 16**i for i, value in enumerate(coefs))
+
+
+
+ +
+ + +
+ + + +

+ sopa.io.explorer.str_cell_id(cell_id) + +

+ + +
+ +

Transforms an integer cell ID into an Xenium Explorer alphabetical cell id

+

E.g., str_cell_id(10000) = 'aaaachba-1'

+ +
+ Source code in sopa/io/explorer/utils.py +
22
+23
+24
+25
+26
+27
+28
+29
+30
def str_cell_id(cell_id: int) -> str:
+    """Transforms an integer cell ID into an Xenium Explorer alphabetical cell id
+
+    E.g., str_cell_id(10000) = 'aaaachba-1'"""
+    coefs = []
+    for _ in range(8):
+        cell_id, coef = divmod(cell_id, 16)
+        coefs.append(coef)
+    return "".join([chr(97 + coef) for coef in coefs][::-1]) + "-1"
+
+
+
+ +
+ + +
+ + + +

+ sopa.io.explorer.align(sdata, image, transformation_matrix_path, image_key=None, image_models_kwargs=None, overwrite=False) + +

+ + +
+ +

Add an image to the SpatialData object after alignment with the Xenium Explorer.

+ + + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
sdata + SpatialData + +
+

A SpatialData object

+
+
+ required +
image + SpatialImage + +
+

A SpatialImage object. Note that image.name is used as the key for the aligned image.

+
+
+ required +
transformation_matrix_path + str + +
+

Path to the .csv transformation matrix exported from the Xenium Explorer

+
+
+ required +
image_key + str + +
+

Optional name of the image on which it has been aligned. Required if multiple images in the SpatialData object.

+
+
+ None +
image_models_kwargs + dict | None + +
+

Kwargs to the Image2DModel model.

+
+
+ None +
overwrite + bool + +
+

Whether to overwrite the image, if already existing.

+
+
+ False +
+ +
+ Source code in sopa/io/explorer/images.py +
def align(
+    sdata: SpatialData,
+    image: SpatialImage,
+    transformation_matrix_path: str,
+    image_key: str = None,
+    image_models_kwargs: dict | None = None,
+    overwrite: bool = False,
+):
+    """Add an image to the `SpatialData` object after alignment with the Xenium Explorer.
+
+    Args:
+        sdata: A `SpatialData` object
+        image: A `SpatialImage` object. Note that `image.name` is used as the key for the aligned image.
+        transformation_matrix_path: Path to the `.csv` transformation matrix exported from the Xenium Explorer
+        image_key: Optional name of the image on which it has been aligned. Required if multiple images in the `SpatialData` object.
+        image_models_kwargs: Kwargs to the `Image2DModel` model.
+        overwrite: Whether to overwrite the image, if already existing.
+    """
+    image_models_kwargs = _default_image_models_kwargs(image_models_kwargs)
+
+    to_pixel = Affine(
+        np.genfromtxt(transformation_matrix_path, delimiter=","),
+        input_axes=("x", "y"),
+        output_axes=("x", "y"),
+    )
+
+    default_image = get_spatial_image(sdata, image_key)
+    pixel_cs = get_intrinsic_cs(sdata, default_image)
+
+    image = Image2DModel.parse(
+        image,
+        dims=("c", "y", "x"),
+        transformations={pixel_cs: to_pixel},
+        c_coords=image.coords["c"].values,
+        **image_models_kwargs,
+    )
+
+    log.info(f"Adding image {image.name}:\n{image}")
+    sdata.add_image(image.name, image, overwrite=overwrite)
+
+
+
+ +
+ + +
+ + + +

+ sopa.io.explorer.save_column_csv(path, adata, key) + +

+ + +
+ +

Save one column of the AnnData object as a CSV that can be open interactively in the explorer, under the "cell" panel.

+ + + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
path + str + +
+

Path where to write the CSV that will be open in the Xenium Explorer

+
+
+ required +
adata + AnnData + +
+

An AnnData object

+
+
+ required +
key + str + +
+

Key of adata.obs containing the column to convert

+
+
+ required +
+ +
+ Source code in sopa/io/explorer/table.py +
def save_column_csv(path: str, adata: AnnData, key: str):
+    """Save one column of the AnnData object as a CSV that can be open interactively in the explorer, under the "cell" panel.
+
+    Args:
+        path: Path where to write the CSV that will be open in the Xenium Explorer
+        adata: An `AnnData` object
+        key: Key of `adata.obs` containing the column to convert
+    """
+    df = pd.DataFrame({"cell_id": adata.obs_names, "group": adata.obs[key].values})
+    df.to_csv(path, index=None)
+
+
+
+ +
+ + + + + + + + + + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/api/io.explorer/io.explorer.md b/api/io.explorer/io.explorer.md new file mode 100644 index 00000000..b5343497 --- /dev/null +++ b/api/io.explorer/io.explorer.md @@ -0,0 +1,43 @@ +::: sopa.io.explorer.write + options: + show_root_heading: true + +::: sopa.io.explorer.write_image + options: + show_root_heading: true + +::: sopa.io.explorer.write_cell_categories + options: + show_root_heading: true + +::: sopa.io.explorer.write_transcripts + options: + show_root_heading: true + +::: sopa.io.explorer.write_gene_counts + options: + show_root_heading: true + +::: sopa.io.explorer.write_polygons + options: + show_root_heading: true + +::: sopa.io.explorer.write_metadata + options: + show_root_heading: true + +::: sopa.io.explorer.int_cell_id + options: + show_root_heading: true + +::: sopa.io.explorer.str_cell_id + options: + show_root_heading: true + +::: sopa.io.explorer.align + options: + show_root_heading: true + +::: sopa.io.explorer.save_column_csv + options: + show_root_heading: true diff --git a/api/io.report/index.html b/api/io.report/index.html new file mode 100644 index 00000000..b357303f --- /dev/null +++ b/api/io.report/index.html @@ -0,0 +1,2621 @@ + + + + + + + + + + + + + + + + + + + + + + + sopa.io.report - Sopa + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + +

sopa.io.report

+ +
+ + + +

+ sopa.io.report.write_report(path, sdata) + +

+ + +
+ +

Create a HTML report (or web report) after running Sopa.

+ +
+ Note +

This report is automatically generated based on a custom python-to-html engine

+
+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
path + str + +
+

Path to the .html report that has to be created

+
+
+ required +
sdata + SpatialData + +
+

A SpatialData object, after running Sopa

+
+
+ required +
+ +
+ Source code in sopa/io/report/generate.py +
32
+33
+34
+35
+36
+37
+38
+39
+40
+41
+42
+43
+44
+45
def write_report(path: str, sdata: SpatialData):
+    """Create a HTML report (or web report) after running Sopa.
+
+    Note:
+        This report is automatically generated based on a custom python-to-html engine
+
+    Args:
+        path: Path to the `.html` report that has to be created
+        sdata: A `SpatialData` object, after running Sopa
+    """
+    sections = SectionBuilder(sdata).compute_sections()
+
+    log.info(f"Writing report to {path}")
+    Root(sections).write(path)
+
+
+
+ +
+ +
+ + + +

+ sopa.io.report.engine.Renderable + + +

+ + +
+ + +

Object that can be transformed to string representing HTML

+ +
+ Source code in sopa/io/report/engine.py +
12
+13
+14
+15
+16
+17
+18
+19
+20
+21
+22
+23
+24
+25
+26
+27
+28
+29
+30
+31
class Renderable:
+    """Object that can be transformed to string representing HTML"""
+
+    @property
+    def children(self) -> list["Renderable"]:
+        if hasattr(self, "_children") and self._children is not None:
+            if isinstance(self._children, list):
+                return self._children
+            return [self._children]
+        return []
+
+    @property
+    def children_html(self) -> str:
+        return "".join([str(child) for child in self.children])
+
+    def children_rec(self) -> list["Renderable"]:
+        return [self] + [cc for child in self.children for cc in child.children_rec()]
+
+    def __str__(self) -> str:
+        return self.children_html
+
+
+ + + +
+ + + + + + + + + + + +
+ +
+ + +
+ +
+ + + +

+ sopa.io.report.engine.Title + + +

+ + +
+

+ Bases: Renderable

+ + +

Report title

+ +
+ Source code in sopa/io/report/engine.py +
34
+35
+36
+37
+38
+39
+40
+41
+42
+43
class Title(Renderable):
+    """Report title"""
+
+    def __init__(self, text: str, level: int, subtitle: bool = False) -> None:
+        self.text = text
+        self.level = level
+        self.subtitle = subtitle
+
+    def __str__(self) -> str:
+        return f"""<h1 class="{'subtitle' if self.subtitle else 'title'} is-{self.level}">{self.text}</h1>"""
+
+
+ + + +
+ + + + + + + + + + + +
+ +
+ + +
+ +
+ + + +

+ sopa.io.report.engine.Paragraph + + +

+ + +
+

+ Bases: Renderable

+ + +

Report paragraph

+ +
+ Source code in sopa/io/report/engine.py +
46
+47
+48
+49
+50
+51
+52
+53
class Paragraph(Renderable):
+    """Report paragraph"""
+
+    def __init__(self, text: str) -> None:
+        self.text = text
+
+    def __str__(self) -> str:
+        return f"""<p>{self.text}</p>"""
+
+
+ + + +
+ + + + + + + + + + + +
+ +
+ + +
+ +
+ + + +

+ sopa.io.report.engine.Message + + +

+ + +
+

+ Bases: Renderable

+ + +

Colored message

+ +
+ Source code in sopa/io/report/engine.py +
56
+57
+58
+59
+60
+61
+62
+63
+64
+65
+66
+67
+68
+69
class Message(Renderable):
+    """Colored message"""
+
+    def __init__(self, text: str, is_light: bool = True, color: str = "primary") -> None:
+        self.text = text
+        self.color = color
+        self.is_light = is_light
+
+    def __str__(self) -> str:
+        return f"""
+        <div class='notification is-{self.color} {'is-light' if self.is_light else ''}'>
+            {self.text}
+        </div>
+        """
+
+
+ + + +
+ + + + + + + + + + + +
+ +
+ + +
+ +
+ + + +

+ sopa.io.report.engine.Block + + +

+ + +
+

+ Bases: Renderable

+ + +

Block, i.e. padded div

+ +
+ Source code in sopa/io/report/engine.py +
72
+73
+74
+75
+76
+77
+78
+79
class Block(Renderable):
+    """Block, i.e. padded div"""
+
+    def __init__(self, content: list[Renderable]) -> None:
+        self._children = content
+
+    def __str__(self) -> str:
+        return f"""<div class="block">{self.children_html}</div>"""
+
+
+ + + +
+ + + + + + + + + + + +
+ +
+ + +
+ +
+ + + +

+ sopa.io.report.engine.CodeBlock + + +

+ + +
+

+ Bases: Renderable

+ + +

Block of code, like in the terminal

+ +
+ Source code in sopa/io/report/engine.py +
82
+83
+84
+85
+86
+87
+88
+89
class CodeBlock(Renderable):
+    """Block of code, like in the terminal"""
+
+    def __init__(self, text: str) -> None:
+        self.text = text
+
+    def __str__(self) -> str:
+        return f"""<pre>{self.text}</pre>"""
+
+
+ + + +
+ + + + + + + + + + + +
+ +
+ + +
+ +
+ + + +

+ sopa.io.report.engine.ProgressBar + + +

+ + +
+

+ Bases: Renderable

+ + +

Progress bar

+ +
+ Source code in sopa/io/report/engine.py +
class ProgressBar(Renderable):
+    """Progress bar"""
+
+    def __init__(
+        self,
+        value: float,
+        valuemax: int = 1,
+        text: Optional[str] = None,
+        color: str = "primary",
+        is_light: bool = False,
+    ) -> None:
+        self.value = value
+        self.valuemax = valuemax
+        self.text = text
+        self.color = color
+        self.is_light = is_light
+
+    def __str__(self) -> str:
+        return f"""
+        {"" if self.text is None else Paragraph(self.text)}
+        <progress class="progress is-{self.color} {'is-light' if self.is_light else ''}"
+            value="{self.value}"
+            max="{self.valuemax}">{self.value}
+        </progress>
+    """
+
+
+ + + +
+ + + + + + + + + + + +
+ +
+ + +
+ +
+ + + +

+ sopa.io.report.engine.Section + + +

+ + +
+

+ Bases: Renderable

+ + +

Section of the report

+ +
+ Source code in sopa/io/report/engine.py +
class Section(Renderable):
+    """Section of the report"""
+
+    def __init__(self, name: str, content: list["Section"] = None) -> None:
+        self.name = name
+        self._children = content
+        self.subtitle = False
+
+    @property
+    def id(self):
+        return self.name.lower().replace(" ", "-")
+
+    def __str__(self) -> str:
+        return f"""
+        <article class="message is-dark" id="{self.id}">
+            <div class="message-header">
+                <p>{self.name}</p>
+            </div>
+            <div class="message-body">
+                {self.children_html}
+            </div>
+        </article>
+        """
+
+
+ + + +
+ + + + + + + + + + + +
+ +
+ + +
+ +
+ + + +

+ sopa.io.report.engine.SubSection + + +

+ + +
+

+ Bases: Section

+ + +

Sub-section of the report

+ +
+ Source code in sopa/io/report/engine.py +
class SubSection(Section):
+    """Sub-section of the report"""
+
+    def __init__(self, *args, **kwargs):
+        super().__init__(*args, **kwargs)
+
+    def __str__(self) -> str:
+        return f"""
+        <section id="{self.id}" class="py-2">
+            {Title(self.name, 4, subtitle=True)}
+            {self.children_html}
+        </section>
+        """
+
+
+ + + +
+ + + + + + + + + + + +
+ +
+ + +
+ +
+ + + +

+ sopa.io.report.engine.NavbarItem + + +

+ + +
+

+ Bases: Renderable

+ + +

One item in the nav bar

+ +
+ Source code in sopa/io/report/engine.py +
class NavbarItem(Renderable):
+    """One item in the nav bar"""
+
+    def __init__(self, section: Section) -> None:
+        self.section = section
+
+    def subsections(self):
+        li = [
+            f"<li><a href='#{subsection.id}'>{subsection.name}</a></li>"
+            for subsection in self.section.children
+        ]
+        return "".join(li)
+
+    def __str__(self) -> str:
+        return f"""
+    <p class="menu-label">{self.section.name}</p>
+    <ul class="menu-list">
+        {self.subsections()}
+    </ul>
+    """
+
+
+ + + +
+ + + + + + + + + + + +
+ +
+ + +
+ +
+ + + +

+ sopa.io.report.engine.Navbar + + +

+ + +
+

+ Bases: Renderable

+ + +

Left nav bar

+ +
+ Source code in sopa/io/report/engine.py +
class Navbar(Renderable):
+    """Left nav bar"""
+
+    def __init__(self, sections: list[Section]) -> None:
+        self._children = [NavbarItem(section) for section in sections]
+
+    def __str__(self) -> str:
+        return f"""
+        {Title("Sopa report", 3)}
+        {Message("This report was generated <br />by the <a href='https://github.com/gustaveroussy/sopa'>Sopa</a> HTML engine.")}
+        {self.children_html}
+    """
+
+
+ + + +
+ + + + + + + + + + + +
+ +
+ + +
+ +
+ + + +

+ sopa.io.report.engine.Columns + + +

+ + +
+

+ Bases: Renderable

+ + +

Flex columns containers

+ +
+ Source code in sopa/io/report/engine.py +
class Columns(Renderable):
+    """Flex columns containers"""
+
+    def __init__(self, content: list[Renderable], is_centered: bool = True) -> None:
+        self._children = content
+        self.is_centered = is_centered
+
+    def __str__(self) -> str:
+        return f"""
+    <div block style="display: flex; {'justify-content: center;' if self.is_centered else ''}">
+        {self.children_html}
+    </div>
+    """
+
+
+ + + +
+ + + + + + + + + + + +
+ +
+ + +
+ +
+ + + +

+ sopa.io.report.engine.Image + + +

+ + +
+

+ Bases: Renderable

+ + +

Image renderer

+ +
+ Source code in sopa/io/report/engine.py +
class Image(Renderable):
+    """Image renderer"""
+
+    def __init__(
+        self, fig: Figure, width: float = 50, extension: str = "png", pretty_legend: bool = True
+    ):
+        self.fig = fig
+        self.width = width
+        self.extension = extension
+        self.pretty_legend = pretty_legend
+
+    def make_figure_pretty(self):
+        if self.pretty_legend and _has_handles(self.fig):
+            self.fig.legend(
+                bbox_to_anchor=(1.04, 0.5), loc="center left", borderaxespad=0, frameon=False
+            )
+        sns.despine(fig=self.fig, offset=10, trim=True)
+
+    def encod(self):
+        self.make_figure_pretty()
+        tmpfile = BytesIO()
+        self.fig.savefig(tmpfile, format=self.extension, transparent=True, bbox_inches="tight")
+        plt.close()
+        return base64.b64encode(tmpfile.getvalue()).decode("utf-8")
+
+    def __str__(self) -> str:
+        return f"""<img src=\'data:image/{self.extension};base64,{self.encod()}\'  width="{self.width}%" height="auto"/>"""
+
+
+ + + +
+ + + + + + + + + + + +
+ +
+ + +
+ +
+ + + +

+ sopa.io.report.engine.Root + + +

+ + +
+

+ Bases: Renderable

+ + +

Whole report generator

+ +
+ Source code in sopa/io/report/engine.py +
class Root(Renderable):
+    """Whole report generator"""
+
+    def __init__(self, sections: list[Section], doc_title: str = "Sopa report"):
+        self.doc_title = doc_title
+        self._children = sections
+        self.nav = Navbar(sections)
+
+    def sanity_check(self):
+        section_ids = [section.id for section in self.children]
+        assert len(section_ids) == len(set(section_ids)), "Sections IDs must be unique"
+
+        subsections_ids = [sub.id for section in self.children for sub in section.children]
+        assert len(subsections_ids) == len(set(subsections_ids)), "Subsections IDs must be unique"
+
+    def write(self, path: str) -> None:
+        self.sanity_check()
+
+        with open(path, "w") as f:
+            f.write(str(self))
+
+    def __str__(self) -> str:
+        return f"""
+    <!DOCTYPE html>
+    <html>
+    <head>
+        <meta charset="utf-8" />
+        <meta name="viewport" content="width=device-width, initial-scale=1" />
+        <title>{self.doc_title}</title>
+        <style>
+        {BULMA_CSS}
+        .menu {{
+            position: sticky;
+            flex: 0 0 260px;
+            overflow-y: auto;
+            height: 100vh;
+            top: 0;
+        }}
+        </style>
+    </head>
+    <body>
+        <div class="is-flex is-flex-direction-row">
+            <div class="mt-5 ml-5 menu">
+                {self.nav}
+            </div>
+            <div class="p-5 block" style="flex: 1; overflow: hidden">
+                {self.children_html}
+            </div>
+        </div>
+    </body>
+    </html>
+    """
+
+
+ + + +
+ + + + + + + + + + + +
+ +
+ + +
+ + + + + + + + + + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/api/io.report/io.report.md b/api/io.report/io.report.md new file mode 100644 index 00000000..fdf61a1f --- /dev/null +++ b/api/io.report/io.report.md @@ -0,0 +1,59 @@ +::: sopa.io.report.write_report + options: + show_root_heading: true + +::: sopa.io.report.engine.Renderable + options: + show_root_heading: true + +::: sopa.io.report.engine.Title + options: + show_root_heading: true + +::: sopa.io.report.engine.Paragraph + options: + show_root_heading: true + +::: sopa.io.report.engine.Message + options: + show_root_heading: true + +::: sopa.io.report.engine.Block + options: + show_root_heading: true + +::: sopa.io.report.engine.CodeBlock + options: + show_root_heading: true + +::: sopa.io.report.engine.ProgressBar + options: + show_root_heading: true + +::: sopa.io.report.engine.Section + options: + show_root_heading: true + +::: sopa.io.report.engine.SubSection + options: + show_root_heading: true + +::: sopa.io.report.engine.NavbarItem + options: + show_root_heading: true + +::: sopa.io.report.engine.Navbar + options: + show_root_heading: true + +::: sopa.io.report.engine.Columns + options: + show_root_heading: true + +::: sopa.io.report.engine.Image + options: + show_root_heading: true + +::: sopa.io.report.engine.Root + options: + show_root_heading: true diff --git a/api/io/index.html b/api/io/index.html new file mode 100644 index 00000000..aa8b3d10 --- /dev/null +++ b/api/io/index.html @@ -0,0 +1,2538 @@ + + + + + + + + + + + + + + + + + + + + + + + sopa.io - Sopa + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + +

sopa.io

+ +
+

Notes

+

Due to many updates in the data format provided by the different companies, you might have issues loading your data. In this case, consider opening an issue detailing the version of the machine you used and the error log, as well as an example of file names that you are trying to read.

+
+
+

Related to spatialdata-io

+

A library called spatialdata-io already contains a lot of readers. Here, we updated some readers already existing in spatialdata-io, and added a few others. In the future, we will completely rely on spatialdata-io.

+
+ + + +
+ + + +

+ sopa.io.xenium(path, imread_kwargs=MappingProxyType({}), image_models_kwargs=MappingProxyType({})) + +

+ + +
+ +

Read Xenium data as a SpatialData object. For more information, refer to spatialdata-io.

+ + + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
path + str | Path + +
+

Path to the Xenium directory containing all the experiment files

+
+
+ required +
imread_kwargs + +
+

See link above.

+
+
+ MappingProxyType({}) +
image_models_kwargs + +
+

See link above.

+
+
+ MappingProxyType({}) +
+ + + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ SpatialData + +
+

A SpatialData object representing the Xenium experiment

+
+
+ +
+ Source code in sopa/io/transcriptomics.py +
def xenium(
+    path: str | Path,
+    imread_kwargs=MappingProxyType({}),
+    image_models_kwargs=MappingProxyType({}),
+) -> SpatialData:
+    """Read Xenium data as a `SpatialData` object. For more information, refer to [spatialdata-io](https://spatialdata.scverse.org/projects/io/en/latest/generated/spatialdata_io.xenium.html).
+
+    Args:
+        path: Path to the Xenium directory containing all the experiment files
+        imread_kwargs: See link above.
+        image_models_kwargs:See link above.
+
+    Returns:
+        A `SpatialData` object representing the Xenium experiment
+    """
+    if "chunks" not in image_models_kwargs:
+        if isinstance(image_models_kwargs, MappingProxyType):
+            image_models_kwargs = {}
+        assert isinstance(image_models_kwargs, dict)
+        image_models_kwargs["chunks"] = (1, 4096, 4096)
+    if "scale_factors" not in image_models_kwargs:
+        if isinstance(image_models_kwargs, MappingProxyType):
+            image_models_kwargs = {}
+        assert isinstance(image_models_kwargs, dict)
+        image_models_kwargs["scale_factors"] = [2, 2, 2, 2]
+
+    path = Path(path)
+    with open(path / XeniumKeys.XENIUM_SPECS) as f:
+        specs = json.load(f)
+
+    points = {"transcripts": _get_points_xenium(path, specs)}
+
+    images = {
+        "morphology_mip": _get_images_xenium(
+            path,
+            XeniumKeys.MORPHOLOGY_MIP_FILE,
+            imread_kwargs,
+            image_models_kwargs,
+        )
+    }
+
+    return SpatialData(images=images, points=points)
+
+
+
+ +
+ + +
+ + + +

+ sopa.io.merscope(path, vpt_outputs=None, z_layers=3, region_name=None, slide_name=None, imread_kwargs=MappingProxyType({}), image_models_kwargs=MappingProxyType({})) + +

+ + +
+ +

Read MERSCOPE data as a SpatialData object. For more information, refer to spatialdata-io.

+ + + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
path + str | Path + +
+

Path to the MERSCOPE directory containing all the experiment files

+
+
+ required +
**kwargs + +
+

See link above.

+
+
+ required +
+ + + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ SpatialData + +
+

A SpatialData object representing the MERSCOPE experiment

+
+
+ +
+ Source code in sopa/io/transcriptomics.py +
def merscope(
+    path: str | Path,
+    vpt_outputs: Path | str | dict[str, Any] | None = None,
+    z_layers: int | list[int] | None = 3,
+    region_name: str | None = None,
+    slide_name: str | None = None,
+    imread_kwargs: Mapping[str, Any] = MappingProxyType({}),
+    image_models_kwargs: Mapping[str, Any] = MappingProxyType({}),
+) -> SpatialData:
+    """Read MERSCOPE data as a `SpatialData` object. For more information, refer to [spatialdata-io](https://spatialdata.scverse.org/projects/io/en/latest/generated/spatialdata_io.merscope.html).
+
+    Args:
+        path: Path to the MERSCOPE directory containing all the experiment files
+        **kwargs: See link above.
+
+    Returns:
+        A `SpatialData` object representing the MERSCOPE experiment
+    """
+    if "chunks" not in image_models_kwargs:
+        if isinstance(image_models_kwargs, MappingProxyType):
+            image_models_kwargs = {}
+        assert isinstance(image_models_kwargs, dict)
+        image_models_kwargs["chunks"] = (1, 4096, 4096)
+    if "scale_factors" not in image_models_kwargs:
+        if isinstance(image_models_kwargs, MappingProxyType):
+            image_models_kwargs = {}
+        assert isinstance(image_models_kwargs, dict)
+        image_models_kwargs["scale_factors"] = [2, 2, 2, 2]
+
+    path = Path(path).absolute()
+    count_path, obs_path, boundaries_path = _get_file_paths(path, vpt_outputs)
+    images_dir = path / MerscopeKeys.IMAGES_DIR
+
+    microns_to_pixels = Affine(
+        np.genfromtxt(images_dir / MerscopeKeys.TRANSFORMATION_FILE),
+        input_axes=("x", "y"),
+        output_axes=("x", "y"),
+    )
+
+    vizgen_region = path.name if region_name is None else region_name
+    slide_name = path.parent.name if slide_name is None else slide_name
+    dataset_id = f"{slide_name}_{vizgen_region}"
+    region = f"{dataset_id}_polygons"
+
+    # Images
+    images = {}
+
+    z_layers = [z_layers] if isinstance(z_layers, int) else z_layers or []
+
+    stainings = _get_channel_names(images_dir)
+    if stainings:
+        for z_layer in z_layers:
+            im = da.stack(
+                [
+                    imread(images_dir / f"mosaic_{stain}_z{z_layer}.tif", **imread_kwargs).squeeze()
+                    for stain in stainings
+                ],
+                axis=0,
+            )
+            parsed_im = Image2DModel.parse(
+                im,
+                dims=("c", "y", "x"),
+                transformations={"microns": microns_to_pixels.inverse()},
+                c_coords=stainings,
+                **image_models_kwargs,
+            )
+            images[f"{dataset_id}_z{z_layer}"] = parsed_im
+
+    # Transcripts
+    points = {}
+    transcript_path = path / MerscopeKeys.TRANSCRIPTS_FILE
+    if transcript_path.exists():
+        points[f"{dataset_id}_transcripts"] = _get_points(transcript_path)
+    else:
+        logger.warning(
+            f"Transcript file {transcript_path} does not exist. Transcripts are not loaded."
+        )
+
+    # Polygons
+    shapes = {}
+    if boundaries_path.exists():
+        shapes[f"{dataset_id}_polygons"] = _get_polygons(boundaries_path)
+    else:
+        logger.warning(
+            f"Boundary file {boundaries_path} does not exist. Cell boundaries are not loaded."
+        )
+
+    # Table
+    table = None
+    if count_path.exists() and obs_path.exists():
+        table = _get_table(count_path, obs_path, vizgen_region, slide_name, dataset_id, region)
+    else:
+        logger.warning(
+            f"At least one of the following files does not exist: {count_path}, {obs_path}. The table is not loaded."
+        )
+
+    return SpatialData(shapes=shapes, points=points, images=images, table=table)
+
+
+
+ +
+ + +
+ + + +

+ sopa.io.cosmx(path, **kwargs) + +

+ + +
+ +

Alias to the spatialdata-io reader.

+ + + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
path + str + +
+

Path to the CosMX data directory

+
+
+ required +
**kwargs + int + +
+

See link above.

+
+
+ {} +
+ + + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ SpatialData + +
+

A SpatialData object representing the CosMX experiment

+
+
+ +
+ Source code in sopa/io/transcriptomics.py +
def cosmx(path: str, **kwargs: int) -> SpatialData:
+    """Alias to the [spatialdata-io reader](https://spatialdata.scverse.org/projects/io/en/latest/generated/spatialdata_io.cosmx.html).
+
+    Args:
+        path: Path to the CosMX data directory
+        **kwargs: See link above.
+
+    Returns:
+        A `SpatialData` object representing the CosMX experiment
+    """
+    return spatialdata_io.cosmx(path, **kwargs)
+
+
+
+ +
+ + +
+ + + +

+ sopa.io.macsima(path, **kwargs) + +

+ + +
+ +

Read MACSIMA data as a SpatialData object

+ +
+ Notes +

For all dulicated name, their index will be added in brackets after, for instance you will often find DAPI (000) to indicate the DAPI channel of index 000

+
+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
path + Path + +
+

Path to the directory containing the MACSIMA .tif images

+
+
+ required +
kwargs + int + +
+

Kwargs for _general_tif_directory_reader

+
+
+ {} +
+ + + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ SpatialData + +
+

A SpatialData object with a 2D-image of shape (C, Y, X)

+
+
+ +
+ Source code in sopa/io/imaging.py +
57
+58
+59
+60
+61
+62
+63
+64
+65
+66
+67
+68
+69
+70
+71
+72
def macsima(path: Path, **kwargs: int) -> SpatialData:
+    """Read MACSIMA data as a `SpatialData` object
+
+    Notes:
+        For all dulicated name, their index will be added in brackets after, for instance you will often find `DAPI (000)` to indicate the DAPI channel of index `000`
+
+    Args:
+        path: Path to the directory containing the MACSIMA `.tif` images
+        kwargs: Kwargs for `_general_tif_directory_reader`
+
+    Returns:
+        A `SpatialData` object with a 2D-image of shape `(C, Y, X)`
+    """
+    return _general_tif_directory_reader(
+        path, files_to_channels=_get_channel_names_macsima, **kwargs
+    )
+
+
+
+ +
+ + +
+ + + +

+ sopa.io.phenocycler(path, channels_renaming=None, image_models_kwargs=None) + +

+ + +
+ +

Read Phenocycler data as a SpatialData object

+ + + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
path + str | Path + +
+

Path to a .qptiff file, or a .tif file (if exported from QuPath)

+
+
+ required +
channels_renaming + dict | None + +
+

A dictionnary whose keys correspond to channels and values to their corresponding new name. Not all channels need to be renamed.

+
+
+ None +
image_models_kwargs + dict | None + +
+

Kwargs provided to the Image2DModel

+
+
+ None +
+ + + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ SpatialData + +
+

A SpatialData object with a 2D-image of shape (C, Y, X)

+
+
+ +
+ Source code in sopa/io/imaging.py +
def phenocycler(
+    path: str | Path, channels_renaming: dict | None = None, image_models_kwargs: dict | None = None
+) -> SpatialData:
+    """Read Phenocycler data as a `SpatialData` object
+
+    Args:
+        path: Path to a `.qptiff` file, or a `.tif` file (if exported from QuPath)
+        channels_renaming: A dictionnary whose keys correspond to channels and values to their corresponding new name. Not all channels need to be renamed.
+        image_models_kwargs: Kwargs provided to the `Image2DModel`
+
+    Returns:
+        A `SpatialData` object with a 2D-image of shape `(C, Y, X)`
+    """
+    image_models_kwargs = _default_image_models_kwargs(image_models_kwargs)
+
+    path = Path(path)
+    image_name = path.absolute().stem
+
+    if path.suffix == ".qptiff":
+        with tf.TiffFile(path) as tif:
+            series = tif.series[0]
+            names = _get_channel_names_qptiff(series)
+
+            delayed_image = delayed(lambda series: series.asarray())(tif)
+            image = da.from_delayed(delayed_image, dtype=series.dtype, shape=series.shape)
+    elif path.suffix == ".tif":
+        image = imread(path)
+        names = _get_IJ_channel_names(path)
+    else:
+        raise ValueError(f"Unsupported file extension {path.suffix}. Must be '.qptiff' or '.tif'.")
+
+    names = _rename_channels(names, channels_renaming)
+    image = image.rechunk(chunks=image_models_kwargs["chunks"])
+
+    image = Image2DModel.parse(
+        image,
+        dims=("c", "y", "x"),
+        transformations={"pixels": Identity()},
+        c_coords=names,
+        **image_models_kwargs,
+    )
+
+    return SpatialData(images={image_name: image})
+
+
+
+ +
+ + +
+ + + +

+ sopa.io.hyperion(path, image_models_kwargs=None, imread_kwargs=None) + +

+ + +
+ +

Read Hyperion data as a SpatialData object

+ + + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
path + Path + +
+

Path to the directory containing the Hyperion .tiff images

+
+
+ required +
image_models_kwargs + dict | None + +
+

Kwargs provided to the Image2DModel

+
+
+ None +
imread_kwargs + dict | None + +
+

Kwargs provided to dask_image.imread.imread

+
+
+ None +
+ + + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ SpatialData + +
+

A SpatialData object with a 2D-image of shape (C, Y, X)

+
+
+ +
+ Source code in sopa/io/imaging.py +
def hyperion(
+    path: Path, image_models_kwargs: dict | None = None, imread_kwargs: dict | None = None
+) -> SpatialData:
+    """Read Hyperion data as a `SpatialData` object
+
+    Args:
+        path: Path to the directory containing the Hyperion `.tiff` images
+        image_models_kwargs: Kwargs provided to the `Image2DModel`
+        imread_kwargs: Kwargs provided to `dask_image.imread.imread`
+
+    Returns:
+        A `SpatialData` object with a 2D-image of shape `(C, Y, X)`
+    """
+    image_models_kwargs = _default_image_models_kwargs(image_models_kwargs)
+    imread_kwargs = {} if imread_kwargs is None else imread_kwargs
+
+    files = [file for file in Path(path).iterdir() if file.suffix == ".tiff"]
+
+    names = _get_channel_names_hyperion(files)
+    image = da.concatenate(
+        [imread(file, **imread_kwargs) for file in files],
+        axis=0,
+    )
+    image = (image / image.max(axis=(1, 2)).compute()[:, None, None] * 255).astype(np.uint8)
+    image = image.rechunk(chunks=image_models_kwargs["chunks"])
+
+    log.info(f"Found channel names {names}")
+
+    image_name = Path(path).absolute().stem
+    image = Image2DModel.parse(
+        image,
+        dims=("c", "y", "x"),
+        transformations={"pixels": Identity()},
+        c_coords=names,
+        **image_models_kwargs,
+    )
+
+    return SpatialData(images={image_name: image})
+
+
+
+ +
+ + +
+ + + +

+ sopa.io.ome_tif(path, as_image=False) + +

+ + +
+ +

Read an .ome.tif image. This image should be a 2D image (with possibly multiple channels). +Typically, this function can be used to open Xenium IF images.

+ + + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
path + Path + +
+

Path to the .ome.tif image

+
+
+ required +
as_image + bool + +
+

If True, will return a SpatialImage object

+
+
+ False +
+ + + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ SpatialImage | SpatialData + +
+

A SpatialImage or a SpatialData object

+
+
+ +
+ Source code in sopa/io/imaging.py +
def ome_tif(path: Path, as_image: bool = False) -> SpatialImage | SpatialData:
+    """Read an `.ome.tif` image. This image should be a 2D image (with possibly multiple channels).
+    Typically, this function can be used to open Xenium IF images.
+
+    Args:
+        path: Path to the `.ome.tif` image
+        as_image: If `True`, will return a `SpatialImage` object
+
+    Returns:
+        A `SpatialImage` or a `SpatialData` object
+    """
+    image_models_kwargs = _default_image_models_kwargs(None)
+    image_name = Path(path).absolute().name.split(".")[0]
+    image: da.Array = imread(path)
+
+    if image.ndim == 4:
+        assert image.shape[0] == 1, "4D images not supported"
+        image = da.moveaxis(image[0], 2, 0)
+        log.info(f"Transformed 4D image into a 3D image of shape (c, y, x) = {image.shape}")
+    elif image.ndim != 3:
+        raise ValueError(f"Number of dimensions not supported: {image.ndim}")
+
+    image = image.rechunk(chunks=image_models_kwargs["chunks"])
+
+    channel_names = _ome_channels_names(path)
+    if len(channel_names) != len(image):
+        channel_names = [str(i) for i in range(len(image))]
+        log.warn(f"Channel names couldn't be read. Using {channel_names} instead.")
+
+    image = SpatialImage(image, dims=["c", "y", "x"], name=image_name, coords={"c": channel_names})
+
+    if as_image:
+        return image
+
+    image = Image2DModel.parse(
+        image,
+        dims=("c", "y", "x"),
+        c_coords=channel_names,
+        transformations={"pixels": Identity()},
+        **image_models_kwargs,
+    )
+
+    return SpatialData(images={image_name: image})
+
+
+
+ +
+ + + + + + + + + + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/api/io/io.md b/api/io/io.md new file mode 100644 index 00000000..83ee81b3 --- /dev/null +++ b/api/io/io.md @@ -0,0 +1,33 @@ +!!! notes + Due to many updates in the data format provided by the different companies, you might have issues loading your data. In this case, consider [opening an issue](https://github.com/gustaveroussy/sopa/issues) detailing the version of the machine you used and the error log, as well as an example of file names that you are trying to read. + +!!! notes "Related to `spatialdata-io`" + A library called [`spatialdata-io`](https://spatialdata.scverse.org/projects/io/en/latest/) already contains a lot of readers. Here, we updated some readers already existing in `spatialdata-io`, and added a few others. In the future, we will completely rely on `spatialdata-io`. + +::: sopa.io.xenium + options: + show_root_heading: true + +::: sopa.io.merscope + options: + show_root_heading: true + +::: sopa.io.cosmx + options: + show_root_heading: true + +::: sopa.io.macsima + options: + show_root_heading: true + +::: sopa.io.phenocycler + options: + show_root_heading: true + +::: sopa.io.hyperion + options: + show_root_heading: true + +::: sopa.io.ome_tif + options: + show_root_heading: true diff --git a/api/segmentation/aggregate/aggregate.md b/api/segmentation/aggregate/aggregate.md new file mode 100644 index 00000000..1dfa90f1 --- /dev/null +++ b/api/segmentation/aggregate/aggregate.md @@ -0,0 +1,19 @@ +::: sopa.segmentation.aggregate.average_channels + options: + show_root_heading: true + +::: sopa.segmentation.aggregate._average_channels_aligned + options: + show_root_heading: true + +::: sopa.segmentation.aggregate.count_transcripts + options: + show_root_heading: true + +::: sopa.segmentation.aggregate._count_transcripts_aligned + options: + show_root_heading: true + +::: sopa.segmentation.aggregate.Aggregator + options: + show_root_heading: true diff --git a/api/segmentation/aggregate/index.html b/api/segmentation/aggregate/index.html new file mode 100644 index 00000000..739e4854 --- /dev/null +++ b/api/segmentation/aggregate/index.html @@ -0,0 +1,2743 @@ + + + + + + + + + + + + + + + + + + + + + + + sopa.segmentation.aggregate - Sopa + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + +

sopa.segmentation.aggregate

+ +
+ + + +

+ sopa.segmentation.aggregate.average_channels(sdata, image_key=None, shapes_key=None, expand_radius_ratio=0) + +

+ + +
+ +

Average channel intensities per cell.

+ + + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
sdata + SpatialData + +
+

A SpatialData object

+
+
+ required +
image_key + str + +
+

Key of sdata containing the image. If only one images element, this does not have to be provided.

+
+
+ None +
shapes_key + str + +
+

Key of sdata containing the cell boundaries. If only one shapes element, this does not have to be provided.

+
+
+ None +
expand_radius_ratio + float + +
+

Cells polygons will be expanded by expand_radius_ratio * mean_radius. This help better aggregate boundary stainings.

+
+
+ 0 +
+ + + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ ndarray + +
+

A numpy ndarray of shape (n_cells, n_channels)

+
+
+ +
+ Source code in sopa/segmentation/aggregate.py +
def average_channels(
+    sdata: SpatialData,
+    image_key: str = None,
+    shapes_key: str = None,
+    expand_radius_ratio: float = 0,
+) -> np.ndarray:
+    """Average channel intensities per cell.
+
+    Args:
+        sdata: A `SpatialData` object
+        image_key: Key of `sdata` containing the image. If only one `images` element, this does not have to be provided.
+        shapes_key: Key of `sdata` containing the cell boundaries. If only one `shapes` element, this does not have to be provided.
+        expand_radius_ratio: Cells polygons will be expanded by `expand_radius_ratio * mean_radius`. This help better aggregate boundary stainings.
+
+    Returns:
+        A numpy `ndarray` of shape `(n_cells, n_channels)`
+    """
+    image = get_spatial_image(sdata, image_key)
+
+    geo_df = get_element(sdata, "shapes", shapes_key)
+    geo_df = to_intrinsic(sdata, geo_df, image)
+
+    expand_radius = expand_radius_ratio * np.mean(np.sqrt(geo_df.area / np.pi))
+
+    if expand_radius > 0:
+        geo_df = geo_df.buffer(expand_radius)
+
+    log.info(
+        f"Averaging channels intensity over {len(geo_df)} cells with expansion {expand_radius}"
+    )
+    return _average_channels_aligned(image, geo_df)
+
+
+
+ +
+ + +
+ + + +

+ sopa.segmentation.aggregate._average_channels_aligned(image, geo_df) + +

+ + +
+ +

Average channel intensities per cell. The image and cells have to be aligned, i.e. be on the same coordinate system.

+ + + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
image + SpatialImage + +
+

A SpatialImage of shape (n_channels, y, x)

+
+
+ required +
geo_df + GeoDataFrame | list[Polygon] + +
+

A GeoDataFrame whose geometries are cell boundaries (polygons)

+
+
+ required +
+ + + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ ndarray + +
+

A numpy ndarray of shape (n_cells, n_channels)

+
+
+ +
+ Source code in sopa/segmentation/aggregate.py +
def _average_channels_aligned(
+    image: SpatialImage, geo_df: gpd.GeoDataFrame | list[Polygon]
+) -> np.ndarray:
+    """Average channel intensities per cell. The image and cells have to be aligned, i.e. be on the same coordinate system.
+
+    Args:
+        image: A `SpatialImage` of shape `(n_channels, y, x)`
+        geo_df: A `GeoDataFrame` whose geometries are cell boundaries (polygons)
+
+    Returns:
+        A numpy `ndarray` of shape `(n_cells, n_channels)`
+    """
+    cells = geo_df if isinstance(geo_df, list) else list(geo_df.geometry)
+    tree = shapely.STRtree(cells)
+
+    intensities = np.zeros((len(cells), len(image.coords["c"])))
+    areas = np.zeros(len(cells))
+
+    def func(chunk, block_info=None):
+        if block_info is not None:
+            (ymin, ymax), (xmin, xmax) = block_info[0]["array-location"][1:]
+            patch = box(xmin, ymin, xmax, ymax)
+            intersections = tree.query(patch, predicate="intersects")
+
+            for index in intersections:
+                cell = cells[index]
+                bounds = shapes.pixel_outer_bounds(cell.bounds)
+
+                sub_image = chunk[
+                    :,
+                    max(bounds[1] - ymin, 0) : bounds[3] - ymin,
+                    max(bounds[0] - xmin, 0) : bounds[2] - xmin,
+                ]
+
+                if sub_image.shape[1] == 0 or sub_image.shape[2] == 0:
+                    continue
+
+                mask = shapes.rasterize(cell, sub_image.shape[1:], bounds)
+
+                intensities[index] += np.sum(sub_image * mask, axis=(1, 2))
+                areas[index] += np.sum(mask)
+        return da.zeros(chunk.shape[1:])
+
+    with ProgressBar():
+        image.data.map_blocks(func, drop_axis=0).compute()
+
+    return intensities / areas[:, None].clip(1)
+
+
+
+ +
+ + +
+ + + +

+ sopa.segmentation.aggregate.count_transcripts(sdata, gene_column, shapes_key=None, points_key=None, geo_df=None) + +

+ + +
+ +

Counts transcripts per cell.

+ + + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
sdata + SpatialData + +
+

A SpatialData object

+
+
+ required +
gene_column + str + +
+

Column of the transcript dataframe containing the gene names

+
+
+ required +
shapes_key + str + +
+

Key of sdata containing the cell boundaries. If only one shapes element, this does not have to be provided.

+
+
+ None +
points_key + str + +
+

Key of sdata containing the transcripts. If only one points element, this does not have to be provided.

+
+
+ None +
geo_df + GeoDataFrame + +
+

If the cell boundaries are not yet in sdata, a GeoDataFrame can be directly provided for cell boundaries

+
+
+ None +
+ + + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ AnnData + +
+

An AnnData object of shape (n_cells, n_genes) with the counts per cell

+
+
+ +
+ Source code in sopa/segmentation/aggregate.py +
def count_transcripts(
+    sdata: SpatialData,
+    gene_column: str,
+    shapes_key: str = None,
+    points_key: str = None,
+    geo_df: gpd.GeoDataFrame = None,
+) -> AnnData:
+    """Counts transcripts per cell.
+
+    Args:
+        sdata: A `SpatialData` object
+        gene_column: Column of the transcript dataframe containing the gene names
+        shapes_key: Key of `sdata` containing the cell boundaries. If only one `shapes` element, this does not have to be provided.
+        points_key: Key of `sdata` containing the transcripts. If only one `points` element, this does not have to be provided.
+        geo_df: If the cell boundaries are not yet in `sdata`, a `GeoDataFrame` can be directly provided for cell boundaries
+
+    Returns:
+        An `AnnData` object of shape `(n_cells, n_genes)` with the counts per cell
+    """
+    points_key, points = get_item(sdata, "points", points_key)
+
+    if geo_df is None:
+        geo_df = get_element(sdata, "shapes", shapes_key)
+        geo_df = to_intrinsic(sdata, geo_df, points_key)
+
+    log.info(f"Aggregating transcripts over {len(geo_df)} cells")
+    return _count_transcripts_aligned(geo_df, points, gene_column)
+
+
+
+ +
+ + +
+ + + +

+ sopa.segmentation.aggregate._count_transcripts_aligned(geo_df, points, value_key) + +

+ + +
+ +

Count transcripts per cell. The cells and points have to be aligned (i.e., in the same coordinate system)

+ + + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
geo_df + GeoDataFrame + +
+

Cells geometries

+
+
+ required +
points + DataFrame + +
+

Transcripts dataframe

+
+
+ required +
value_key + str + +
+

Key of points containing the genes names

+
+
+ required +
+ + + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ AnnData + +
+

An AnnData object of shape (n_cells, n_genes) with the counts per cell

+
+
+ +
+ Source code in sopa/segmentation/aggregate.py +
def _count_transcripts_aligned(
+    geo_df: gpd.GeoDataFrame, points: dd.DataFrame, value_key: str
+) -> AnnData:
+    """Count transcripts per cell. The cells and points have to be aligned (i.e., in the same coordinate system)
+
+    Args:
+        geo_df: Cells geometries
+        points: Transcripts dataframe
+        value_key: Key of `points` containing the genes names
+
+    Returns:
+        An `AnnData` object of shape `(n_cells, n_genes)` with the counts per cell
+    """
+    points[value_key] = points[value_key].astype("category").cat.as_known()
+    gene_names = points[value_key].cat.categories.astype(str)
+
+    X = coo_matrix((len(geo_df), len(gene_names)), dtype=int)
+    adata = AnnData(X=X, var=pd.DataFrame(index=gene_names))
+    adata.obs_names = geo_df.index
+
+    geo_df = geo_df.reset_index()
+
+    with ProgressBar():
+        points.map_partitions(
+            partial(_add_coo, adata, geo_df, gene_column=value_key, gene_names=gene_names),
+            meta=(),
+        ).compute()
+
+    adata.X = adata.X.tocsr()
+    return adata
+
+
+
+ +
+ +
+ + + +

+ sopa.segmentation.aggregate.Aggregator + + +

+ + +
+ + +

Perform transcript count and channel averaging over a SpatialData object

+ +
+ Source code in sopa/segmentation/aggregate.py +
class Aggregator:
+    """Perform transcript count and channel averaging over a `SpatialData` object"""
+
+    def __init__(
+        self,
+        sdata: SpatialData,
+        overwrite: bool = True,
+        image_key: str | None = None,
+        shapes_key: str | None = None,
+    ):
+        """
+        Args:
+            sdata: A `SpatialData` object
+            overwrite: If `True`, will overwrite `sdata.table` if already existing
+            image_key: Key of `sdata` with the image to be averaged. If only one image, this does not have to be provided.
+            shapes_key: Key of `sdata` with the shapes corresponding to the cells boundaries
+        """
+        self.sdata = sdata
+        self.overwrite = overwrite
+
+        self.image_key, self.image = get_spatial_image(sdata, image_key, return_key=True)
+
+        if shapes_key is None:
+            self.shapes_key, self.geo_df = get_boundaries(sdata, return_key=True)
+        else:
+            self.shapes_key = shapes_key
+            self.geo_df = self.sdata[shapes_key]
+
+        if sdata.table is not None and len(self.geo_df) != sdata.table.n_obs:
+            log.warn(
+                f"Table already existing with {sdata.table.n_obs} obs, but aggregating on {len(self.geo_df)} cells. Deleting table."
+            )
+            del sdata.table
+
+        self.table = sdata.table
+
+    def standardize_table(self):
+        self.table.obs_names = list(map(str_cell_id, range(self.table.n_obs)))
+
+        self.table.obsm["spatial"] = np.array(
+            [[centroid.x, centroid.y] for centroid in self.geo_df.centroid]
+        )
+        self.table.obs[SopaKeys.REGION_KEY] = pd.Series(
+            self.shapes_key, index=self.table.obs_names, dtype="category"
+        )
+        self.table.obs[SopaKeys.SLIDE_KEY] = pd.Series(
+            self.image_key, index=self.table.obs_names, dtype="category"
+        )
+        self.table.obs[SopaKeys.INSTANCE_KEY] = self.geo_df.index
+
+        self.table.obs[SopaKeys.AREA_OBS] = self.geo_df.area.values
+
+        if "spatialdata_attrs" in self.table.uns:
+            del self.table.uns["spatialdata_attrs"]
+
+        self.table = TableModel.parse(
+            self.table,
+            region_key=SopaKeys.REGION_KEY,
+            region=self.shapes_key,
+            instance_key=SopaKeys.INSTANCE_KEY,
+        )
+
+    def filter_cells(self, where_filter: np.ndarray):
+        log.info(f"Filtering {where_filter.sum()} cells")
+
+        self.geo_df = self.geo_df[~where_filter]
+        self.sdata.add_shapes(self.shapes_key, self.geo_df, overwrite=True)
+
+        if self.table is not None:
+            self.table = self.table[~where_filter]
+
+    def save_table(self):
+        if self.sdata.table is not None and self.overwrite:
+            del self.sdata.table
+        self.sdata.table = self.table
+
+    def update_table(
+        self,
+        gene_column: str | None = None,
+        average_intensities: bool = True,
+        expand_radius_ratio: float = 0,
+        min_transcripts: int = 0,
+        min_intensity_ratio: float = 0,
+    ):
+        """Perform aggregation and update the spatialdata table
+
+        Args:
+            gene_column: Column key of the transcript dataframe containing the gene names
+            average_intensities: Whether to average the channels intensities inside cells polygons
+            expand_radius_ratio: Cells polygons will be expanded by `expand_radius_ratio * mean_radius` for channels averaging **only**. This help better aggregate boundary stainings
+            min_transcripts: Minimum amount of transcript to keep a cell
+            min_intensity_ratio: Cells whose mean channel intensity is less than `min_intensity_ratio * quantile_90` will be filtered
+        """
+        does_count = (
+            self.table is not None and isinstance(self.table.X, csr_matrix)
+        ) or gene_column is not None
+
+        assert (
+            average_intensities or does_count
+        ), "You must choose at least one aggregation: transcripts or fluorescence intensities"
+
+        if gene_column is not None:
+            if self.table is not None:
+                log.warn("sdata.table is already existing. Transcripts are not count again.")
+            else:
+                self.table = count_transcripts(self.sdata, gene_column, shapes_key=self.shapes_key)
+
+        if does_count and min_transcripts > 0:
+            self.filter_cells(self.table.X.sum(axis=1) < min_transcripts)
+
+        if average_intensities:
+            mean_intensities = average_channels(
+                self.sdata,
+                image_key=self.image_key,
+                shapes_key=self.shapes_key,
+                expand_radius_ratio=expand_radius_ratio,
+            )
+
+            if min_intensity_ratio > 0:
+                means = mean_intensities.mean(axis=1)
+                intensity_threshold = min_intensity_ratio * np.quantile(means, 0.9)
+                where_filter = means < intensity_threshold
+                self.filter_cells(where_filter)
+                mean_intensities = mean_intensities[~where_filter]
+
+            if not does_count:
+                self.table = AnnData(
+                    mean_intensities,
+                    dtype=mean_intensities.dtype,
+                    var=pd.DataFrame(index=self.image.coords["c"].values.astype(str)),
+                    obs=pd.DataFrame(index=self.geo_df.index),
+                )
+            else:
+                self.table.obsm[SopaKeys.INTENSITIES_OBSM] = pd.DataFrame(
+                    mean_intensities,
+                    columns=self.image.coords["c"].values.astype(str),
+                    index=self.table.obs_names,
+                )
+
+        self.table.uns[SopaKeys.UNS_KEY] = {
+            "version": sopa.__version__,
+            SopaKeys.UNS_HAS_TRANSCRIPTS: does_count,
+            SopaKeys.UNS_HAS_INTENSITIES: average_intensities,
+        }
+
+        self.standardize_table()
+        self.save_table()
+
+
+ + + +
+ + + + + + + + + + +
+ + + +

+ __init__(sdata, overwrite=True, image_key=None, shapes_key=None) + +

+ + +
+ + + + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
sdata + SpatialData + +
+

A SpatialData object

+
+
+ required +
overwrite + bool + +
+

If True, will overwrite sdata.table if already existing

+
+
+ True +
image_key + str | None + +
+

Key of sdata with the image to be averaged. If only one image, this does not have to be provided.

+
+
+ None +
shapes_key + str | None + +
+

Key of sdata with the shapes corresponding to the cells boundaries

+
+
+ None +
+ +
+ Source code in sopa/segmentation/aggregate.py +
37
+38
+39
+40
+41
+42
+43
+44
+45
+46
+47
+48
+49
+50
+51
+52
+53
+54
+55
+56
+57
+58
+59
+60
+61
+62
+63
+64
+65
+66
+67
+68
def __init__(
+    self,
+    sdata: SpatialData,
+    overwrite: bool = True,
+    image_key: str | None = None,
+    shapes_key: str | None = None,
+):
+    """
+    Args:
+        sdata: A `SpatialData` object
+        overwrite: If `True`, will overwrite `sdata.table` if already existing
+        image_key: Key of `sdata` with the image to be averaged. If only one image, this does not have to be provided.
+        shapes_key: Key of `sdata` with the shapes corresponding to the cells boundaries
+    """
+    self.sdata = sdata
+    self.overwrite = overwrite
+
+    self.image_key, self.image = get_spatial_image(sdata, image_key, return_key=True)
+
+    if shapes_key is None:
+        self.shapes_key, self.geo_df = get_boundaries(sdata, return_key=True)
+    else:
+        self.shapes_key = shapes_key
+        self.geo_df = self.sdata[shapes_key]
+
+    if sdata.table is not None and len(self.geo_df) != sdata.table.n_obs:
+        log.warn(
+            f"Table already existing with {sdata.table.n_obs} obs, but aggregating on {len(self.geo_df)} cells. Deleting table."
+        )
+        del sdata.table
+
+    self.table = sdata.table
+
+
+
+ +
+ + +
+ + + +

+ update_table(gene_column=None, average_intensities=True, expand_radius_ratio=0, min_transcripts=0, min_intensity_ratio=0) + +

+ + +
+ +

Perform aggregation and update the spatialdata table

+ + + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
gene_column + str | None + +
+

Column key of the transcript dataframe containing the gene names

+
+
+ None +
average_intensities + bool + +
+

Whether to average the channels intensities inside cells polygons

+
+
+ True +
expand_radius_ratio + float + +
+

Cells polygons will be expanded by expand_radius_ratio * mean_radius for channels averaging only. This help better aggregate boundary stainings

+
+
+ 0 +
min_transcripts + int + +
+

Minimum amount of transcript to keep a cell

+
+
+ 0 +
min_intensity_ratio + float + +
+

Cells whose mean channel intensity is less than min_intensity_ratio * quantile_90 will be filtered

+
+
+ 0 +
+ +
+ Source code in sopa/segmentation/aggregate.py +
def update_table(
+    self,
+    gene_column: str | None = None,
+    average_intensities: bool = True,
+    expand_radius_ratio: float = 0,
+    min_transcripts: int = 0,
+    min_intensity_ratio: float = 0,
+):
+    """Perform aggregation and update the spatialdata table
+
+    Args:
+        gene_column: Column key of the transcript dataframe containing the gene names
+        average_intensities: Whether to average the channels intensities inside cells polygons
+        expand_radius_ratio: Cells polygons will be expanded by `expand_radius_ratio * mean_radius` for channels averaging **only**. This help better aggregate boundary stainings
+        min_transcripts: Minimum amount of transcript to keep a cell
+        min_intensity_ratio: Cells whose mean channel intensity is less than `min_intensity_ratio * quantile_90` will be filtered
+    """
+    does_count = (
+        self.table is not None and isinstance(self.table.X, csr_matrix)
+    ) or gene_column is not None
+
+    assert (
+        average_intensities or does_count
+    ), "You must choose at least one aggregation: transcripts or fluorescence intensities"
+
+    if gene_column is not None:
+        if self.table is not None:
+            log.warn("sdata.table is already existing. Transcripts are not count again.")
+        else:
+            self.table = count_transcripts(self.sdata, gene_column, shapes_key=self.shapes_key)
+
+    if does_count and min_transcripts > 0:
+        self.filter_cells(self.table.X.sum(axis=1) < min_transcripts)
+
+    if average_intensities:
+        mean_intensities = average_channels(
+            self.sdata,
+            image_key=self.image_key,
+            shapes_key=self.shapes_key,
+            expand_radius_ratio=expand_radius_ratio,
+        )
+
+        if min_intensity_ratio > 0:
+            means = mean_intensities.mean(axis=1)
+            intensity_threshold = min_intensity_ratio * np.quantile(means, 0.9)
+            where_filter = means < intensity_threshold
+            self.filter_cells(where_filter)
+            mean_intensities = mean_intensities[~where_filter]
+
+        if not does_count:
+            self.table = AnnData(
+                mean_intensities,
+                dtype=mean_intensities.dtype,
+                var=pd.DataFrame(index=self.image.coords["c"].values.astype(str)),
+                obs=pd.DataFrame(index=self.geo_df.index),
+            )
+        else:
+            self.table.obsm[SopaKeys.INTENSITIES_OBSM] = pd.DataFrame(
+                mean_intensities,
+                columns=self.image.coords["c"].values.astype(str),
+                index=self.table.obs_names,
+            )
+
+    self.table.uns[SopaKeys.UNS_KEY] = {
+        "version": sopa.__version__,
+        SopaKeys.UNS_HAS_TRANSCRIPTS: does_count,
+        SopaKeys.UNS_HAS_INTENSITIES: average_intensities,
+    }
+
+    self.standardize_table()
+    self.save_table()
+
+
+
+ +
+ + + +
+ +
+ + +
+ + + + + + + + + + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/api/segmentation/baysor/baysor.md b/api/segmentation/baysor/baysor.md new file mode 100644 index 00000000..9704226e --- /dev/null +++ b/api/segmentation/baysor/baysor.md @@ -0,0 +1,3 @@ +::: sopa.segmentation.baysor.resolve.resolve + options: + show_root_heading: true diff --git a/api/segmentation/baysor/index.html b/api/segmentation/baysor/index.html new file mode 100644 index 00000000..6d1b9485 --- /dev/null +++ b/api/segmentation/baysor/index.html @@ -0,0 +1,1421 @@ + + + + + + + + + + + + + + + + + + + + + + + sopa.segmentation.baysor - Sopa + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + +

sopa.segmentation.baysor

+ +
+ + + +

+ sopa.segmentation.baysor.resolve.resolve(sdata, baysor_temp_dir, gene_column, patches_dirs=None, min_area=0) + +

+ + +
+ +

Concatenate all the per-patch Baysor run and resolve the conflicts. Resulting cells boundaries are saved in the SpatialData object.

+ + + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
sdata + SpatialData + +
+

A SpatialData object

+
+
+ required +
baysor_temp_dir + str + +
+

Temporary directory used to store all the baysor subdirectories (one subdirectory for one patch and for one baysor run)

+
+
+ required +
gene_column + str + +
+

Column of the transcript dataframe containing the genes names

+
+
+ required +
patches_dirs + list[str] | None + +
+

Optional list of subdirectories inside baysor_temp_dir to be read. By default, read all.

+
+
+ None +
min_area + float + +
+

Minimum area (in microns^2) for a cell to be kept

+
+
+ 0 +
+ +
+ Source code in sopa/segmentation/baysor/resolve.py +
def resolve(
+    sdata: SpatialData,
+    baysor_temp_dir: str,
+    gene_column: str,
+    patches_dirs: list[str] | None = None,
+    min_area: float = 0,
+):
+    """Concatenate all the per-patch Baysor run and resolve the conflicts. Resulting cells boundaries are saved in the `SpatialData` object.
+
+    Args:
+        sdata: A `SpatialData` object
+        baysor_temp_dir: Temporary directory used to store all the baysor subdirectories (one subdirectory for one patch and for one baysor run)
+        gene_column: Column of the transcript dataframe containing the genes names
+        patches_dirs: Optional list of subdirectories inside `baysor_temp_dir` to be read. By default, read all.
+        min_area: Minimum area (in microns^2) for a cell to be kept
+    """
+    if min_area > 0:
+        log.info(f"Cells whose area is less than {min_area} microns^2 will be removed")
+
+    patches_cells, adatas = read_all_baysor_patches(baysor_temp_dir, min_area, patches_dirs)
+    geo_df, cells_indices, new_ids = resolve_patches(patches_cells, adatas)
+
+    image_key = get_key(sdata, "images")
+    points = get_element(sdata, "points")
+    transformations = get_transformation(points, get_all=True)
+
+    geo_df = ShapesModel.parse(geo_df, transformations=transformations)
+
+    table_conflicts = []
+    if len(new_ids):
+        new_cells = geo_df.geometry[cells_indices == -1]
+        geo_df_new = gpd.GeoDataFrame({"geometry": new_cells})
+        geo_df_new = ShapesModel.parse(geo_df_new, transformations=transformations)
+
+        log.info("Aggregating transcripts on merged cells")
+        table_conflicts = aggregate.count_transcripts(sdata, gene_column, geo_df=geo_df_new)
+        table_conflicts.obs_names = new_ids
+        table_conflicts = [table_conflicts]
+
+    valid_ids = set(list(geo_df.index))
+    table = anndata.concat(
+        [adata[list(valid_ids & set(list(adata.obs_names)))] for adata in adatas] + table_conflicts,
+        join="outer",
+    )
+    table.obs.dropna(axis="columns", inplace=True)
+
+    geo_df = geo_df.loc[table.obs_names]
+
+    table.obsm["spatial"] = np.array([[centroid.x, centroid.y] for centroid in geo_df.centroid])
+    table.obs[SopaKeys.REGION_KEY] = pd.Series(
+        SopaKeys.BAYSOR_BOUNDARIES, index=table.obs_names, dtype="category"
+    )
+    table.obs[SopaKeys.SLIDE_KEY] = pd.Series(image_key, index=table.obs_names, dtype="category")
+    table.obs[SopaKeys.INSTANCE_KEY] = geo_df.index
+
+    table = TableModel.parse(
+        table,
+        region_key=SopaKeys.REGION_KEY,
+        region=SopaKeys.BAYSOR_BOUNDARIES,
+        instance_key=SopaKeys.INSTANCE_KEY,
+    )
+
+    sdata.add_shapes(SopaKeys.BAYSOR_BOUNDARIES, geo_df, overwrite=True)
+
+    if sdata.table is not None:
+        log.warn("Table already existing. It will be replaced by the new one.")
+        del sdata.table
+
+    sdata.table = table
+
+    log.info(
+        f"Added sdata.table, and {len(geo_df)} cell boundaries to sdata['{SopaKeys.BAYSOR_BOUNDARIES}']"
+    )
+
+
+
+ +
+ + + + + + + + + + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/api/segmentation/methods/index.html b/api/segmentation/methods/index.html new file mode 100644 index 00000000..e96a2302 --- /dev/null +++ b/api/segmentation/methods/index.html @@ -0,0 +1,1355 @@ + + + + + + + + + + + + + + + + + + + + + + + sopa.segmentation.methods - Sopa + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + +

sopa.segmentation.methods

+ +
+ + + +

+ sopa.segmentation.methods.cellpose_patch(diameter, channels, model_type='cyto2', **cellpose_kwargs) + +

+ + +
+ +

Creation of a callable that runs Cellpose segmentation on a patch

+ + + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
diameter + float + +
+

Cellpose diameter parameter

+
+
+ required +
channels + list[str] + +
+

List of channel names

+
+
+ required +
model_type + str + +
+

Cellpose model type

+
+
+ 'cyto2' +
**cellpose_kwargs + int + +
+

Kwargs to be provided to model.eval (where model is a cellpose.models.Cellpose object)

+
+
+ {} +
+ + + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ Callable + +
+

A callable whose input is an image of shape (C, Y, X) and output is a cell mask of shape (Y, X). Each mask value >0 represent a unique cell ID

+
+
+ +
+ Source code in sopa/segmentation/methods.py +
 6
+ 7
+ 8
+ 9
+10
+11
+12
+13
+14
+15
+16
+17
+18
+19
+20
+21
+22
+23
+24
+25
+26
+27
+28
+29
+30
+31
+32
+33
+34
+35
+36
+37
+38
+39
+40
def cellpose_patch(
+    diameter: float, channels: list[str], model_type: str = "cyto2", **cellpose_kwargs: int
+) -> Callable:
+    """Creation of a callable that runs Cellpose segmentation on a patch
+
+    Args:
+        diameter: Cellpose diameter parameter
+        channels: List of channel names
+        model_type: Cellpose model type
+        **cellpose_kwargs: Kwargs to be provided to `model.eval` (where `model` is a `cellpose.models.Cellpose` object)
+
+    Returns:
+        A `callable` whose input is an image of shape `(C, Y, X)` and output is a cell mask of shape `(Y, X)`. Each mask value `>0` represent a unique cell ID
+    """
+    try:
+        from cellpose import models
+    except ImportError:
+        raise ImportError(
+            "To use cellpose, you need its corresponding sopa extra: `pip install 'sopa[cellpose]'`"
+        )
+
+    model = models.Cellpose(model_type=model_type)
+
+    if isinstance(channels, str) or len(channels) == 1:
+        channels = [0, 0]  # gray scale
+    elif len(channels) == 2:
+        channels = [1, 2]
+    else:
+        raise ValueError(f"Provide 1 or 2 channels. Found {len(channels)}")
+
+    def _(patch: np.ndarray):
+        mask, *_ = model.eval(patch, diameter=diameter, channels=channels, **cellpose_kwargs)
+        return mask
+
+    return _
+
+
+
+ +
+ + + + + + + + + + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/api/segmentation/methods/methods.md b/api/segmentation/methods/methods.md new file mode 100644 index 00000000..7392dc8b --- /dev/null +++ b/api/segmentation/methods/methods.md @@ -0,0 +1,3 @@ +::: sopa.segmentation.methods.cellpose_patch + options: + show_root_heading: true diff --git a/api/segmentation/patching/index.html b/api/segmentation/patching/index.html new file mode 100644 index 00000000..d5683bff --- /dev/null +++ b/api/segmentation/patching/index.html @@ -0,0 +1,2629 @@ + + + + + + + + + + + + + + + + + + + + + + + sopa.segmentation.patching - Sopa + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + +

sopa.segmentation.patching

+ +
+ + + +

+ sopa.segmentation.patching.Patches2D + + +

+ + +
+ + +

Perform patching with overlap

+ +
+ Source code in sopa/segmentation/patching.py +
class Patches2D:
+    """Perform patching with overlap"""
+
+    def __init__(
+        self,
+        sdata: SpatialData,
+        element_name: str,
+        patch_width: float | int,
+        patch_overlap: float | int = 50,
+    ):
+        """
+        Args:
+            sdata: A `SpatialData` object
+            element_name: Name of the element on with patches will be made
+            patch_width: Width of the patches (in the unit of the coordinate system of the element)
+            patch_overlap: Overlap width between the patches
+        """
+        self.sdata = sdata
+        self.element_name = element_name
+        self.element = sdata[element_name]
+
+        if isinstance(self.element, MultiscaleSpatialImage):
+            self.element = get_spatial_image(sdata, element_name)
+
+        if isinstance(self.element, SpatialImage):
+            xmin, ymin = 0, 0
+            xmax, ymax = len(self.element.coords["x"]), len(self.element.coords["y"])
+            tight, int_coords = False, True
+        elif isinstance(self.element, dd.DataFrame):
+            xmin, ymin = self.element.x.min().compute(), self.element.y.min().compute()
+            xmax, ymax = self.element.x.max().compute(), self.element.y.max().compute()
+            tight, int_coords = True, False
+        else:
+            raise ValueError(f"Invalid element type: {type(self.element)}")
+
+        self.patch_x = Patches1D(xmin, xmax, patch_width, patch_overlap, tight, int_coords)
+        self.patch_y = Patches1D(ymin, ymax, patch_width, patch_overlap, tight, int_coords)
+
+        self.roi = sdata.shapes[ROI.KEY] if ROI.KEY in sdata.shapes else None
+        if self.roi is not None:
+            self.roi = to_intrinsic(sdata, self.roi, element_name).geometry[0]
+
+        self._ilocs = []
+
+        for i in range(self.patch_x._count * self.patch_y._count):
+            ix, iy = self.pair_indices(i)
+            bounds = self.iloc(ix, iy)
+            patch = box(*bounds)
+            if self.roi is None or self.roi.intersects(patch):
+                self._ilocs.append((ix, iy))
+
+    def pair_indices(self, i: int) -> tuple[int, int]:
+        """Index localization of one patch
+
+        Args:
+            i: The patch index
+
+        Returns:
+            A tuple `(index_x, index_y)` representing the 2D localization of the patch
+        """
+        iy, ix = divmod(i, self.patch_x._count)
+        return ix, iy
+
+    def iloc(self, ix: int, iy: int) -> list[int]:
+        """Coordinates of the rectangle bounding box of the patch at the given indices
+
+        Args:
+            ix: Patch index in the x-axis
+            iy: Patch indes in the y-axis
+
+        Returns:
+            A list `[xmin, ymin, xmax, ymax]` representing the bounding box of the patch
+        """
+        xmin, xmax = self.patch_x[ix]
+        ymin, ymax = self.patch_y[iy]
+        return [xmin, ymin, xmax, ymax]
+
+    def __getitem__(self, i) -> tuple[int, int, int, int]:
+        """One patche bounding box: (xmin, ymin, xmax, ymax)"""
+        if isinstance(i, slice):
+            start, stop, step = i.indices(len(self))
+            return [self[i] for i in range(start, stop, step)]
+
+        return self.iloc(*self._ilocs[i])
+
+    def __len__(self):
+        """Number of patches"""
+        return len(self._ilocs)
+
+    def __iter__(self):
+        """Iterate over all patches (see `__getitem__`)"""
+        for i in range(len(self)):
+            yield self[i]
+
+    def polygon(self, i: int) -> Polygon:
+        """One patch as a shapely polygon. The polygon may not be just a square, if a ROI has been previously selected.
+
+        Args:
+            i: Patch index
+
+        Returns:
+            Polygon representing the patch
+        """
+        rectangle = box(*self[i])
+        return rectangle if self.roi is None else rectangle.intersection(self.roi)
+
+    @property
+    def polygons(self) -> list[Polygon]:
+        """All the patches as polygons
+
+        Returns:
+            List of `shapely` polygons
+        """
+        return [self.polygon(i) for i in range(len(self))]
+
+    def write(self, overwrite: bool = True):
+        """Save patches in `sdata.shapes["sopa_patches"]`
+
+        Args:
+            overwrite: Whether to overwrite patches if existing
+        """
+        geo_df = gpd.GeoDataFrame(
+            {"geometry": self.polygons, SopaKeys.BOUNDS: [self[i] for i in range(len(self))]}
+        )
+        geo_df = ShapesModel.parse(
+            geo_df, transformations=get_transformation(self.element, get_all=True)
+        )
+        self.sdata.add_shapes(SopaKeys.PATCHES, geo_df, overwrite=overwrite)
+
+        log.info(f"{len(geo_df)} patches were saved in sdata['{SopaKeys.PATCHES}']")
+
+    def patchify_transcripts(
+        self,
+        baysor_temp_dir: str,
+        cell_key: str = None,
+        unassigned_value: int | str = None,
+        use_prior: bool = False,
+        config: dict = {},
+        config_path: str | None = None,
+    ) -> list[int]:
+        """Patchification of the transcripts
+
+        Args:
+            baysor_temp_dir: Temporary directory where each patch will be stored. Note that each patch will have its own subdirectory.
+            cell_key: Optional key of the transcript dataframe containing the cell IDs. This is useful if a prior segmentation has been run, assigning each transcript to a cell.
+            unassigned_value: If `cell_key` has been provided, this corresponds to the value given in the 'cell ID' column for transcript that are not inside any cell.
+            use_prior: Whether to use Cellpose as a prior segmentation for Baysor. If `True`, make sure you have already run Cellpose with Sopa, and no need to provide `cell_key` and `unassigned_value`. Note that, if you have MERFISH data, the prior has already been run, so just use `cell_key` and `unassigned_value`.
+            config: Dictionnary of baysor parameters
+            config_path: Path to the baysor config (you can also directly provide the argument via the `config` option)
+
+        Returns:
+            A list of patches indices. Each index correspond to the name of a subdirectory inside `baysor_temp_dir`
+        """
+        return BaysorPatches(self, self.element).write(
+            baysor_temp_dir, cell_key, unassigned_value, use_prior, config, config_path
+        )
+
+
+ + + +
+ + + + + + + +
+ + + +

+ polygons: list[Polygon] + + + property + + +

+ + +
+ +

All the patches as polygons

+ + + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ list[Polygon] + +
+

List of shapely polygons

+
+
+
+ +
+ + + + +
+ + + +

+ __getitem__(i) + +

+ + +
+ +

One patche bounding box: (xmin, ymin, xmax, ymax)

+ +
+ Source code in sopa/segmentation/patching.py +
def __getitem__(self, i) -> tuple[int, int, int, int]:
+    """One patche bounding box: (xmin, ymin, xmax, ymax)"""
+    if isinstance(i, slice):
+        start, stop, step = i.indices(len(self))
+        return [self[i] for i in range(start, stop, step)]
+
+    return self.iloc(*self._ilocs[i])
+
+
+
+ +
+ + +
+ + + +

+ __init__(sdata, element_name, patch_width, patch_overlap=50) + +

+ + +
+ + + + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
sdata + SpatialData + +
+

A SpatialData object

+
+
+ required +
element_name + str + +
+

Name of the element on with patches will be made

+
+
+ required +
patch_width + float | int + +
+

Width of the patches (in the unit of the coordinate system of the element)

+
+
+ required +
patch_overlap + float | int + +
+

Overlap width between the patches

+
+
+ 50 +
+ +
+ Source code in sopa/segmentation/patching.py +
def __init__(
+    self,
+    sdata: SpatialData,
+    element_name: str,
+    patch_width: float | int,
+    patch_overlap: float | int = 50,
+):
+    """
+    Args:
+        sdata: A `SpatialData` object
+        element_name: Name of the element on with patches will be made
+        patch_width: Width of the patches (in the unit of the coordinate system of the element)
+        patch_overlap: Overlap width between the patches
+    """
+    self.sdata = sdata
+    self.element_name = element_name
+    self.element = sdata[element_name]
+
+    if isinstance(self.element, MultiscaleSpatialImage):
+        self.element = get_spatial_image(sdata, element_name)
+
+    if isinstance(self.element, SpatialImage):
+        xmin, ymin = 0, 0
+        xmax, ymax = len(self.element.coords["x"]), len(self.element.coords["y"])
+        tight, int_coords = False, True
+    elif isinstance(self.element, dd.DataFrame):
+        xmin, ymin = self.element.x.min().compute(), self.element.y.min().compute()
+        xmax, ymax = self.element.x.max().compute(), self.element.y.max().compute()
+        tight, int_coords = True, False
+    else:
+        raise ValueError(f"Invalid element type: {type(self.element)}")
+
+    self.patch_x = Patches1D(xmin, xmax, patch_width, patch_overlap, tight, int_coords)
+    self.patch_y = Patches1D(ymin, ymax, patch_width, patch_overlap, tight, int_coords)
+
+    self.roi = sdata.shapes[ROI.KEY] if ROI.KEY in sdata.shapes else None
+    if self.roi is not None:
+        self.roi = to_intrinsic(sdata, self.roi, element_name).geometry[0]
+
+    self._ilocs = []
+
+    for i in range(self.patch_x._count * self.patch_y._count):
+        ix, iy = self.pair_indices(i)
+        bounds = self.iloc(ix, iy)
+        patch = box(*bounds)
+        if self.roi is None or self.roi.intersects(patch):
+            self._ilocs.append((ix, iy))
+
+
+
+ +
+ + +
+ + + +

+ __iter__() + +

+ + +
+ +

Iterate over all patches (see __getitem__)

+ +
+ Source code in sopa/segmentation/patching.py +
def __iter__(self):
+    """Iterate over all patches (see `__getitem__`)"""
+    for i in range(len(self)):
+        yield self[i]
+
+
+
+ +
+ + +
+ + + +

+ __len__() + +

+ + +
+ +

Number of patches

+ +
+ Source code in sopa/segmentation/patching.py +
def __len__(self):
+    """Number of patches"""
+    return len(self._ilocs)
+
+
+
+ +
+ + +
+ + + +

+ iloc(ix, iy) + +

+ + +
+ +

Coordinates of the rectangle bounding box of the patch at the given indices

+ + + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
ix + int + +
+

Patch index in the x-axis

+
+
+ required +
iy + int + +
+

Patch indes in the y-axis

+
+
+ required +
+ + + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ list[int] + +
+

A list [xmin, ymin, xmax, ymax] representing the bounding box of the patch

+
+
+ +
+ Source code in sopa/segmentation/patching.py +
def iloc(self, ix: int, iy: int) -> list[int]:
+    """Coordinates of the rectangle bounding box of the patch at the given indices
+
+    Args:
+        ix: Patch index in the x-axis
+        iy: Patch indes in the y-axis
+
+    Returns:
+        A list `[xmin, ymin, xmax, ymax]` representing the bounding box of the patch
+    """
+    xmin, xmax = self.patch_x[ix]
+    ymin, ymax = self.patch_y[iy]
+    return [xmin, ymin, xmax, ymax]
+
+
+
+ +
+ + +
+ + + +

+ pair_indices(i) + +

+ + +
+ +

Index localization of one patch

+ + + +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
i + int + +
+

The patch index

+
+
+ required +
+ + + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ tuple[int, int] + +
+

A tuple (index_x, index_y) representing the 2D localization of the patch

+
+
+ +
+ Source code in sopa/segmentation/patching.py +
def pair_indices(self, i: int) -> tuple[int, int]:
+    """Index localization of one patch
+
+    Args:
+        i: The patch index
+
+    Returns:
+        A tuple `(index_x, index_y)` representing the 2D localization of the patch
+    """
+    iy, ix = divmod(i, self.patch_x._count)
+    return ix, iy
+
+
+
+ +
+ + +
+ + + +

+ patchify_transcripts(baysor_temp_dir, cell_key=None, unassigned_value=None, use_prior=False, config={}, config_path=None) + +

+ + +
+ +

Patchification of the transcripts

+ + + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
baysor_temp_dir + str + +
+

Temporary directory where each patch will be stored. Note that each patch will have its own subdirectory.

+
+
+ required +
cell_key + str + +
+

Optional key of the transcript dataframe containing the cell IDs. This is useful if a prior segmentation has been run, assigning each transcript to a cell.

+
+
+ None +
unassigned_value + int | str + +
+

If cell_key has been provided, this corresponds to the value given in the 'cell ID' column for transcript that are not inside any cell.

+
+
+ None +
use_prior + bool + +
+

Whether to use Cellpose as a prior segmentation for Baysor. If True, make sure you have already run Cellpose with Sopa, and no need to provide cell_key and unassigned_value. Note that, if you have MERFISH data, the prior has already been run, so just use cell_key and unassigned_value.

+
+
+ False +
config + dict + +
+

Dictionnary of baysor parameters

+
+
+ {} +
config_path + str | None + +
+

Path to the baysor config (you can also directly provide the argument via the config option)

+
+
+ None +
+ + + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ list[int] + +
+

A list of patches indices. Each index correspond to the name of a subdirectory inside baysor_temp_dir

+
+
+ +
+ Source code in sopa/segmentation/patching.py +
def patchify_transcripts(
+    self,
+    baysor_temp_dir: str,
+    cell_key: str = None,
+    unassigned_value: int | str = None,
+    use_prior: bool = False,
+    config: dict = {},
+    config_path: str | None = None,
+) -> list[int]:
+    """Patchification of the transcripts
+
+    Args:
+        baysor_temp_dir: Temporary directory where each patch will be stored. Note that each patch will have its own subdirectory.
+        cell_key: Optional key of the transcript dataframe containing the cell IDs. This is useful if a prior segmentation has been run, assigning each transcript to a cell.
+        unassigned_value: If `cell_key` has been provided, this corresponds to the value given in the 'cell ID' column for transcript that are not inside any cell.
+        use_prior: Whether to use Cellpose as a prior segmentation for Baysor. If `True`, make sure you have already run Cellpose with Sopa, and no need to provide `cell_key` and `unassigned_value`. Note that, if you have MERFISH data, the prior has already been run, so just use `cell_key` and `unassigned_value`.
+        config: Dictionnary of baysor parameters
+        config_path: Path to the baysor config (you can also directly provide the argument via the `config` option)
+
+    Returns:
+        A list of patches indices. Each index correspond to the name of a subdirectory inside `baysor_temp_dir`
+    """
+    return BaysorPatches(self, self.element).write(
+        baysor_temp_dir, cell_key, unassigned_value, use_prior, config, config_path
+    )
+
+
+
+ +
+ + +
+ + + +

+ polygon(i) + +

+ + +
+ +

One patch as a shapely polygon. The polygon may not be just a square, if a ROI has been previously selected.

+ + + +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
i + int + +
+

Patch index

+
+
+ required +
+ + + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ Polygon + +
+

Polygon representing the patch

+
+
+ +
+ Source code in sopa/segmentation/patching.py +
def polygon(self, i: int) -> Polygon:
+    """One patch as a shapely polygon. The polygon may not be just a square, if a ROI has been previously selected.
+
+    Args:
+        i: Patch index
+
+    Returns:
+        Polygon representing the patch
+    """
+    rectangle = box(*self[i])
+    return rectangle if self.roi is None else rectangle.intersection(self.roi)
+
+
+
+ +
+ + +
+ + + +

+ write(overwrite=True) + +

+ + +
+ +

Save patches in sdata.shapes["sopa_patches"]

+ + + +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
overwrite + bool + +
+

Whether to overwrite patches if existing

+
+
+ True +
+ +
+ Source code in sopa/segmentation/patching.py +
def write(self, overwrite: bool = True):
+    """Save patches in `sdata.shapes["sopa_patches"]`
+
+    Args:
+        overwrite: Whether to overwrite patches if existing
+    """
+    geo_df = gpd.GeoDataFrame(
+        {"geometry": self.polygons, SopaKeys.BOUNDS: [self[i] for i in range(len(self))]}
+    )
+    geo_df = ShapesModel.parse(
+        geo_df, transformations=get_transformation(self.element, get_all=True)
+    )
+    self.sdata.add_shapes(SopaKeys.PATCHES, geo_df, overwrite=overwrite)
+
+    log.info(f"{len(geo_df)} patches were saved in sdata['{SopaKeys.PATCHES}']")
+
+
+
+ +
+ + + +
+ +
+ + +
+ + + + + + + + + + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/api/segmentation/patching/patching.md b/api/segmentation/patching/patching.md new file mode 100644 index 00000000..048cc26b --- /dev/null +++ b/api/segmentation/patching/patching.md @@ -0,0 +1,3 @@ +::: sopa.segmentation.patching.Patches2D + options: + show_root_heading: true diff --git a/api/segmentation/shapes/index.html b/api/segmentation/shapes/index.html new file mode 100644 index 00000000..c16c78c0 --- /dev/null +++ b/api/segmentation/shapes/index.html @@ -0,0 +1,1725 @@ + + + + + + + + + + + + + + + + + + + + + + + sopa.segmentation.shapes - Sopa + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + +

sopa.segmentation.shapes

+ +
+ + + +

+ sopa.segmentation.shapes.solve_conflicts(cells, threshold=0.5, patch_indices=None, return_indices=False) + +

+ + +
+ +

Resolve segmentation conflicts (i.e. overlap) after running segmentation on patches

+ + + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
cells + list[Polygon] + +
+

List of cell polygons

+
+
+ required +
threshold + float + +
+

When two cells are overlapping, we look at the area of intersection over the area of the smallest cell. If this value is higher than the threshold, the cells are merged

+
+
+ 0.5 +
patch_indices + ndarray | None + +
+

Patch from which each cell belongs.

+
+
+ None +
return_indices + bool + +
+

If True, returns also the cells indices. Merged cells have an index of -1.

+
+
+ False +
+ + + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ ndarray[Polygon] | tuple[ndarray[Polygon], ndarray] + +
+

Array of resolved cells polygons. If return_indices, it also returns an array of cell indices.

+
+
+ +
+ Source code in sopa/segmentation/shapes.py +
13
+14
+15
+16
+17
+18
+19
+20
+21
+22
+23
+24
+25
+26
+27
+28
+29
+30
+31
+32
+33
+34
+35
+36
+37
+38
+39
+40
+41
+42
+43
+44
+45
+46
+47
+48
+49
+50
+51
+52
+53
+54
+55
+56
+57
+58
+59
+60
+61
+62
def solve_conflicts(
+    cells: list[Polygon],
+    threshold: float = 0.5,
+    patch_indices: np.ndarray | None = None,
+    return_indices: bool = False,
+) -> np.ndarray[Polygon] | tuple[np.ndarray[Polygon], np.ndarray]:
+    """Resolve segmentation conflicts (i.e. overlap) after running segmentation on patches
+
+    Args:
+        cells: List of cell polygons
+        threshold: When two cells are overlapping, we look at the area of intersection over the area of the smallest cell. If this value is higher than the `threshold`, the cells are merged
+        patch_indices: Patch from which each cell belongs.
+        return_indices: If `True`, returns also the cells indices. Merged cells have an index of -1.
+
+    Returns:
+        Array of resolved cells polygons. If `return_indices`, it also returns an array of cell indices.
+    """
+    cells = list(cells)
+    n_cells = len(cells)
+    resolved_indices = np.arange(n_cells)
+
+    assert n_cells > 0, "No cells was segmented, cannot continue"
+
+    tree = shapely.STRtree(cells)
+    conflicts = tree.query(cells, predicate="intersects")
+
+    if patch_indices is not None:
+        conflicts = conflicts[:, patch_indices[conflicts[0]] != patch_indices[conflicts[1]]].T
+    else:
+        conflicts = conflicts[:, conflicts[0] != conflicts[1]].T
+
+    for i1, i2 in tqdm(conflicts, desc="Resolving conflicts"):
+        resolved_i1: int = resolved_indices[i1]
+        resolved_i2: int = resolved_indices[i2]
+        cell1, cell2 = cells[resolved_i1], cells[resolved_i2]
+
+        intersection = cell1.intersection(cell2).area
+        if intersection >= threshold * min(cell1.area, cell2.area):
+            cell = _ensure_polygon(cell1.union(cell2))
+
+            resolved_indices[np.isin(resolved_indices, [resolved_i1, resolved_i2])] = len(cells)
+            cells.append(cell)
+
+    unique_indices = np.unique(resolved_indices)
+    unique_cells = np.array(cells)[unique_indices]
+
+    if return_indices:
+        return unique_cells, np.where(unique_indices < n_cells, unique_indices, -1)
+
+    return unique_cells
+
+
+
+ +
+ + +
+ + + +

+ sopa.segmentation.shapes.geometrize(mask, tolerance=None, smooth_radius_ratio=0.1) + +

+ + +
+ +

Convert a cells mask to multiple shapely geometries. Inspired from https://github.com/Vizgen/vizgen-postprocessing

+ + + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
mask + ndarray + +
+

A cell mask. Non-null values correspond to cell ids

+
+
+ required +
tolerance + float | None + +
+

Tolerance parameter used by shapely during simplification. By default, define the tolerance automatically.

+
+
+ None +
+ + + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ list[Polygon] + +
+

List of shapely polygons representing each cell ID of the mask

+
+
+ +
+ Source code in sopa/segmentation/shapes.py +
def geometrize(
+    mask: np.ndarray, tolerance: float | None = None, smooth_radius_ratio: float = 0.1
+) -> list[Polygon]:
+    """Convert a cells mask to multiple `shapely` geometries. Inspired from https://github.com/Vizgen/vizgen-postprocessing
+
+    Args:
+        mask: A cell mask. Non-null values correspond to cell ids
+        tolerance: Tolerance parameter used by `shapely` during simplification. By default, define the tolerance automatically.
+
+    Returns:
+        List of `shapely` polygons representing each cell ID of the mask
+    """
+    max_cells = mask.max()
+
+    if max_cells == 0:
+        log.warn("No cell was returned by the segmentation")
+        return []
+
+    cells = [_contours((mask == cell_id).astype("uint8")) for cell_id in range(1, max_cells + 1)]
+
+    mean_radius = np.sqrt(np.array([cell.area for cell in cells]) / np.pi).mean()
+    smooth_radius = mean_radius * smooth_radius_ratio
+
+    if tolerance is None:
+        tolerance = _default_tolerance(mean_radius)
+
+    cells = [_smoothen_cell(cell, smooth_radius, tolerance) for cell in cells]
+    cells = [cell for cell in cells if cell is not None]
+
+    log.info(
+        f"Percentage of non-geometrized cells: {(max_cells - len(cells)) / max_cells:.2%} (usually due to segmentation artefacts)"
+    )
+
+    return cells
+
+
+
+ +
+ + +
+ + + +

+ sopa.segmentation.shapes.rasterize(cell, shape, xy_min=[0, 0]) + +

+ + +
+ +

Transform a cell polygon into a numpy array with value 1 where the polygon touches a pixel, else 0.

+ + + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
cell + Polygon + +
+

Cell polygon to rasterize.

+
+
+ required +
shape + tuple[int, int] + +
+

Image shape as a tuple (y, x).

+
+
+ required +
xy_min + tuple[int, int] + +
+

Tuple containing the origin of the image [x0, y0].

+
+
+ [0, 0] +
+ + + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ ndarray + +
+

The mask array.

+
+
+ +
+ Source code in sopa/segmentation/shapes.py +
def rasterize(
+    cell: Polygon, shape: tuple[int, int], xy_min: tuple[int, int] = [0, 0]
+) -> np.ndarray:
+    """Transform a cell polygon into a numpy array with value 1 where the polygon touches a pixel, else 0.
+
+    Args:
+        cell: Cell polygon to rasterize.
+        shape: Image shape as a tuple (y, x).
+        xy_min: Tuple containing the origin of the image [x0, y0].
+
+    Returns:
+        The mask array.
+    """
+    import cv2
+
+    xmin, ymin, xmax, ymax = [xy_min[0], xy_min[1], xy_min[0] + shape[1], xy_min[1] + shape[0]]
+
+    cell_translated = shapely.affinity.translate(cell, -xmin, -ymin)
+
+    coords = np.array(cell_translated.exterior.coords)[None, :].astype(np.int32)
+    return cv2.fillPoly(np.zeros((ymax - ymin, xmax - xmin), dtype=np.int8), coords, color=1)
+
+
+
+ +
+ + + + + + + + + + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/api/segmentation/shapes/shapes.md b/api/segmentation/shapes/shapes.md new file mode 100644 index 00000000..eb6b217c --- /dev/null +++ b/api/segmentation/shapes/shapes.md @@ -0,0 +1,11 @@ +::: sopa.segmentation.shapes.solve_conflicts + options: + show_root_heading: true + +::: sopa.segmentation.shapes.geometrize + options: + show_root_heading: true + +::: sopa.segmentation.shapes.rasterize + options: + show_root_heading: true diff --git a/api/segmentation/stainings/index.html b/api/segmentation/stainings/index.html new file mode 100644 index 00000000..fb252cc6 --- /dev/null +++ b/api/segmentation/stainings/index.html @@ -0,0 +1,2410 @@ + + + + + + + + + + + + + + + + + + + + + + + sopa.segmentation.stainings - Sopa + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + +

sopa.segmentation.stainings

+ +
+ + + +

+ sopa.segmentation.stainings.StainingSegmentation + + +

+ + +
+ + +
+ Source code in sopa/segmentation/stainings.py +
class StainingSegmentation:
+    def __init__(
+        self,
+        sdata: SpatialData,
+        method: Callable,
+        channels: list[str] | str,
+        min_area: float = 0,
+        clip_limit: float = 0.2,
+        gaussian_sigma: float = 1,
+    ):
+        """Generalized staining-based segmentation
+
+        !!! note "Sequential segmentation (slower)"
+            ```python
+            from sopa.segmentation.stainings import StainingSegmentation
+
+            method = ... # custom callable that runs segmentation on each patch
+
+            segmentation = StainingSegmentation(sdata, method, "DAPI")
+            cells = segmentation.run_patches(2000, 100)
+            StainingSegmentation.add_shapes(sdata, cells, image_key, "method_name")
+            ```
+
+        !!! note "Parallel segmentation (faster)"
+            ```python
+            from sopa.segmentation.stainings import StainingSegmentation
+
+            method = ... # custom callable that runs segmentation on each patch
+
+            segmentation = StainingSegmentation(sdata, method, "DAPI")
+
+            # Run all this in a parallel manner, e.g. on different jobs
+            for i in range(len(sdata['sopa_patches'])):
+                segmentation.write_patch_cells("./temp_dir", i)
+
+            cells = StainingSegmentation.read_patches_cells("./temp_dir")
+            StainingSegmentation.add_shapes(sdata, cells, image_key, "method_name")
+            ```
+
+        Args:
+            sdata: A `SpatialData` object
+            method: A segmentation `callable` whose input is an image of shape `(C, Y, X)` and output is a cell mask of shape `(Y, X)`. Each mask value `>0` represent a unique cell ID. Such callables can be found in `sopa.segmentation.methods`.
+            channels: One or a list of channel names used for segmentation. If only one channel is provided, the image given to the `method` will be of shape `(1, Y, X)`.
+            min_area: Minimum area (in pixels^2) for a cell to be kept
+            clip_limit: Parameter for skimage.exposure.equalize_adapthist (applied before running cellpose)
+            gaussian_sigma: Parameter for scipy gaussian_filter (applied before running cellpose)
+        """
+        self.sdata = sdata
+        self.method = method
+        self.channels = [channels] if isinstance(channels, str) else channels
+
+        self.min_area = min_area
+        self.clip_limit = clip_limit
+        self.gaussian_sigma = gaussian_sigma
+
+        self.image_key, self.image = get_spatial_image(sdata, return_key=True)
+
+        image_channels = self.image.coords["c"].values
+        assert np.isin(
+            channels, image_channels
+        ).all(), f"Channel names must be a subset of: {', '.join(image_channels)}"
+
+    def _run_patch(self, patch: Polygon) -> list[Polygon]:
+        """Run segmentation on one patch
+
+        Args:
+            patch: Patch, represented as a `shapely` polygon
+
+        Returns:
+            A list of cells, represented as polygons
+        """
+        bounds = [int(x) for x in patch.bounds]
+
+        image = self.image.sel(
+            c=self.channels,
+            x=slice(bounds[0], bounds[2]),
+            y=slice(bounds[1], bounds[3]),
+        ).values
+
+        image = gaussian_filter(image, sigma=self.gaussian_sigma)
+        image = exposure.equalize_adapthist(image, clip_limit=self.clip_limit)
+
+        if patch.area < box(*bounds).area:
+            image = image * shapes.rasterize(patch, image.shape[1:], bounds)
+
+        cells = shapes.geometrize(self.method(image))
+
+        if self.min_area > 0:
+            cells = [cell for cell in cells if cell.area >= self.min_area]
+
+        return [affinity.translate(cell, *bounds[:2]) for cell in cells]
+
+    def write_patch_cells(self, patch_dir: str, patch_index: int):
+        """Run segmentation on one patch, and save the result in a dedicated directory
+
+        Args:
+            patch_dir: Directory inside which segmentation results will be saved
+            patch_index: Index of the patch on which to run segmentation. NB: the number of patches is `len(sdata['sopa_patches'])`
+        """
+        patch = self.sdata[SopaKeys.PATCHES].geometry[patch_index]
+
+        cells = self._run_patch(patch)
+        gdf = gpd.GeoDataFrame(geometry=cells)
+
+        patch_dir: Path = Path(patch_dir)
+        patch_dir.mkdir(parents=True, exist_ok=True)
+        patch_file = patch_dir / f"{patch_index}.parquet"
+
+        gdf.to_parquet(patch_file)
+
+    def run_patches(
+        self,
+        patch_width: int,
+        patch_overlap: int,
+    ) -> list[Polygon]:
+        """Run segmentation over all patches, in a sequential manner (this is slower than running all patches in parallel)
+
+        Args:
+            patch_width: Width of the patches
+            patch_overlap: Number of pixels of overlap between patches
+
+        Returns:
+            A list of cells represented as `shapely` polygons
+        """
+        self.patches = Patches2D(self.sdata, self.image_key, patch_width, patch_overlap)
+
+        cells = [
+            cell
+            for patch in tqdm(self.patches.polygons, desc="Run on patches")
+            for cell in self._run_patch(patch)
+        ]
+        cells = shapes.solve_conflicts(cells)
+        return cells
+
+    @classmethod
+    def read_patches_cells(cls, patch_dir: str | list[str]) -> list[Polygon]:
+        """Read all patch segmentation results after running `write_patch_cells` on all patches
+
+        Args:
+            patch_dir: Directory provided when running `write_patch_cells` containing the `.parquet` files. For multi-step segmentation, provide a list of directories (one per step).
+
+        Returns:
+            A list of cells represented as `shapely` polygons
+        """
+        cells = []
+
+        files = [f for f in Path(patch_dir).iterdir() if f.suffix == ".parquet"]
+        for file in tqdm(files, desc="Reading patches"):
+            cells += list(gpd.read_parquet(file).geometry)
+
+        log.info(f"Found {len(cells)} total cells")
+        return cells
+
+    @classmethod
+    def add_shapes(cls, sdata: SpatialData, cells: list[Polygon], image_key: str, shapes_key: str):
+        """Adding `shapely` polygon to the `SpatialData` object
+
+        Args:
+            sdata: A `SpatialData` object
+            cells: List of polygons after segmentation
+            image_key: Key of the image on which segmentation has been run
+            shapes_key: Name to provide to the geodataframe to be created
+        """
+        image = get_spatial_image(sdata, image_key)
+
+        geo_df = gpd.GeoDataFrame({"geometry": cells})
+        geo_df.index = image_key + geo_df.index.astype(str)
+
+        geo_df = ShapesModel.parse(geo_df, transformations=get_transformation(image, get_all=True))
+        sdata.add_shapes(shapes_key, geo_df, overwrite=True)
+
+        log.info(f"Added {len(geo_df)} cell boundaries in sdata['{shapes_key}']")
+
+
+ + + +
+ + + + + + + + + + +
+ + + +

+ __init__(sdata, method, channels, min_area=0, clip_limit=0.2, gaussian_sigma=1) + +

+ + +
+ +

Generalized staining-based segmentation

+
+

Sequential segmentation (slower)

+
from sopa.segmentation.stainings import StainingSegmentation
+
+method = ... # custom callable that runs segmentation on each patch
+
+segmentation = StainingSegmentation(sdata, method, "DAPI")
+cells = segmentation.run_patches(2000, 100)
+StainingSegmentation.add_shapes(sdata, cells, image_key, "method_name")
+
+
+
+

Parallel segmentation (faster)

+
from sopa.segmentation.stainings import StainingSegmentation
+
+method = ... # custom callable that runs segmentation on each patch
+
+segmentation = StainingSegmentation(sdata, method, "DAPI")
+
+# Run all this in a parallel manner, e.g. on different jobs
+for i in range(len(sdata['sopa_patches'])):
+    segmentation.write_patch_cells("./temp_dir", i)
+
+cells = StainingSegmentation.read_patches_cells("./temp_dir")
+StainingSegmentation.add_shapes(sdata, cells, image_key, "method_name")
+
+
+ + + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
sdata + SpatialData + +
+

A SpatialData object

+
+
+ required +
method + Callable + +
+

A segmentation callable whose input is an image of shape (C, Y, X) and output is a cell mask of shape (Y, X). Each mask value >0 represent a unique cell ID. Such callables can be found in sopa.segmentation.methods.

+
+
+ required +
channels + list[str] | str + +
+

One or a list of channel names used for segmentation. If only one channel is provided, the image given to the method will be of shape (1, Y, X).

+
+
+ required +
min_area + float + +
+

Minimum area (in pixels^2) for a cell to be kept

+
+
+ 0 +
clip_limit + float + +
+

Parameter for skimage.exposure.equalize_adapthist (applied before running cellpose)

+
+
+ 0.2 +
gaussian_sigma + float + +
+

Parameter for scipy gaussian_filter (applied before running cellpose)

+
+
+ 1 +
+ +
+ Source code in sopa/segmentation/stainings.py +
25
+26
+27
+28
+29
+30
+31
+32
+33
+34
+35
+36
+37
+38
+39
+40
+41
+42
+43
+44
+45
+46
+47
+48
+49
+50
+51
+52
+53
+54
+55
+56
+57
+58
+59
+60
+61
+62
+63
+64
+65
+66
+67
+68
+69
+70
+71
+72
+73
+74
+75
+76
+77
+78
+79
+80
+81
+82
+83
+84
def __init__(
+    self,
+    sdata: SpatialData,
+    method: Callable,
+    channels: list[str] | str,
+    min_area: float = 0,
+    clip_limit: float = 0.2,
+    gaussian_sigma: float = 1,
+):
+    """Generalized staining-based segmentation
+
+    !!! note "Sequential segmentation (slower)"
+        ```python
+        from sopa.segmentation.stainings import StainingSegmentation
+
+        method = ... # custom callable that runs segmentation on each patch
+
+        segmentation = StainingSegmentation(sdata, method, "DAPI")
+        cells = segmentation.run_patches(2000, 100)
+        StainingSegmentation.add_shapes(sdata, cells, image_key, "method_name")
+        ```
+
+    !!! note "Parallel segmentation (faster)"
+        ```python
+        from sopa.segmentation.stainings import StainingSegmentation
+
+        method = ... # custom callable that runs segmentation on each patch
+
+        segmentation = StainingSegmentation(sdata, method, "DAPI")
+
+        # Run all this in a parallel manner, e.g. on different jobs
+        for i in range(len(sdata['sopa_patches'])):
+            segmentation.write_patch_cells("./temp_dir", i)
+
+        cells = StainingSegmentation.read_patches_cells("./temp_dir")
+        StainingSegmentation.add_shapes(sdata, cells, image_key, "method_name")
+        ```
+
+    Args:
+        sdata: A `SpatialData` object
+        method: A segmentation `callable` whose input is an image of shape `(C, Y, X)` and output is a cell mask of shape `(Y, X)`. Each mask value `>0` represent a unique cell ID. Such callables can be found in `sopa.segmentation.methods`.
+        channels: One or a list of channel names used for segmentation. If only one channel is provided, the image given to the `method` will be of shape `(1, Y, X)`.
+        min_area: Minimum area (in pixels^2) for a cell to be kept
+        clip_limit: Parameter for skimage.exposure.equalize_adapthist (applied before running cellpose)
+        gaussian_sigma: Parameter for scipy gaussian_filter (applied before running cellpose)
+    """
+    self.sdata = sdata
+    self.method = method
+    self.channels = [channels] if isinstance(channels, str) else channels
+
+    self.min_area = min_area
+    self.clip_limit = clip_limit
+    self.gaussian_sigma = gaussian_sigma
+
+    self.image_key, self.image = get_spatial_image(sdata, return_key=True)
+
+    image_channels = self.image.coords["c"].values
+    assert np.isin(
+        channels, image_channels
+    ).all(), f"Channel names must be a subset of: {', '.join(image_channels)}"
+
+
+
+ +
+ + +
+ + + +

+ add_shapes(sdata, cells, image_key, shapes_key) + + + classmethod + + +

+ + +
+ +

Adding shapely polygon to the SpatialData object

+ + + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
sdata + SpatialData + +
+

A SpatialData object

+
+
+ required +
cells + list[Polygon] + +
+

List of polygons after segmentation

+
+
+ required +
image_key + str + +
+

Key of the image on which segmentation has been run

+
+
+ required +
shapes_key + str + +
+

Name to provide to the geodataframe to be created

+
+
+ required +
+ +
+ Source code in sopa/segmentation/stainings.py +
@classmethod
+def add_shapes(cls, sdata: SpatialData, cells: list[Polygon], image_key: str, shapes_key: str):
+    """Adding `shapely` polygon to the `SpatialData` object
+
+    Args:
+        sdata: A `SpatialData` object
+        cells: List of polygons after segmentation
+        image_key: Key of the image on which segmentation has been run
+        shapes_key: Name to provide to the geodataframe to be created
+    """
+    image = get_spatial_image(sdata, image_key)
+
+    geo_df = gpd.GeoDataFrame({"geometry": cells})
+    geo_df.index = image_key + geo_df.index.astype(str)
+
+    geo_df = ShapesModel.parse(geo_df, transformations=get_transformation(image, get_all=True))
+    sdata.add_shapes(shapes_key, geo_df, overwrite=True)
+
+    log.info(f"Added {len(geo_df)} cell boundaries in sdata['{shapes_key}']")
+
+
+
+ +
+ + +
+ + + +

+ read_patches_cells(patch_dir) + + + classmethod + + +

+ + +
+ +

Read all patch segmentation results after running write_patch_cells on all patches

+ + + +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
patch_dir + str | list[str] + +
+

Directory provided when running write_patch_cells containing the .parquet files. For multi-step segmentation, provide a list of directories (one per step).

+
+
+ required +
+ + + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ list[Polygon] + +
+

A list of cells represented as shapely polygons

+
+
+ +
+ Source code in sopa/segmentation/stainings.py +
@classmethod
+def read_patches_cells(cls, patch_dir: str | list[str]) -> list[Polygon]:
+    """Read all patch segmentation results after running `write_patch_cells` on all patches
+
+    Args:
+        patch_dir: Directory provided when running `write_patch_cells` containing the `.parquet` files. For multi-step segmentation, provide a list of directories (one per step).
+
+    Returns:
+        A list of cells represented as `shapely` polygons
+    """
+    cells = []
+
+    files = [f for f in Path(patch_dir).iterdir() if f.suffix == ".parquet"]
+    for file in tqdm(files, desc="Reading patches"):
+        cells += list(gpd.read_parquet(file).geometry)
+
+    log.info(f"Found {len(cells)} total cells")
+    return cells
+
+
+
+ +
+ + +
+ + + +

+ run_patches(patch_width, patch_overlap) + +

+ + +
+ +

Run segmentation over all patches, in a sequential manner (this is slower than running all patches in parallel)

+ + + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
patch_width + int + +
+

Width of the patches

+
+
+ required +
patch_overlap + int + +
+

Number of pixels of overlap between patches

+
+
+ required +
+ + + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ list[Polygon] + +
+

A list of cells represented as shapely polygons

+
+
+ +
+ Source code in sopa/segmentation/stainings.py +
def run_patches(
+    self,
+    patch_width: int,
+    patch_overlap: int,
+) -> list[Polygon]:
+    """Run segmentation over all patches, in a sequential manner (this is slower than running all patches in parallel)
+
+    Args:
+        patch_width: Width of the patches
+        patch_overlap: Number of pixels of overlap between patches
+
+    Returns:
+        A list of cells represented as `shapely` polygons
+    """
+    self.patches = Patches2D(self.sdata, self.image_key, patch_width, patch_overlap)
+
+    cells = [
+        cell
+        for patch in tqdm(self.patches.polygons, desc="Run on patches")
+        for cell in self._run_patch(patch)
+    ]
+    cells = shapes.solve_conflicts(cells)
+    return cells
+
+
+
+ +
+ + +
+ + + +

+ write_patch_cells(patch_dir, patch_index) + +

+ + +
+ +

Run segmentation on one patch, and save the result in a dedicated directory

+ + + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
patch_dir + str + +
+

Directory inside which segmentation results will be saved

+
+
+ required +
patch_index + int + +
+

Index of the patch on which to run segmentation. NB: the number of patches is len(sdata['sopa_patches'])

+
+
+ required +
+ +
+ Source code in sopa/segmentation/stainings.py +
def write_patch_cells(self, patch_dir: str, patch_index: int):
+    """Run segmentation on one patch, and save the result in a dedicated directory
+
+    Args:
+        patch_dir: Directory inside which segmentation results will be saved
+        patch_index: Index of the patch on which to run segmentation. NB: the number of patches is `len(sdata['sopa_patches'])`
+    """
+    patch = self.sdata[SopaKeys.PATCHES].geometry[patch_index]
+
+    cells = self._run_patch(patch)
+    gdf = gpd.GeoDataFrame(geometry=cells)
+
+    patch_dir: Path = Path(patch_dir)
+    patch_dir.mkdir(parents=True, exist_ok=True)
+    patch_file = patch_dir / f"{patch_index}.parquet"
+
+    gdf.to_parquet(patch_file)
+
+
+
+ +
+ + + +
+ +
+ + +
+ + + + + + + + + + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/api/segmentation/stainings/stainings.md b/api/segmentation/stainings/stainings.md new file mode 100644 index 00000000..8e59f172 --- /dev/null +++ b/api/segmentation/stainings/stainings.md @@ -0,0 +1,3 @@ +::: sopa.segmentation.stainings.StainingSegmentation + options: + show_root_heading: true diff --git a/api/spatial/index.html b/api/spatial/index.html new file mode 100644 index 00000000..9acb0db6 --- /dev/null +++ b/api/spatial/index.html @@ -0,0 +1,2648 @@ + + + + + + + + + + + + + + + + + + + + + + + sopa.spatial - Sopa + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + +

sopa.spatial

+ +
+ + + +

+ sopa.spatial.mean_distance(adata, group_key, target_group_key=None, ignore_zeros=False) + +

+ + +
+ +

Mean distance between two groups (typically, between cell-types, or between cell-types and domains)

+ +
+ Note +

The distance is a number of hops, i.e. a distance of 10 between a pDC and a T cell means that there are 10 cells on the closest path from one to the other cell.

+
+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
adata + AnnData + +
+

An AnnData object, or a SpatialData object

+
+
+ required +
group_key + str + +
+

Key of adata.obs containing the groups

+
+
+ required +
target_group_key + str | None + +
+

Key of adata.obs containing the target groups (by default, uses group_key)

+
+
+ None +
ignore_zeros + bool + +
+

If True, a cell distance to its own group is 0.

+
+
+ False +
+ + + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ DataFrame + +
+

DataFrame of shape n_groups * n_groups_target of mean hop-distances

+
+
+ +
+ Source code in sopa/spatial/distance.py +
def mean_distance(
+    adata: AnnData, group_key: str, target_group_key: str | None = None, ignore_zeros: bool = False
+) -> pd.DataFrame:
+    """Mean distance between two groups (typically, between cell-types, or between cell-types and domains)
+
+    Note:
+        The distance is a number of hops, i.e. a distance of 10 between a pDC and a T cell means that there are 10 cells on the closest path from one to the other cell.
+
+    Args:
+        adata: An `AnnData` object, or a `SpatialData object`
+        group_key: Key of `adata.obs` containing the groups
+        target_group_key: Key of `adata.obs` containing the target groups (by default, uses `group_key`)
+        ignore_zeros: If `True`, a cell distance to its own group is 0.
+
+    Returns:
+        `DataFrame` of shape `n_groups * n_groups_target` of mean hop-distances
+    """
+    if isinstance(adata, SpatialData):
+        adata = adata.table
+
+    target_group_key = group_key if target_group_key is None else target_group_key
+
+    df_distances = cells_to_groups(adata, target_group_key, None, ignore_zeros=ignore_zeros)
+
+    if ignore_zeros:
+        df_distances.replace(0, np.nan, inplace=True)
+
+    df_distances[group_key] = adata.obs[group_key]
+    df_distances = df_distances.groupby(group_key, observed=False).mean()
+    df_distances.columns.name = target_group_key
+
+    return df_distances
+
+
+
+ +
+ + +
+ + + +

+ sopa.spatial.geometrize_niches(adata, niche_key, buffer='auto', perc_area_th=0.05) + +

+ + +
+ +

Converts the niches to shapely polygons, and put into a GeoDataFrame. Note that each niche can appear multiple times, as they can be separated by other niches ; in this case, we call them different "components" of the same niche ID.

+ +
+ Plot components +

You can show niches components with GeoPandas +

gdf = geometrize_niches(adata, niche_key)
+gdf.plot(column=niche_key)
+

+
+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
adata + AnnData | SpatialData + +
+

An AnnData object, or a SpatialData object

+
+
+ required +
niche_key + str + +
+

Key of adata.obs containing the niches

+
+
+ required +
buffer + int | str + +
+

Expansion radius applied on components. By default, 3 * mean_distance_neighbors

+
+
+ 'auto' +
perc_area_th + float + +
+

For each niche, components whose area is less than perc_area_th * max_component_area will be removed

+
+
+ 0.05 +
+ + + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ GeoDataFrame + +
+

A GeoDataFrame with geometries for each niche component. We also compute the area/perimeter/roundness of each component.

+
+
+ +
+ Source code in sopa/spatial/morpho.py +
18
+19
+20
+21
+22
+23
+24
+25
+26
+27
+28
+29
+30
+31
+32
+33
+34
+35
+36
+37
+38
+39
+40
+41
+42
+43
+44
+45
+46
+47
+48
+49
+50
+51
+52
+53
+54
+55
+56
+57
+58
+59
+60
+61
+62
+63
+64
+65
+66
+67
+68
+69
+70
+71
+72
+73
+74
+75
+76
+77
+78
+79
+80
+81
+82
+83
+84
+85
+86
def geometrize_niches(
+    adata: AnnData | SpatialData,
+    niche_key: str,
+    buffer: int | str = "auto",
+    perc_area_th: float = 0.05,
+) -> gpd.GeoDataFrame:
+    """Converts the niches to shapely polygons, and put into a `GeoDataFrame`. Note that each niche can appear multiple times, as they can be separated by other niches ; in this case, we call them different "components" of the same niche ID.
+
+    Plot components:
+        You can show niches components with GeoPandas
+        ```py
+        gdf = geometrize_niches(adata, niche_key)
+        gdf.plot(column=niche_key)
+        ```
+
+    Args:
+        adata: An `AnnData` object, or a `SpatialData object`
+        niche_key: Key of `adata.obs` containing the niches
+        buffer: Expansion radius applied on components. By default, `3 * mean_distance_neighbors`
+        perc_area_th: For each niche, components whose area is less than `perc_area_th * max_component_area` will be removed
+
+    Returns:
+        A `GeoDataFrame` with geometries for each niche component. We also compute the area/perimeter/roundness of each component.
+    """
+    if isinstance(adata, SpatialData):
+        adata = adata.table
+
+    _check_has_delaunay(adata)
+    data = {"geometry": [], niche_key: []}
+
+    delaunay = Delaunay(adata.obsm["spatial"])
+    connectivities = adata.obsp["spatial_connectivities"]
+    values = adata.obs[niche_key].values
+
+    keep = (
+        (connectivities[delaunay.simplices[:, 0], delaunay.simplices[:, 1]].A1 == 1)
+        & (connectivities[delaunay.simplices[:, 0], delaunay.simplices[:, 2]].A1 == 1)
+        & (connectivities[delaunay.simplices[:, 1], delaunay.simplices[:, 2]].A1 == 1)
+        & (values[delaunay.simplices[:, 0]] == values[delaunay.simplices[:, 1]])
+        & (values[delaunay.simplices[:, 0]] == values[delaunay.simplices[:, 2]])
+        & (values[delaunay.simplices[:, 0]] == values[delaunay.simplices[:, 2]])
+    )  # Keep simplices that are in the original Delaunay graph, and which are not in between different value categories
+
+    neighbors = np.where(np.isin(delaunay.neighbors, np.where(~keep)[0]), -1, delaunay.neighbors)
+
+    simplices_to_visit = set(np.where(keep)[0])
+
+    while simplices_to_visit:
+        component = Component(adata, delaunay, neighbors)
+        component.visit(simplices_to_visit)
+
+        data["geometry"].append(component.polygon)
+        data[niche_key].append(values[component.first_vertex_index()])
+
+    gdf = gpd.GeoDataFrame(data)
+
+    if buffer is not None and buffer != 0:
+        gdf = _clean_components(adata, gdf, niche_key, buffer)
+
+    gdf[SopaKeys.GEOMETRY_LENGTH] = gdf.length
+    gdf[SopaKeys.GEOMETRY_AREA] = gdf.area
+    gdf[SopaKeys.GEOMETRY_ROUNDNESS] = (
+        4 * np.pi * gdf[SopaKeys.GEOMETRY_AREA] / gdf[SopaKeys.GEOMETRY_LENGTH] ** 2
+    )
+
+    # Remove minor components (compared to the largest component of its corresponding niche)
+    gdf = gdf[gdf.area >= gdf[niche_key].map(gdf.groupby(niche_key).area.max() * perc_area_th)]
+
+    return gdf
+
+
+
+ +
+ + +
+ + + +

+ sopa.spatial.niches_geometry_stats(adata, niche_key, aggregation='min', key_added_suffix='_distance_to_niche_', **geometrize_niches_kwargs) + +

+ + +
+ +

Computes statistics over niches geometries

+ +
+ Details +
    +
  • n_components: Number of connected component of a niche (a component is a group of neighbor cells with the same niche attribute)
  • +
  • length: Mean distance of the exterior/boundary of the components of a niche
  • +
  • area: Mean area of the components of a niche
  • +
  • roundness: Float value between 0 and 1. The higher the value, the closer to a circle. Computed via 4 * pi * area / length**2
  • +
  • mean_distance_to_niche_X: mean distance to the niche (between the two closest points of the niches)
  • +
+
+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
adata + AnnData | SpatialData + +
+

An AnnData object, or a SpatialData object

+
+
+ required +
niche_key + str + +
+

Key of adata.obs containing the niches

+
+
+ required +
aggregation + str | list[str] + +
+

Aggregation mode. Either one string such as "min", or a list such as ["mean", "min"].

+
+
+ 'min' +
key_added_suffix + str + +
+

Suffix added in the DataFrame columns. Defaults to "distance_to_niche".

+
+
+ '_distance_to_niche_' +
geometrize_niches_kwargs + str + +
+

Kwargs to the sopa.spatial.geometrize_niches function

+
+
+ {} +
+ + + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ GeoDataFrame + +
+

A DataFrame of shape n_niches * n_statistics

+
+
+ +
+ Source code in sopa/spatial/morpho.py +
def niches_geometry_stats(
+    adata: AnnData | SpatialData,
+    niche_key: str,
+    aggregation: str | list[str] = "min",
+    key_added_suffix: str = "_distance_to_niche_",
+    **geometrize_niches_kwargs: str,
+) -> gpd.GeoDataFrame:
+    """Computes statistics over niches geometries
+
+    Details:
+        - `n_components`: Number of connected component of a niche (a component is a group of neighbor cells with the same niche attribute)
+        - `length`: Mean distance of the exterior/boundary of the components of a niche
+        - `area`: Mean area of the components of a niche
+        - `roundness`: Float value between 0 and 1. The higher the value, the closer to a circle. Computed via `4 * pi * area / length**2`
+        - `mean_distance_to_niche_X`: mean distance to the niche (between the two closest points of the niches)
+
+    Args:
+        adata: An `AnnData` object, or a `SpatialData object`
+        niche_key: Key of `adata.obs` containing the niches
+        aggregation: Aggregation mode. Either one string such as `"min"`, or a list such as `["mean", "min"]`.
+        key_added_suffix: Suffix added in the DataFrame columns. Defaults to "_distance_to_niche_".
+        geometrize_niches_kwargs: Kwargs to the `sopa.spatial.geometrize_niches` function
+
+    Returns:
+        A `DataFrame` of shape `n_niches * n_statistics`
+    """
+    if isinstance(adata, SpatialData):
+        adata = adata.table
+
+    gdf = geometrize_niches(adata, niche_key, **geometrize_niches_kwargs)
+    value_counts = gdf[niche_key].value_counts()
+
+    assert len(gdf), "No niche geometry found, stats can't be computed"
+
+    log.info(f"Computing pairwise distances between {len(gdf)} components")
+    pairwise_distances: pd.DataFrame = gdf.geometry.apply(lambda g: gdf.distance(g))
+    pairwise_distances[niche_key] = gdf[niche_key]
+
+    if isinstance(aggregation, str):
+        aggregation = [aggregation]
+
+    for aggr in aggregation:
+        df = pairwise_distances.groupby(niche_key).aggregate(aggr).T
+        df.columns = [f"{aggr}{key_added_suffix}{c}" for c in df.columns]
+        gdf[df.columns] = df
+
+    df_stats: pd.DataFrame = gdf.groupby(niche_key)[gdf.columns[2:]].mean()
+    df_stats.insert(0, SopaKeys.GEOMETRY_COUNT, value_counts)
+
+    return df_stats
+
+
+
+ +
+ + +
+ + + +

+ sopa.spatial.cells_to_groups(adata, group_key, key_added_prefix=None, ignore_zeros=False) + +

+ + +
+ +

Compute the hop-distance between each cell and a cell category/group.

+ + + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
adata + AnnData + +
+

An AnnData object, or a SpatialData object

+
+
+ required +
group_key + str + +
+

Key of adata.obs containing the groups

+
+
+ required +
key_added_prefix + str | None + +
+

Prefix to the key added in adata.obsm. If None, will return the DataFrame instead of saving it.

+
+
+ None +
ignore_zeros + bool + +
+

If True, a cell distance to its own group is 0.

+
+
+ False +
+ + + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ DataFrame | None + +
+

A Dataframe of shape n_obs * n_groups, or None if key_added_prefix was provided (in this case, the dataframe is saved in "{key_added_prefix}{group_key}")

+
+
+ +
+ Source code in sopa/spatial/distance.py +
14
+15
+16
+17
+18
+19
+20
+21
+22
+23
+24
+25
+26
+27
+28
+29
+30
+31
+32
+33
+34
+35
+36
+37
+38
+39
+40
+41
+42
+43
+44
+45
+46
+47
+48
+49
+50
+51
+52
+53
+54
+55
+56
+57
+58
+59
+60
+61
+62
+63
+64
+65
+66
+67
+68
+69
+70
+71
def cells_to_groups(
+    adata: AnnData,
+    group_key: str,
+    key_added_prefix: str | None = None,
+    ignore_zeros: bool = False,
+) -> pd.DataFrame | None:
+    """Compute the hop-distance between each cell and a cell category/group.
+
+    Args:
+        adata: An `AnnData` object, or a `SpatialData object`
+        group_key: Key of `adata.obs` containing the groups
+        key_added_prefix: Prefix to the key added in `adata.obsm`. If `None`, will return the `DataFrame` instead of saving it.
+        ignore_zeros: If `True`, a cell distance to its own group is 0.
+
+    Returns:
+        A `Dataframe` of shape `n_obs * n_groups`, or `None` if `key_added_prefix` was provided (in this case, the dataframe is saved in `"{key_added_prefix}{group_key}"`)
+    """
+    if isinstance(adata, SpatialData):
+        adata = adata.table
+
+    _check_has_delaunay(adata)
+
+    distances_to_groups = {}
+
+    if not adata.obs[group_key].dtype.name == "category":
+        log.info(f"Converting adata.obs['{group_key}'] to category")
+        adata.obs[group_key] = adata.obs[group_key].astype("category")
+
+    for group_id in tqdm(adata.obs[group_key].cat.categories):
+        group_nodes = np.where(adata.obs[group_key] == group_id)[0]
+
+        distances = np.full(adata.n_obs, np.nan)
+
+        if not ignore_zeros:
+            distances[group_nodes] = 0
+            visited = set(group_nodes)
+        else:
+            visited = set()
+
+        queue = group_nodes
+        current_distance = 0
+
+        while len(queue):
+            distances[queue] = current_distance
+
+            neighbors = set(adata.obsp["spatial_connectivities"][queue].indices)
+            queue = np.array(list(neighbors - visited))
+            visited |= neighbors
+
+            current_distance += 1
+
+        distances_to_groups[group_id] = distances
+
+    df_distances = pd.DataFrame(distances_to_groups, index=adata.obs_names)
+
+    if key_added_prefix is None:
+        return df_distances
+    adata.obsm[f"{key_added_prefix}{group_key}"] = df_distances
+
+
+
+ +
+ + +
+ + + +

+ sopa.spatial.spatial_neighbors(adata, radius, library_key=None, percentile=None, set_diag=False) + +

+ + +
+ +

Create a Delaunay graph from spatial coordinates. This function comes from squidpy.

+ + + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
adata + AnnData | SpatialData + +
+

AnnData object

+
+
+ required +
radius + tuple[float, float] | None + +
+

tuple that prunes the final graph to only contain edges in interval [min(radius), max(radius)]. If None, all edges are kept.

+
+
+ required +
library_key + str | None + +
+

Optional batch key in adata.obs

+
+
+ None +
percentile + float | None + +
+

Percentile of the distances to use as threshold.

+
+
+ None +
set_diag + bool + +
+

Whether to set the diagonal of the spatial connectivities to 1.0.

+
+
+ False +
+ +
+ Source code in sopa/spatial/_build.py +
24
+25
+26
+27
+28
+29
+30
+31
+32
+33
+34
+35
+36
+37
+38
+39
+40
+41
+42
+43
+44
+45
+46
+47
+48
+49
+50
+51
+52
+53
+54
+55
+56
+57
+58
+59
+60
+61
+62
+63
+64
+65
+66
+67
+68
+69
+70
+71
+72
+73
+74
+75
+76
def spatial_neighbors(
+    adata: AnnData | SpatialData,
+    radius: tuple[float, float] | None,
+    library_key: str | None = None,
+    percentile: float | None = None,
+    set_diag: bool = False,
+):
+    """Create a Delaunay graph from spatial coordinates. This function comes from [squidpy](https://squidpy.readthedocs.io/en/latest/api/squidpy.gr.spatial_neighbors.html#squidpy.gr.spatial_neighbors).
+
+    Args:
+        adata: AnnData object
+        radius: tuple that prunes the final graph to only contain edges in interval `[min(radius), max(radius)]`. If `None`, all edges are kept.
+        library_key: Optional batch key in adata.obs
+        percentile: Percentile of the distances to use as threshold.
+        set_diag: Whether to set the diagonal of the spatial connectivities to `1.0`.
+    """
+    if isinstance(adata, SpatialData):
+        adata = adata.table
+
+    assert (
+        radius is None or len(radius) == 2
+    ), "Radius is expected to be a tuple (min_radius, max_radius)"
+
+    log.info("Computing delaunay graph")
+
+    if library_key is not None:
+        assert adata.obs[library_key].dtype == "category"
+        libs = adata.obs[library_key].cat.categories
+        make_index_unique(adata.obs_names)
+    else:
+        libs = [None]
+
+    _build_fun = partial(
+        _spatial_neighbor,
+        set_diag=set_diag,
+        radius=radius,
+        percentile=percentile,
+    )
+
+    if library_key is not None:
+        mats: list[tuple[spmatrix, spmatrix]] = []
+        ixs = []  # type: ignore[var-annotated]
+        for lib in libs:
+            ixs.extend(np.where(adata.obs[library_key] == lib)[0])
+            mats.append(_build_fun(adata[adata.obs[library_key] == lib]))
+        ixs = np.argsort(ixs)  # type: ignore[assignment] # invert
+        Adj = block_diag([m[0] for m in mats], format="csr")[ixs, :][:, ixs]
+        Dst = block_diag([m[1] for m in mats], format="csr")[ixs, :][:, ixs]
+    else:
+        Adj, Dst = _build_fun(adata)
+
+    adata.obsp["spatial_connectivities"] = Adj
+    adata.obsp["spatial_distances"] = Dst
+
+
+
+ +
+ + +
+ + + +

+ sopa.spatial.prepare_network(adata, cell_type_key, niche_key, clip_weight=3, node_colors=('#5c7dc4', '#f05541'), node_sizes=(1.3, 5)) + +

+ + +
+ +

Create a dataframe representing weights between cell-types and/or niches. +This can be later use to plot a cell-type/niche represention of a whole slide +using the netgraph library.

+ + + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
adata + AnnData + +
+

An AnnData object

+
+
+ required +
cell_type_key + str + +
+

Key of adata.obs containing the cell types

+
+
+ required +
niche_key + str + +
+

Key of adata.obs containing the niches

+
+
+ required +
clip_weight + float + +
+

Maximum weight

+
+
+ 3 +
node_colors + tuple[str] + +
+

Tuple of (cell-type color, niche color)

+
+
+ ('#5c7dc4', '#f05541') +
node_sizes + float + +
+

Tuple of (cell-type size, niche size)

+
+
+ (1.3, 5) +
+ + + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ tuple[DataFrame, dict, dict, dict] + +
+

A DataFrame of weights between cell-types and/or niches, and three dict for netgraph display

+
+
+ +
+ Source code in sopa/spatial/distance.py +
def prepare_network(
+    adata: AnnData,
+    cell_type_key: str,
+    niche_key: str,
+    clip_weight: float = 3,
+    node_colors: tuple[str] = ("#5c7dc4", "#f05541"),
+    node_sizes: float = (1.3, 5),
+) -> tuple[pd.DataFrame, dict, dict, dict]:
+    """Create a dataframe representing weights between cell-types and/or niches.
+    This can be later use to plot a cell-type/niche represention of a whole slide
+    using the netgraph library.
+
+    Args:
+        adata: An `AnnData` object
+        cell_type_key: Key of `adata.obs` containing the cell types
+        niche_key: Key of `adata.obs` containing the niches
+        clip_weight: Maximum weight
+        node_colors: Tuple of (cell-type color, niche color)
+        node_sizes: Tuple of (cell-type size, niche size)
+
+    Returns:
+        A DataFrame of weights between cell-types and/or niches, and three dict for netgraph display
+    """
+    node_color, node_size, node_shape = {}, {}, {}
+
+    log.info("Computing all distances for the 4 pairs of categories")
+    weights = mean_distance(adata, cell_type_key)
+    top_right = mean_distance(adata, cell_type_key, niche_key)
+    bottom_left = mean_distance(adata, niche_key, cell_type_key)
+    bottom_right = mean_distance(adata, niche_key, niche_key)
+
+    for pop in weights.index:
+        node_color[pop] = node_colors[0]
+        node_size[pop] = node_sizes[0]
+        node_shape[pop] = "o"
+
+    for niche in bottom_right.index:
+        node_color[niche] = node_colors[1]
+        node_size[niche] = node_sizes[1]
+        node_shape[niche] = "h"
+
+    # assemble dataframe per-block
+    bottom_left[bottom_right.columns] = bottom_right
+    weights[top_right.columns] = top_right
+    weights = pd.concat([weights, bottom_left], axis=0).copy()
+
+    # convert distances to symmetric weights
+    weights = 1 / weights
+    np.fill_diagonal(weights.values, 0)
+    weights = weights.clip(0, clip_weight)
+    weights = (weights.T + weights) / 2
+
+    return weights, node_color, node_size, node_shape
+
+
+
+ +
+ + + + + + + + + + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/api/spatial/spatial.md b/api/spatial/spatial.md new file mode 100644 index 00000000..4331fc49 --- /dev/null +++ b/api/spatial/spatial.md @@ -0,0 +1,23 @@ +::: sopa.spatial.mean_distance + options: + show_root_heading: true + +::: sopa.spatial.geometrize_niches + options: + show_root_heading: true + +::: sopa.spatial.niches_geometry_stats + options: + show_root_heading: true + +::: sopa.spatial.cells_to_groups + options: + show_root_heading: true + +::: sopa.spatial.spatial_neighbors + options: + show_root_heading: true + +::: sopa.spatial.prepare_network + options: + show_root_heading: true diff --git a/api/utils/data/data.md b/api/utils/data/data.md new file mode 100644 index 00000000..3e4013e9 --- /dev/null +++ b/api/utils/data/data.md @@ -0,0 +1,7 @@ +::: sopa.utils.data.uniform + options: + show_root_heading: true + +::: sopa.utils.data.blobs + options: + show_root_heading: true diff --git a/api/utils/data/index.html b/api/utils/data/index.html new file mode 100644 index 00000000..20e7fed8 --- /dev/null +++ b/api/utils/data/index.html @@ -0,0 +1,1723 @@ + + + + + + + + + + + + + + + + + + + + + + + sopa.utils.data - Sopa + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + +

sopa.utils.data

+ +
+ + + +

+ sopa.utils.data.uniform(*_, length=2048, cell_density=0.0001, n_points_per_cell=50, c_coords=['DAPI', 'CK', 'CD3', 'CD20'], genes=['EPCAM', 'CD3E', 'CD20', 'CXCL4', 'CXCL10'], sigma_factor=0.1, pixel_size=0.1, seed=0, include_vertices=False, include_image=True, apply_blur=True) + +

+ + +
+ +

Generate a dummy dataset composed of cells generated uniformly in a square. It also has transcripts.

+ + + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
length + int + +
+

Size of the square, in pixels

+
+
+ 2048 +
cell_density + float + +
+

Density of cells per pixel^2

+
+
+ 0.0001 +
n_points_per_cell + int + +
+

Mean number of transcripts per cell

+
+
+ 50 +
c_coords + list[str] + +
+

Channel names

+
+
+ ['DAPI', 'CK', 'CD3', 'CD20'] +
genes + int | list[str] + +
+

Number of different genes, or list of gene names

+
+
+ ['EPCAM', 'CD3E', 'CD20', 'CXCL4', 'CXCL10'] +
sigma_factor + float + +
+

Factor used to determine sigma for the gaussian blur.

+
+
+ 0.1 +
pixel_size + float + +
+

Number of microns in one pixel.

+
+
+ 0.1 +
seed + int + +
+

Numpy random seed

+
+
+ 0 +
include_vertices + bool + +
+

Whether to include the vertices of the cells (as points) in the spatialdata object

+
+
+ False +
include_image + bool + +
+

Whether to include the image in the spatialdata object

+
+
+ True +
apply_blur + bool + +
+

Whether to apply gaussian blur on the image (without blur, cells are just one pixel)

+
+
+ True +
+ + + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ SpatialData + +
+

A SpatialData object with a 2D image (sdata["image"]), the cells polygon boundaries (sdata["cells"]), the transcripts (sdata["transcripts"]), and optional cell vertices (sdata["vertices"]) if include_vertices is True.

+
+
+ +
+ Source code in sopa/utils/data.py +
def uniform(
+    *_,
+    length: int = 2_048,
+    cell_density: float = 1e-4,
+    n_points_per_cell: int = 50,
+    c_coords: list[str] = ["DAPI", "CK", "CD3", "CD20"],
+    genes: int | list[str] = ["EPCAM", "CD3E", "CD20", "CXCL4", "CXCL10"],
+    sigma_factor: float = 0.1,
+    pixel_size: float = 0.1,
+    seed: int = 0,
+    include_vertices: bool = False,
+    include_image: bool = True,
+    apply_blur: bool = True,
+) -> SpatialData:
+    """Generate a dummy dataset composed of cells generated uniformly in a square. It also has transcripts.
+
+    Args:
+        length: Size of the square, in pixels
+        cell_density: Density of cells per pixel^2
+        n_points_per_cell: Mean number of transcripts per cell
+        c_coords: Channel names
+        genes: Number of different genes, or list of gene names
+        sigma_factor: Factor used to determine `sigma` for the gaussian blur.
+        pixel_size: Number of microns in one pixel.
+        seed: Numpy random seed
+        include_vertices: Whether to include the vertices of the cells (as points) in the spatialdata object
+        include_image: Whether to include the image in the spatialdata object
+        apply_blur: Whether to apply gaussian blur on the image (without blur, cells are just one pixel)
+
+    Returns:
+        A SpatialData object with a 2D image (`sdata["image"]`), the cells polygon boundaries (`sdata["cells"]`), the transcripts (`sdata["transcripts"]`), and optional cell vertices (`sdata["vertices"]`) if `include_vertices` is `True`.
+    """
+    np.random.seed(seed)
+
+    grid_width = max(1, int(length * np.sqrt(cell_density)))
+    dx = length / grid_width
+    sigma = dx * sigma_factor
+    n_cells = grid_width**2
+    radius = int(dx) // 4
+    cell_types_index = np.random.randint(0, max(1, len(c_coords) - 1), n_cells)
+
+    log.info(
+        f"Image of size ({len(c_coords), length, length}) with {n_cells} cells and {n_points_per_cell} transcripts per cell"
+    )
+
+    ### Compute cell vertices (xy array)
+    vertices_x = dx / 2 + np.arange(grid_width) * dx
+    x, y = np.meshgrid(vertices_x, vertices_x)
+    xy = np.stack([x.ravel(), y.ravel()], axis=1)
+    xy += np.random.uniform(-dx / 2, dx / 2, size=xy.shape)
+    xy = xy.clip(0, length - 1).astype(int)
+
+    vertices = pd.DataFrame(xy, columns=["x", "y"])
+
+    ### Create image
+    images = {}
+
+    if include_image:
+        x_circle, y_circle = circle_coords(radius)
+
+        image = np.zeros((len(c_coords), length, length))
+        for i, (x, y) in enumerate(xy):
+            y_coords = (y + y_circle).clip(0, image.shape[1] - 1)
+            x_coords = (x + x_circle).clip(0, image.shape[2] - 1)
+            image[0, y_coords, x_coords] = 1
+            if len(c_coords) > 1:
+                image[cell_types_index[i] + 1, y_coords, x_coords] = 1
+        if apply_blur:
+            image = gaussian_filter(image, sigma=sigma, axes=(1, 2))
+        image = (image / image.max() * 255).astype(np.uint8)
+        image = da.from_array(image, chunks=(1, 4096, 4096))
+        images["image"] = Image2DModel.parse(image, c_coords=c_coords, dims=["c", "y", "x"])
+
+    ### Create cell boundaries
+    cells = [Point(vertex).buffer(radius).simplify(tolerance=1) for vertex in xy]
+    bbox = box(0, 0, length - 1, length - 1)
+    cells = [cell.intersection(bbox) for cell in cells]
+    gdf = gpd.GeoDataFrame(geometry=cells)
+    shapes = {"cells": ShapesModel.parse(gdf)}
+
+    ### Create transcripts
+    n_genes = n_cells * n_points_per_cell
+    point_cell_index = np.arange(n_cells).repeat(n_points_per_cell)
+    points_coords = radius / 2 * np.random.randn(n_genes, 2) + xy[point_cell_index]
+    points_coords = points_coords.clip(0, length - 1)
+
+    if isinstance(genes, int):
+        gene_names = np.random.choice([chr(97 + i) for i in range(n_genes)], size=n_genes)
+    elif len(genes) and len(genes) >= len(c_coords) - 1:
+        gene_names = np.full(n_genes, "", dtype="<U5")
+        for i in range(len(genes)):
+            where_cell_type = np.where(cell_types_index[point_cell_index] == i)[0]
+            probabilities = np.full(len(genes), 0.2 / (len(genes) - 1))
+            probabilities[i] = 0.8
+            gene_names[where_cell_type] = np.random.choice(
+                genes, len(where_cell_type), p=probabilities
+            )
+    else:
+        gene_names = np.random.choice(genes, size=n_genes)
+
+    df = pd.DataFrame(
+        {
+            "x": points_coords[:, 0],
+            "y": points_coords[:, 1],
+            "z": 1,
+            "genes": gene_names,
+        }
+    )
+
+    # apply an arbritrary transformation for a more complete test case
+    affine = np.array([[pixel_size, 0, 100], [0, pixel_size, 600], [0, 0, 1]])
+    df[["x", "y", "z"]] = df[["x", "y", "z"]] @ affine.T
+    affine = Affine(affine, input_axes=["x", "y"], output_axes=["x", "y"]).inverse()
+
+    df = dd.from_pandas(df, chunksize=2_000_000)
+
+    points = {
+        "transcripts": PointsModel.parse(
+            df, transformations={"global": affine, "microns": Identity()}
+        )
+    }
+    if include_vertices:
+        points["vertices"] = PointsModel.parse(vertices)
+
+    return SpatialData(images=images, points=points, shapes=shapes)
+
+
+
+ +
+ + +
+ + + +

+ sopa.utils.data.blobs(*_, length=1024, n_points=10000, c_coords=['DAPI', 'CK', 'CD3', 'CD20'], **kwargs) + +

+ + +
+ +

Adapts the blobs dataset from SpatialData for sopa. Please refer to the SpatialData documentation

+ +
+ Source code in sopa/utils/data.py +
def blobs(
+    *_,
+    length: int = 1_024,
+    n_points: int = 10_000,
+    c_coords=["DAPI", "CK", "CD3", "CD20"],
+    **kwargs,
+) -> SpatialData:
+    """Adapts the blobs dataset from SpatialData for sopa. Please refer to the SpatialData documentation"""
+    _blobs = BlobsDataset(
+        length=length, n_points=n_points, c_coords=c_coords, n_channels=len(c_coords), **kwargs
+    )
+
+    image = _blobs._image_blobs(
+        _blobs.transformations,
+        _blobs.length,
+        _blobs.n_channels,
+        _blobs.c_coords,
+    )
+    image.data = (image.data * 255).astype(np.uint8)
+
+    points = _blobs._points_blobs(_blobs.transformations, _blobs.length, _blobs.n_points)
+    genes = pd.Series(np.random.choice(list("abcdef"), size=len(points))).astype("category")
+    points["genes"] = dd.from_pandas(genes, npartitions=points.npartitions)
+
+    return SpatialData(images={"blob_image": image}, points={"blob_transcripts": points})
+
+
+
+ +
+ + + + + + + + + + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/api/utils/image/image.md b/api/utils/image/image.md new file mode 100644 index 00000000..3c82fed1 --- /dev/null +++ b/api/utils/image/image.md @@ -0,0 +1,11 @@ +::: sopa.utils.image.scale_dtype + options: + show_root_heading: true + +::: sopa.utils.image.resize + options: + show_root_heading: true + +::: sopa.utils.image.resize_numpy + options: + show_root_heading: true diff --git a/api/utils/image/index.html b/api/utils/image/index.html new file mode 100644 index 00000000..93384c91 --- /dev/null +++ b/api/utils/image/index.html @@ -0,0 +1,1627 @@ + + + + + + + + + + + + + + + + + + + + + + + sopa.utils.image - Sopa + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + +

sopa.utils.image

+ +
+ + + +

+ sopa.utils.image.scale_dtype(arr, dtype) + +

+ + +
+ +

Change the dtype of an array but keep the scale compared to the type maximum value.

+
+

Example

+

For an array of dtype uint8 being transformed to np.uint16, the value 255 will become 65535

+
+ + + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
arr + ndarray + +
+

A numpy array

+
+
+ required +
dtype + dtype + +
+

Target numpy data type

+
+
+ required +
+ + + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ ndarray + +
+

A scaled numpy array with the dtype provided.

+
+
+ +
+ Source code in sopa/utils/image.py +
57
+58
+59
+60
+61
+62
+63
+64
+65
+66
+67
+68
+69
+70
+71
+72
+73
+74
+75
+76
+77
def scale_dtype(arr: np.ndarray, dtype: np.dtype) -> np.ndarray:
+    """Change the dtype of an array but keep the scale compared to the type maximum value.
+
+    !!! note "Example"
+        For an array of dtype `uint8` being transformed to `np.uint16`, the value `255` will become `65535`
+
+    Args:
+        arr: A `numpy` array
+        dtype: Target `numpy` data type
+
+    Returns:
+        A scaled `numpy` array with the dtype provided.
+    """
+    _check_integer_dtype(arr.dtype)
+    _check_integer_dtype(dtype)
+
+    if arr.dtype == dtype:
+        return arr
+
+    factor = np.iinfo(dtype).max / np.iinfo(arr.dtype).max
+    return (arr * factor).astype(dtype)
+
+
+
+ +
+ + +
+ + + +

+ sopa.utils.image.resize(xarr, scale_factor) + +

+ + +
+ +

Resize a xarray image

+ + + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
xarr + DataArray + +
+

A xarray array

+
+
+ required +
scale_factor + float + +
+

Scale factor of resizing, e.g. 2 will decrease the width by 2

+
+
+ required +
+ + + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ Array + +
+

Resized dask array

+
+
+ +
+ Source code in sopa/utils/image.py +
 7
+ 8
+ 9
+10
+11
+12
+13
+14
+15
+16
+17
+18
+19
+20
+21
+22
+23
+24
+25
+26
def resize(xarr: xr.DataArray, scale_factor: float) -> da.Array:
+    """Resize a xarray image
+
+    Args:
+        xarr: A `xarray` array
+        scale_factor: Scale factor of resizing, e.g. `2` will decrease the width by 2
+
+    Returns:
+        Resized dask array
+    """
+    resize_dims = [dim in ["x", "y"] for dim in xarr.dims]
+    transform = np.diag([scale_factor if resize_dim else 1 for resize_dim in resize_dims])
+    output_shape = [
+        size // scale_factor if resize_dim else size
+        for size, resize_dim in zip(xarr.shape, resize_dims)
+    ]
+
+    return dask_image.ndinterp.affine_transform(
+        xarr.data, matrix=transform, output_shape=output_shape
+    )
+
+
+
+ +
+ + +
+ + + +

+ sopa.utils.image.resize_numpy(arr, scale_factor, dims, output_shape) + +

+ + +
+ +

Resize a numpy image

+ + + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
arr + ndarray + +
+

a numpy array

+
+
+ required +
scale_factor + float + +
+

Scale factor of resizing, e.g. 2 will decrease the width by 2

+
+
+ required +
dims + list[str] + +
+

List of dimension names. Only "x" and "y" are resized.

+
+
+ required +
output_shape + list[int] + +
+

Size of the output array

+
+
+ required +
+ + + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ ndarray + +
+

Resized array

+
+
+ +
+ Source code in sopa/utils/image.py +
29
+30
+31
+32
+33
+34
+35
+36
+37
+38
+39
+40
+41
+42
+43
+44
+45
+46
+47
+48
def resize_numpy(
+    arr: np.ndarray, scale_factor: float, dims: list[str], output_shape: list[int]
+) -> np.ndarray:
+    """Resize a numpy image
+
+    Args:
+        arr: a `numpy` array
+        scale_factor: Scale factor of resizing, e.g. `2` will decrease the width by 2
+        dims: List of dimension names. Only `"x"` and `"y"` are resized.
+        output_shape: Size of the output array
+
+    Returns:
+        Resized array
+    """
+    resize_dims = [dim in ["x", "y"] for dim in dims]
+    transform = np.diag([scale_factor if resize_dim else 1 for resize_dim in resize_dims])
+
+    return dask_image.ndinterp.affine_transform(
+        arr, matrix=transform, output_shape=output_shape
+    ).compute()
+
+
+
+ +
+ + + + + + + + + + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/api/utils/polygon_crop/index.html b/api/utils/polygon_crop/index.html new file mode 100644 index 00000000..fee2e8bb --- /dev/null +++ b/api/utils/polygon_crop/index.html @@ -0,0 +1,1381 @@ + + + + + + + + + + + + + + + + + + + + + + + sopa.utils.polygon_crop - Sopa + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + +

sopa.utils.polygon_crop

+ +
+ + + +

+ sopa.utils.polygon_crop.polygon_selection(sdata, intermediate_image=None, intermediate_polygon=None, channels=None, scale_factor=10, margin_ratio=0.1) + +

+ + +
+ +

Crop an image based on a user-defined polygon (interactive mode).

+ + + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
sdata + SpatialData + +
+

A SpatialData object

+
+
+ required +
intermediate_image + str | None + +
+

Path to the intermediate image, with a .zip extension. Use this only if the interactive mode is not available

+
+
+ None +
intermediate_polygon + str | None + +
+

Path to the intermediate polygon, with a .zip extension. Use this locally, after downloading the intermediate_image

+
+
+ None +
channels + list[str] | None + +
+

List of channel names to be displayed. Optional if there are already only 1 or 3 channels.

+
+
+ None +
scale_factor + float + +
+

Resize the image by this value (high value for a lower memory usage)

+
+
+ 10 +
margin_ratio + float + +
+

Ratio of the image margin on the display (compared to the image size)

+
+
+ 0.1 +
+ +
+ Source code in sopa/utils/polygon_crop.py +
def polygon_selection(
+    sdata: SpatialData,
+    intermediate_image: str | None = None,
+    intermediate_polygon: str | None = None,
+    channels: list[str] | None = None,
+    scale_factor: float = 10,
+    margin_ratio: float = 0.1,
+):
+    """Crop an image based on a user-defined polygon (interactive mode).
+
+    Args:
+        sdata: A `SpatialData` object
+        intermediate_image: Path to the intermediate image, with a `.zip` extension. Use this only if the interactive mode is not available
+        intermediate_polygon: Path to the intermediate polygon, with a `.zip` extension. Use this locally, after downloading the `intermediate_image`
+        channels: List of channel names to be displayed. Optional if there are already only 1 or 3 channels.
+        scale_factor: Resize the image by this value (high value for a lower memory usage)
+        margin_ratio: Ratio of the image margin on the display (compared to the image size)
+    """
+    if intermediate_polygon is None:
+        image_key, image = _prepare(sdata, channels, scale_factor)
+
+        if intermediate_image is not None:
+            log.info(f"Resized image will be saved to {intermediate_image}")
+            with zarr.ZipStore(intermediate_image, mode="w") as store:
+                g = zarr.group(store=store)
+                g.attrs.put({ROI.SCALE_FACTOR: scale_factor, ROI.IMAGE_KEY: image_key})
+                g.array(ROI.IMAGE_ARRAY_KEY, image, dtype=image.dtype, chunks=image.shape)
+            return
+
+        polygon = _draw_polygon(image, scale_factor, margin_ratio)
+    else:
+        log.info(f"Reading polygon at path {intermediate_polygon}")
+        z = zarr.open(intermediate_polygon, mode="r")
+        polygon = Polygon(z[ROI.POLYGON_ARRAY_KEY][:])
+        image_key = z.attrs[ROI.IMAGE_KEY]
+
+        image = get_spatial_image(sdata, image_key)
+
+    geo_df = gpd.GeoDataFrame(geometry=[polygon])
+
+    geo_df = ShapesModel.parse(
+        geo_df, transformations=get_transformation(sdata[image_key], get_all=True)
+    )
+    sdata.add_shapes(ROI.KEY, geo_df)
+
+    log.info(f"Polygon saved in sdata['{ROI.KEY}']")
+
+
+
+ +
+ + + + + + + + + + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/api/utils/polygon_crop/polygon_crop.md b/api/utils/polygon_crop/polygon_crop.md new file mode 100644 index 00000000..8033eb97 --- /dev/null +++ b/api/utils/polygon_crop/polygon_crop.md @@ -0,0 +1,3 @@ +::: sopa.utils.polygon_crop.polygon_selection + options: + show_root_heading: true diff --git a/assets/_mkdocstrings.css b/assets/_mkdocstrings.css new file mode 100644 index 00000000..4b7d98b8 --- /dev/null +++ b/assets/_mkdocstrings.css @@ -0,0 +1,109 @@ + +/* Avoid breaking parameter names, etc. in table cells. */ +.doc-contents td code { + word-break: normal !important; +} + +/* No line break before first paragraph of descriptions. */ +.doc-md-description, +.doc-md-description>p:first-child { + display: inline; +} + +/* Max width for docstring sections tables. */ +.doc .md-typeset__table, +.doc .md-typeset__table table { + display: table !important; + width: 100%; +} + +.doc .md-typeset__table tr { + display: table-row; +} + +/* Defaults in Spacy table style. */ +.doc-param-default { + float: right; +} + +/* Symbols in Navigation and ToC. */ +:root, +[data-md-color-scheme="default"] { + --doc-symbol-attribute-fg-color: #953800; + --doc-symbol-function-fg-color: #8250df; + --doc-symbol-method-fg-color: #8250df; + --doc-symbol-class-fg-color: #0550ae; + --doc-symbol-module-fg-color: #5cad0f; + + --doc-symbol-attribute-bg-color: #9538001a; + --doc-symbol-function-bg-color: #8250df1a; + --doc-symbol-method-bg-color: #8250df1a; + --doc-symbol-class-bg-color: #0550ae1a; + --doc-symbol-module-bg-color: #5cad0f1a; +} + +[data-md-color-scheme="slate"] { + --doc-symbol-attribute-fg-color: #ffa657; + --doc-symbol-function-fg-color: #d2a8ff; + --doc-symbol-method-fg-color: #d2a8ff; + --doc-symbol-class-fg-color: #79c0ff; + --doc-symbol-module-fg-color: #baff79; + + --doc-symbol-attribute-bg-color: #ffa6571a; + --doc-symbol-function-bg-color: #d2a8ff1a; + --doc-symbol-method-bg-color: #d2a8ff1a; + --doc-symbol-class-bg-color: #79c0ff1a; + --doc-symbol-module-bg-color: #baff791a; +} + +code.doc-symbol { + border-radius: .1rem; + font-size: .85em; + padding: 0 .3em; + font-weight: bold; +} + +code.doc-symbol-attribute { + color: var(--doc-symbol-attribute-fg-color); + background-color: var(--doc-symbol-attribute-bg-color); +} + +code.doc-symbol-attribute::after { + content: "attr"; +} + +code.doc-symbol-function { + color: var(--doc-symbol-function-fg-color); + background-color: var(--doc-symbol-function-bg-color); +} + +code.doc-symbol-function::after { + content: "func"; +} + +code.doc-symbol-method { + color: var(--doc-symbol-method-fg-color); + background-color: var(--doc-symbol-method-bg-color); +} + +code.doc-symbol-method::after { + content: "meth"; +} + +code.doc-symbol-class { + color: var(--doc-symbol-class-fg-color); + background-color: var(--doc-symbol-class-bg-color); +} + +code.doc-symbol-class::after { + content: "class"; +} + +code.doc-symbol-module { + color: var(--doc-symbol-module-fg-color); + background-color: var(--doc-symbol-module-bg-color); +} + +code.doc-symbol-module::after { + content: "mod"; +} \ No newline at end of file diff --git a/assets/explorer/add_image.png b/assets/explorer/add_image.png new file mode 100644 index 00000000..49f9ddd9 Binary files /dev/null and b/assets/explorer/add_image.png differ diff --git a/assets/explorer/download_alignment.png b/assets/explorer/download_alignment.png new file mode 100644 index 00000000..8cd1098c Binary files /dev/null and b/assets/explorer/download_alignment.png differ diff --git a/assets/explorer/download_transformation_file.png b/assets/explorer/download_transformation_file.png new file mode 100644 index 00000000..ce27c163 Binary files /dev/null and b/assets/explorer/download_transformation_file.png differ diff --git a/assets/images/favicon.png b/assets/images/favicon.png new file mode 100644 index 00000000..1cf13b9f Binary files /dev/null and b/assets/images/favicon.png differ diff --git a/assets/javascripts/bundle.7389ff0e.min.js b/assets/javascripts/bundle.7389ff0e.min.js new file mode 100644 index 00000000..c7df7197 --- /dev/null +++ b/assets/javascripts/bundle.7389ff0e.min.js @@ -0,0 +1,29 @@ +"use strict";(()=>{var Mi=Object.create;var gr=Object.defineProperty;var Li=Object.getOwnPropertyDescriptor;var _i=Object.getOwnPropertyNames,Ft=Object.getOwnPropertySymbols,Ai=Object.getPrototypeOf,xr=Object.prototype.hasOwnProperty,ro=Object.prototype.propertyIsEnumerable;var to=(e,t,r)=>t in e?gr(e,t,{enumerable:!0,configurable:!0,writable:!0,value:r}):e[t]=r,P=(e,t)=>{for(var r in t||(t={}))xr.call(t,r)&&to(e,r,t[r]);if(Ft)for(var r of Ft(t))ro.call(t,r)&&to(e,r,t[r]);return e};var oo=(e,t)=>{var r={};for(var o in e)xr.call(e,o)&&t.indexOf(o)<0&&(r[o]=e[o]);if(e!=null&&Ft)for(var o of Ft(e))t.indexOf(o)<0&&ro.call(e,o)&&(r[o]=e[o]);return r};var yr=(e,t)=>()=>(t||e((t={exports:{}}).exports,t),t.exports);var Ci=(e,t,r,o)=>{if(t&&typeof t=="object"||typeof t=="function")for(let n of _i(t))!xr.call(e,n)&&n!==r&&gr(e,n,{get:()=>t[n],enumerable:!(o=Li(t,n))||o.enumerable});return e};var jt=(e,t,r)=>(r=e!=null?Mi(Ai(e)):{},Ci(t||!e||!e.__esModule?gr(r,"default",{value:e,enumerable:!0}):r,e));var no=(e,t,r)=>new Promise((o,n)=>{var i=c=>{try{a(r.next(c))}catch(p){n(p)}},s=c=>{try{a(r.throw(c))}catch(p){n(p)}},a=c=>c.done?o(c.value):Promise.resolve(c.value).then(i,s);a((r=r.apply(e,t)).next())});var ao=yr((Er,io)=>{(function(e,t){typeof Er=="object"&&typeof io!="undefined"?t():typeof define=="function"&&define.amd?define(t):t()})(Er,function(){"use strict";function e(r){var o=!0,n=!1,i=null,s={text:!0,search:!0,url:!0,tel:!0,email:!0,password:!0,number:!0,date:!0,month:!0,week:!0,time:!0,datetime:!0,"datetime-local":!0};function a(C){return!!(C&&C!==document&&C.nodeName!=="HTML"&&C.nodeName!=="BODY"&&"classList"in C&&"contains"in C.classList)}function c(C){var ct=C.type,Ve=C.tagName;return!!(Ve==="INPUT"&&s[ct]&&!C.readOnly||Ve==="TEXTAREA"&&!C.readOnly||C.isContentEditable)}function p(C){C.classList.contains("focus-visible")||(C.classList.add("focus-visible"),C.setAttribute("data-focus-visible-added",""))}function l(C){C.hasAttribute("data-focus-visible-added")&&(C.classList.remove("focus-visible"),C.removeAttribute("data-focus-visible-added"))}function f(C){C.metaKey||C.altKey||C.ctrlKey||(a(r.activeElement)&&p(r.activeElement),o=!0)}function u(C){o=!1}function d(C){a(C.target)&&(o||c(C.target))&&p(C.target)}function y(C){a(C.target)&&(C.target.classList.contains("focus-visible")||C.target.hasAttribute("data-focus-visible-added"))&&(n=!0,window.clearTimeout(i),i=window.setTimeout(function(){n=!1},100),l(C.target))}function b(C){document.visibilityState==="hidden"&&(n&&(o=!0),D())}function D(){document.addEventListener("mousemove",J),document.addEventListener("mousedown",J),document.addEventListener("mouseup",J),document.addEventListener("pointermove",J),document.addEventListener("pointerdown",J),document.addEventListener("pointerup",J),document.addEventListener("touchmove",J),document.addEventListener("touchstart",J),document.addEventListener("touchend",J)}function Q(){document.removeEventListener("mousemove",J),document.removeEventListener("mousedown",J),document.removeEventListener("mouseup",J),document.removeEventListener("pointermove",J),document.removeEventListener("pointerdown",J),document.removeEventListener("pointerup",J),document.removeEventListener("touchmove",J),document.removeEventListener("touchstart",J),document.removeEventListener("touchend",J)}function J(C){C.target.nodeName&&C.target.nodeName.toLowerCase()==="html"||(o=!1,Q())}document.addEventListener("keydown",f,!0),document.addEventListener("mousedown",u,!0),document.addEventListener("pointerdown",u,!0),document.addEventListener("touchstart",u,!0),document.addEventListener("visibilitychange",b,!0),D(),r.addEventListener("focus",d,!0),r.addEventListener("blur",y,!0),r.nodeType===Node.DOCUMENT_FRAGMENT_NODE&&r.host?r.host.setAttribute("data-js-focus-visible",""):r.nodeType===Node.DOCUMENT_NODE&&(document.documentElement.classList.add("js-focus-visible"),document.documentElement.setAttribute("data-js-focus-visible",""))}if(typeof window!="undefined"&&typeof document!="undefined"){window.applyFocusVisiblePolyfill=e;var t;try{t=new CustomEvent("focus-visible-polyfill-ready")}catch(r){t=document.createEvent("CustomEvent"),t.initCustomEvent("focus-visible-polyfill-ready",!1,!1,{})}window.dispatchEvent(t)}typeof document!="undefined"&&e(document)})});var Kr=yr((kt,qr)=>{/*! + * clipboard.js v2.0.11 + * https://clipboardjs.com/ + * + * Licensed MIT © Zeno Rocha + */(function(t,r){typeof kt=="object"&&typeof qr=="object"?qr.exports=r():typeof define=="function"&&define.amd?define([],r):typeof kt=="object"?kt.ClipboardJS=r():t.ClipboardJS=r()})(kt,function(){return function(){var e={686:function(o,n,i){"use strict";i.d(n,{default:function(){return Oi}});var s=i(279),a=i.n(s),c=i(370),p=i.n(c),l=i(817),f=i.n(l);function u(V){try{return document.execCommand(V)}catch(_){return!1}}var d=function(_){var O=f()(_);return u("cut"),O},y=d;function b(V){var _=document.documentElement.getAttribute("dir")==="rtl",O=document.createElement("textarea");O.style.fontSize="12pt",O.style.border="0",O.style.padding="0",O.style.margin="0",O.style.position="absolute",O.style[_?"right":"left"]="-9999px";var $=window.pageYOffset||document.documentElement.scrollTop;return O.style.top="".concat($,"px"),O.setAttribute("readonly",""),O.value=V,O}var D=function(_,O){var $=b(_);O.container.appendChild($);var N=f()($);return u("copy"),$.remove(),N},Q=function(_){var O=arguments.length>1&&arguments[1]!==void 0?arguments[1]:{container:document.body},$="";return typeof _=="string"?$=D(_,O):_ instanceof HTMLInputElement&&!["text","search","url","tel","password"].includes(_==null?void 0:_.type)?$=D(_.value,O):($=f()(_),u("copy")),$},J=Q;function C(V){"@babel/helpers - typeof";return typeof Symbol=="function"&&typeof Symbol.iterator=="symbol"?C=function(O){return typeof O}:C=function(O){return O&&typeof Symbol=="function"&&O.constructor===Symbol&&O!==Symbol.prototype?"symbol":typeof O},C(V)}var ct=function(){var _=arguments.length>0&&arguments[0]!==void 0?arguments[0]:{},O=_.action,$=O===void 0?"copy":O,N=_.container,Y=_.target,ke=_.text;if($!=="copy"&&$!=="cut")throw new Error('Invalid "action" value, use either "copy" or "cut"');if(Y!==void 0)if(Y&&C(Y)==="object"&&Y.nodeType===1){if($==="copy"&&Y.hasAttribute("disabled"))throw new Error('Invalid "target" attribute. Please use "readonly" instead of "disabled" attribute');if($==="cut"&&(Y.hasAttribute("readonly")||Y.hasAttribute("disabled")))throw new Error(`Invalid "target" attribute. You can't cut text from elements with "readonly" or "disabled" attributes`)}else throw new Error('Invalid "target" value, use a valid Element');if(ke)return J(ke,{container:N});if(Y)return $==="cut"?y(Y):J(Y,{container:N})},Ve=ct;function Fe(V){"@babel/helpers - typeof";return typeof Symbol=="function"&&typeof Symbol.iterator=="symbol"?Fe=function(O){return typeof O}:Fe=function(O){return O&&typeof Symbol=="function"&&O.constructor===Symbol&&O!==Symbol.prototype?"symbol":typeof O},Fe(V)}function vi(V,_){if(!(V instanceof _))throw new TypeError("Cannot call a class as a function")}function eo(V,_){for(var O=0;O<_.length;O++){var $=_[O];$.enumerable=$.enumerable||!1,$.configurable=!0,"value"in $&&($.writable=!0),Object.defineProperty(V,$.key,$)}}function gi(V,_,O){return _&&eo(V.prototype,_),O&&eo(V,O),V}function xi(V,_){if(typeof _!="function"&&_!==null)throw new TypeError("Super expression must either be null or a function");V.prototype=Object.create(_&&_.prototype,{constructor:{value:V,writable:!0,configurable:!0}}),_&&br(V,_)}function br(V,_){return br=Object.setPrototypeOf||function($,N){return $.__proto__=N,$},br(V,_)}function yi(V){var _=Ti();return function(){var $=Rt(V),N;if(_){var Y=Rt(this).constructor;N=Reflect.construct($,arguments,Y)}else N=$.apply(this,arguments);return Ei(this,N)}}function Ei(V,_){return _&&(Fe(_)==="object"||typeof _=="function")?_:wi(V)}function wi(V){if(V===void 0)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return V}function Ti(){if(typeof Reflect=="undefined"||!Reflect.construct||Reflect.construct.sham)return!1;if(typeof Proxy=="function")return!0;try{return Date.prototype.toString.call(Reflect.construct(Date,[],function(){})),!0}catch(V){return!1}}function Rt(V){return Rt=Object.setPrototypeOf?Object.getPrototypeOf:function(O){return O.__proto__||Object.getPrototypeOf(O)},Rt(V)}function vr(V,_){var O="data-clipboard-".concat(V);if(_.hasAttribute(O))return _.getAttribute(O)}var Si=function(V){xi(O,V);var _=yi(O);function O($,N){var Y;return vi(this,O),Y=_.call(this),Y.resolveOptions(N),Y.listenClick($),Y}return gi(O,[{key:"resolveOptions",value:function(){var N=arguments.length>0&&arguments[0]!==void 0?arguments[0]:{};this.action=typeof N.action=="function"?N.action:this.defaultAction,this.target=typeof N.target=="function"?N.target:this.defaultTarget,this.text=typeof N.text=="function"?N.text:this.defaultText,this.container=Fe(N.container)==="object"?N.container:document.body}},{key:"listenClick",value:function(N){var Y=this;this.listener=p()(N,"click",function(ke){return Y.onClick(ke)})}},{key:"onClick",value:function(N){var Y=N.delegateTarget||N.currentTarget,ke=this.action(Y)||"copy",It=Ve({action:ke,container:this.container,target:this.target(Y),text:this.text(Y)});this.emit(It?"success":"error",{action:ke,text:It,trigger:Y,clearSelection:function(){Y&&Y.focus(),window.getSelection().removeAllRanges()}})}},{key:"defaultAction",value:function(N){return vr("action",N)}},{key:"defaultTarget",value:function(N){var Y=vr("target",N);if(Y)return document.querySelector(Y)}},{key:"defaultText",value:function(N){return vr("text",N)}},{key:"destroy",value:function(){this.listener.destroy()}}],[{key:"copy",value:function(N){var Y=arguments.length>1&&arguments[1]!==void 0?arguments[1]:{container:document.body};return J(N,Y)}},{key:"cut",value:function(N){return y(N)}},{key:"isSupported",value:function(){var N=arguments.length>0&&arguments[0]!==void 0?arguments[0]:["copy","cut"],Y=typeof N=="string"?[N]:N,ke=!!document.queryCommandSupported;return Y.forEach(function(It){ke=ke&&!!document.queryCommandSupported(It)}),ke}}]),O}(a()),Oi=Si},828:function(o){var n=9;if(typeof Element!="undefined"&&!Element.prototype.matches){var i=Element.prototype;i.matches=i.matchesSelector||i.mozMatchesSelector||i.msMatchesSelector||i.oMatchesSelector||i.webkitMatchesSelector}function s(a,c){for(;a&&a.nodeType!==n;){if(typeof a.matches=="function"&&a.matches(c))return a;a=a.parentNode}}o.exports=s},438:function(o,n,i){var s=i(828);function a(l,f,u,d,y){var b=p.apply(this,arguments);return l.addEventListener(u,b,y),{destroy:function(){l.removeEventListener(u,b,y)}}}function c(l,f,u,d,y){return typeof l.addEventListener=="function"?a.apply(null,arguments):typeof u=="function"?a.bind(null,document).apply(null,arguments):(typeof l=="string"&&(l=document.querySelectorAll(l)),Array.prototype.map.call(l,function(b){return a(b,f,u,d,y)}))}function p(l,f,u,d){return function(y){y.delegateTarget=s(y.target,f),y.delegateTarget&&d.call(l,y)}}o.exports=c},879:function(o,n){n.node=function(i){return i!==void 0&&i instanceof HTMLElement&&i.nodeType===1},n.nodeList=function(i){var s=Object.prototype.toString.call(i);return i!==void 0&&(s==="[object NodeList]"||s==="[object HTMLCollection]")&&"length"in i&&(i.length===0||n.node(i[0]))},n.string=function(i){return typeof i=="string"||i instanceof String},n.fn=function(i){var s=Object.prototype.toString.call(i);return s==="[object Function]"}},370:function(o,n,i){var s=i(879),a=i(438);function c(u,d,y){if(!u&&!d&&!y)throw new Error("Missing required arguments");if(!s.string(d))throw new TypeError("Second argument must be a String");if(!s.fn(y))throw new TypeError("Third argument must be a Function");if(s.node(u))return p(u,d,y);if(s.nodeList(u))return l(u,d,y);if(s.string(u))return f(u,d,y);throw new TypeError("First argument must be a String, HTMLElement, HTMLCollection, or NodeList")}function p(u,d,y){return u.addEventListener(d,y),{destroy:function(){u.removeEventListener(d,y)}}}function l(u,d,y){return Array.prototype.forEach.call(u,function(b){b.addEventListener(d,y)}),{destroy:function(){Array.prototype.forEach.call(u,function(b){b.removeEventListener(d,y)})}}}function f(u,d,y){return a(document.body,u,d,y)}o.exports=c},817:function(o){function n(i){var s;if(i.nodeName==="SELECT")i.focus(),s=i.value;else if(i.nodeName==="INPUT"||i.nodeName==="TEXTAREA"){var a=i.hasAttribute("readonly");a||i.setAttribute("readonly",""),i.select(),i.setSelectionRange(0,i.value.length),a||i.removeAttribute("readonly"),s=i.value}else{i.hasAttribute("contenteditable")&&i.focus();var c=window.getSelection(),p=document.createRange();p.selectNodeContents(i),c.removeAllRanges(),c.addRange(p),s=c.toString()}return s}o.exports=n},279:function(o){function n(){}n.prototype={on:function(i,s,a){var c=this.e||(this.e={});return(c[i]||(c[i]=[])).push({fn:s,ctx:a}),this},once:function(i,s,a){var c=this;function p(){c.off(i,p),s.apply(a,arguments)}return p._=s,this.on(i,p,a)},emit:function(i){var s=[].slice.call(arguments,1),a=((this.e||(this.e={}))[i]||[]).slice(),c=0,p=a.length;for(c;c{"use strict";/*! + * escape-html + * Copyright(c) 2012-2013 TJ Holowaychuk + * Copyright(c) 2015 Andreas Lubbe + * Copyright(c) 2015 Tiancheng "Timothy" Gu + * MIT Licensed + */var Wa=/["'&<>]/;Vn.exports=Ua;function Ua(e){var t=""+e,r=Wa.exec(t);if(!r)return t;var o,n="",i=0,s=0;for(i=r.index;i0&&i[i.length-1])&&(p[0]===6||p[0]===2)){r=0;continue}if(p[0]===3&&(!i||p[1]>i[0]&&p[1]=e.length&&(e=void 0),{value:e&&e[o++],done:!e}}};throw new TypeError(t?"Object is not iterable.":"Symbol.iterator is not defined.")}function z(e,t){var r=typeof Symbol=="function"&&e[Symbol.iterator];if(!r)return e;var o=r.call(e),n,i=[],s;try{for(;(t===void 0||t-- >0)&&!(n=o.next()).done;)i.push(n.value)}catch(a){s={error:a}}finally{try{n&&!n.done&&(r=o.return)&&r.call(o)}finally{if(s)throw s.error}}return i}function K(e,t,r){if(r||arguments.length===2)for(var o=0,n=t.length,i;o1||a(u,d)})})}function a(u,d){try{c(o[u](d))}catch(y){f(i[0][3],y)}}function c(u){u.value instanceof ot?Promise.resolve(u.value.v).then(p,l):f(i[0][2],u)}function p(u){a("next",u)}function l(u){a("throw",u)}function f(u,d){u(d),i.shift(),i.length&&a(i[0][0],i[0][1])}}function po(e){if(!Symbol.asyncIterator)throw new TypeError("Symbol.asyncIterator is not defined.");var t=e[Symbol.asyncIterator],r;return t?t.call(e):(e=typeof be=="function"?be(e):e[Symbol.iterator](),r={},o("next"),o("throw"),o("return"),r[Symbol.asyncIterator]=function(){return this},r);function o(i){r[i]=e[i]&&function(s){return new Promise(function(a,c){s=e[i](s),n(a,c,s.done,s.value)})}}function n(i,s,a,c){Promise.resolve(c).then(function(p){i({value:p,done:a})},s)}}function k(e){return typeof e=="function"}function pt(e){var t=function(o){Error.call(o),o.stack=new Error().stack},r=e(t);return r.prototype=Object.create(Error.prototype),r.prototype.constructor=r,r}var Ut=pt(function(e){return function(r){e(this),this.message=r?r.length+` errors occurred during unsubscription: +`+r.map(function(o,n){return n+1+") "+o.toString()}).join(` + `):"",this.name="UnsubscriptionError",this.errors=r}});function ze(e,t){if(e){var r=e.indexOf(t);0<=r&&e.splice(r,1)}}var je=function(){function e(t){this.initialTeardown=t,this.closed=!1,this._parentage=null,this._finalizers=null}return e.prototype.unsubscribe=function(){var t,r,o,n,i;if(!this.closed){this.closed=!0;var s=this._parentage;if(s)if(this._parentage=null,Array.isArray(s))try{for(var a=be(s),c=a.next();!c.done;c=a.next()){var p=c.value;p.remove(this)}}catch(b){t={error:b}}finally{try{c&&!c.done&&(r=a.return)&&r.call(a)}finally{if(t)throw t.error}}else s.remove(this);var l=this.initialTeardown;if(k(l))try{l()}catch(b){i=b instanceof Ut?b.errors:[b]}var f=this._finalizers;if(f){this._finalizers=null;try{for(var u=be(f),d=u.next();!d.done;d=u.next()){var y=d.value;try{lo(y)}catch(b){i=i!=null?i:[],b instanceof Ut?i=K(K([],z(i)),z(b.errors)):i.push(b)}}}catch(b){o={error:b}}finally{try{d&&!d.done&&(n=u.return)&&n.call(u)}finally{if(o)throw o.error}}}if(i)throw new Ut(i)}},e.prototype.add=function(t){var r;if(t&&t!==this)if(this.closed)lo(t);else{if(t instanceof e){if(t.closed||t._hasParent(this))return;t._addParent(this)}(this._finalizers=(r=this._finalizers)!==null&&r!==void 0?r:[]).push(t)}},e.prototype._hasParent=function(t){var r=this._parentage;return r===t||Array.isArray(r)&&r.includes(t)},e.prototype._addParent=function(t){var r=this._parentage;this._parentage=Array.isArray(r)?(r.push(t),r):r?[r,t]:t},e.prototype._removeParent=function(t){var r=this._parentage;r===t?this._parentage=null:Array.isArray(r)&&ze(r,t)},e.prototype.remove=function(t){var r=this._finalizers;r&&ze(r,t),t instanceof e&&t._removeParent(this)},e.EMPTY=function(){var t=new e;return t.closed=!0,t}(),e}();var Tr=je.EMPTY;function Nt(e){return e instanceof je||e&&"closed"in e&&k(e.remove)&&k(e.add)&&k(e.unsubscribe)}function lo(e){k(e)?e():e.unsubscribe()}var He={onUnhandledError:null,onStoppedNotification:null,Promise:void 0,useDeprecatedSynchronousErrorHandling:!1,useDeprecatedNextContext:!1};var lt={setTimeout:function(e,t){for(var r=[],o=2;o0},enumerable:!1,configurable:!0}),t.prototype._trySubscribe=function(r){return this._throwIfClosed(),e.prototype._trySubscribe.call(this,r)},t.prototype._subscribe=function(r){return this._throwIfClosed(),this._checkFinalizedStatuses(r),this._innerSubscribe(r)},t.prototype._innerSubscribe=function(r){var o=this,n=this,i=n.hasError,s=n.isStopped,a=n.observers;return i||s?Tr:(this.currentObservers=null,a.push(r),new je(function(){o.currentObservers=null,ze(a,r)}))},t.prototype._checkFinalizedStatuses=function(r){var o=this,n=o.hasError,i=o.thrownError,s=o.isStopped;n?r.error(i):s&&r.complete()},t.prototype.asObservable=function(){var r=new I;return r.source=this,r},t.create=function(r,o){return new xo(r,o)},t}(I);var xo=function(e){se(t,e);function t(r,o){var n=e.call(this)||this;return n.destination=r,n.source=o,n}return t.prototype.next=function(r){var o,n;(n=(o=this.destination)===null||o===void 0?void 0:o.next)===null||n===void 0||n.call(o,r)},t.prototype.error=function(r){var o,n;(n=(o=this.destination)===null||o===void 0?void 0:o.error)===null||n===void 0||n.call(o,r)},t.prototype.complete=function(){var r,o;(o=(r=this.destination)===null||r===void 0?void 0:r.complete)===null||o===void 0||o.call(r)},t.prototype._subscribe=function(r){var o,n;return(n=(o=this.source)===null||o===void 0?void 0:o.subscribe(r))!==null&&n!==void 0?n:Tr},t}(x);var St={now:function(){return(St.delegate||Date).now()},delegate:void 0};var Ot=function(e){se(t,e);function t(r,o,n){r===void 0&&(r=1/0),o===void 0&&(o=1/0),n===void 0&&(n=St);var i=e.call(this)||this;return i._bufferSize=r,i._windowTime=o,i._timestampProvider=n,i._buffer=[],i._infiniteTimeWindow=!0,i._infiniteTimeWindow=o===1/0,i._bufferSize=Math.max(1,r),i._windowTime=Math.max(1,o),i}return t.prototype.next=function(r){var o=this,n=o.isStopped,i=o._buffer,s=o._infiniteTimeWindow,a=o._timestampProvider,c=o._windowTime;n||(i.push(r),!s&&i.push(a.now()+c)),this._trimBuffer(),e.prototype.next.call(this,r)},t.prototype._subscribe=function(r){this._throwIfClosed(),this._trimBuffer();for(var o=this._innerSubscribe(r),n=this,i=n._infiniteTimeWindow,s=n._buffer,a=s.slice(),c=0;c0?e.prototype.requestAsyncId.call(this,r,o,n):(r.actions.push(this),r._scheduled||(r._scheduled=ut.requestAnimationFrame(function(){return r.flush(void 0)})))},t.prototype.recycleAsyncId=function(r,o,n){var i;if(n===void 0&&(n=0),n!=null?n>0:this.delay>0)return e.prototype.recycleAsyncId.call(this,r,o,n);var s=r.actions;o!=null&&((i=s[s.length-1])===null||i===void 0?void 0:i.id)!==o&&(ut.cancelAnimationFrame(o),r._scheduled=void 0)},t}(zt);var wo=function(e){se(t,e);function t(){return e!==null&&e.apply(this,arguments)||this}return t.prototype.flush=function(r){this._active=!0;var o=this._scheduled;this._scheduled=void 0;var n=this.actions,i;r=r||n.shift();do if(i=r.execute(r.state,r.delay))break;while((r=n[0])&&r.id===o&&n.shift());if(this._active=!1,i){for(;(r=n[0])&&r.id===o&&n.shift();)r.unsubscribe();throw i}},t}(qt);var ge=new wo(Eo);var M=new I(function(e){return e.complete()});function Kt(e){return e&&k(e.schedule)}function Cr(e){return e[e.length-1]}function Ge(e){return k(Cr(e))?e.pop():void 0}function Ae(e){return Kt(Cr(e))?e.pop():void 0}function Qt(e,t){return typeof Cr(e)=="number"?e.pop():t}var dt=function(e){return e&&typeof e.length=="number"&&typeof e!="function"};function Yt(e){return k(e==null?void 0:e.then)}function Bt(e){return k(e[ft])}function Gt(e){return Symbol.asyncIterator&&k(e==null?void 0:e[Symbol.asyncIterator])}function Jt(e){return new TypeError("You provided "+(e!==null&&typeof e=="object"?"an invalid object":"'"+e+"'")+" where a stream was expected. You can provide an Observable, Promise, ReadableStream, Array, AsyncIterable, or Iterable.")}function Wi(){return typeof Symbol!="function"||!Symbol.iterator?"@@iterator":Symbol.iterator}var Xt=Wi();function Zt(e){return k(e==null?void 0:e[Xt])}function er(e){return co(this,arguments,function(){var r,o,n,i;return Wt(this,function(s){switch(s.label){case 0:r=e.getReader(),s.label=1;case 1:s.trys.push([1,,9,10]),s.label=2;case 2:return[4,ot(r.read())];case 3:return o=s.sent(),n=o.value,i=o.done,i?[4,ot(void 0)]:[3,5];case 4:return[2,s.sent()];case 5:return[4,ot(n)];case 6:return[4,s.sent()];case 7:return s.sent(),[3,2];case 8:return[3,10];case 9:return r.releaseLock(),[7];case 10:return[2]}})})}function tr(e){return k(e==null?void 0:e.getReader)}function F(e){if(e instanceof I)return e;if(e!=null){if(Bt(e))return Ui(e);if(dt(e))return Ni(e);if(Yt(e))return Di(e);if(Gt(e))return To(e);if(Zt(e))return Vi(e);if(tr(e))return zi(e)}throw Jt(e)}function Ui(e){return new I(function(t){var r=e[ft]();if(k(r.subscribe))return r.subscribe(t);throw new TypeError("Provided object does not correctly implement Symbol.observable")})}function Ni(e){return new I(function(t){for(var r=0;r=2;return function(o){return o.pipe(e?v(function(n,i){return e(n,i,o)}):pe,ue(1),r?$e(t):Uo(function(){return new or}))}}function Rr(e){return e<=0?function(){return M}:g(function(t,r){var o=[];t.subscribe(E(r,function(n){o.push(n),e=2,!0))}function de(e){e===void 0&&(e={});var t=e.connector,r=t===void 0?function(){return new x}:t,o=e.resetOnError,n=o===void 0?!0:o,i=e.resetOnComplete,s=i===void 0?!0:i,a=e.resetOnRefCountZero,c=a===void 0?!0:a;return function(p){var l,f,u,d=0,y=!1,b=!1,D=function(){f==null||f.unsubscribe(),f=void 0},Q=function(){D(),l=u=void 0,y=b=!1},J=function(){var C=l;Q(),C==null||C.unsubscribe()};return g(function(C,ct){d++,!b&&!y&&D();var Ve=u=u!=null?u:r();ct.add(function(){d--,d===0&&!b&&!y&&(f=jr(J,c))}),Ve.subscribe(ct),!l&&d>0&&(l=new it({next:function(Fe){return Ve.next(Fe)},error:function(Fe){b=!0,D(),f=jr(Q,n,Fe),Ve.error(Fe)},complete:function(){y=!0,D(),f=jr(Q,s),Ve.complete()}}),F(C).subscribe(l))})(p)}}function jr(e,t){for(var r=[],o=2;oe.next(document)),e}function W(e,t=document){return Array.from(t.querySelectorAll(e))}function U(e,t=document){let r=ce(e,t);if(typeof r=="undefined")throw new ReferenceError(`Missing element: expected "${e}" to be present`);return r}function ce(e,t=document){return t.querySelector(e)||void 0}function Ie(){return document.activeElement instanceof HTMLElement&&document.activeElement||void 0}var ca=L(h(document.body,"focusin"),h(document.body,"focusout")).pipe(ye(1),q(void 0),m(()=>Ie()||document.body),Z(1));function vt(e){return ca.pipe(m(t=>e.contains(t)),X())}function qo(e,t){return L(h(e,"mouseenter").pipe(m(()=>!0)),h(e,"mouseleave").pipe(m(()=>!1))).pipe(t?ye(t):pe,q(!1))}function Ue(e){return{x:e.offsetLeft,y:e.offsetTop}}function Ko(e){return L(h(window,"load"),h(window,"resize")).pipe(Le(0,ge),m(()=>Ue(e)),q(Ue(e)))}function ir(e){return{x:e.scrollLeft,y:e.scrollTop}}function et(e){return L(h(e,"scroll"),h(window,"resize")).pipe(Le(0,ge),m(()=>ir(e)),q(ir(e)))}function Qo(e,t){if(typeof t=="string"||typeof t=="number")e.innerHTML+=t.toString();else if(t instanceof Node)e.appendChild(t);else if(Array.isArray(t))for(let r of t)Qo(e,r)}function S(e,t,...r){let o=document.createElement(e);if(t)for(let n of Object.keys(t))typeof t[n]!="undefined"&&(typeof t[n]!="boolean"?o.setAttribute(n,t[n]):o.setAttribute(n,""));for(let n of r)Qo(o,n);return o}function ar(e){if(e>999){let t=+((e-950)%1e3>99);return`${((e+1e-6)/1e3).toFixed(t)}k`}else return e.toString()}function gt(e){let t=S("script",{src:e});return H(()=>(document.head.appendChild(t),L(h(t,"load"),h(t,"error").pipe(w(()=>kr(()=>new ReferenceError(`Invalid script: ${e}`))))).pipe(m(()=>{}),A(()=>document.head.removeChild(t)),ue(1))))}var Yo=new x,pa=H(()=>typeof ResizeObserver=="undefined"?gt("https://unpkg.com/resize-observer-polyfill"):R(void 0)).pipe(m(()=>new ResizeObserver(e=>{for(let t of e)Yo.next(t)})),w(e=>L(Ke,R(e)).pipe(A(()=>e.disconnect()))),Z(1));function le(e){return{width:e.offsetWidth,height:e.offsetHeight}}function Se(e){return pa.pipe(T(t=>t.observe(e)),w(t=>Yo.pipe(v(({target:r})=>r===e),A(()=>t.unobserve(e)),m(()=>le(e)))),q(le(e)))}function xt(e){return{width:e.scrollWidth,height:e.scrollHeight}}function sr(e){let t=e.parentElement;for(;t&&(e.scrollWidth<=t.scrollWidth&&e.scrollHeight<=t.scrollHeight);)t=(e=t).parentElement;return t?e:void 0}var Bo=new x,la=H(()=>R(new IntersectionObserver(e=>{for(let t of e)Bo.next(t)},{threshold:0}))).pipe(w(e=>L(Ke,R(e)).pipe(A(()=>e.disconnect()))),Z(1));function yt(e){return la.pipe(T(t=>t.observe(e)),w(t=>Bo.pipe(v(({target:r})=>r===e),A(()=>t.unobserve(e)),m(({isIntersecting:r})=>r))))}function Go(e,t=16){return et(e).pipe(m(({y:r})=>{let o=le(e),n=xt(e);return r>=n.height-o.height-t}),X())}var cr={drawer:U("[data-md-toggle=drawer]"),search:U("[data-md-toggle=search]")};function Jo(e){return cr[e].checked}function Ye(e,t){cr[e].checked!==t&&cr[e].click()}function Ne(e){let t=cr[e];return h(t,"change").pipe(m(()=>t.checked),q(t.checked))}function ma(e,t){switch(e.constructor){case HTMLInputElement:return e.type==="radio"?/^Arrow/.test(t):!0;case HTMLSelectElement:case HTMLTextAreaElement:return!0;default:return e.isContentEditable}}function fa(){return L(h(window,"compositionstart").pipe(m(()=>!0)),h(window,"compositionend").pipe(m(()=>!1))).pipe(q(!1))}function Xo(){let e=h(window,"keydown").pipe(v(t=>!(t.metaKey||t.ctrlKey)),m(t=>({mode:Jo("search")?"search":"global",type:t.key,claim(){t.preventDefault(),t.stopPropagation()}})),v(({mode:t,type:r})=>{if(t==="global"){let o=Ie();if(typeof o!="undefined")return!ma(o,r)}return!0}),de());return fa().pipe(w(t=>t?M:e))}function me(){return new URL(location.href)}function st(e,t=!1){if(G("navigation.instant")&&!t){let r=S("a",{href:e.href});document.body.appendChild(r),r.click(),r.remove()}else location.href=e.href}function Zo(){return new x}function en(){return location.hash.slice(1)}function pr(e){let t=S("a",{href:e});t.addEventListener("click",r=>r.stopPropagation()),t.click()}function ua(e){return L(h(window,"hashchange"),e).pipe(m(en),q(en()),v(t=>t.length>0),Z(1))}function tn(e){return ua(e).pipe(m(t=>ce(`[id="${t}"]`)),v(t=>typeof t!="undefined"))}function At(e){let t=matchMedia(e);return nr(r=>t.addListener(()=>r(t.matches))).pipe(q(t.matches))}function rn(){let e=matchMedia("print");return L(h(window,"beforeprint").pipe(m(()=>!0)),h(window,"afterprint").pipe(m(()=>!1))).pipe(q(e.matches))}function Dr(e,t){return e.pipe(w(r=>r?t():M))}function lr(e,t){return new I(r=>{let o=new XMLHttpRequest;o.open("GET",`${e}`),o.responseType="blob",o.addEventListener("load",()=>{o.status>=200&&o.status<300?(r.next(o.response),r.complete()):r.error(new Error(o.statusText))}),o.addEventListener("error",()=>{r.error(new Error("Network Error"))}),o.addEventListener("abort",()=>{r.error(new Error("Request aborted"))}),typeof(t==null?void 0:t.progress$)!="undefined"&&(o.addEventListener("progress",n=>{if(n.lengthComputable)t.progress$.next(n.loaded/n.total*100);else{let i=Number(o.getResponseHeader("Content-Length"))||0;t.progress$.next(n.loaded/i*100)}}),t.progress$.next(5)),o.send()})}function De(e,t){return lr(e,t).pipe(w(r=>r.text()),m(r=>JSON.parse(r)),Z(1))}function on(e,t){let r=new DOMParser;return lr(e,t).pipe(w(o=>o.text()),m(o=>r.parseFromString(o,"text/xml")),Z(1))}function nn(){return{x:Math.max(0,scrollX),y:Math.max(0,scrollY)}}function an(){return L(h(window,"scroll",{passive:!0}),h(window,"resize",{passive:!0})).pipe(m(nn),q(nn()))}function sn(){return{width:innerWidth,height:innerHeight}}function cn(){return h(window,"resize",{passive:!0}).pipe(m(sn),q(sn()))}function pn(){return B([an(),cn()]).pipe(m(([e,t])=>({offset:e,size:t})),Z(1))}function mr(e,{viewport$:t,header$:r}){let o=t.pipe(te("size")),n=B([o,r]).pipe(m(()=>Ue(e)));return B([r,t,n]).pipe(m(([{height:i},{offset:s,size:a},{x:c,y:p}])=>({offset:{x:s.x-c,y:s.y-p+i},size:a})))}function da(e){return h(e,"message",t=>t.data)}function ha(e){let t=new x;return t.subscribe(r=>e.postMessage(r)),t}function ln(e,t=new Worker(e)){let r=da(t),o=ha(t),n=new x;n.subscribe(o);let i=o.pipe(ee(),oe(!0));return n.pipe(ee(),Re(r.pipe(j(i))),de())}var ba=U("#__config"),Et=JSON.parse(ba.textContent);Et.base=`${new URL(Et.base,me())}`;function he(){return Et}function G(e){return Et.features.includes(e)}function we(e,t){return typeof t!="undefined"?Et.translations[e].replace("#",t.toString()):Et.translations[e]}function Oe(e,t=document){return U(`[data-md-component=${e}]`,t)}function ne(e,t=document){return W(`[data-md-component=${e}]`,t)}function va(e){let t=U(".md-typeset > :first-child",e);return h(t,"click",{once:!0}).pipe(m(()=>U(".md-typeset",e)),m(r=>({hash:__md_hash(r.innerHTML)})))}function mn(e){if(!G("announce.dismiss")||!e.childElementCount)return M;if(!e.hidden){let t=U(".md-typeset",e);__md_hash(t.innerHTML)===__md_get("__announce")&&(e.hidden=!0)}return H(()=>{let t=new x;return t.subscribe(({hash:r})=>{e.hidden=!0,__md_set("__announce",r)}),va(e).pipe(T(r=>t.next(r)),A(()=>t.complete()),m(r=>P({ref:e},r)))})}function ga(e,{target$:t}){return t.pipe(m(r=>({hidden:r!==e})))}function fn(e,t){let r=new x;return r.subscribe(({hidden:o})=>{e.hidden=o}),ga(e,t).pipe(T(o=>r.next(o)),A(()=>r.complete()),m(o=>P({ref:e},o)))}function Ct(e,t){return t==="inline"?S("div",{class:"md-tooltip md-tooltip--inline",id:e,role:"tooltip"},S("div",{class:"md-tooltip__inner md-typeset"})):S("div",{class:"md-tooltip",id:e,role:"tooltip"},S("div",{class:"md-tooltip__inner md-typeset"}))}function un(e,t){if(t=t?`${t}_annotation_${e}`:void 0,t){let r=t?`#${t}`:void 0;return S("aside",{class:"md-annotation",tabIndex:0},Ct(t),S("a",{href:r,class:"md-annotation__index",tabIndex:-1},S("span",{"data-md-annotation-id":e})))}else return S("aside",{class:"md-annotation",tabIndex:0},Ct(t),S("span",{class:"md-annotation__index",tabIndex:-1},S("span",{"data-md-annotation-id":e})))}function dn(e){return S("button",{class:"md-clipboard md-icon",title:we("clipboard.copy"),"data-clipboard-target":`#${e} > code`})}function Vr(e,t){let r=t&2,o=t&1,n=Object.keys(e.terms).filter(c=>!e.terms[c]).reduce((c,p)=>[...c,S("del",null,p)," "],[]).slice(0,-1),i=he(),s=new URL(e.location,i.base);G("search.highlight")&&s.searchParams.set("h",Object.entries(e.terms).filter(([,c])=>c).reduce((c,[p])=>`${c} ${p}`.trim(),""));let{tags:a}=he();return S("a",{href:`${s}`,class:"md-search-result__link",tabIndex:-1},S("article",{class:"md-search-result__article md-typeset","data-md-score":e.score.toFixed(2)},r>0&&S("div",{class:"md-search-result__icon md-icon"}),r>0&&S("h1",null,e.title),r<=0&&S("h2",null,e.title),o>0&&e.text.length>0&&e.text,e.tags&&e.tags.map(c=>{let p=a?c in a?`md-tag-icon md-tag--${a[c]}`:"md-tag-icon":"";return S("span",{class:`md-tag ${p}`},c)}),o>0&&n.length>0&&S("p",{class:"md-search-result__terms"},we("search.result.term.missing"),": ",...n)))}function hn(e){let t=e[0].score,r=[...e],o=he(),n=r.findIndex(l=>!`${new URL(l.location,o.base)}`.includes("#")),[i]=r.splice(n,1),s=r.findIndex(l=>l.scoreVr(l,1)),...c.length?[S("details",{class:"md-search-result__more"},S("summary",{tabIndex:-1},S("div",null,c.length>0&&c.length===1?we("search.result.more.one"):we("search.result.more.other",c.length))),...c.map(l=>Vr(l,1)))]:[]];return S("li",{class:"md-search-result__item"},p)}function bn(e){return S("ul",{class:"md-source__facts"},Object.entries(e).map(([t,r])=>S("li",{class:`md-source__fact md-source__fact--${t}`},typeof r=="number"?ar(r):r)))}function zr(e){let t=`tabbed-control tabbed-control--${e}`;return S("div",{class:t,hidden:!0},S("button",{class:"tabbed-button",tabIndex:-1,"aria-hidden":"true"}))}function vn(e){return S("div",{class:"md-typeset__scrollwrap"},S("div",{class:"md-typeset__table"},e))}function xa(e){let t=he(),r=new URL(`../${e.version}/`,t.base);return S("li",{class:"md-version__item"},S("a",{href:`${r}`,class:"md-version__link"},e.title))}function gn(e,t){return S("div",{class:"md-version"},S("button",{class:"md-version__current","aria-label":we("select.version")},t.title),S("ul",{class:"md-version__list"},e.map(xa)))}var ya=0;function Ea(e,t){document.body.append(e);let{width:r}=le(e);e.style.setProperty("--md-tooltip-width",`${r}px`),e.remove();let o=sr(t),n=typeof o!="undefined"?et(o):R({x:0,y:0}),i=L(vt(t),qo(t)).pipe(X());return B([i,n]).pipe(m(([s,a])=>{let{x:c,y:p}=Ue(t),l=le(t),f=t.closest("table");return f&&t.parentElement&&(c+=f.offsetLeft+t.parentElement.offsetLeft,p+=f.offsetTop+t.parentElement.offsetTop),{active:s,offset:{x:c-a.x+l.width/2-r/2,y:p-a.y+l.height+8}}}))}function Be(e){let t=e.title;if(!t.length)return M;let r=`__tooltip_${ya++}`,o=Ct(r,"inline"),n=U(".md-typeset",o);return n.innerHTML=t,H(()=>{let i=new x;return i.subscribe({next({offset:s}){o.style.setProperty("--md-tooltip-x",`${s.x}px`),o.style.setProperty("--md-tooltip-y",`${s.y}px`)},complete(){o.style.removeProperty("--md-tooltip-x"),o.style.removeProperty("--md-tooltip-y")}}),L(i.pipe(v(({active:s})=>s)),i.pipe(ye(250),v(({active:s})=>!s))).subscribe({next({active:s}){s?(e.insertAdjacentElement("afterend",o),e.setAttribute("aria-describedby",r),e.removeAttribute("title")):(o.remove(),e.removeAttribute("aria-describedby"),e.setAttribute("title",t))},complete(){o.remove(),e.removeAttribute("aria-describedby"),e.setAttribute("title",t)}}),i.pipe(Le(16,ge)).subscribe(({active:s})=>{o.classList.toggle("md-tooltip--active",s)}),i.pipe(_t(125,ge),v(()=>!!e.offsetParent),m(()=>e.offsetParent.getBoundingClientRect()),m(({x:s})=>s)).subscribe({next(s){s?o.style.setProperty("--md-tooltip-0",`${-s}px`):o.style.removeProperty("--md-tooltip-0")},complete(){o.style.removeProperty("--md-tooltip-0")}}),Ea(o,e).pipe(T(s=>i.next(s)),A(()=>i.complete()),m(s=>P({ref:e},s)))}).pipe(qe(ie))}function wa(e,t){let r=H(()=>B([Ko(e),et(t)])).pipe(m(([{x:o,y:n},i])=>{let{width:s,height:a}=le(e);return{x:o-i.x+s/2,y:n-i.y+a/2}}));return vt(e).pipe(w(o=>r.pipe(m(n=>({active:o,offset:n})),ue(+!o||1/0))))}function xn(e,t,{target$:r}){let[o,n]=Array.from(e.children);return H(()=>{let i=new x,s=i.pipe(ee(),oe(!0));return i.subscribe({next({offset:a}){e.style.setProperty("--md-tooltip-x",`${a.x}px`),e.style.setProperty("--md-tooltip-y",`${a.y}px`)},complete(){e.style.removeProperty("--md-tooltip-x"),e.style.removeProperty("--md-tooltip-y")}}),yt(e).pipe(j(s)).subscribe(a=>{e.toggleAttribute("data-md-visible",a)}),L(i.pipe(v(({active:a})=>a)),i.pipe(ye(250),v(({active:a})=>!a))).subscribe({next({active:a}){a?e.prepend(o):o.remove()},complete(){e.prepend(o)}}),i.pipe(Le(16,ge)).subscribe(({active:a})=>{o.classList.toggle("md-tooltip--active",a)}),i.pipe(_t(125,ge),v(()=>!!e.offsetParent),m(()=>e.offsetParent.getBoundingClientRect()),m(({x:a})=>a)).subscribe({next(a){a?e.style.setProperty("--md-tooltip-0",`${-a}px`):e.style.removeProperty("--md-tooltip-0")},complete(){e.style.removeProperty("--md-tooltip-0")}}),h(n,"click").pipe(j(s),v(a=>!(a.metaKey||a.ctrlKey))).subscribe(a=>{a.stopPropagation(),a.preventDefault()}),h(n,"mousedown").pipe(j(s),ae(i)).subscribe(([a,{active:c}])=>{var p;if(a.button!==0||a.metaKey||a.ctrlKey)a.preventDefault();else if(c){a.preventDefault();let l=e.parentElement.closest(".md-annotation");l instanceof HTMLElement?l.focus():(p=Ie())==null||p.blur()}}),r.pipe(j(s),v(a=>a===o),Qe(125)).subscribe(()=>e.focus()),wa(e,t).pipe(T(a=>i.next(a)),A(()=>i.complete()),m(a=>P({ref:e},a)))})}function Ta(e){return e.tagName==="CODE"?W(".c, .c1, .cm",e):[e]}function Sa(e){let t=[];for(let r of Ta(e)){let o=[],n=document.createNodeIterator(r,NodeFilter.SHOW_TEXT);for(let i=n.nextNode();i;i=n.nextNode())o.push(i);for(let i of o){let s;for(;s=/(\(\d+\))(!)?/.exec(i.textContent);){let[,a,c]=s;if(typeof c=="undefined"){let p=i.splitText(s.index);i=p.splitText(a.length),t.push(p)}else{i.textContent=a,t.push(i);break}}}}return t}function yn(e,t){t.append(...Array.from(e.childNodes))}function fr(e,t,{target$:r,print$:o}){let n=t.closest("[id]"),i=n==null?void 0:n.id,s=new Map;for(let a of Sa(t)){let[,c]=a.textContent.match(/\((\d+)\)/);ce(`:scope > li:nth-child(${c})`,e)&&(s.set(c,un(c,i)),a.replaceWith(s.get(c)))}return s.size===0?M:H(()=>{let a=new x,c=a.pipe(ee(),oe(!0)),p=[];for(let[l,f]of s)p.push([U(".md-typeset",f),U(`:scope > li:nth-child(${l})`,e)]);return o.pipe(j(c)).subscribe(l=>{e.hidden=!l,e.classList.toggle("md-annotation-list",l);for(let[f,u]of p)l?yn(f,u):yn(u,f)}),L(...[...s].map(([,l])=>xn(l,t,{target$:r}))).pipe(A(()=>a.complete()),de())})}function En(e){if(e.nextElementSibling){let t=e.nextElementSibling;if(t.tagName==="OL")return t;if(t.tagName==="P"&&!t.children.length)return En(t)}}function wn(e,t){return H(()=>{let r=En(e);return typeof r!="undefined"?fr(r,e,t):M})}var Tn=jt(Kr());var Oa=0;function Sn(e){if(e.nextElementSibling){let t=e.nextElementSibling;if(t.tagName==="OL")return t;if(t.tagName==="P"&&!t.children.length)return Sn(t)}}function Ma(e){return Se(e).pipe(m(({width:t})=>({scrollable:xt(e).width>t})),te("scrollable"))}function On(e,t){let{matches:r}=matchMedia("(hover)"),o=H(()=>{let n=new x,i=n.pipe(Rr(1));n.subscribe(({scrollable:c})=>{c&&r?e.setAttribute("tabindex","0"):e.removeAttribute("tabindex")});let s=[];if(Tn.default.isSupported()&&(e.closest(".copy")||G("content.code.copy")&&!e.closest(".no-copy"))){let c=e.closest("pre");c.id=`__code_${Oa++}`;let p=dn(c.id);c.insertBefore(p,e),G("content.tooltips")&&s.push(Be(p))}let a=e.closest(".highlight");if(a instanceof HTMLElement){let c=Sn(a);if(typeof c!="undefined"&&(a.classList.contains("annotate")||G("content.code.annotate"))){let p=fr(c,e,t);s.push(Se(a).pipe(j(i),m(({width:l,height:f})=>l&&f),X(),w(l=>l?p:M)))}}return Ma(e).pipe(T(c=>n.next(c)),A(()=>n.complete()),m(c=>P({ref:e},c)),Re(...s))});return G("content.lazy")?yt(e).pipe(v(n=>n),ue(1),w(()=>o)):o}function La(e,{target$:t,print$:r}){let o=!0;return L(t.pipe(m(n=>n.closest("details:not([open])")),v(n=>e===n),m(()=>({action:"open",reveal:!0}))),r.pipe(v(n=>n||!o),T(()=>o=e.open),m(n=>({action:n?"open":"close"}))))}function Mn(e,t){return H(()=>{let r=new x;return r.subscribe(({action:o,reveal:n})=>{e.toggleAttribute("open",o==="open"),n&&e.scrollIntoView()}),La(e,t).pipe(T(o=>r.next(o)),A(()=>r.complete()),m(o=>P({ref:e},o)))})}var Ln=".node circle,.node ellipse,.node path,.node polygon,.node rect{fill:var(--md-mermaid-node-bg-color);stroke:var(--md-mermaid-node-fg-color)}marker{fill:var(--md-mermaid-edge-color)!important}.edgeLabel .label rect{fill:#0000}.label{color:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.label foreignObject{line-height:normal;overflow:visible}.label div .edgeLabel{color:var(--md-mermaid-label-fg-color)}.edgeLabel,.edgeLabel rect,.label div .edgeLabel{background-color:var(--md-mermaid-label-bg-color)}.edgeLabel,.edgeLabel rect{fill:var(--md-mermaid-label-bg-color);color:var(--md-mermaid-edge-color)}.edgePath .path,.flowchart-link{stroke:var(--md-mermaid-edge-color);stroke-width:.05rem}.edgePath .arrowheadPath{fill:var(--md-mermaid-edge-color);stroke:none}.cluster rect{fill:var(--md-default-fg-color--lightest);stroke:var(--md-default-fg-color--lighter)}.cluster span{color:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}g #flowchart-circleEnd,g #flowchart-circleStart,g #flowchart-crossEnd,g #flowchart-crossStart,g #flowchart-pointEnd,g #flowchart-pointStart{stroke:none}g.classGroup line,g.classGroup rect{fill:var(--md-mermaid-node-bg-color);stroke:var(--md-mermaid-node-fg-color)}g.classGroup text{fill:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.classLabel .box{fill:var(--md-mermaid-label-bg-color);background-color:var(--md-mermaid-label-bg-color);opacity:1}.classLabel .label{fill:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.node .divider{stroke:var(--md-mermaid-node-fg-color)}.relation{stroke:var(--md-mermaid-edge-color)}.cardinality{fill:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.cardinality text{fill:inherit!important}defs #classDiagram-compositionEnd,defs #classDiagram-compositionStart,defs #classDiagram-dependencyEnd,defs #classDiagram-dependencyStart,defs #classDiagram-extensionEnd,defs #classDiagram-extensionStart{fill:var(--md-mermaid-edge-color)!important;stroke:var(--md-mermaid-edge-color)!important}defs #classDiagram-aggregationEnd,defs #classDiagram-aggregationStart{fill:var(--md-mermaid-label-bg-color)!important;stroke:var(--md-mermaid-edge-color)!important}g.stateGroup rect{fill:var(--md-mermaid-node-bg-color);stroke:var(--md-mermaid-node-fg-color)}g.stateGroup .state-title{fill:var(--md-mermaid-label-fg-color)!important;font-family:var(--md-mermaid-font-family)}g.stateGroup .composit{fill:var(--md-mermaid-label-bg-color)}.nodeLabel{color:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.node circle.state-end,.node circle.state-start,.start-state{fill:var(--md-mermaid-edge-color);stroke:none}.end-state-inner,.end-state-outer{fill:var(--md-mermaid-edge-color)}.end-state-inner,.node circle.state-end{stroke:var(--md-mermaid-label-bg-color)}.transition{stroke:var(--md-mermaid-edge-color)}[id^=state-fork] rect,[id^=state-join] rect{fill:var(--md-mermaid-edge-color)!important;stroke:none!important}.statediagram-cluster.statediagram-cluster .inner{fill:var(--md-default-bg-color)}.statediagram-cluster rect{fill:var(--md-mermaid-node-bg-color);stroke:var(--md-mermaid-node-fg-color)}.statediagram-state rect.divider{fill:var(--md-default-fg-color--lightest);stroke:var(--md-default-fg-color--lighter)}defs #statediagram-barbEnd{stroke:var(--md-mermaid-edge-color)}.attributeBoxEven,.attributeBoxOdd{fill:var(--md-mermaid-node-bg-color);stroke:var(--md-mermaid-node-fg-color)}.entityBox{fill:var(--md-mermaid-label-bg-color);stroke:var(--md-mermaid-node-fg-color)}.entityLabel{fill:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.relationshipLabelBox{fill:var(--md-mermaid-label-bg-color);fill-opacity:1;background-color:var(--md-mermaid-label-bg-color);opacity:1}.relationshipLabel{fill:var(--md-mermaid-label-fg-color)}.relationshipLine{stroke:var(--md-mermaid-edge-color)}defs #ONE_OR_MORE_END *,defs #ONE_OR_MORE_START *,defs #ONLY_ONE_END *,defs #ONLY_ONE_START *,defs #ZERO_OR_MORE_END *,defs #ZERO_OR_MORE_START *,defs #ZERO_OR_ONE_END *,defs #ZERO_OR_ONE_START *{stroke:var(--md-mermaid-edge-color)!important}defs #ZERO_OR_MORE_END circle,defs #ZERO_OR_MORE_START circle{fill:var(--md-mermaid-label-bg-color)}.actor{fill:var(--md-mermaid-sequence-actor-bg-color);stroke:var(--md-mermaid-sequence-actor-border-color)}text.actor>tspan{fill:var(--md-mermaid-sequence-actor-fg-color);font-family:var(--md-mermaid-font-family)}line{stroke:var(--md-mermaid-sequence-actor-line-color)}.actor-man circle,.actor-man line{fill:var(--md-mermaid-sequence-actorman-bg-color);stroke:var(--md-mermaid-sequence-actorman-line-color)}.messageLine0,.messageLine1{stroke:var(--md-mermaid-sequence-message-line-color)}.note{fill:var(--md-mermaid-sequence-note-bg-color);stroke:var(--md-mermaid-sequence-note-border-color)}.loopText,.loopText>tspan,.messageText,.noteText>tspan{stroke:none;font-family:var(--md-mermaid-font-family)!important}.messageText{fill:var(--md-mermaid-sequence-message-fg-color)}.loopText,.loopText>tspan{fill:var(--md-mermaid-sequence-loop-fg-color)}.noteText>tspan{fill:var(--md-mermaid-sequence-note-fg-color)}#arrowhead path{fill:var(--md-mermaid-sequence-message-line-color);stroke:none}.loopLine{fill:var(--md-mermaid-sequence-loop-bg-color);stroke:var(--md-mermaid-sequence-loop-border-color)}.labelBox{fill:var(--md-mermaid-sequence-label-bg-color);stroke:none}.labelText,.labelText>span{fill:var(--md-mermaid-sequence-label-fg-color);font-family:var(--md-mermaid-font-family)}.sequenceNumber{fill:var(--md-mermaid-sequence-number-fg-color)}rect.rect{fill:var(--md-mermaid-sequence-box-bg-color);stroke:none}rect.rect+text.text{fill:var(--md-mermaid-sequence-box-fg-color)}defs #sequencenumber{fill:var(--md-mermaid-sequence-number-bg-color)!important}";var Qr,Aa=0;function Ca(){return typeof mermaid=="undefined"||mermaid instanceof Element?gt("https://unpkg.com/mermaid@10.6.1/dist/mermaid.min.js"):R(void 0)}function _n(e){return e.classList.remove("mermaid"),Qr||(Qr=Ca().pipe(T(()=>mermaid.initialize({startOnLoad:!1,themeCSS:Ln,sequence:{actorFontSize:"16px",messageFontSize:"16px",noteFontSize:"16px"}})),m(()=>{}),Z(1))),Qr.subscribe(()=>no(this,null,function*(){e.classList.add("mermaid");let t=`__mermaid_${Aa++}`,r=S("div",{class:"mermaid"}),o=e.textContent,{svg:n,fn:i}=yield mermaid.render(t,o),s=r.attachShadow({mode:"closed"});s.innerHTML=n,e.replaceWith(r),i==null||i(s)})),Qr.pipe(m(()=>({ref:e})))}var An=S("table");function Cn(e){return e.replaceWith(An),An.replaceWith(vn(e)),R({ref:e})}function ka(e){let t=e.find(r=>r.checked)||e[0];return L(...e.map(r=>h(r,"change").pipe(m(()=>U(`label[for="${r.id}"]`))))).pipe(q(U(`label[for="${t.id}"]`)),m(r=>({active:r})))}function kn(e,{viewport$:t,target$:r}){let o=U(".tabbed-labels",e),n=W(":scope > input",e),i=zr("prev");e.append(i);let s=zr("next");return e.append(s),H(()=>{let a=new x,c=a.pipe(ee(),oe(!0));B([a,Se(e)]).pipe(j(c),Le(1,ge)).subscribe({next([{active:p},l]){let f=Ue(p),{width:u}=le(p);e.style.setProperty("--md-indicator-x",`${f.x}px`),e.style.setProperty("--md-indicator-width",`${u}px`);let d=ir(o);(f.xd.x+l.width)&&o.scrollTo({left:Math.max(0,f.x-16),behavior:"smooth"})},complete(){e.style.removeProperty("--md-indicator-x"),e.style.removeProperty("--md-indicator-width")}}),B([et(o),Se(o)]).pipe(j(c)).subscribe(([p,l])=>{let f=xt(o);i.hidden=p.x<16,s.hidden=p.x>f.width-l.width-16}),L(h(i,"click").pipe(m(()=>-1)),h(s,"click").pipe(m(()=>1))).pipe(j(c)).subscribe(p=>{let{width:l}=le(o);o.scrollBy({left:l*p,behavior:"smooth"})}),r.pipe(j(c),v(p=>n.includes(p))).subscribe(p=>p.click()),o.classList.add("tabbed-labels--linked");for(let p of n){let l=U(`label[for="${p.id}"]`);l.replaceChildren(S("a",{href:`#${l.htmlFor}`,tabIndex:-1},...Array.from(l.childNodes))),h(l.firstElementChild,"click").pipe(j(c),v(f=>!(f.metaKey||f.ctrlKey)),T(f=>{f.preventDefault(),f.stopPropagation()})).subscribe(()=>{history.replaceState({},"",`#${l.htmlFor}`),l.click()})}return G("content.tabs.link")&&a.pipe(Ee(1),ae(t)).subscribe(([{active:p},{offset:l}])=>{let f=p.innerText.trim();if(p.hasAttribute("data-md-switching"))p.removeAttribute("data-md-switching");else{let u=e.offsetTop-l.y;for(let y of W("[data-tabs]"))for(let b of W(":scope > input",y)){let D=U(`label[for="${b.id}"]`);if(D!==p&&D.innerText.trim()===f){D.setAttribute("data-md-switching",""),b.click();break}}window.scrollTo({top:e.offsetTop-u});let d=__md_get("__tabs")||[];__md_set("__tabs",[...new Set([f,...d])])}}),a.pipe(j(c)).subscribe(()=>{for(let p of W("audio, video",e))p.pause()}),ka(n).pipe(T(p=>a.next(p)),A(()=>a.complete()),m(p=>P({ref:e},p)))}).pipe(qe(ie))}function Hn(e,{viewport$:t,target$:r,print$:o}){return L(...W(".annotate:not(.highlight)",e).map(n=>wn(n,{target$:r,print$:o})),...W("pre:not(.mermaid) > code",e).map(n=>On(n,{target$:r,print$:o})),...W("pre.mermaid",e).map(n=>_n(n)),...W("table:not([class])",e).map(n=>Cn(n)),...W("details",e).map(n=>Mn(n,{target$:r,print$:o})),...W("[data-tabs]",e).map(n=>kn(n,{viewport$:t,target$:r})),...W("[title]",e).filter(()=>G("content.tooltips")).map(n=>Be(n)))}function Ha(e,{alert$:t}){return t.pipe(w(r=>L(R(!0),R(!1).pipe(Qe(2e3))).pipe(m(o=>({message:r,active:o})))))}function $n(e,t){let r=U(".md-typeset",e);return H(()=>{let o=new x;return o.subscribe(({message:n,active:i})=>{e.classList.toggle("md-dialog--active",i),r.textContent=n}),Ha(e,t).pipe(T(n=>o.next(n)),A(()=>o.complete()),m(n=>P({ref:e},n)))})}function $a({viewport$:e}){if(!G("header.autohide"))return R(!1);let t=e.pipe(m(({offset:{y:n}})=>n),Ce(2,1),m(([n,i])=>[nMath.abs(i-n.y)>100),m(([,[n]])=>n),X()),o=Ne("search");return B([e,o]).pipe(m(([{offset:n},i])=>n.y>400&&!i),X(),w(n=>n?r:R(!1)),q(!1))}function Pn(e,t){return H(()=>B([Se(e),$a(t)])).pipe(m(([{height:r},o])=>({height:r,hidden:o})),X((r,o)=>r.height===o.height&&r.hidden===o.hidden),Z(1))}function Rn(e,{header$:t,main$:r}){return H(()=>{let o=new x,n=o.pipe(ee(),oe(!0));o.pipe(te("active"),Ze(t)).subscribe(([{active:s},{hidden:a}])=>{e.classList.toggle("md-header--shadow",s&&!a),e.hidden=a});let i=fe(W("[title]",e)).pipe(v(()=>G("content.tooltips")),re(s=>Be(s)));return r.subscribe(o),t.pipe(j(n),m(s=>P({ref:e},s)),Re(i.pipe(j(n))))})}function Pa(e,{viewport$:t,header$:r}){return mr(e,{viewport$:t,header$:r}).pipe(m(({offset:{y:o}})=>{let{height:n}=le(e);return{active:o>=n}}),te("active"))}function In(e,t){return H(()=>{let r=new x;r.subscribe({next({active:n}){e.classList.toggle("md-header__title--active",n)},complete(){e.classList.remove("md-header__title--active")}});let o=ce(".md-content h1");return typeof o=="undefined"?M:Pa(o,t).pipe(T(n=>r.next(n)),A(()=>r.complete()),m(n=>P({ref:e},n)))})}function Fn(e,{viewport$:t,header$:r}){let o=r.pipe(m(({height:i})=>i),X()),n=o.pipe(w(()=>Se(e).pipe(m(({height:i})=>({top:e.offsetTop,bottom:e.offsetTop+i})),te("bottom"))));return B([o,n,t]).pipe(m(([i,{top:s,bottom:a},{offset:{y:c},size:{height:p}}])=>(p=Math.max(0,p-Math.max(0,s-c,i)-Math.max(0,p+c-a)),{offset:s-i,height:p,active:s-i<=c})),X((i,s)=>i.offset===s.offset&&i.height===s.height&&i.active===s.active))}function Ra(e){let t=__md_get("__palette")||{index:e.findIndex(o=>matchMedia(o.getAttribute("data-md-color-media")).matches)},r=Math.max(0,Math.min(t.index,e.length-1));return R(...e).pipe(re(o=>h(o,"change").pipe(m(()=>o))),q(e[r]),m(o=>({index:e.indexOf(o),color:{media:o.getAttribute("data-md-color-media"),scheme:o.getAttribute("data-md-color-scheme"),primary:o.getAttribute("data-md-color-primary"),accent:o.getAttribute("data-md-color-accent")}})),Z(1))}function jn(e){let t=W("input",e),r=S("meta",{name:"theme-color"});document.head.appendChild(r);let o=S("meta",{name:"color-scheme"});document.head.appendChild(o);let n=At("(prefers-color-scheme: light)");return H(()=>{let i=new x;return i.subscribe(s=>{if(document.body.setAttribute("data-md-color-switching",""),s.color.media==="(prefers-color-scheme)"){let a=matchMedia("(prefers-color-scheme: light)"),c=document.querySelector(a.matches?"[data-md-color-media='(prefers-color-scheme: light)']":"[data-md-color-media='(prefers-color-scheme: dark)']");s.color.scheme=c.getAttribute("data-md-color-scheme"),s.color.primary=c.getAttribute("data-md-color-primary"),s.color.accent=c.getAttribute("data-md-color-accent")}for(let[a,c]of Object.entries(s.color))document.body.setAttribute(`data-md-color-${a}`,c);for(let a=0;a{let s=Oe("header"),a=window.getComputedStyle(s);return o.content=a.colorScheme,a.backgroundColor.match(/\d+/g).map(c=>(+c).toString(16).padStart(2,"0")).join("")})).subscribe(s=>r.content=`#${s}`),i.pipe(Me(ie)).subscribe(()=>{document.body.removeAttribute("data-md-color-switching")}),Ra(t).pipe(j(n.pipe(Ee(1))),at(),T(s=>i.next(s)),A(()=>i.complete()),m(s=>P({ref:e},s)))})}function Wn(e,{progress$:t}){return H(()=>{let r=new x;return r.subscribe(({value:o})=>{e.style.setProperty("--md-progress-value",`${o}`)}),t.pipe(T(o=>r.next({value:o})),A(()=>r.complete()),m(o=>({ref:e,value:o})))})}var Yr=jt(Kr());function Ia(e){e.setAttribute("data-md-copying","");let t=e.closest("[data-copy]"),r=t?t.getAttribute("data-copy"):e.innerText;return e.removeAttribute("data-md-copying"),r.trimEnd()}function Un({alert$:e}){Yr.default.isSupported()&&new I(t=>{new Yr.default("[data-clipboard-target], [data-clipboard-text]",{text:r=>r.getAttribute("data-clipboard-text")||Ia(U(r.getAttribute("data-clipboard-target")))}).on("success",r=>t.next(r))}).pipe(T(t=>{t.trigger.focus()}),m(()=>we("clipboard.copied"))).subscribe(e)}function Fa(e){if(e.length<2)return[""];let[t,r]=[...e].sort((n,i)=>n.length-i.length).map(n=>n.replace(/[^/]+$/,"")),o=0;if(t===r)o=t.length;else for(;t.charCodeAt(o)===r.charCodeAt(o);)o++;return e.map(n=>n.replace(t.slice(0,o),""))}function ur(e){let t=__md_get("__sitemap",sessionStorage,e);if(t)return R(t);{let r=he();return on(new URL("sitemap.xml",e||r.base)).pipe(m(o=>Fa(W("loc",o).map(n=>n.textContent))),xe(()=>M),$e([]),T(o=>__md_set("__sitemap",o,sessionStorage,e)))}}function Nn(e){let t=ce("[rel=canonical]",e);typeof t!="undefined"&&(t.href=t.href.replace("//localhost:","//127.0.0.1:"));let r=new Map;for(let o of W(":scope > *",e)){let n=o.outerHTML;for(let i of["href","src"]){let s=o.getAttribute(i);if(s===null)continue;let a=new URL(s,t==null?void 0:t.href),c=o.cloneNode();c.setAttribute(i,`${a}`),n=c.outerHTML;break}r.set(n,o)}return r}function Dn({location$:e,viewport$:t,progress$:r}){let o=he();if(location.protocol==="file:")return M;let n=ur().pipe(m(l=>l.map(f=>`${new URL(f,o.base)}`))),i=h(document.body,"click").pipe(ae(n),w(([l,f])=>{if(!(l.target instanceof Element))return M;let u=l.target.closest("a");if(u===null)return M;if(u.target||l.metaKey||l.ctrlKey)return M;let d=new URL(u.href);return d.search=d.hash="",f.includes(`${d}`)?(l.preventDefault(),R(new URL(u.href))):M}),de());i.pipe(ue(1)).subscribe(()=>{let l=ce("link[rel=icon]");typeof l!="undefined"&&(l.href=l.href)}),h(window,"beforeunload").subscribe(()=>{history.scrollRestoration="auto"}),i.pipe(ae(t)).subscribe(([l,{offset:f}])=>{history.scrollRestoration="manual",history.replaceState(f,""),history.pushState(null,"",l)}),i.subscribe(e);let s=e.pipe(q(me()),te("pathname"),Ee(1),w(l=>lr(l,{progress$:r}).pipe(xe(()=>(st(l,!0),M))))),a=new DOMParser,c=s.pipe(w(l=>l.text()),w(l=>{let f=a.parseFromString(l,"text/html");for(let b of["[data-md-component=announce]","[data-md-component=container]","[data-md-component=header-topic]","[data-md-component=outdated]","[data-md-component=logo]","[data-md-component=skip]",...G("navigation.tabs.sticky")?["[data-md-component=tabs]"]:[]]){let D=ce(b),Q=ce(b,f);typeof D!="undefined"&&typeof Q!="undefined"&&D.replaceWith(Q)}let u=Nn(document.head),d=Nn(f.head);for(let[b,D]of d)D.getAttribute("rel")==="stylesheet"||D.hasAttribute("src")||(u.has(b)?u.delete(b):document.head.appendChild(D));for(let b of u.values())b.getAttribute("rel")==="stylesheet"||b.hasAttribute("src")||b.remove();let y=Oe("container");return We(W("script",y)).pipe(w(b=>{let D=f.createElement("script");if(b.src){for(let Q of b.getAttributeNames())D.setAttribute(Q,b.getAttribute(Q));return b.replaceWith(D),new I(Q=>{D.onload=()=>Q.complete()})}else return D.textContent=b.textContent,b.replaceWith(D),M}),ee(),oe(f))}),de());return h(window,"popstate").pipe(m(me)).subscribe(e),e.pipe(q(me()),Ce(2,1),v(([l,f])=>l.pathname===f.pathname&&l.hash!==f.hash),m(([,l])=>l)).subscribe(l=>{var f,u;history.state!==null||!l.hash?window.scrollTo(0,(u=(f=history.state)==null?void 0:f.y)!=null?u:0):(history.scrollRestoration="auto",pr(l.hash),history.scrollRestoration="manual")}),e.pipe(Ir(i),q(me()),Ce(2,1),v(([l,f])=>l.pathname===f.pathname&&l.hash===f.hash),m(([,l])=>l)).subscribe(l=>{history.scrollRestoration="auto",pr(l.hash),history.scrollRestoration="manual",history.back()}),c.pipe(ae(e)).subscribe(([,l])=>{var f,u;history.state!==null||!l.hash?window.scrollTo(0,(u=(f=history.state)==null?void 0:f.y)!=null?u:0):pr(l.hash)}),t.pipe(te("offset"),ye(100)).subscribe(({offset:l})=>{history.replaceState(l,"")}),c}var qn=jt(zn());function Kn(e){let t=e.separator.split("|").map(n=>n.replace(/(\(\?[!=<][^)]+\))/g,"").length===0?"\uFFFD":n).join("|"),r=new RegExp(t,"img"),o=(n,i,s)=>`${i}${s}`;return n=>{n=n.replace(/[\s*+\-:~^]+/g," ").trim();let i=new RegExp(`(^|${e.separator}|)(${n.replace(/[|\\{}()[\]^$+*?.-]/g,"\\$&").replace(r,"|")})`,"img");return s=>(0,qn.default)(s).replace(i,o).replace(/<\/mark>(\s+)]*>/img,"$1")}}function Ht(e){return e.type===1}function dr(e){return e.type===3}function Qn(e,t){let r=ln(e);return L(R(location.protocol!=="file:"),Ne("search")).pipe(Pe(o=>o),w(()=>t)).subscribe(({config:o,docs:n})=>r.next({type:0,data:{config:o,docs:n,options:{suggest:G("search.suggest")}}})),r}function Yn({document$:e}){let t=he(),r=De(new URL("../versions.json",t.base)).pipe(xe(()=>M)),o=r.pipe(m(n=>{let[,i]=t.base.match(/([^/]+)\/?$/);return n.find(({version:s,aliases:a})=>s===i||a.includes(i))||n[0]}));r.pipe(m(n=>new Map(n.map(i=>[`${new URL(`../${i.version}/`,t.base)}`,i]))),w(n=>h(document.body,"click").pipe(v(i=>!i.metaKey&&!i.ctrlKey),ae(o),w(([i,s])=>{if(i.target instanceof Element){let a=i.target.closest("a");if(a&&!a.target&&n.has(a.href)){let c=a.href;return!i.target.closest(".md-version")&&n.get(c)===s?M:(i.preventDefault(),R(c))}}return M}),w(i=>{let{version:s}=n.get(i);return ur(new URL(i)).pipe(m(a=>{let p=me().href.replace(t.base,"");return a.includes(p.split("#")[0])?new URL(`../${s}/${p}`,t.base):new URL(i)}))})))).subscribe(n=>st(n,!0)),B([r,o]).subscribe(([n,i])=>{U(".md-header__topic").appendChild(gn(n,i))}),e.pipe(w(()=>o)).subscribe(n=>{var s;let i=__md_get("__outdated",sessionStorage);if(i===null){i=!0;let a=((s=t.version)==null?void 0:s.default)||"latest";Array.isArray(a)||(a=[a]);e:for(let c of a)for(let p of n.aliases.concat(n.version))if(new RegExp(c,"i").test(p)){i=!1;break e}__md_set("__outdated",i,sessionStorage)}if(i)for(let a of ne("outdated"))a.hidden=!1})}function Da(e,{worker$:t}){let{searchParams:r}=me();r.has("q")&&(Ye("search",!0),e.value=r.get("q"),e.focus(),Ne("search").pipe(Pe(i=>!i)).subscribe(()=>{let i=me();i.searchParams.delete("q"),history.replaceState({},"",`${i}`)}));let o=vt(e),n=L(t.pipe(Pe(Ht)),h(e,"keyup"),o).pipe(m(()=>e.value),X());return B([n,o]).pipe(m(([i,s])=>({value:i,focus:s})),Z(1))}function Bn(e,{worker$:t}){let r=new x,o=r.pipe(ee(),oe(!0));B([t.pipe(Pe(Ht)),r],(i,s)=>s).pipe(te("value")).subscribe(({value:i})=>t.next({type:2,data:i})),r.pipe(te("focus")).subscribe(({focus:i})=>{i&&Ye("search",i)}),h(e.form,"reset").pipe(j(o)).subscribe(()=>e.focus());let n=U("header [for=__search]");return h(n,"click").subscribe(()=>e.focus()),Da(e,{worker$:t}).pipe(T(i=>r.next(i)),A(()=>r.complete()),m(i=>P({ref:e},i)),Z(1))}function Gn(e,{worker$:t,query$:r}){let o=new x,n=Go(e.parentElement).pipe(v(Boolean)),i=e.parentElement,s=U(":scope > :first-child",e),a=U(":scope > :last-child",e);Ne("search").subscribe(l=>a.setAttribute("role",l?"list":"presentation")),o.pipe(ae(r),Wr(t.pipe(Pe(Ht)))).subscribe(([{items:l},{value:f}])=>{switch(l.length){case 0:s.textContent=f.length?we("search.result.none"):we("search.result.placeholder");break;case 1:s.textContent=we("search.result.one");break;default:let u=ar(l.length);s.textContent=we("search.result.other",u)}});let c=o.pipe(T(()=>a.innerHTML=""),w(({items:l})=>L(R(...l.slice(0,10)),R(...l.slice(10)).pipe(Ce(4),Nr(n),w(([f])=>f)))),m(hn),de());return c.subscribe(l=>a.appendChild(l)),c.pipe(re(l=>{let f=ce("details",l);return typeof f=="undefined"?M:h(f,"toggle").pipe(j(o),m(()=>f))})).subscribe(l=>{l.open===!1&&l.offsetTop<=i.scrollTop&&i.scrollTo({top:l.offsetTop})}),t.pipe(v(dr),m(({data:l})=>l)).pipe(T(l=>o.next(l)),A(()=>o.complete()),m(l=>P({ref:e},l)))}function Va(e,{query$:t}){return t.pipe(m(({value:r})=>{let o=me();return o.hash="",r=r.replace(/\s+/g,"+").replace(/&/g,"%26").replace(/=/g,"%3D"),o.search=`q=${r}`,{url:o}}))}function Jn(e,t){let r=new x,o=r.pipe(ee(),oe(!0));return r.subscribe(({url:n})=>{e.setAttribute("data-clipboard-text",e.href),e.href=`${n}`}),h(e,"click").pipe(j(o)).subscribe(n=>n.preventDefault()),Va(e,t).pipe(T(n=>r.next(n)),A(()=>r.complete()),m(n=>P({ref:e},n)))}function Xn(e,{worker$:t,keyboard$:r}){let o=new x,n=Oe("search-query"),i=L(h(n,"keydown"),h(n,"focus")).pipe(Me(ie),m(()=>n.value),X());return o.pipe(Ze(i),m(([{suggest:a},c])=>{let p=c.split(/([\s-]+)/);if(a!=null&&a.length&&p[p.length-1]){let l=a[a.length-1];l.startsWith(p[p.length-1])&&(p[p.length-1]=l)}else p.length=0;return p})).subscribe(a=>e.innerHTML=a.join("").replace(/\s/g," ")),r.pipe(v(({mode:a})=>a==="search")).subscribe(a=>{switch(a.type){case"ArrowRight":e.innerText.length&&n.selectionStart===n.value.length&&(n.value=e.innerText);break}}),t.pipe(v(dr),m(({data:a})=>a)).pipe(T(a=>o.next(a)),A(()=>o.complete()),m(()=>({ref:e})))}function Zn(e,{index$:t,keyboard$:r}){let o=he();try{let n=Qn(o.search,t),i=Oe("search-query",e),s=Oe("search-result",e);h(e,"click").pipe(v(({target:c})=>c instanceof Element&&!!c.closest("a"))).subscribe(()=>Ye("search",!1)),r.pipe(v(({mode:c})=>c==="search")).subscribe(c=>{let p=Ie();switch(c.type){case"Enter":if(p===i){let l=new Map;for(let f of W(":first-child [href]",s)){let u=f.firstElementChild;l.set(f,parseFloat(u.getAttribute("data-md-score")))}if(l.size){let[[f]]=[...l].sort(([,u],[,d])=>d-u);f.click()}c.claim()}break;case"Escape":case"Tab":Ye("search",!1),i.blur();break;case"ArrowUp":case"ArrowDown":if(typeof p=="undefined")i.focus();else{let l=[i,...W(":not(details) > [href], summary, details[open] [href]",s)],f=Math.max(0,(Math.max(0,l.indexOf(p))+l.length+(c.type==="ArrowUp"?-1:1))%l.length);l[f].focus()}c.claim();break;default:i!==Ie()&&i.focus()}}),r.pipe(v(({mode:c})=>c==="global")).subscribe(c=>{switch(c.type){case"f":case"s":case"/":i.focus(),i.select(),c.claim();break}});let a=Bn(i,{worker$:n});return L(a,Gn(s,{worker$:n,query$:a})).pipe(Re(...ne("search-share",e).map(c=>Jn(c,{query$:a})),...ne("search-suggest",e).map(c=>Xn(c,{worker$:n,keyboard$:r}))))}catch(n){return e.hidden=!0,Ke}}function ei(e,{index$:t,location$:r}){return B([t,r.pipe(q(me()),v(o=>!!o.searchParams.get("h")))]).pipe(m(([o,n])=>Kn(o.config)(n.searchParams.get("h"))),m(o=>{var s;let n=new Map,i=document.createNodeIterator(e,NodeFilter.SHOW_TEXT);for(let a=i.nextNode();a;a=i.nextNode())if((s=a.parentElement)!=null&&s.offsetHeight){let c=a.textContent,p=o(c);p.length>c.length&&n.set(a,p)}for(let[a,c]of n){let{childNodes:p}=S("span",null,c);a.replaceWith(...Array.from(p))}return{ref:e,nodes:n}}))}function za(e,{viewport$:t,main$:r}){let o=e.closest(".md-grid"),n=o.offsetTop-o.parentElement.offsetTop;return B([r,t]).pipe(m(([{offset:i,height:s},{offset:{y:a}}])=>(s=s+Math.min(n,Math.max(0,a-i))-n,{height:s,locked:a>=i+n})),X((i,s)=>i.height===s.height&&i.locked===s.locked))}function Br(e,o){var n=o,{header$:t}=n,r=oo(n,["header$"]);let i=U(".md-sidebar__scrollwrap",e),{y:s}=Ue(i);return H(()=>{let a=new x,c=a.pipe(ee(),oe(!0)),p=a.pipe(Le(0,ge));return p.pipe(ae(t)).subscribe({next([{height:l},{height:f}]){i.style.height=`${l-2*s}px`,e.style.top=`${f}px`},complete(){i.style.height="",e.style.top=""}}),p.pipe(Pe()).subscribe(()=>{for(let l of W(".md-nav__link--active[href]",e)){if(!l.clientHeight)continue;let f=l.closest(".md-sidebar__scrollwrap");if(typeof f!="undefined"){let u=l.offsetTop-f.offsetTop,{height:d}=le(f);f.scrollTo({top:u-d/2})}}}),fe(W("label[tabindex]",e)).pipe(re(l=>h(l,"click").pipe(Me(ie),m(()=>l),j(c)))).subscribe(l=>{let f=U(`[id="${l.htmlFor}"]`);U(`[aria-labelledby="${l.id}"]`).setAttribute("aria-expanded",`${f.checked}`)}),za(e,r).pipe(T(l=>a.next(l)),A(()=>a.complete()),m(l=>P({ref:e},l)))})}function ti(e,t){if(typeof t!="undefined"){let r=`https://api.github.com/repos/${e}/${t}`;return Lt(De(`${r}/releases/latest`).pipe(xe(()=>M),m(o=>({version:o.tag_name})),$e({})),De(r).pipe(xe(()=>M),m(o=>({stars:o.stargazers_count,forks:o.forks_count})),$e({}))).pipe(m(([o,n])=>P(P({},o),n)))}else{let r=`https://api.github.com/users/${e}`;return De(r).pipe(m(o=>({repositories:o.public_repos})),$e({}))}}function ri(e,t){let r=`https://${e}/api/v4/projects/${encodeURIComponent(t)}`;return De(r).pipe(xe(()=>M),m(({star_count:o,forks_count:n})=>({stars:o,forks:n})),$e({}))}function oi(e){let t=e.match(/^.+github\.com\/([^/]+)\/?([^/]+)?/i);if(t){let[,r,o]=t;return ti(r,o)}if(t=e.match(/^.+?([^/]*gitlab[^/]+)\/(.+?)\/?$/i),t){let[,r,o]=t;return ri(r,o)}return M}var qa;function Ka(e){return qa||(qa=H(()=>{let t=__md_get("__source",sessionStorage);if(t)return R(t);if(ne("consent").length){let o=__md_get("__consent");if(!(o&&o.github))return M}return oi(e.href).pipe(T(o=>__md_set("__source",o,sessionStorage)))}).pipe(xe(()=>M),v(t=>Object.keys(t).length>0),m(t=>({facts:t})),Z(1)))}function ni(e){let t=U(":scope > :last-child",e);return H(()=>{let r=new x;return r.subscribe(({facts:o})=>{t.appendChild(bn(o)),t.classList.add("md-source__repository--active")}),Ka(e).pipe(T(o=>r.next(o)),A(()=>r.complete()),m(o=>P({ref:e},o)))})}function Qa(e,{viewport$:t,header$:r}){return Se(document.body).pipe(w(()=>mr(e,{header$:r,viewport$:t})),m(({offset:{y:o}})=>({hidden:o>=10})),te("hidden"))}function ii(e,t){return H(()=>{let r=new x;return r.subscribe({next({hidden:o}){e.hidden=o},complete(){e.hidden=!1}}),(G("navigation.tabs.sticky")?R({hidden:!1}):Qa(e,t)).pipe(T(o=>r.next(o)),A(()=>r.complete()),m(o=>P({ref:e},o)))})}function Ya(e,{viewport$:t,header$:r}){let o=new Map,n=W("[href^=\\#]",e);for(let a of n){let c=decodeURIComponent(a.hash.substring(1)),p=ce(`[id="${c}"]`);typeof p!="undefined"&&o.set(a,p)}let i=r.pipe(te("height"),m(({height:a})=>{let c=Oe("main"),p=U(":scope > :first-child",c);return a+.8*(p.offsetTop-c.offsetTop)}),de());return Se(document.body).pipe(te("height"),w(a=>H(()=>{let c=[];return R([...o].reduce((p,[l,f])=>{for(;c.length&&o.get(c[c.length-1]).tagName>=f.tagName;)c.pop();let u=f.offsetTop;for(;!u&&f.parentElement;)f=f.parentElement,u=f.offsetTop;let d=f.offsetParent;for(;d;d=d.offsetParent)u+=d.offsetTop;return p.set([...c=[...c,l]].reverse(),u)},new Map))}).pipe(m(c=>new Map([...c].sort(([,p],[,l])=>p-l))),Ze(i),w(([c,p])=>t.pipe(Fr(([l,f],{offset:{y:u},size:d})=>{let y=u+d.height>=Math.floor(a.height);for(;f.length;){let[,b]=f[0];if(b-p=u&&!y)f=[l.pop(),...f];else break}return[l,f]},[[],[...c]]),X((l,f)=>l[0]===f[0]&&l[1]===f[1])))))).pipe(m(([a,c])=>({prev:a.map(([p])=>p),next:c.map(([p])=>p)})),q({prev:[],next:[]}),Ce(2,1),m(([a,c])=>a.prev.length{let i=new x,s=i.pipe(ee(),oe(!0));if(i.subscribe(({prev:a,next:c})=>{for(let[p]of c)p.classList.remove("md-nav__link--passed"),p.classList.remove("md-nav__link--active");for(let[p,[l]]of a.entries())l.classList.add("md-nav__link--passed"),l.classList.toggle("md-nav__link--active",p===a.length-1)}),G("toc.follow")){let a=L(t.pipe(ye(1),m(()=>{})),t.pipe(ye(250),m(()=>"smooth")));i.pipe(v(({prev:c})=>c.length>0),Ze(o.pipe(Me(ie))),ae(a)).subscribe(([[{prev:c}],p])=>{let[l]=c[c.length-1];if(l.offsetHeight){let f=sr(l);if(typeof f!="undefined"){let u=l.offsetTop-f.offsetTop,{height:d}=le(f);f.scrollTo({top:u-d/2,behavior:p})}}})}return G("navigation.tracking")&&t.pipe(j(s),te("offset"),ye(250),Ee(1),j(n.pipe(Ee(1))),at({delay:250}),ae(i)).subscribe(([,{prev:a}])=>{let c=me(),p=a[a.length-1];if(p&&p.length){let[l]=p,{hash:f}=new URL(l.href);c.hash!==f&&(c.hash=f,history.replaceState({},"",`${c}`))}else c.hash="",history.replaceState({},"",`${c}`)}),Ya(e,{viewport$:t,header$:r}).pipe(T(a=>i.next(a)),A(()=>i.complete()),m(a=>P({ref:e},a)))})}function Ba(e,{viewport$:t,main$:r,target$:o}){let n=t.pipe(m(({offset:{y:s}})=>s),Ce(2,1),m(([s,a])=>s>a&&a>0),X()),i=r.pipe(m(({active:s})=>s));return B([i,n]).pipe(m(([s,a])=>!(s&&a)),X(),j(o.pipe(Ee(1))),oe(!0),at({delay:250}),m(s=>({hidden:s})))}function si(e,{viewport$:t,header$:r,main$:o,target$:n}){let i=new x,s=i.pipe(ee(),oe(!0));return i.subscribe({next({hidden:a}){e.hidden=a,a?(e.setAttribute("tabindex","-1"),e.blur()):e.removeAttribute("tabindex")},complete(){e.style.top="",e.hidden=!0,e.removeAttribute("tabindex")}}),r.pipe(j(s),te("height")).subscribe(({height:a})=>{e.style.top=`${a+16}px`}),h(e,"click").subscribe(a=>{a.preventDefault(),window.scrollTo({top:0})}),Ba(e,{viewport$:t,main$:o,target$:n}).pipe(T(a=>i.next(a)),A(()=>i.complete()),m(a=>P({ref:e},a)))}function ci({document$:e}){e.pipe(w(()=>W(".md-ellipsis")),re(t=>yt(t).pipe(j(e.pipe(Ee(1))),v(r=>r),m(()=>t),ue(1))),v(t=>t.offsetWidth{let r=t.innerText,o=t.closest("a")||t;return o.title=r,Be(o).pipe(j(e.pipe(Ee(1))),A(()=>o.removeAttribute("title")))})).subscribe(),e.pipe(w(()=>W(".md-status")),re(t=>Be(t))).subscribe()}function pi({document$:e,tablet$:t}){e.pipe(w(()=>W(".md-toggle--indeterminate")),T(r=>{r.indeterminate=!0,r.checked=!1}),re(r=>h(r,"change").pipe(Ur(()=>r.classList.contains("md-toggle--indeterminate")),m(()=>r))),ae(t)).subscribe(([r,o])=>{r.classList.remove("md-toggle--indeterminate"),o&&(r.checked=!1)})}function Ga(){return/(iPad|iPhone|iPod)/.test(navigator.userAgent)}function li({document$:e}){e.pipe(w(()=>W("[data-md-scrollfix]")),T(t=>t.removeAttribute("data-md-scrollfix")),v(Ga),re(t=>h(t,"touchstart").pipe(m(()=>t)))).subscribe(t=>{let r=t.scrollTop;r===0?t.scrollTop=1:r+t.offsetHeight===t.scrollHeight&&(t.scrollTop=r-1)})}function mi({viewport$:e,tablet$:t}){B([Ne("search"),t]).pipe(m(([r,o])=>r&&!o),w(r=>R(r).pipe(Qe(r?400:100))),ae(e)).subscribe(([r,{offset:{y:o}}])=>{if(r)document.body.setAttribute("data-md-scrolllock",""),document.body.style.top=`-${o}px`;else{let n=-1*parseInt(document.body.style.top,10);document.body.removeAttribute("data-md-scrolllock"),document.body.style.top="",n&&window.scrollTo(0,n)}})}Object.entries||(Object.entries=function(e){let t=[];for(let r of Object.keys(e))t.push([r,e[r]]);return t});Object.values||(Object.values=function(e){let t=[];for(let r of Object.keys(e))t.push(e[r]);return t});typeof Element!="undefined"&&(Element.prototype.scrollTo||(Element.prototype.scrollTo=function(e,t){typeof e=="object"?(this.scrollLeft=e.left,this.scrollTop=e.top):(this.scrollLeft=e,this.scrollTop=t)}),Element.prototype.replaceWith||(Element.prototype.replaceWith=function(...e){let t=this.parentNode;if(t){e.length===0&&t.removeChild(this);for(let r=e.length-1;r>=0;r--){let o=e[r];typeof o=="string"?o=document.createTextNode(o):o.parentNode&&o.parentNode.removeChild(o),r?t.insertBefore(this.previousSibling,o):t.replaceChild(o,this)}}}));function Ja(){return location.protocol==="file:"?gt(`${new URL("search/search_index.js",Gr.base)}`).pipe(m(()=>__index),Z(1)):De(new URL("search/search_index.json",Gr.base))}document.documentElement.classList.remove("no-js");document.documentElement.classList.add("js");var rt=zo(),Pt=Zo(),wt=tn(Pt),Jr=Xo(),_e=pn(),hr=At("(min-width: 960px)"),ui=At("(min-width: 1220px)"),di=rn(),Gr=he(),hi=document.forms.namedItem("search")?Ja():Ke,Xr=new x;Un({alert$:Xr});var Zr=new x;G("navigation.instant")&&Dn({location$:Pt,viewport$:_e,progress$:Zr}).subscribe(rt);var fi;((fi=Gr.version)==null?void 0:fi.provider)==="mike"&&Yn({document$:rt});L(Pt,wt).pipe(Qe(125)).subscribe(()=>{Ye("drawer",!1),Ye("search",!1)});Jr.pipe(v(({mode:e})=>e==="global")).subscribe(e=>{switch(e.type){case"p":case",":let t=ce("link[rel=prev]");typeof t!="undefined"&&st(t);break;case"n":case".":let r=ce("link[rel=next]");typeof r!="undefined"&&st(r);break;case"Enter":let o=Ie();o instanceof HTMLLabelElement&&o.click()}});ci({document$:rt});pi({document$:rt,tablet$:hr});li({document$:rt});mi({viewport$:_e,tablet$:hr});var tt=Pn(Oe("header"),{viewport$:_e}),$t=rt.pipe(m(()=>Oe("main")),w(e=>Fn(e,{viewport$:_e,header$:tt})),Z(1)),Xa=L(...ne("consent").map(e=>fn(e,{target$:wt})),...ne("dialog").map(e=>$n(e,{alert$:Xr})),...ne("header").map(e=>Rn(e,{viewport$:_e,header$:tt,main$:$t})),...ne("palette").map(e=>jn(e)),...ne("progress").map(e=>Wn(e,{progress$:Zr})),...ne("search").map(e=>Zn(e,{index$:hi,keyboard$:Jr})),...ne("source").map(e=>ni(e))),Za=H(()=>L(...ne("announce").map(e=>mn(e)),...ne("content").map(e=>Hn(e,{viewport$:_e,target$:wt,print$:di})),...ne("content").map(e=>G("search.highlight")?ei(e,{index$:hi,location$:Pt}):M),...ne("header-title").map(e=>In(e,{viewport$:_e,header$:tt})),...ne("sidebar").map(e=>e.getAttribute("data-md-type")==="navigation"?Dr(ui,()=>Br(e,{viewport$:_e,header$:tt,main$:$t})):Dr(hr,()=>Br(e,{viewport$:_e,header$:tt,main$:$t}))),...ne("tabs").map(e=>ii(e,{viewport$:_e,header$:tt})),...ne("toc").map(e=>ai(e,{viewport$:_e,header$:tt,main$:$t,target$:wt})),...ne("top").map(e=>si(e,{viewport$:_e,header$:tt,main$:$t,target$:wt})))),bi=rt.pipe(w(()=>Za),Re(Xa),Z(1));bi.subscribe();window.document$=rt;window.location$=Pt;window.target$=wt;window.keyboard$=Jr;window.viewport$=_e;window.tablet$=hr;window.screen$=ui;window.print$=di;window.alert$=Xr;window.progress$=Zr;window.component$=bi;})(); +//# sourceMappingURL=bundle.7389ff0e.min.js.map + diff --git a/assets/javascripts/bundle.7389ff0e.min.js.map b/assets/javascripts/bundle.7389ff0e.min.js.map new file mode 100644 index 00000000..dbee324c --- /dev/null +++ b/assets/javascripts/bundle.7389ff0e.min.js.map @@ -0,0 +1,7 @@ +{ + "version": 3, + "sources": ["node_modules/focus-visible/dist/focus-visible.js", "node_modules/clipboard/dist/clipboard.js", "node_modules/escape-html/index.js", "src/templates/assets/javascripts/bundle.ts", "node_modules/rxjs/node_modules/tslib/tslib.es6.js", "node_modules/rxjs/src/internal/util/isFunction.ts", "node_modules/rxjs/src/internal/util/createErrorClass.ts", "node_modules/rxjs/src/internal/util/UnsubscriptionError.ts", "node_modules/rxjs/src/internal/util/arrRemove.ts", "node_modules/rxjs/src/internal/Subscription.ts", "node_modules/rxjs/src/internal/config.ts", "node_modules/rxjs/src/internal/scheduler/timeoutProvider.ts", "node_modules/rxjs/src/internal/util/reportUnhandledError.ts", "node_modules/rxjs/src/internal/util/noop.ts", "node_modules/rxjs/src/internal/NotificationFactories.ts", "node_modules/rxjs/src/internal/util/errorContext.ts", "node_modules/rxjs/src/internal/Subscriber.ts", "node_modules/rxjs/src/internal/symbol/observable.ts", "node_modules/rxjs/src/internal/util/identity.ts", "node_modules/rxjs/src/internal/util/pipe.ts", "node_modules/rxjs/src/internal/Observable.ts", "node_modules/rxjs/src/internal/util/lift.ts", "node_modules/rxjs/src/internal/operators/OperatorSubscriber.ts", "node_modules/rxjs/src/internal/scheduler/animationFrameProvider.ts", "node_modules/rxjs/src/internal/util/ObjectUnsubscribedError.ts", "node_modules/rxjs/src/internal/Subject.ts", "node_modules/rxjs/src/internal/scheduler/dateTimestampProvider.ts", "node_modules/rxjs/src/internal/ReplaySubject.ts", "node_modules/rxjs/src/internal/scheduler/Action.ts", "node_modules/rxjs/src/internal/scheduler/intervalProvider.ts", "node_modules/rxjs/src/internal/scheduler/AsyncAction.ts", "node_modules/rxjs/src/internal/Scheduler.ts", "node_modules/rxjs/src/internal/scheduler/AsyncScheduler.ts", "node_modules/rxjs/src/internal/scheduler/async.ts", "node_modules/rxjs/src/internal/scheduler/AnimationFrameAction.ts", "node_modules/rxjs/src/internal/scheduler/AnimationFrameScheduler.ts", "node_modules/rxjs/src/internal/scheduler/animationFrame.ts", "node_modules/rxjs/src/internal/observable/empty.ts", "node_modules/rxjs/src/internal/util/isScheduler.ts", "node_modules/rxjs/src/internal/util/args.ts", "node_modules/rxjs/src/internal/util/isArrayLike.ts", "node_modules/rxjs/src/internal/util/isPromise.ts", "node_modules/rxjs/src/internal/util/isInteropObservable.ts", "node_modules/rxjs/src/internal/util/isAsyncIterable.ts", "node_modules/rxjs/src/internal/util/throwUnobservableError.ts", "node_modules/rxjs/src/internal/symbol/iterator.ts", "node_modules/rxjs/src/internal/util/isIterable.ts", "node_modules/rxjs/src/internal/util/isReadableStreamLike.ts", "node_modules/rxjs/src/internal/observable/innerFrom.ts", "node_modules/rxjs/src/internal/util/executeSchedule.ts", "node_modules/rxjs/src/internal/operators/observeOn.ts", "node_modules/rxjs/src/internal/operators/subscribeOn.ts", "node_modules/rxjs/src/internal/scheduled/scheduleObservable.ts", "node_modules/rxjs/src/internal/scheduled/schedulePromise.ts", "node_modules/rxjs/src/internal/scheduled/scheduleArray.ts", "node_modules/rxjs/src/internal/scheduled/scheduleIterable.ts", "node_modules/rxjs/src/internal/scheduled/scheduleAsyncIterable.ts", "node_modules/rxjs/src/internal/scheduled/scheduleReadableStreamLike.ts", "node_modules/rxjs/src/internal/scheduled/scheduled.ts", "node_modules/rxjs/src/internal/observable/from.ts", "node_modules/rxjs/src/internal/observable/of.ts", "node_modules/rxjs/src/internal/observable/throwError.ts", "node_modules/rxjs/src/internal/util/EmptyError.ts", "node_modules/rxjs/src/internal/util/isDate.ts", "node_modules/rxjs/src/internal/operators/map.ts", "node_modules/rxjs/src/internal/util/mapOneOrManyArgs.ts", "node_modules/rxjs/src/internal/util/argsArgArrayOrObject.ts", "node_modules/rxjs/src/internal/util/createObject.ts", "node_modules/rxjs/src/internal/observable/combineLatest.ts", "node_modules/rxjs/src/internal/operators/mergeInternals.ts", "node_modules/rxjs/src/internal/operators/mergeMap.ts", "node_modules/rxjs/src/internal/operators/mergeAll.ts", "node_modules/rxjs/src/internal/operators/concatAll.ts", "node_modules/rxjs/src/internal/observable/concat.ts", "node_modules/rxjs/src/internal/observable/defer.ts", "node_modules/rxjs/src/internal/observable/fromEvent.ts", "node_modules/rxjs/src/internal/observable/fromEventPattern.ts", "node_modules/rxjs/src/internal/observable/timer.ts", "node_modules/rxjs/src/internal/observable/merge.ts", "node_modules/rxjs/src/internal/observable/never.ts", "node_modules/rxjs/src/internal/util/argsOrArgArray.ts", "node_modules/rxjs/src/internal/operators/filter.ts", "node_modules/rxjs/src/internal/observable/zip.ts", "node_modules/rxjs/src/internal/operators/audit.ts", "node_modules/rxjs/src/internal/operators/auditTime.ts", "node_modules/rxjs/src/internal/operators/bufferCount.ts", "node_modules/rxjs/src/internal/operators/catchError.ts", "node_modules/rxjs/src/internal/operators/scanInternals.ts", "node_modules/rxjs/src/internal/operators/combineLatest.ts", "node_modules/rxjs/src/internal/operators/combineLatestWith.ts", "node_modules/rxjs/src/internal/operators/debounceTime.ts", "node_modules/rxjs/src/internal/operators/defaultIfEmpty.ts", "node_modules/rxjs/src/internal/operators/take.ts", "node_modules/rxjs/src/internal/operators/ignoreElements.ts", "node_modules/rxjs/src/internal/operators/mapTo.ts", "node_modules/rxjs/src/internal/operators/delayWhen.ts", "node_modules/rxjs/src/internal/operators/delay.ts", "node_modules/rxjs/src/internal/operators/distinctUntilChanged.ts", "node_modules/rxjs/src/internal/operators/distinctUntilKeyChanged.ts", "node_modules/rxjs/src/internal/operators/throwIfEmpty.ts", "node_modules/rxjs/src/internal/operators/endWith.ts", "node_modules/rxjs/src/internal/operators/finalize.ts", "node_modules/rxjs/src/internal/operators/first.ts", "node_modules/rxjs/src/internal/operators/takeLast.ts", "node_modules/rxjs/src/internal/operators/merge.ts", "node_modules/rxjs/src/internal/operators/mergeWith.ts", "node_modules/rxjs/src/internal/operators/repeat.ts", "node_modules/rxjs/src/internal/operators/sample.ts", "node_modules/rxjs/src/internal/operators/scan.ts", "node_modules/rxjs/src/internal/operators/share.ts", "node_modules/rxjs/src/internal/operators/shareReplay.ts", "node_modules/rxjs/src/internal/operators/skip.ts", "node_modules/rxjs/src/internal/operators/skipUntil.ts", "node_modules/rxjs/src/internal/operators/startWith.ts", "node_modules/rxjs/src/internal/operators/switchMap.ts", "node_modules/rxjs/src/internal/operators/takeUntil.ts", "node_modules/rxjs/src/internal/operators/takeWhile.ts", "node_modules/rxjs/src/internal/operators/tap.ts", "node_modules/rxjs/src/internal/operators/throttle.ts", "node_modules/rxjs/src/internal/operators/throttleTime.ts", "node_modules/rxjs/src/internal/operators/withLatestFrom.ts", "node_modules/rxjs/src/internal/operators/zip.ts", "node_modules/rxjs/src/internal/operators/zipWith.ts", "src/templates/assets/javascripts/browser/document/index.ts", "src/templates/assets/javascripts/browser/element/_/index.ts", "src/templates/assets/javascripts/browser/element/focus/index.ts", "src/templates/assets/javascripts/browser/element/hover/index.ts", "src/templates/assets/javascripts/browser/element/offset/_/index.ts", "src/templates/assets/javascripts/browser/element/offset/content/index.ts", "src/templates/assets/javascripts/utilities/h/index.ts", "src/templates/assets/javascripts/utilities/round/index.ts", "src/templates/assets/javascripts/browser/script/index.ts", "src/templates/assets/javascripts/browser/element/size/_/index.ts", "src/templates/assets/javascripts/browser/element/size/content/index.ts", "src/templates/assets/javascripts/browser/element/visibility/index.ts", "src/templates/assets/javascripts/browser/toggle/index.ts", "src/templates/assets/javascripts/browser/keyboard/index.ts", "src/templates/assets/javascripts/browser/location/_/index.ts", "src/templates/assets/javascripts/browser/location/hash/index.ts", "src/templates/assets/javascripts/browser/media/index.ts", "src/templates/assets/javascripts/browser/request/index.ts", "src/templates/assets/javascripts/browser/viewport/offset/index.ts", "src/templates/assets/javascripts/browser/viewport/size/index.ts", "src/templates/assets/javascripts/browser/viewport/_/index.ts", "src/templates/assets/javascripts/browser/viewport/at/index.ts", "src/templates/assets/javascripts/browser/worker/index.ts", "src/templates/assets/javascripts/_/index.ts", "src/templates/assets/javascripts/components/_/index.ts", "src/templates/assets/javascripts/components/announce/index.ts", "src/templates/assets/javascripts/components/consent/index.ts", "src/templates/assets/javascripts/templates/tooltip/index.tsx", "src/templates/assets/javascripts/templates/annotation/index.tsx", "src/templates/assets/javascripts/templates/clipboard/index.tsx", "src/templates/assets/javascripts/templates/search/index.tsx", "src/templates/assets/javascripts/templates/source/index.tsx", "src/templates/assets/javascripts/templates/tabbed/index.tsx", "src/templates/assets/javascripts/templates/table/index.tsx", "src/templates/assets/javascripts/templates/version/index.tsx", "src/templates/assets/javascripts/components/tooltip/index.ts", "src/templates/assets/javascripts/components/content/annotation/_/index.ts", "src/templates/assets/javascripts/components/content/annotation/list/index.ts", "src/templates/assets/javascripts/components/content/annotation/block/index.ts", "src/templates/assets/javascripts/components/content/code/_/index.ts", "src/templates/assets/javascripts/components/content/details/index.ts", "src/templates/assets/javascripts/components/content/mermaid/index.css", "src/templates/assets/javascripts/components/content/mermaid/index.ts", "src/templates/assets/javascripts/components/content/table/index.ts", "src/templates/assets/javascripts/components/content/tabs/index.ts", "src/templates/assets/javascripts/components/content/_/index.ts", "src/templates/assets/javascripts/components/dialog/index.ts", "src/templates/assets/javascripts/components/header/_/index.ts", "src/templates/assets/javascripts/components/header/title/index.ts", "src/templates/assets/javascripts/components/main/index.ts", "src/templates/assets/javascripts/components/palette/index.ts", "src/templates/assets/javascripts/components/progress/index.ts", "src/templates/assets/javascripts/integrations/clipboard/index.ts", "src/templates/assets/javascripts/integrations/sitemap/index.ts", "src/templates/assets/javascripts/integrations/instant/index.ts", "src/templates/assets/javascripts/integrations/search/highlighter/index.ts", "src/templates/assets/javascripts/integrations/search/worker/message/index.ts", "src/templates/assets/javascripts/integrations/search/worker/_/index.ts", "src/templates/assets/javascripts/integrations/version/index.ts", "src/templates/assets/javascripts/components/search/query/index.ts", "src/templates/assets/javascripts/components/search/result/index.ts", "src/templates/assets/javascripts/components/search/share/index.ts", "src/templates/assets/javascripts/components/search/suggest/index.ts", "src/templates/assets/javascripts/components/search/_/index.ts", "src/templates/assets/javascripts/components/search/highlight/index.ts", "src/templates/assets/javascripts/components/sidebar/index.ts", "src/templates/assets/javascripts/components/source/facts/github/index.ts", "src/templates/assets/javascripts/components/source/facts/gitlab/index.ts", "src/templates/assets/javascripts/components/source/facts/_/index.ts", "src/templates/assets/javascripts/components/source/_/index.ts", "src/templates/assets/javascripts/components/tabs/index.ts", "src/templates/assets/javascripts/components/toc/index.ts", "src/templates/assets/javascripts/components/top/index.ts", "src/templates/assets/javascripts/patches/ellipsis/index.ts", "src/templates/assets/javascripts/patches/indeterminate/index.ts", "src/templates/assets/javascripts/patches/scrollfix/index.ts", "src/templates/assets/javascripts/patches/scrolllock/index.ts", "src/templates/assets/javascripts/polyfills/index.ts"], + "sourcesContent": ["(function (global, factory) {\n typeof exports === 'object' && typeof module !== 'undefined' ? factory() :\n typeof define === 'function' && define.amd ? define(factory) :\n (factory());\n}(this, (function () { 'use strict';\n\n /**\n * Applies the :focus-visible polyfill at the given scope.\n * A scope in this case is either the top-level Document or a Shadow Root.\n *\n * @param {(Document|ShadowRoot)} scope\n * @see https://github.com/WICG/focus-visible\n */\n function applyFocusVisiblePolyfill(scope) {\n var hadKeyboardEvent = true;\n var hadFocusVisibleRecently = false;\n var hadFocusVisibleRecentlyTimeout = null;\n\n var inputTypesAllowlist = {\n text: true,\n search: true,\n url: true,\n tel: true,\n email: true,\n password: true,\n number: true,\n date: true,\n month: true,\n week: true,\n time: true,\n datetime: true,\n 'datetime-local': true\n };\n\n /**\n * Helper function for legacy browsers and iframes which sometimes focus\n * elements like document, body, and non-interactive SVG.\n * @param {Element} el\n */\n function isValidFocusTarget(el) {\n if (\n el &&\n el !== document &&\n el.nodeName !== 'HTML' &&\n el.nodeName !== 'BODY' &&\n 'classList' in el &&\n 'contains' in el.classList\n ) {\n return true;\n }\n return false;\n }\n\n /**\n * Computes whether the given element should automatically trigger the\n * `focus-visible` class being added, i.e. whether it should always match\n * `:focus-visible` when focused.\n * @param {Element} el\n * @return {boolean}\n */\n function focusTriggersKeyboardModality(el) {\n var type = el.type;\n var tagName = el.tagName;\n\n if (tagName === 'INPUT' && inputTypesAllowlist[type] && !el.readOnly) {\n return true;\n }\n\n if (tagName === 'TEXTAREA' && !el.readOnly) {\n return true;\n }\n\n if (el.isContentEditable) {\n return true;\n }\n\n return false;\n }\n\n /**\n * Add the `focus-visible` class to the given element if it was not added by\n * the author.\n * @param {Element} el\n */\n function addFocusVisibleClass(el) {\n if (el.classList.contains('focus-visible')) {\n return;\n }\n el.classList.add('focus-visible');\n el.setAttribute('data-focus-visible-added', '');\n }\n\n /**\n * Remove the `focus-visible` class from the given element if it was not\n * originally added by the author.\n * @param {Element} el\n */\n function removeFocusVisibleClass(el) {\n if (!el.hasAttribute('data-focus-visible-added')) {\n return;\n }\n el.classList.remove('focus-visible');\n el.removeAttribute('data-focus-visible-added');\n }\n\n /**\n * If the most recent user interaction was via the keyboard;\n * and the key press did not include a meta, alt/option, or control key;\n * then the modality is keyboard. Otherwise, the modality is not keyboard.\n * Apply `focus-visible` to any current active element and keep track\n * of our keyboard modality state with `hadKeyboardEvent`.\n * @param {KeyboardEvent} e\n */\n function onKeyDown(e) {\n if (e.metaKey || e.altKey || e.ctrlKey) {\n return;\n }\n\n if (isValidFocusTarget(scope.activeElement)) {\n addFocusVisibleClass(scope.activeElement);\n }\n\n hadKeyboardEvent = true;\n }\n\n /**\n * If at any point a user clicks with a pointing device, ensure that we change\n * the modality away from keyboard.\n * This avoids the situation where a user presses a key on an already focused\n * element, and then clicks on a different element, focusing it with a\n * pointing device, while we still think we're in keyboard modality.\n * @param {Event} e\n */\n function onPointerDown(e) {\n hadKeyboardEvent = false;\n }\n\n /**\n * On `focus`, add the `focus-visible` class to the target if:\n * - the target received focus as a result of keyboard navigation, or\n * - the event target is an element that will likely require interaction\n * via the keyboard (e.g. a text box)\n * @param {Event} e\n */\n function onFocus(e) {\n // Prevent IE from focusing the document or HTML element.\n if (!isValidFocusTarget(e.target)) {\n return;\n }\n\n if (hadKeyboardEvent || focusTriggersKeyboardModality(e.target)) {\n addFocusVisibleClass(e.target);\n }\n }\n\n /**\n * On `blur`, remove the `focus-visible` class from the target.\n * @param {Event} e\n */\n function onBlur(e) {\n if (!isValidFocusTarget(e.target)) {\n return;\n }\n\n if (\n e.target.classList.contains('focus-visible') ||\n e.target.hasAttribute('data-focus-visible-added')\n ) {\n // To detect a tab/window switch, we look for a blur event followed\n // rapidly by a visibility change.\n // If we don't see a visibility change within 100ms, it's probably a\n // regular focus change.\n hadFocusVisibleRecently = true;\n window.clearTimeout(hadFocusVisibleRecentlyTimeout);\n hadFocusVisibleRecentlyTimeout = window.setTimeout(function() {\n hadFocusVisibleRecently = false;\n }, 100);\n removeFocusVisibleClass(e.target);\n }\n }\n\n /**\n * If the user changes tabs, keep track of whether or not the previously\n * focused element had .focus-visible.\n * @param {Event} e\n */\n function onVisibilityChange(e) {\n if (document.visibilityState === 'hidden') {\n // If the tab becomes active again, the browser will handle calling focus\n // on the element (Safari actually calls it twice).\n // If this tab change caused a blur on an element with focus-visible,\n // re-apply the class when the user switches back to the tab.\n if (hadFocusVisibleRecently) {\n hadKeyboardEvent = true;\n }\n addInitialPointerMoveListeners();\n }\n }\n\n /**\n * Add a group of listeners to detect usage of any pointing devices.\n * These listeners will be added when the polyfill first loads, and anytime\n * the window is blurred, so that they are active when the window regains\n * focus.\n */\n function addInitialPointerMoveListeners() {\n document.addEventListener('mousemove', onInitialPointerMove);\n document.addEventListener('mousedown', onInitialPointerMove);\n document.addEventListener('mouseup', onInitialPointerMove);\n document.addEventListener('pointermove', onInitialPointerMove);\n document.addEventListener('pointerdown', onInitialPointerMove);\n document.addEventListener('pointerup', onInitialPointerMove);\n document.addEventListener('touchmove', onInitialPointerMove);\n document.addEventListener('touchstart', onInitialPointerMove);\n document.addEventListener('touchend', onInitialPointerMove);\n }\n\n function removeInitialPointerMoveListeners() {\n document.removeEventListener('mousemove', onInitialPointerMove);\n document.removeEventListener('mousedown', onInitialPointerMove);\n document.removeEventListener('mouseup', onInitialPointerMove);\n document.removeEventListener('pointermove', onInitialPointerMove);\n document.removeEventListener('pointerdown', onInitialPointerMove);\n document.removeEventListener('pointerup', onInitialPointerMove);\n document.removeEventListener('touchmove', onInitialPointerMove);\n document.removeEventListener('touchstart', onInitialPointerMove);\n document.removeEventListener('touchend', onInitialPointerMove);\n }\n\n /**\n * When the polfyill first loads, assume the user is in keyboard modality.\n * If any event is received from a pointing device (e.g. mouse, pointer,\n * touch), turn off keyboard modality.\n * This accounts for situations where focus enters the page from the URL bar.\n * @param {Event} e\n */\n function onInitialPointerMove(e) {\n // Work around a Safari quirk that fires a mousemove on whenever the\n // window blurs, even if you're tabbing out of the page. \u00AF\\_(\u30C4)_/\u00AF\n if (e.target.nodeName && e.target.nodeName.toLowerCase() === 'html') {\n return;\n }\n\n hadKeyboardEvent = false;\n removeInitialPointerMoveListeners();\n }\n\n // For some kinds of state, we are interested in changes at the global scope\n // only. For example, global pointer input, global key presses and global\n // visibility change should affect the state at every scope:\n document.addEventListener('keydown', onKeyDown, true);\n document.addEventListener('mousedown', onPointerDown, true);\n document.addEventListener('pointerdown', onPointerDown, true);\n document.addEventListener('touchstart', onPointerDown, true);\n document.addEventListener('visibilitychange', onVisibilityChange, true);\n\n addInitialPointerMoveListeners();\n\n // For focus and blur, we specifically care about state changes in the local\n // scope. This is because focus / blur events that originate from within a\n // shadow root are not re-dispatched from the host element if it was already\n // the active element in its own scope:\n scope.addEventListener('focus', onFocus, true);\n scope.addEventListener('blur', onBlur, true);\n\n // We detect that a node is a ShadowRoot by ensuring that it is a\n // DocumentFragment and also has a host property. This check covers native\n // implementation and polyfill implementation transparently. If we only cared\n // about the native implementation, we could just check if the scope was\n // an instance of a ShadowRoot.\n if (scope.nodeType === Node.DOCUMENT_FRAGMENT_NODE && scope.host) {\n // Since a ShadowRoot is a special kind of DocumentFragment, it does not\n // have a root element to add a class to. So, we add this attribute to the\n // host element instead:\n scope.host.setAttribute('data-js-focus-visible', '');\n } else if (scope.nodeType === Node.DOCUMENT_NODE) {\n document.documentElement.classList.add('js-focus-visible');\n document.documentElement.setAttribute('data-js-focus-visible', '');\n }\n }\n\n // It is important to wrap all references to global window and document in\n // these checks to support server-side rendering use cases\n // @see https://github.com/WICG/focus-visible/issues/199\n if (typeof window !== 'undefined' && typeof document !== 'undefined') {\n // Make the polyfill helper globally available. This can be used as a signal\n // to interested libraries that wish to coordinate with the polyfill for e.g.,\n // applying the polyfill to a shadow root:\n window.applyFocusVisiblePolyfill = applyFocusVisiblePolyfill;\n\n // Notify interested libraries of the polyfill's presence, in case the\n // polyfill was loaded lazily:\n var event;\n\n try {\n event = new CustomEvent('focus-visible-polyfill-ready');\n } catch (error) {\n // IE11 does not support using CustomEvent as a constructor directly:\n event = document.createEvent('CustomEvent');\n event.initCustomEvent('focus-visible-polyfill-ready', false, false, {});\n }\n\n window.dispatchEvent(event);\n }\n\n if (typeof document !== 'undefined') {\n // Apply the polyfill to the global document, so that no JavaScript\n // coordination is required to use the polyfill in the top-level document:\n applyFocusVisiblePolyfill(document);\n }\n\n})));\n", "/*!\n * clipboard.js v2.0.11\n * https://clipboardjs.com/\n *\n * Licensed MIT \u00A9 Zeno Rocha\n */\n(function webpackUniversalModuleDefinition(root, factory) {\n\tif(typeof exports === 'object' && typeof module === 'object')\n\t\tmodule.exports = factory();\n\telse if(typeof define === 'function' && define.amd)\n\t\tdefine([], factory);\n\telse if(typeof exports === 'object')\n\t\texports[\"ClipboardJS\"] = factory();\n\telse\n\t\troot[\"ClipboardJS\"] = factory();\n})(this, function() {\nreturn /******/ (function() { // webpackBootstrap\n/******/ \tvar __webpack_modules__ = ({\n\n/***/ 686:\n/***/ (function(__unused_webpack_module, __webpack_exports__, __webpack_require__) {\n\n\"use strict\";\n\n// EXPORTS\n__webpack_require__.d(__webpack_exports__, {\n \"default\": function() { return /* binding */ clipboard; }\n});\n\n// EXTERNAL MODULE: ./node_modules/tiny-emitter/index.js\nvar tiny_emitter = __webpack_require__(279);\nvar tiny_emitter_default = /*#__PURE__*/__webpack_require__.n(tiny_emitter);\n// EXTERNAL MODULE: ./node_modules/good-listener/src/listen.js\nvar listen = __webpack_require__(370);\nvar listen_default = /*#__PURE__*/__webpack_require__.n(listen);\n// EXTERNAL MODULE: ./node_modules/select/src/select.js\nvar src_select = __webpack_require__(817);\nvar select_default = /*#__PURE__*/__webpack_require__.n(src_select);\n;// CONCATENATED MODULE: ./src/common/command.js\n/**\n * Executes a given operation type.\n * @param {String} type\n * @return {Boolean}\n */\nfunction command(type) {\n try {\n return document.execCommand(type);\n } catch (err) {\n return false;\n }\n}\n;// CONCATENATED MODULE: ./src/actions/cut.js\n\n\n/**\n * Cut action wrapper.\n * @param {String|HTMLElement} target\n * @return {String}\n */\n\nvar ClipboardActionCut = function ClipboardActionCut(target) {\n var selectedText = select_default()(target);\n command('cut');\n return selectedText;\n};\n\n/* harmony default export */ var actions_cut = (ClipboardActionCut);\n;// CONCATENATED MODULE: ./src/common/create-fake-element.js\n/**\n * Creates a fake textarea element with a value.\n * @param {String} value\n * @return {HTMLElement}\n */\nfunction createFakeElement(value) {\n var isRTL = document.documentElement.getAttribute('dir') === 'rtl';\n var fakeElement = document.createElement('textarea'); // Prevent zooming on iOS\n\n fakeElement.style.fontSize = '12pt'; // Reset box model\n\n fakeElement.style.border = '0';\n fakeElement.style.padding = '0';\n fakeElement.style.margin = '0'; // Move element out of screen horizontally\n\n fakeElement.style.position = 'absolute';\n fakeElement.style[isRTL ? 'right' : 'left'] = '-9999px'; // Move element to the same position vertically\n\n var yPosition = window.pageYOffset || document.documentElement.scrollTop;\n fakeElement.style.top = \"\".concat(yPosition, \"px\");\n fakeElement.setAttribute('readonly', '');\n fakeElement.value = value;\n return fakeElement;\n}\n;// CONCATENATED MODULE: ./src/actions/copy.js\n\n\n\n/**\n * Create fake copy action wrapper using a fake element.\n * @param {String} target\n * @param {Object} options\n * @return {String}\n */\n\nvar fakeCopyAction = function fakeCopyAction(value, options) {\n var fakeElement = createFakeElement(value);\n options.container.appendChild(fakeElement);\n var selectedText = select_default()(fakeElement);\n command('copy');\n fakeElement.remove();\n return selectedText;\n};\n/**\n * Copy action wrapper.\n * @param {String|HTMLElement} target\n * @param {Object} options\n * @return {String}\n */\n\n\nvar ClipboardActionCopy = function ClipboardActionCopy(target) {\n var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {\n container: document.body\n };\n var selectedText = '';\n\n if (typeof target === 'string') {\n selectedText = fakeCopyAction(target, options);\n } else if (target instanceof HTMLInputElement && !['text', 'search', 'url', 'tel', 'password'].includes(target === null || target === void 0 ? void 0 : target.type)) {\n // If input type doesn't support `setSelectionRange`. Simulate it. https://developer.mozilla.org/en-US/docs/Web/API/HTMLInputElement/setSelectionRange\n selectedText = fakeCopyAction(target.value, options);\n } else {\n selectedText = select_default()(target);\n command('copy');\n }\n\n return selectedText;\n};\n\n/* harmony default export */ var actions_copy = (ClipboardActionCopy);\n;// CONCATENATED MODULE: ./src/actions/default.js\nfunction _typeof(obj) { \"@babel/helpers - typeof\"; if (typeof Symbol === \"function\" && typeof Symbol.iterator === \"symbol\") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === \"function\" && obj.constructor === Symbol && obj !== Symbol.prototype ? \"symbol\" : typeof obj; }; } return _typeof(obj); }\n\n\n\n/**\n * Inner function which performs selection from either `text` or `target`\n * properties and then executes copy or cut operations.\n * @param {Object} options\n */\n\nvar ClipboardActionDefault = function ClipboardActionDefault() {\n var options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};\n // Defines base properties passed from constructor.\n var _options$action = options.action,\n action = _options$action === void 0 ? 'copy' : _options$action,\n container = options.container,\n target = options.target,\n text = options.text; // Sets the `action` to be performed which can be either 'copy' or 'cut'.\n\n if (action !== 'copy' && action !== 'cut') {\n throw new Error('Invalid \"action\" value, use either \"copy\" or \"cut\"');\n } // Sets the `target` property using an element that will be have its content copied.\n\n\n if (target !== undefined) {\n if (target && _typeof(target) === 'object' && target.nodeType === 1) {\n if (action === 'copy' && target.hasAttribute('disabled')) {\n throw new Error('Invalid \"target\" attribute. Please use \"readonly\" instead of \"disabled\" attribute');\n }\n\n if (action === 'cut' && (target.hasAttribute('readonly') || target.hasAttribute('disabled'))) {\n throw new Error('Invalid \"target\" attribute. You can\\'t cut text from elements with \"readonly\" or \"disabled\" attributes');\n }\n } else {\n throw new Error('Invalid \"target\" value, use a valid Element');\n }\n } // Define selection strategy based on `text` property.\n\n\n if (text) {\n return actions_copy(text, {\n container: container\n });\n } // Defines which selection strategy based on `target` property.\n\n\n if (target) {\n return action === 'cut' ? actions_cut(target) : actions_copy(target, {\n container: container\n });\n }\n};\n\n/* harmony default export */ var actions_default = (ClipboardActionDefault);\n;// CONCATENATED MODULE: ./src/clipboard.js\nfunction clipboard_typeof(obj) { \"@babel/helpers - typeof\"; if (typeof Symbol === \"function\" && typeof Symbol.iterator === \"symbol\") { clipboard_typeof = function _typeof(obj) { return typeof obj; }; } else { clipboard_typeof = function _typeof(obj) { return obj && typeof Symbol === \"function\" && obj.constructor === Symbol && obj !== Symbol.prototype ? \"symbol\" : typeof obj; }; } return clipboard_typeof(obj); }\n\nfunction _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nfunction _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if (\"value\" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }\n\nfunction _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }\n\nfunction _inherits(subClass, superClass) { if (typeof superClass !== \"function\" && superClass !== null) { throw new TypeError(\"Super expression must either be null or a function\"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }\n\nfunction _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }\n\nfunction _createSuper(Derived) { var hasNativeReflectConstruct = _isNativeReflectConstruct(); return function _createSuperInternal() { var Super = _getPrototypeOf(Derived), result; if (hasNativeReflectConstruct) { var NewTarget = _getPrototypeOf(this).constructor; result = Reflect.construct(Super, arguments, NewTarget); } else { result = Super.apply(this, arguments); } return _possibleConstructorReturn(this, result); }; }\n\nfunction _possibleConstructorReturn(self, call) { if (call && (clipboard_typeof(call) === \"object\" || typeof call === \"function\")) { return call; } return _assertThisInitialized(self); }\n\nfunction _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError(\"this hasn't been initialised - super() hasn't been called\"); } return self; }\n\nfunction _isNativeReflectConstruct() { if (typeof Reflect === \"undefined\" || !Reflect.construct) return false; if (Reflect.construct.sham) return false; if (typeof Proxy === \"function\") return true; try { Date.prototype.toString.call(Reflect.construct(Date, [], function () {})); return true; } catch (e) { return false; } }\n\nfunction _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }\n\n\n\n\n\n\n/**\n * Helper function to retrieve attribute value.\n * @param {String} suffix\n * @param {Element} element\n */\n\nfunction getAttributeValue(suffix, element) {\n var attribute = \"data-clipboard-\".concat(suffix);\n\n if (!element.hasAttribute(attribute)) {\n return;\n }\n\n return element.getAttribute(attribute);\n}\n/**\n * Base class which takes one or more elements, adds event listeners to them,\n * and instantiates a new `ClipboardAction` on each click.\n */\n\n\nvar Clipboard = /*#__PURE__*/function (_Emitter) {\n _inherits(Clipboard, _Emitter);\n\n var _super = _createSuper(Clipboard);\n\n /**\n * @param {String|HTMLElement|HTMLCollection|NodeList} trigger\n * @param {Object} options\n */\n function Clipboard(trigger, options) {\n var _this;\n\n _classCallCheck(this, Clipboard);\n\n _this = _super.call(this);\n\n _this.resolveOptions(options);\n\n _this.listenClick(trigger);\n\n return _this;\n }\n /**\n * Defines if attributes would be resolved using internal setter functions\n * or custom functions that were passed in the constructor.\n * @param {Object} options\n */\n\n\n _createClass(Clipboard, [{\n key: \"resolveOptions\",\n value: function resolveOptions() {\n var options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};\n this.action = typeof options.action === 'function' ? options.action : this.defaultAction;\n this.target = typeof options.target === 'function' ? options.target : this.defaultTarget;\n this.text = typeof options.text === 'function' ? options.text : this.defaultText;\n this.container = clipboard_typeof(options.container) === 'object' ? options.container : document.body;\n }\n /**\n * Adds a click event listener to the passed trigger.\n * @param {String|HTMLElement|HTMLCollection|NodeList} trigger\n */\n\n }, {\n key: \"listenClick\",\n value: function listenClick(trigger) {\n var _this2 = this;\n\n this.listener = listen_default()(trigger, 'click', function (e) {\n return _this2.onClick(e);\n });\n }\n /**\n * Defines a new `ClipboardAction` on each click event.\n * @param {Event} e\n */\n\n }, {\n key: \"onClick\",\n value: function onClick(e) {\n var trigger = e.delegateTarget || e.currentTarget;\n var action = this.action(trigger) || 'copy';\n var text = actions_default({\n action: action,\n container: this.container,\n target: this.target(trigger),\n text: this.text(trigger)\n }); // Fires an event based on the copy operation result.\n\n this.emit(text ? 'success' : 'error', {\n action: action,\n text: text,\n trigger: trigger,\n clearSelection: function clearSelection() {\n if (trigger) {\n trigger.focus();\n }\n\n window.getSelection().removeAllRanges();\n }\n });\n }\n /**\n * Default `action` lookup function.\n * @param {Element} trigger\n */\n\n }, {\n key: \"defaultAction\",\n value: function defaultAction(trigger) {\n return getAttributeValue('action', trigger);\n }\n /**\n * Default `target` lookup function.\n * @param {Element} trigger\n */\n\n }, {\n key: \"defaultTarget\",\n value: function defaultTarget(trigger) {\n var selector = getAttributeValue('target', trigger);\n\n if (selector) {\n return document.querySelector(selector);\n }\n }\n /**\n * Allow fire programmatically a copy action\n * @param {String|HTMLElement} target\n * @param {Object} options\n * @returns Text copied.\n */\n\n }, {\n key: \"defaultText\",\n\n /**\n * Default `text` lookup function.\n * @param {Element} trigger\n */\n value: function defaultText(trigger) {\n return getAttributeValue('text', trigger);\n }\n /**\n * Destroy lifecycle.\n */\n\n }, {\n key: \"destroy\",\n value: function destroy() {\n this.listener.destroy();\n }\n }], [{\n key: \"copy\",\n value: function copy(target) {\n var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {\n container: document.body\n };\n return actions_copy(target, options);\n }\n /**\n * Allow fire programmatically a cut action\n * @param {String|HTMLElement} target\n * @returns Text cutted.\n */\n\n }, {\n key: \"cut\",\n value: function cut(target) {\n return actions_cut(target);\n }\n /**\n * Returns the support of the given action, or all actions if no action is\n * given.\n * @param {String} [action]\n */\n\n }, {\n key: \"isSupported\",\n value: function isSupported() {\n var action = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : ['copy', 'cut'];\n var actions = typeof action === 'string' ? [action] : action;\n var support = !!document.queryCommandSupported;\n actions.forEach(function (action) {\n support = support && !!document.queryCommandSupported(action);\n });\n return support;\n }\n }]);\n\n return Clipboard;\n}((tiny_emitter_default()));\n\n/* harmony default export */ var clipboard = (Clipboard);\n\n/***/ }),\n\n/***/ 828:\n/***/ (function(module) {\n\nvar DOCUMENT_NODE_TYPE = 9;\n\n/**\n * A polyfill for Element.matches()\n */\nif (typeof Element !== 'undefined' && !Element.prototype.matches) {\n var proto = Element.prototype;\n\n proto.matches = proto.matchesSelector ||\n proto.mozMatchesSelector ||\n proto.msMatchesSelector ||\n proto.oMatchesSelector ||\n proto.webkitMatchesSelector;\n}\n\n/**\n * Finds the closest parent that matches a selector.\n *\n * @param {Element} element\n * @param {String} selector\n * @return {Function}\n */\nfunction closest (element, selector) {\n while (element && element.nodeType !== DOCUMENT_NODE_TYPE) {\n if (typeof element.matches === 'function' &&\n element.matches(selector)) {\n return element;\n }\n element = element.parentNode;\n }\n}\n\nmodule.exports = closest;\n\n\n/***/ }),\n\n/***/ 438:\n/***/ (function(module, __unused_webpack_exports, __webpack_require__) {\n\nvar closest = __webpack_require__(828);\n\n/**\n * Delegates event to a selector.\n *\n * @param {Element} element\n * @param {String} selector\n * @param {String} type\n * @param {Function} callback\n * @param {Boolean} useCapture\n * @return {Object}\n */\nfunction _delegate(element, selector, type, callback, useCapture) {\n var listenerFn = listener.apply(this, arguments);\n\n element.addEventListener(type, listenerFn, useCapture);\n\n return {\n destroy: function() {\n element.removeEventListener(type, listenerFn, useCapture);\n }\n }\n}\n\n/**\n * Delegates event to a selector.\n *\n * @param {Element|String|Array} [elements]\n * @param {String} selector\n * @param {String} type\n * @param {Function} callback\n * @param {Boolean} useCapture\n * @return {Object}\n */\nfunction delegate(elements, selector, type, callback, useCapture) {\n // Handle the regular Element usage\n if (typeof elements.addEventListener === 'function') {\n return _delegate.apply(null, arguments);\n }\n\n // Handle Element-less usage, it defaults to global delegation\n if (typeof type === 'function') {\n // Use `document` as the first parameter, then apply arguments\n // This is a short way to .unshift `arguments` without running into deoptimizations\n return _delegate.bind(null, document).apply(null, arguments);\n }\n\n // Handle Selector-based usage\n if (typeof elements === 'string') {\n elements = document.querySelectorAll(elements);\n }\n\n // Handle Array-like based usage\n return Array.prototype.map.call(elements, function (element) {\n return _delegate(element, selector, type, callback, useCapture);\n });\n}\n\n/**\n * Finds closest match and invokes callback.\n *\n * @param {Element} element\n * @param {String} selector\n * @param {String} type\n * @param {Function} callback\n * @return {Function}\n */\nfunction listener(element, selector, type, callback) {\n return function(e) {\n e.delegateTarget = closest(e.target, selector);\n\n if (e.delegateTarget) {\n callback.call(element, e);\n }\n }\n}\n\nmodule.exports = delegate;\n\n\n/***/ }),\n\n/***/ 879:\n/***/ (function(__unused_webpack_module, exports) {\n\n/**\n * Check if argument is a HTML element.\n *\n * @param {Object} value\n * @return {Boolean}\n */\nexports.node = function(value) {\n return value !== undefined\n && value instanceof HTMLElement\n && value.nodeType === 1;\n};\n\n/**\n * Check if argument is a list of HTML elements.\n *\n * @param {Object} value\n * @return {Boolean}\n */\nexports.nodeList = function(value) {\n var type = Object.prototype.toString.call(value);\n\n return value !== undefined\n && (type === '[object NodeList]' || type === '[object HTMLCollection]')\n && ('length' in value)\n && (value.length === 0 || exports.node(value[0]));\n};\n\n/**\n * Check if argument is a string.\n *\n * @param {Object} value\n * @return {Boolean}\n */\nexports.string = function(value) {\n return typeof value === 'string'\n || value instanceof String;\n};\n\n/**\n * Check if argument is a function.\n *\n * @param {Object} value\n * @return {Boolean}\n */\nexports.fn = function(value) {\n var type = Object.prototype.toString.call(value);\n\n return type === '[object Function]';\n};\n\n\n/***/ }),\n\n/***/ 370:\n/***/ (function(module, __unused_webpack_exports, __webpack_require__) {\n\nvar is = __webpack_require__(879);\nvar delegate = __webpack_require__(438);\n\n/**\n * Validates all params and calls the right\n * listener function based on its target type.\n *\n * @param {String|HTMLElement|HTMLCollection|NodeList} target\n * @param {String} type\n * @param {Function} callback\n * @return {Object}\n */\nfunction listen(target, type, callback) {\n if (!target && !type && !callback) {\n throw new Error('Missing required arguments');\n }\n\n if (!is.string(type)) {\n throw new TypeError('Second argument must be a String');\n }\n\n if (!is.fn(callback)) {\n throw new TypeError('Third argument must be a Function');\n }\n\n if (is.node(target)) {\n return listenNode(target, type, callback);\n }\n else if (is.nodeList(target)) {\n return listenNodeList(target, type, callback);\n }\n else if (is.string(target)) {\n return listenSelector(target, type, callback);\n }\n else {\n throw new TypeError('First argument must be a String, HTMLElement, HTMLCollection, or NodeList');\n }\n}\n\n/**\n * Adds an event listener to a HTML element\n * and returns a remove listener function.\n *\n * @param {HTMLElement} node\n * @param {String} type\n * @param {Function} callback\n * @return {Object}\n */\nfunction listenNode(node, type, callback) {\n node.addEventListener(type, callback);\n\n return {\n destroy: function() {\n node.removeEventListener(type, callback);\n }\n }\n}\n\n/**\n * Add an event listener to a list of HTML elements\n * and returns a remove listener function.\n *\n * @param {NodeList|HTMLCollection} nodeList\n * @param {String} type\n * @param {Function} callback\n * @return {Object}\n */\nfunction listenNodeList(nodeList, type, callback) {\n Array.prototype.forEach.call(nodeList, function(node) {\n node.addEventListener(type, callback);\n });\n\n return {\n destroy: function() {\n Array.prototype.forEach.call(nodeList, function(node) {\n node.removeEventListener(type, callback);\n });\n }\n }\n}\n\n/**\n * Add an event listener to a selector\n * and returns a remove listener function.\n *\n * @param {String} selector\n * @param {String} type\n * @param {Function} callback\n * @return {Object}\n */\nfunction listenSelector(selector, type, callback) {\n return delegate(document.body, selector, type, callback);\n}\n\nmodule.exports = listen;\n\n\n/***/ }),\n\n/***/ 817:\n/***/ (function(module) {\n\nfunction select(element) {\n var selectedText;\n\n if (element.nodeName === 'SELECT') {\n element.focus();\n\n selectedText = element.value;\n }\n else if (element.nodeName === 'INPUT' || element.nodeName === 'TEXTAREA') {\n var isReadOnly = element.hasAttribute('readonly');\n\n if (!isReadOnly) {\n element.setAttribute('readonly', '');\n }\n\n element.select();\n element.setSelectionRange(0, element.value.length);\n\n if (!isReadOnly) {\n element.removeAttribute('readonly');\n }\n\n selectedText = element.value;\n }\n else {\n if (element.hasAttribute('contenteditable')) {\n element.focus();\n }\n\n var selection = window.getSelection();\n var range = document.createRange();\n\n range.selectNodeContents(element);\n selection.removeAllRanges();\n selection.addRange(range);\n\n selectedText = selection.toString();\n }\n\n return selectedText;\n}\n\nmodule.exports = select;\n\n\n/***/ }),\n\n/***/ 279:\n/***/ (function(module) {\n\nfunction E () {\n // Keep this empty so it's easier to inherit from\n // (via https://github.com/lipsmack from https://github.com/scottcorgan/tiny-emitter/issues/3)\n}\n\nE.prototype = {\n on: function (name, callback, ctx) {\n var e = this.e || (this.e = {});\n\n (e[name] || (e[name] = [])).push({\n fn: callback,\n ctx: ctx\n });\n\n return this;\n },\n\n once: function (name, callback, ctx) {\n var self = this;\n function listener () {\n self.off(name, listener);\n callback.apply(ctx, arguments);\n };\n\n listener._ = callback\n return this.on(name, listener, ctx);\n },\n\n emit: function (name) {\n var data = [].slice.call(arguments, 1);\n var evtArr = ((this.e || (this.e = {}))[name] || []).slice();\n var i = 0;\n var len = evtArr.length;\n\n for (i; i < len; i++) {\n evtArr[i].fn.apply(evtArr[i].ctx, data);\n }\n\n return this;\n },\n\n off: function (name, callback) {\n var e = this.e || (this.e = {});\n var evts = e[name];\n var liveEvents = [];\n\n if (evts && callback) {\n for (var i = 0, len = evts.length; i < len; i++) {\n if (evts[i].fn !== callback && evts[i].fn._ !== callback)\n liveEvents.push(evts[i]);\n }\n }\n\n // Remove event from queue to prevent memory leak\n // Suggested by https://github.com/lazd\n // Ref: https://github.com/scottcorgan/tiny-emitter/commit/c6ebfaa9bc973b33d110a84a307742b7cf94c953#commitcomment-5024910\n\n (liveEvents.length)\n ? e[name] = liveEvents\n : delete e[name];\n\n return this;\n }\n};\n\nmodule.exports = E;\nmodule.exports.TinyEmitter = E;\n\n\n/***/ })\n\n/******/ \t});\n/************************************************************************/\n/******/ \t// The module cache\n/******/ \tvar __webpack_module_cache__ = {};\n/******/ \t\n/******/ \t// The require function\n/******/ \tfunction __webpack_require__(moduleId) {\n/******/ \t\t// Check if module is in cache\n/******/ \t\tif(__webpack_module_cache__[moduleId]) {\n/******/ \t\t\treturn __webpack_module_cache__[moduleId].exports;\n/******/ \t\t}\n/******/ \t\t// Create a new module (and put it into the cache)\n/******/ \t\tvar module = __webpack_module_cache__[moduleId] = {\n/******/ \t\t\t// no module.id needed\n/******/ \t\t\t// no module.loaded needed\n/******/ \t\t\texports: {}\n/******/ \t\t};\n/******/ \t\n/******/ \t\t// Execute the module function\n/******/ \t\t__webpack_modules__[moduleId](module, module.exports, __webpack_require__);\n/******/ \t\n/******/ \t\t// Return the exports of the module\n/******/ \t\treturn module.exports;\n/******/ \t}\n/******/ \t\n/************************************************************************/\n/******/ \t/* webpack/runtime/compat get default export */\n/******/ \t!function() {\n/******/ \t\t// getDefaultExport function for compatibility with non-harmony modules\n/******/ \t\t__webpack_require__.n = function(module) {\n/******/ \t\t\tvar getter = module && module.__esModule ?\n/******/ \t\t\t\tfunction() { return module['default']; } :\n/******/ \t\t\t\tfunction() { return module; };\n/******/ \t\t\t__webpack_require__.d(getter, { a: getter });\n/******/ \t\t\treturn getter;\n/******/ \t\t};\n/******/ \t}();\n/******/ \t\n/******/ \t/* webpack/runtime/define property getters */\n/******/ \t!function() {\n/******/ \t\t// define getter functions for harmony exports\n/******/ \t\t__webpack_require__.d = function(exports, definition) {\n/******/ \t\t\tfor(var key in definition) {\n/******/ \t\t\t\tif(__webpack_require__.o(definition, key) && !__webpack_require__.o(exports, key)) {\n/******/ \t\t\t\t\tObject.defineProperty(exports, key, { enumerable: true, get: definition[key] });\n/******/ \t\t\t\t}\n/******/ \t\t\t}\n/******/ \t\t};\n/******/ \t}();\n/******/ \t\n/******/ \t/* webpack/runtime/hasOwnProperty shorthand */\n/******/ \t!function() {\n/******/ \t\t__webpack_require__.o = function(obj, prop) { return Object.prototype.hasOwnProperty.call(obj, prop); }\n/******/ \t}();\n/******/ \t\n/************************************************************************/\n/******/ \t// module exports must be returned from runtime so entry inlining is disabled\n/******/ \t// startup\n/******/ \t// Load entry module and return exports\n/******/ \treturn __webpack_require__(686);\n/******/ })()\n.default;\n});", "/*!\n * escape-html\n * Copyright(c) 2012-2013 TJ Holowaychuk\n * Copyright(c) 2015 Andreas Lubbe\n * Copyright(c) 2015 Tiancheng \"Timothy\" Gu\n * MIT Licensed\n */\n\n'use strict';\n\n/**\n * Module variables.\n * @private\n */\n\nvar matchHtmlRegExp = /[\"'&<>]/;\n\n/**\n * Module exports.\n * @public\n */\n\nmodule.exports = escapeHtml;\n\n/**\n * Escape special characters in the given string of html.\n *\n * @param {string} string The string to escape for inserting into HTML\n * @return {string}\n * @public\n */\n\nfunction escapeHtml(string) {\n var str = '' + string;\n var match = matchHtmlRegExp.exec(str);\n\n if (!match) {\n return str;\n }\n\n var escape;\n var html = '';\n var index = 0;\n var lastIndex = 0;\n\n for (index = match.index; index < str.length; index++) {\n switch (str.charCodeAt(index)) {\n case 34: // \"\n escape = '"';\n break;\n case 38: // &\n escape = '&';\n break;\n case 39: // '\n escape = ''';\n break;\n case 60: // <\n escape = '<';\n break;\n case 62: // >\n escape = '>';\n break;\n default:\n continue;\n }\n\n if (lastIndex !== index) {\n html += str.substring(lastIndex, index);\n }\n\n lastIndex = index + 1;\n html += escape;\n }\n\n return lastIndex !== index\n ? html + str.substring(lastIndex, index)\n : html;\n}\n", "/*\n * Copyright (c) 2016-2024 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport \"focus-visible\"\n\nimport {\n EMPTY,\n NEVER,\n Observable,\n Subject,\n defer,\n delay,\n filter,\n map,\n merge,\n mergeWith,\n shareReplay,\n switchMap\n} from \"rxjs\"\n\nimport { configuration, feature } from \"./_\"\nimport {\n at,\n getActiveElement,\n getOptionalElement,\n requestJSON,\n setLocation,\n setToggle,\n watchDocument,\n watchKeyboard,\n watchLocation,\n watchLocationTarget,\n watchMedia,\n watchPrint,\n watchScript,\n watchViewport\n} from \"./browser\"\nimport {\n getComponentElement,\n getComponentElements,\n mountAnnounce,\n mountBackToTop,\n mountConsent,\n mountContent,\n mountDialog,\n mountHeader,\n mountHeaderTitle,\n mountPalette,\n mountProgress,\n mountSearch,\n mountSearchHiglight,\n mountSidebar,\n mountSource,\n mountTableOfContents,\n mountTabs,\n watchHeader,\n watchMain\n} from \"./components\"\nimport {\n SearchIndex,\n setupClipboardJS,\n setupInstantNavigation,\n setupVersionSelector\n} from \"./integrations\"\nimport {\n patchEllipsis,\n patchIndeterminate,\n patchScrollfix,\n patchScrolllock\n} from \"./patches\"\nimport \"./polyfills\"\n\n/* ----------------------------------------------------------------------------\n * Functions - @todo refactor\n * ------------------------------------------------------------------------- */\n\n/**\n * Fetch search index\n *\n * @returns Search index observable\n */\nfunction fetchSearchIndex(): Observable {\n if (location.protocol === \"file:\") {\n return watchScript(\n `${new URL(\"search/search_index.js\", config.base)}`\n )\n .pipe(\n // @ts-ignore - @todo fix typings\n map(() => __index),\n shareReplay(1)\n )\n } else {\n return requestJSON(\n new URL(\"search/search_index.json\", config.base)\n )\n }\n}\n\n/* ----------------------------------------------------------------------------\n * Application\n * ------------------------------------------------------------------------- */\n\n/* Yay, JavaScript is available */\ndocument.documentElement.classList.remove(\"no-js\")\ndocument.documentElement.classList.add(\"js\")\n\n/* Set up navigation observables and subjects */\nconst document$ = watchDocument()\nconst location$ = watchLocation()\nconst target$ = watchLocationTarget(location$)\nconst keyboard$ = watchKeyboard()\n\n/* Set up media observables */\nconst viewport$ = watchViewport()\nconst tablet$ = watchMedia(\"(min-width: 960px)\")\nconst screen$ = watchMedia(\"(min-width: 1220px)\")\nconst print$ = watchPrint()\n\n/* Retrieve search index, if search is enabled */\nconst config = configuration()\nconst index$ = document.forms.namedItem(\"search\")\n ? fetchSearchIndex()\n : NEVER\n\n/* Set up Clipboard.js integration */\nconst alert$ = new Subject()\nsetupClipboardJS({ alert$ })\n\n/* Set up progress indicator */\nconst progress$ = new Subject()\n\n/* Set up instant navigation, if enabled */\nif (feature(\"navigation.instant\"))\n setupInstantNavigation({ location$, viewport$, progress$ })\n .subscribe(document$)\n\n/* Set up version selector */\nif (config.version?.provider === \"mike\")\n setupVersionSelector({ document$ })\n\n/* Always close drawer and search on navigation */\nmerge(location$, target$)\n .pipe(\n delay(125)\n )\n .subscribe(() => {\n setToggle(\"drawer\", false)\n setToggle(\"search\", false)\n })\n\n/* Set up global keyboard handlers */\nkeyboard$\n .pipe(\n filter(({ mode }) => mode === \"global\")\n )\n .subscribe(key => {\n switch (key.type) {\n\n /* Go to previous page */\n case \"p\":\n case \",\":\n const prev = getOptionalElement(\"link[rel=prev]\")\n if (typeof prev !== \"undefined\")\n setLocation(prev)\n break\n\n /* Go to next page */\n case \"n\":\n case \".\":\n const next = getOptionalElement(\"link[rel=next]\")\n if (typeof next !== \"undefined\")\n setLocation(next)\n break\n\n /* Expand navigation, see https://bit.ly/3ZjG5io */\n case \"Enter\":\n const active = getActiveElement()\n if (active instanceof HTMLLabelElement)\n active.click()\n }\n })\n\n/* Set up patches */\npatchEllipsis({ document$ })\npatchIndeterminate({ document$, tablet$ })\npatchScrollfix({ document$ })\npatchScrolllock({ viewport$, tablet$ })\n\n/* Set up header and main area observable */\nconst header$ = watchHeader(getComponentElement(\"header\"), { viewport$ })\nconst main$ = document$\n .pipe(\n map(() => getComponentElement(\"main\")),\n switchMap(el => watchMain(el, { viewport$, header$ })),\n shareReplay(1)\n )\n\n/* Set up control component observables */\nconst control$ = merge(\n\n /* Consent */\n ...getComponentElements(\"consent\")\n .map(el => mountConsent(el, { target$ })),\n\n /* Dialog */\n ...getComponentElements(\"dialog\")\n .map(el => mountDialog(el, { alert$ })),\n\n /* Header */\n ...getComponentElements(\"header\")\n .map(el => mountHeader(el, { viewport$, header$, main$ })),\n\n /* Color palette */\n ...getComponentElements(\"palette\")\n .map(el => mountPalette(el)),\n\n /* Progress bar */\n ...getComponentElements(\"progress\")\n .map(el => mountProgress(el, { progress$ })),\n\n /* Search */\n ...getComponentElements(\"search\")\n .map(el => mountSearch(el, { index$, keyboard$ })),\n\n /* Repository information */\n ...getComponentElements(\"source\")\n .map(el => mountSource(el))\n)\n\n/* Set up content component observables */\nconst content$ = defer(() => merge(\n\n /* Announcement bar */\n ...getComponentElements(\"announce\")\n .map(el => mountAnnounce(el)),\n\n /* Content */\n ...getComponentElements(\"content\")\n .map(el => mountContent(el, { viewport$, target$, print$ })),\n\n /* Search highlighting */\n ...getComponentElements(\"content\")\n .map(el => feature(\"search.highlight\")\n ? mountSearchHiglight(el, { index$, location$ })\n : EMPTY\n ),\n\n /* Header title */\n ...getComponentElements(\"header-title\")\n .map(el => mountHeaderTitle(el, { viewport$, header$ })),\n\n /* Sidebar */\n ...getComponentElements(\"sidebar\")\n .map(el => el.getAttribute(\"data-md-type\") === \"navigation\"\n ? at(screen$, () => mountSidebar(el, { viewport$, header$, main$ }))\n : at(tablet$, () => mountSidebar(el, { viewport$, header$, main$ }))\n ),\n\n /* Navigation tabs */\n ...getComponentElements(\"tabs\")\n .map(el => mountTabs(el, { viewport$, header$ })),\n\n /* Table of contents */\n ...getComponentElements(\"toc\")\n .map(el => mountTableOfContents(el, {\n viewport$, header$, main$, target$\n })),\n\n /* Back-to-top button */\n ...getComponentElements(\"top\")\n .map(el => mountBackToTop(el, { viewport$, header$, main$, target$ }))\n))\n\n/* Set up component observables */\nconst component$ = document$\n .pipe(\n switchMap(() => content$),\n mergeWith(control$),\n shareReplay(1)\n )\n\n/* Subscribe to all components */\ncomponent$.subscribe()\n\n/* ----------------------------------------------------------------------------\n * Exports\n * ------------------------------------------------------------------------- */\n\nwindow.document$ = document$ /* Document observable */\nwindow.location$ = location$ /* Location subject */\nwindow.target$ = target$ /* Location target observable */\nwindow.keyboard$ = keyboard$ /* Keyboard observable */\nwindow.viewport$ = viewport$ /* Viewport observable */\nwindow.tablet$ = tablet$ /* Media tablet observable */\nwindow.screen$ = screen$ /* Media screen observable */\nwindow.print$ = print$ /* Media print observable */\nwindow.alert$ = alert$ /* Alert subject */\nwindow.progress$ = progress$ /* Progress indicator subject */\nwindow.component$ = component$ /* Component observable */\n", "/*! *****************************************************************************\r\nCopyright (c) Microsoft Corporation.\r\n\r\nPermission to use, copy, modify, and/or distribute this software for any\r\npurpose with or without fee is hereby granted.\r\n\r\nTHE SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH\r\nREGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY\r\nAND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,\r\nINDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM\r\nLOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR\r\nOTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR\r\nPERFORMANCE OF THIS SOFTWARE.\r\n***************************************************************************** */\r\n/* global Reflect, Promise */\r\n\r\nvar extendStatics = function(d, b) {\r\n extendStatics = Object.setPrototypeOf ||\r\n ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||\r\n function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };\r\n return extendStatics(d, b);\r\n};\r\n\r\nexport function __extends(d, b) {\r\n if (typeof b !== \"function\" && b !== null)\r\n throw new TypeError(\"Class extends value \" + String(b) + \" is not a constructor or null\");\r\n extendStatics(d, b);\r\n function __() { this.constructor = d; }\r\n d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());\r\n}\r\n\r\nexport var __assign = function() {\r\n __assign = Object.assign || function __assign(t) {\r\n for (var s, i = 1, n = arguments.length; i < n; i++) {\r\n s = arguments[i];\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p];\r\n }\r\n return t;\r\n }\r\n return __assign.apply(this, arguments);\r\n}\r\n\r\nexport function __rest(s, e) {\r\n var t = {};\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)\r\n t[p] = s[p];\r\n if (s != null && typeof Object.getOwnPropertySymbols === \"function\")\r\n for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {\r\n if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))\r\n t[p[i]] = s[p[i]];\r\n }\r\n return t;\r\n}\r\n\r\nexport function __decorate(decorators, target, key, desc) {\r\n var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;\r\n if (typeof Reflect === \"object\" && typeof Reflect.decorate === \"function\") r = Reflect.decorate(decorators, target, key, desc);\r\n else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;\r\n return c > 3 && r && Object.defineProperty(target, key, r), r;\r\n}\r\n\r\nexport function __param(paramIndex, decorator) {\r\n return function (target, key) { decorator(target, key, paramIndex); }\r\n}\r\n\r\nexport function __metadata(metadataKey, metadataValue) {\r\n if (typeof Reflect === \"object\" && typeof Reflect.metadata === \"function\") return Reflect.metadata(metadataKey, metadataValue);\r\n}\r\n\r\nexport function __awaiter(thisArg, _arguments, P, generator) {\r\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\r\n return new (P || (P = Promise))(function (resolve, reject) {\r\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\r\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\r\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\r\n step((generator = generator.apply(thisArg, _arguments || [])).next());\r\n });\r\n}\r\n\r\nexport function __generator(thisArg, body) {\r\n var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;\r\n return g = { next: verb(0), \"throw\": verb(1), \"return\": verb(2) }, typeof Symbol === \"function\" && (g[Symbol.iterator] = function() { return this; }), g;\r\n function verb(n) { return function (v) { return step([n, v]); }; }\r\n function step(op) {\r\n if (f) throw new TypeError(\"Generator is already executing.\");\r\n while (_) try {\r\n if (f = 1, y && (t = op[0] & 2 ? y[\"return\"] : op[0] ? y[\"throw\"] || ((t = y[\"return\"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;\r\n if (y = 0, t) op = [op[0] & 2, t.value];\r\n switch (op[0]) {\r\n case 0: case 1: t = op; break;\r\n case 4: _.label++; return { value: op[1], done: false };\r\n case 5: _.label++; y = op[1]; op = [0]; continue;\r\n case 7: op = _.ops.pop(); _.trys.pop(); continue;\r\n default:\r\n if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }\r\n if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }\r\n if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }\r\n if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }\r\n if (t[2]) _.ops.pop();\r\n _.trys.pop(); continue;\r\n }\r\n op = body.call(thisArg, _);\r\n } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }\r\n if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };\r\n }\r\n}\r\n\r\nexport var __createBinding = Object.create ? (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\r\n}) : (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n});\r\n\r\nexport function __exportStar(m, o) {\r\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p);\r\n}\r\n\r\nexport function __values(o) {\r\n var s = typeof Symbol === \"function\" && Symbol.iterator, m = s && o[s], i = 0;\r\n if (m) return m.call(o);\r\n if (o && typeof o.length === \"number\") return {\r\n next: function () {\r\n if (o && i >= o.length) o = void 0;\r\n return { value: o && o[i++], done: !o };\r\n }\r\n };\r\n throw new TypeError(s ? \"Object is not iterable.\" : \"Symbol.iterator is not defined.\");\r\n}\r\n\r\nexport function __read(o, n) {\r\n var m = typeof Symbol === \"function\" && o[Symbol.iterator];\r\n if (!m) return o;\r\n var i = m.call(o), r, ar = [], e;\r\n try {\r\n while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value);\r\n }\r\n catch (error) { e = { error: error }; }\r\n finally {\r\n try {\r\n if (r && !r.done && (m = i[\"return\"])) m.call(i);\r\n }\r\n finally { if (e) throw e.error; }\r\n }\r\n return ar;\r\n}\r\n\r\n/** @deprecated */\r\nexport function __spread() {\r\n for (var ar = [], i = 0; i < arguments.length; i++)\r\n ar = ar.concat(__read(arguments[i]));\r\n return ar;\r\n}\r\n\r\n/** @deprecated */\r\nexport function __spreadArrays() {\r\n for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length;\r\n for (var r = Array(s), k = 0, i = 0; i < il; i++)\r\n for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++)\r\n r[k] = a[j];\r\n return r;\r\n}\r\n\r\nexport function __spreadArray(to, from, pack) {\r\n if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) {\r\n if (ar || !(i in from)) {\r\n if (!ar) ar = Array.prototype.slice.call(from, 0, i);\r\n ar[i] = from[i];\r\n }\r\n }\r\n return to.concat(ar || Array.prototype.slice.call(from));\r\n}\r\n\r\nexport function __await(v) {\r\n return this instanceof __await ? (this.v = v, this) : new __await(v);\r\n}\r\n\r\nexport function __asyncGenerator(thisArg, _arguments, generator) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var g = generator.apply(thisArg, _arguments || []), i, q = [];\r\n return i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i;\r\n function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }\r\n function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }\r\n function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }\r\n function fulfill(value) { resume(\"next\", value); }\r\n function reject(value) { resume(\"throw\", value); }\r\n function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }\r\n}\r\n\r\nexport function __asyncDelegator(o) {\r\n var i, p;\r\n return i = {}, verb(\"next\"), verb(\"throw\", function (e) { throw e; }), verb(\"return\"), i[Symbol.iterator] = function () { return this; }, i;\r\n function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === \"return\" } : f ? f(v) : v; } : f; }\r\n}\r\n\r\nexport function __asyncValues(o) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var m = o[Symbol.asyncIterator], i;\r\n return m ? m.call(o) : (o = typeof __values === \"function\" ? __values(o) : o[Symbol.iterator](), i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i);\r\n function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }\r\n function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }\r\n}\r\n\r\nexport function __makeTemplateObject(cooked, raw) {\r\n if (Object.defineProperty) { Object.defineProperty(cooked, \"raw\", { value: raw }); } else { cooked.raw = raw; }\r\n return cooked;\r\n};\r\n\r\nvar __setModuleDefault = Object.create ? (function(o, v) {\r\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\r\n}) : function(o, v) {\r\n o[\"default\"] = v;\r\n};\r\n\r\nexport function __importStar(mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\r\n __setModuleDefault(result, mod);\r\n return result;\r\n}\r\n\r\nexport function __importDefault(mod) {\r\n return (mod && mod.__esModule) ? mod : { default: mod };\r\n}\r\n\r\nexport function __classPrivateFieldGet(receiver, state, kind, f) {\r\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\r\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\r\n return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\r\n}\r\n\r\nexport function __classPrivateFieldSet(receiver, state, value, kind, f) {\r\n if (kind === \"m\") throw new TypeError(\"Private method is not writable\");\r\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a setter\");\r\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot write private member to an object whose class did not declare it\");\r\n return (kind === \"a\" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;\r\n}\r\n", "/**\n * Returns true if the object is a function.\n * @param value The value to check\n */\nexport function isFunction(value: any): value is (...args: any[]) => any {\n return typeof value === 'function';\n}\n", "/**\n * Used to create Error subclasses until the community moves away from ES5.\n *\n * This is because compiling from TypeScript down to ES5 has issues with subclassing Errors\n * as well as other built-in types: https://github.com/Microsoft/TypeScript/issues/12123\n *\n * @param createImpl A factory function to create the actual constructor implementation. The returned\n * function should be a named function that calls `_super` internally.\n */\nexport function createErrorClass(createImpl: (_super: any) => any): T {\n const _super = (instance: any) => {\n Error.call(instance);\n instance.stack = new Error().stack;\n };\n\n const ctorFunc = createImpl(_super);\n ctorFunc.prototype = Object.create(Error.prototype);\n ctorFunc.prototype.constructor = ctorFunc;\n return ctorFunc;\n}\n", "import { createErrorClass } from './createErrorClass';\n\nexport interface UnsubscriptionError extends Error {\n readonly errors: any[];\n}\n\nexport interface UnsubscriptionErrorCtor {\n /**\n * @deprecated Internal implementation detail. Do not construct error instances.\n * Cannot be tagged as internal: https://github.com/ReactiveX/rxjs/issues/6269\n */\n new (errors: any[]): UnsubscriptionError;\n}\n\n/**\n * An error thrown when one or more errors have occurred during the\n * `unsubscribe` of a {@link Subscription}.\n */\nexport const UnsubscriptionError: UnsubscriptionErrorCtor = createErrorClass(\n (_super) =>\n function UnsubscriptionErrorImpl(this: any, errors: (Error | string)[]) {\n _super(this);\n this.message = errors\n ? `${errors.length} errors occurred during unsubscription:\n${errors.map((err, i) => `${i + 1}) ${err.toString()}`).join('\\n ')}`\n : '';\n this.name = 'UnsubscriptionError';\n this.errors = errors;\n }\n);\n", "/**\n * Removes an item from an array, mutating it.\n * @param arr The array to remove the item from\n * @param item The item to remove\n */\nexport function arrRemove(arr: T[] | undefined | null, item: T) {\n if (arr) {\n const index = arr.indexOf(item);\n 0 <= index && arr.splice(index, 1);\n }\n}\n", "import { isFunction } from './util/isFunction';\nimport { UnsubscriptionError } from './util/UnsubscriptionError';\nimport { SubscriptionLike, TeardownLogic, Unsubscribable } from './types';\nimport { arrRemove } from './util/arrRemove';\n\n/**\n * Represents a disposable resource, such as the execution of an Observable. A\n * Subscription has one important method, `unsubscribe`, that takes no argument\n * and just disposes the resource held by the subscription.\n *\n * Additionally, subscriptions may be grouped together through the `add()`\n * method, which will attach a child Subscription to the current Subscription.\n * When a Subscription is unsubscribed, all its children (and its grandchildren)\n * will be unsubscribed as well.\n *\n * @class Subscription\n */\nexport class Subscription implements SubscriptionLike {\n /** @nocollapse */\n public static EMPTY = (() => {\n const empty = new Subscription();\n empty.closed = true;\n return empty;\n })();\n\n /**\n * A flag to indicate whether this Subscription has already been unsubscribed.\n */\n public closed = false;\n\n private _parentage: Subscription[] | Subscription | null = null;\n\n /**\n * The list of registered finalizers to execute upon unsubscription. Adding and removing from this\n * list occurs in the {@link #add} and {@link #remove} methods.\n */\n private _finalizers: Exclude[] | null = null;\n\n /**\n * @param initialTeardown A function executed first as part of the finalization\n * process that is kicked off when {@link #unsubscribe} is called.\n */\n constructor(private initialTeardown?: () => void) {}\n\n /**\n * Disposes the resources held by the subscription. May, for instance, cancel\n * an ongoing Observable execution or cancel any other type of work that\n * started when the Subscription was created.\n * @return {void}\n */\n unsubscribe(): void {\n let errors: any[] | undefined;\n\n if (!this.closed) {\n this.closed = true;\n\n // Remove this from it's parents.\n const { _parentage } = this;\n if (_parentage) {\n this._parentage = null;\n if (Array.isArray(_parentage)) {\n for (const parent of _parentage) {\n parent.remove(this);\n }\n } else {\n _parentage.remove(this);\n }\n }\n\n const { initialTeardown: initialFinalizer } = this;\n if (isFunction(initialFinalizer)) {\n try {\n initialFinalizer();\n } catch (e) {\n errors = e instanceof UnsubscriptionError ? e.errors : [e];\n }\n }\n\n const { _finalizers } = this;\n if (_finalizers) {\n this._finalizers = null;\n for (const finalizer of _finalizers) {\n try {\n execFinalizer(finalizer);\n } catch (err) {\n errors = errors ?? [];\n if (err instanceof UnsubscriptionError) {\n errors = [...errors, ...err.errors];\n } else {\n errors.push(err);\n }\n }\n }\n }\n\n if (errors) {\n throw new UnsubscriptionError(errors);\n }\n }\n }\n\n /**\n * Adds a finalizer to this subscription, so that finalization will be unsubscribed/called\n * when this subscription is unsubscribed. If this subscription is already {@link #closed},\n * because it has already been unsubscribed, then whatever finalizer is passed to it\n * will automatically be executed (unless the finalizer itself is also a closed subscription).\n *\n * Closed Subscriptions cannot be added as finalizers to any subscription. Adding a closed\n * subscription to a any subscription will result in no operation. (A noop).\n *\n * Adding a subscription to itself, or adding `null` or `undefined` will not perform any\n * operation at all. (A noop).\n *\n * `Subscription` instances that are added to this instance will automatically remove themselves\n * if they are unsubscribed. Functions and {@link Unsubscribable} objects that you wish to remove\n * will need to be removed manually with {@link #remove}\n *\n * @param teardown The finalization logic to add to this subscription.\n */\n add(teardown: TeardownLogic): void {\n // Only add the finalizer if it's not undefined\n // and don't add a subscription to itself.\n if (teardown && teardown !== this) {\n if (this.closed) {\n // If this subscription is already closed,\n // execute whatever finalizer is handed to it automatically.\n execFinalizer(teardown);\n } else {\n if (teardown instanceof Subscription) {\n // We don't add closed subscriptions, and we don't add the same subscription\n // twice. Subscription unsubscribe is idempotent.\n if (teardown.closed || teardown._hasParent(this)) {\n return;\n }\n teardown._addParent(this);\n }\n (this._finalizers = this._finalizers ?? []).push(teardown);\n }\n }\n }\n\n /**\n * Checks to see if a this subscription already has a particular parent.\n * This will signal that this subscription has already been added to the parent in question.\n * @param parent the parent to check for\n */\n private _hasParent(parent: Subscription) {\n const { _parentage } = this;\n return _parentage === parent || (Array.isArray(_parentage) && _parentage.includes(parent));\n }\n\n /**\n * Adds a parent to this subscription so it can be removed from the parent if it\n * unsubscribes on it's own.\n *\n * NOTE: THIS ASSUMES THAT {@link _hasParent} HAS ALREADY BEEN CHECKED.\n * @param parent The parent subscription to add\n */\n private _addParent(parent: Subscription) {\n const { _parentage } = this;\n this._parentage = Array.isArray(_parentage) ? (_parentage.push(parent), _parentage) : _parentage ? [_parentage, parent] : parent;\n }\n\n /**\n * Called on a child when it is removed via {@link #remove}.\n * @param parent The parent to remove\n */\n private _removeParent(parent: Subscription) {\n const { _parentage } = this;\n if (_parentage === parent) {\n this._parentage = null;\n } else if (Array.isArray(_parentage)) {\n arrRemove(_parentage, parent);\n }\n }\n\n /**\n * Removes a finalizer from this subscription that was previously added with the {@link #add} method.\n *\n * Note that `Subscription` instances, when unsubscribed, will automatically remove themselves\n * from every other `Subscription` they have been added to. This means that using the `remove` method\n * is not a common thing and should be used thoughtfully.\n *\n * If you add the same finalizer instance of a function or an unsubscribable object to a `Subscription` instance\n * more than once, you will need to call `remove` the same number of times to remove all instances.\n *\n * All finalizer instances are removed to free up memory upon unsubscription.\n *\n * @param teardown The finalizer to remove from this subscription\n */\n remove(teardown: Exclude): void {\n const { _finalizers } = this;\n _finalizers && arrRemove(_finalizers, teardown);\n\n if (teardown instanceof Subscription) {\n teardown._removeParent(this);\n }\n }\n}\n\nexport const EMPTY_SUBSCRIPTION = Subscription.EMPTY;\n\nexport function isSubscription(value: any): value is Subscription {\n return (\n value instanceof Subscription ||\n (value && 'closed' in value && isFunction(value.remove) && isFunction(value.add) && isFunction(value.unsubscribe))\n );\n}\n\nfunction execFinalizer(finalizer: Unsubscribable | (() => void)) {\n if (isFunction(finalizer)) {\n finalizer();\n } else {\n finalizer.unsubscribe();\n }\n}\n", "import { Subscriber } from './Subscriber';\nimport { ObservableNotification } from './types';\n\n/**\n * The {@link GlobalConfig} object for RxJS. It is used to configure things\n * like how to react on unhandled errors.\n */\nexport const config: GlobalConfig = {\n onUnhandledError: null,\n onStoppedNotification: null,\n Promise: undefined,\n useDeprecatedSynchronousErrorHandling: false,\n useDeprecatedNextContext: false,\n};\n\n/**\n * The global configuration object for RxJS, used to configure things\n * like how to react on unhandled errors. Accessible via {@link config}\n * object.\n */\nexport interface GlobalConfig {\n /**\n * A registration point for unhandled errors from RxJS. These are errors that\n * cannot were not handled by consuming code in the usual subscription path. For\n * example, if you have this configured, and you subscribe to an observable without\n * providing an error handler, errors from that subscription will end up here. This\n * will _always_ be called asynchronously on another job in the runtime. This is because\n * we do not want errors thrown in this user-configured handler to interfere with the\n * behavior of the library.\n */\n onUnhandledError: ((err: any) => void) | null;\n\n /**\n * A registration point for notifications that cannot be sent to subscribers because they\n * have completed, errored or have been explicitly unsubscribed. By default, next, complete\n * and error notifications sent to stopped subscribers are noops. However, sometimes callers\n * might want a different behavior. For example, with sources that attempt to report errors\n * to stopped subscribers, a caller can configure RxJS to throw an unhandled error instead.\n * This will _always_ be called asynchronously on another job in the runtime. This is because\n * we do not want errors thrown in this user-configured handler to interfere with the\n * behavior of the library.\n */\n onStoppedNotification: ((notification: ObservableNotification, subscriber: Subscriber) => void) | null;\n\n /**\n * The promise constructor used by default for {@link Observable#toPromise toPromise} and {@link Observable#forEach forEach}\n * methods.\n *\n * @deprecated As of version 8, RxJS will no longer support this sort of injection of a\n * Promise constructor. If you need a Promise implementation other than native promises,\n * please polyfill/patch Promise as you see appropriate. Will be removed in v8.\n */\n Promise?: PromiseConstructorLike;\n\n /**\n * If true, turns on synchronous error rethrowing, which is a deprecated behavior\n * in v6 and higher. This behavior enables bad patterns like wrapping a subscribe\n * call in a try/catch block. It also enables producer interference, a nasty bug\n * where a multicast can be broken for all observers by a downstream consumer with\n * an unhandled error. DO NOT USE THIS FLAG UNLESS IT'S NEEDED TO BUY TIME\n * FOR MIGRATION REASONS.\n *\n * @deprecated As of version 8, RxJS will no longer support synchronous throwing\n * of unhandled errors. All errors will be thrown on a separate call stack to prevent bad\n * behaviors described above. Will be removed in v8.\n */\n useDeprecatedSynchronousErrorHandling: boolean;\n\n /**\n * If true, enables an as-of-yet undocumented feature from v5: The ability to access\n * `unsubscribe()` via `this` context in `next` functions created in observers passed\n * to `subscribe`.\n *\n * This is being removed because the performance was severely problematic, and it could also cause\n * issues when types other than POJOs are passed to subscribe as subscribers, as they will likely have\n * their `this` context overwritten.\n *\n * @deprecated As of version 8, RxJS will no longer support altering the\n * context of next functions provided as part of an observer to Subscribe. Instead,\n * you will have access to a subscription or a signal or token that will allow you to do things like\n * unsubscribe and test closed status. Will be removed in v8.\n */\n useDeprecatedNextContext: boolean;\n}\n", "import type { TimerHandle } from './timerHandle';\ntype SetTimeoutFunction = (handler: () => void, timeout?: number, ...args: any[]) => TimerHandle;\ntype ClearTimeoutFunction = (handle: TimerHandle) => void;\n\ninterface TimeoutProvider {\n setTimeout: SetTimeoutFunction;\n clearTimeout: ClearTimeoutFunction;\n delegate:\n | {\n setTimeout: SetTimeoutFunction;\n clearTimeout: ClearTimeoutFunction;\n }\n | undefined;\n}\n\nexport const timeoutProvider: TimeoutProvider = {\n // When accessing the delegate, use the variable rather than `this` so that\n // the functions can be called without being bound to the provider.\n setTimeout(handler: () => void, timeout?: number, ...args) {\n const { delegate } = timeoutProvider;\n if (delegate?.setTimeout) {\n return delegate.setTimeout(handler, timeout, ...args);\n }\n return setTimeout(handler, timeout, ...args);\n },\n clearTimeout(handle) {\n const { delegate } = timeoutProvider;\n return (delegate?.clearTimeout || clearTimeout)(handle as any);\n },\n delegate: undefined,\n};\n", "import { config } from '../config';\nimport { timeoutProvider } from '../scheduler/timeoutProvider';\n\n/**\n * Handles an error on another job either with the user-configured {@link onUnhandledError},\n * or by throwing it on that new job so it can be picked up by `window.onerror`, `process.on('error')`, etc.\n *\n * This should be called whenever there is an error that is out-of-band with the subscription\n * or when an error hits a terminal boundary of the subscription and no error handler was provided.\n *\n * @param err the error to report\n */\nexport function reportUnhandledError(err: any) {\n timeoutProvider.setTimeout(() => {\n const { onUnhandledError } = config;\n if (onUnhandledError) {\n // Execute the user-configured error handler.\n onUnhandledError(err);\n } else {\n // Throw so it is picked up by the runtime's uncaught error mechanism.\n throw err;\n }\n });\n}\n", "/* tslint:disable:no-empty */\nexport function noop() { }\n", "import { CompleteNotification, NextNotification, ErrorNotification } from './types';\n\n/**\n * A completion object optimized for memory use and created to be the\n * same \"shape\" as other notifications in v8.\n * @internal\n */\nexport const COMPLETE_NOTIFICATION = (() => createNotification('C', undefined, undefined) as CompleteNotification)();\n\n/**\n * Internal use only. Creates an optimized error notification that is the same \"shape\"\n * as other notifications.\n * @internal\n */\nexport function errorNotification(error: any): ErrorNotification {\n return createNotification('E', undefined, error) as any;\n}\n\n/**\n * Internal use only. Creates an optimized next notification that is the same \"shape\"\n * as other notifications.\n * @internal\n */\nexport function nextNotification(value: T) {\n return createNotification('N', value, undefined) as NextNotification;\n}\n\n/**\n * Ensures that all notifications created internally have the same \"shape\" in v8.\n *\n * TODO: This is only exported to support a crazy legacy test in `groupBy`.\n * @internal\n */\nexport function createNotification(kind: 'N' | 'E' | 'C', value: any, error: any) {\n return {\n kind,\n value,\n error,\n };\n}\n", "import { config } from '../config';\n\nlet context: { errorThrown: boolean; error: any } | null = null;\n\n/**\n * Handles dealing with errors for super-gross mode. Creates a context, in which\n * any synchronously thrown errors will be passed to {@link captureError}. Which\n * will record the error such that it will be rethrown after the call back is complete.\n * TODO: Remove in v8\n * @param cb An immediately executed function.\n */\nexport function errorContext(cb: () => void) {\n if (config.useDeprecatedSynchronousErrorHandling) {\n const isRoot = !context;\n if (isRoot) {\n context = { errorThrown: false, error: null };\n }\n cb();\n if (isRoot) {\n const { errorThrown, error } = context!;\n context = null;\n if (errorThrown) {\n throw error;\n }\n }\n } else {\n // This is the general non-deprecated path for everyone that\n // isn't crazy enough to use super-gross mode (useDeprecatedSynchronousErrorHandling)\n cb();\n }\n}\n\n/**\n * Captures errors only in super-gross mode.\n * @param err the error to capture\n */\nexport function captureError(err: any) {\n if (config.useDeprecatedSynchronousErrorHandling && context) {\n context.errorThrown = true;\n context.error = err;\n }\n}\n", "import { isFunction } from './util/isFunction';\nimport { Observer, ObservableNotification } from './types';\nimport { isSubscription, Subscription } from './Subscription';\nimport { config } from './config';\nimport { reportUnhandledError } from './util/reportUnhandledError';\nimport { noop } from './util/noop';\nimport { nextNotification, errorNotification, COMPLETE_NOTIFICATION } from './NotificationFactories';\nimport { timeoutProvider } from './scheduler/timeoutProvider';\nimport { captureError } from './util/errorContext';\n\n/**\n * Implements the {@link Observer} interface and extends the\n * {@link Subscription} class. While the {@link Observer} is the public API for\n * consuming the values of an {@link Observable}, all Observers get converted to\n * a Subscriber, in order to provide Subscription-like capabilities such as\n * `unsubscribe`. Subscriber is a common type in RxJS, and crucial for\n * implementing operators, but it is rarely used as a public API.\n *\n * @class Subscriber\n */\nexport class Subscriber extends Subscription implements Observer {\n /**\n * A static factory for a Subscriber, given a (potentially partial) definition\n * of an Observer.\n * @param next The `next` callback of an Observer.\n * @param error The `error` callback of an\n * Observer.\n * @param complete The `complete` callback of an\n * Observer.\n * @return A Subscriber wrapping the (partially defined)\n * Observer represented by the given arguments.\n * @nocollapse\n * @deprecated Do not use. Will be removed in v8. There is no replacement for this\n * method, and there is no reason to be creating instances of `Subscriber` directly.\n * If you have a specific use case, please file an issue.\n */\n static create(next?: (x?: T) => void, error?: (e?: any) => void, complete?: () => void): Subscriber {\n return new SafeSubscriber(next, error, complete);\n }\n\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n protected isStopped: boolean = false;\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n protected destination: Subscriber | Observer; // this `any` is the escape hatch to erase extra type param (e.g. R)\n\n /**\n * @deprecated Internal implementation detail, do not use directly. Will be made internal in v8.\n * There is no reason to directly create an instance of Subscriber. This type is exported for typings reasons.\n */\n constructor(destination?: Subscriber | Observer) {\n super();\n if (destination) {\n this.destination = destination;\n // Automatically chain subscriptions together here.\n // if destination is a Subscription, then it is a Subscriber.\n if (isSubscription(destination)) {\n destination.add(this);\n }\n } else {\n this.destination = EMPTY_OBSERVER;\n }\n }\n\n /**\n * The {@link Observer} callback to receive notifications of type `next` from\n * the Observable, with a value. The Observable may call this method 0 or more\n * times.\n * @param {T} [value] The `next` value.\n * @return {void}\n */\n next(value?: T): void {\n if (this.isStopped) {\n handleStoppedNotification(nextNotification(value), this);\n } else {\n this._next(value!);\n }\n }\n\n /**\n * The {@link Observer} callback to receive notifications of type `error` from\n * the Observable, with an attached `Error`. Notifies the Observer that\n * the Observable has experienced an error condition.\n * @param {any} [err] The `error` exception.\n * @return {void}\n */\n error(err?: any): void {\n if (this.isStopped) {\n handleStoppedNotification(errorNotification(err), this);\n } else {\n this.isStopped = true;\n this._error(err);\n }\n }\n\n /**\n * The {@link Observer} callback to receive a valueless notification of type\n * `complete` from the Observable. Notifies the Observer that the Observable\n * has finished sending push-based notifications.\n * @return {void}\n */\n complete(): void {\n if (this.isStopped) {\n handleStoppedNotification(COMPLETE_NOTIFICATION, this);\n } else {\n this.isStopped = true;\n this._complete();\n }\n }\n\n unsubscribe(): void {\n if (!this.closed) {\n this.isStopped = true;\n super.unsubscribe();\n this.destination = null!;\n }\n }\n\n protected _next(value: T): void {\n this.destination.next(value);\n }\n\n protected _error(err: any): void {\n try {\n this.destination.error(err);\n } finally {\n this.unsubscribe();\n }\n }\n\n protected _complete(): void {\n try {\n this.destination.complete();\n } finally {\n this.unsubscribe();\n }\n }\n}\n\n/**\n * This bind is captured here because we want to be able to have\n * compatibility with monoid libraries that tend to use a method named\n * `bind`. In particular, a library called Monio requires this.\n */\nconst _bind = Function.prototype.bind;\n\nfunction bind any>(fn: Fn, thisArg: any): Fn {\n return _bind.call(fn, thisArg);\n}\n\n/**\n * Internal optimization only, DO NOT EXPOSE.\n * @internal\n */\nclass ConsumerObserver implements Observer {\n constructor(private partialObserver: Partial>) {}\n\n next(value: T): void {\n const { partialObserver } = this;\n if (partialObserver.next) {\n try {\n partialObserver.next(value);\n } catch (error) {\n handleUnhandledError(error);\n }\n }\n }\n\n error(err: any): void {\n const { partialObserver } = this;\n if (partialObserver.error) {\n try {\n partialObserver.error(err);\n } catch (error) {\n handleUnhandledError(error);\n }\n } else {\n handleUnhandledError(err);\n }\n }\n\n complete(): void {\n const { partialObserver } = this;\n if (partialObserver.complete) {\n try {\n partialObserver.complete();\n } catch (error) {\n handleUnhandledError(error);\n }\n }\n }\n}\n\nexport class SafeSubscriber extends Subscriber {\n constructor(\n observerOrNext?: Partial> | ((value: T) => void) | null,\n error?: ((e?: any) => void) | null,\n complete?: (() => void) | null\n ) {\n super();\n\n let partialObserver: Partial>;\n if (isFunction(observerOrNext) || !observerOrNext) {\n // The first argument is a function, not an observer. The next\n // two arguments *could* be observers, or they could be empty.\n partialObserver = {\n next: (observerOrNext ?? undefined) as (((value: T) => void) | undefined),\n error: error ?? undefined,\n complete: complete ?? undefined,\n };\n } else {\n // The first argument is a partial observer.\n let context: any;\n if (this && config.useDeprecatedNextContext) {\n // This is a deprecated path that made `this.unsubscribe()` available in\n // next handler functions passed to subscribe. This only exists behind a flag\n // now, as it is *very* slow.\n context = Object.create(observerOrNext);\n context.unsubscribe = () => this.unsubscribe();\n partialObserver = {\n next: observerOrNext.next && bind(observerOrNext.next, context),\n error: observerOrNext.error && bind(observerOrNext.error, context),\n complete: observerOrNext.complete && bind(observerOrNext.complete, context),\n };\n } else {\n // The \"normal\" path. Just use the partial observer directly.\n partialObserver = observerOrNext;\n }\n }\n\n // Wrap the partial observer to ensure it's a full observer, and\n // make sure proper error handling is accounted for.\n this.destination = new ConsumerObserver(partialObserver);\n }\n}\n\nfunction handleUnhandledError(error: any) {\n if (config.useDeprecatedSynchronousErrorHandling) {\n captureError(error);\n } else {\n // Ideal path, we report this as an unhandled error,\n // which is thrown on a new call stack.\n reportUnhandledError(error);\n }\n}\n\n/**\n * An error handler used when no error handler was supplied\n * to the SafeSubscriber -- meaning no error handler was supplied\n * do the `subscribe` call on our observable.\n * @param err The error to handle\n */\nfunction defaultErrorHandler(err: any) {\n throw err;\n}\n\n/**\n * A handler for notifications that cannot be sent to a stopped subscriber.\n * @param notification The notification being sent\n * @param subscriber The stopped subscriber\n */\nfunction handleStoppedNotification(notification: ObservableNotification, subscriber: Subscriber) {\n const { onStoppedNotification } = config;\n onStoppedNotification && timeoutProvider.setTimeout(() => onStoppedNotification(notification, subscriber));\n}\n\n/**\n * The observer used as a stub for subscriptions where the user did not\n * pass any arguments to `subscribe`. Comes with the default error handling\n * behavior.\n */\nexport const EMPTY_OBSERVER: Readonly> & { closed: true } = {\n closed: true,\n next: noop,\n error: defaultErrorHandler,\n complete: noop,\n};\n", "/**\n * Symbol.observable or a string \"@@observable\". Used for interop\n *\n * @deprecated We will no longer be exporting this symbol in upcoming versions of RxJS.\n * Instead polyfill and use Symbol.observable directly *or* use https://www.npmjs.com/package/symbol-observable\n */\nexport const observable: string | symbol = (() => (typeof Symbol === 'function' && Symbol.observable) || '@@observable')();\n", "/**\n * This function takes one parameter and just returns it. Simply put,\n * this is like `(x: T): T => x`.\n *\n * ## Examples\n *\n * This is useful in some cases when using things like `mergeMap`\n *\n * ```ts\n * import { interval, take, map, range, mergeMap, identity } from 'rxjs';\n *\n * const source$ = interval(1000).pipe(take(5));\n *\n * const result$ = source$.pipe(\n * map(i => range(i)),\n * mergeMap(identity) // same as mergeMap(x => x)\n * );\n *\n * result$.subscribe({\n * next: console.log\n * });\n * ```\n *\n * Or when you want to selectively apply an operator\n *\n * ```ts\n * import { interval, take, identity } from 'rxjs';\n *\n * const shouldLimit = () => Math.random() < 0.5;\n *\n * const source$ = interval(1000);\n *\n * const result$ = source$.pipe(shouldLimit() ? take(5) : identity);\n *\n * result$.subscribe({\n * next: console.log\n * });\n * ```\n *\n * @param x Any value that is returned by this function\n * @returns The value passed as the first parameter to this function\n */\nexport function identity(x: T): T {\n return x;\n}\n", "import { identity } from './identity';\nimport { UnaryFunction } from '../types';\n\nexport function pipe(): typeof identity;\nexport function pipe(fn1: UnaryFunction): UnaryFunction;\nexport function pipe(fn1: UnaryFunction, fn2: UnaryFunction): UnaryFunction;\nexport function pipe(fn1: UnaryFunction, fn2: UnaryFunction, fn3: UnaryFunction): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction\n): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction,\n fn5: UnaryFunction\n): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction,\n fn5: UnaryFunction,\n fn6: UnaryFunction\n): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction,\n fn5: UnaryFunction,\n fn6: UnaryFunction,\n fn7: UnaryFunction\n): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction,\n fn5: UnaryFunction,\n fn6: UnaryFunction,\n fn7: UnaryFunction,\n fn8: UnaryFunction\n): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction,\n fn5: UnaryFunction,\n fn6: UnaryFunction,\n fn7: UnaryFunction,\n fn8: UnaryFunction,\n fn9: UnaryFunction\n): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction,\n fn5: UnaryFunction,\n fn6: UnaryFunction,\n fn7: UnaryFunction,\n fn8: UnaryFunction,\n fn9: UnaryFunction,\n ...fns: UnaryFunction[]\n): UnaryFunction;\n\n/**\n * pipe() can be called on one or more functions, each of which can take one argument (\"UnaryFunction\")\n * and uses it to return a value.\n * It returns a function that takes one argument, passes it to the first UnaryFunction, and then\n * passes the result to the next one, passes that result to the next one, and so on. \n */\nexport function pipe(...fns: Array>): UnaryFunction {\n return pipeFromArray(fns);\n}\n\n/** @internal */\nexport function pipeFromArray(fns: Array>): UnaryFunction {\n if (fns.length === 0) {\n return identity as UnaryFunction;\n }\n\n if (fns.length === 1) {\n return fns[0];\n }\n\n return function piped(input: T): R {\n return fns.reduce((prev: any, fn: UnaryFunction) => fn(prev), input as any);\n };\n}\n", "import { Operator } from './Operator';\nimport { SafeSubscriber, Subscriber } from './Subscriber';\nimport { isSubscription, Subscription } from './Subscription';\nimport { TeardownLogic, OperatorFunction, Subscribable, Observer } from './types';\nimport { observable as Symbol_observable } from './symbol/observable';\nimport { pipeFromArray } from './util/pipe';\nimport { config } from './config';\nimport { isFunction } from './util/isFunction';\nimport { errorContext } from './util/errorContext';\n\n/**\n * A representation of any set of values over any amount of time. This is the most basic building block\n * of RxJS.\n *\n * @class Observable\n */\nexport class Observable implements Subscribable {\n /**\n * @deprecated Internal implementation detail, do not use directly. Will be made internal in v8.\n */\n source: Observable | undefined;\n\n /**\n * @deprecated Internal implementation detail, do not use directly. Will be made internal in v8.\n */\n operator: Operator | undefined;\n\n /**\n * @constructor\n * @param {Function} subscribe the function that is called when the Observable is\n * initially subscribed to. This function is given a Subscriber, to which new values\n * can be `next`ed, or an `error` method can be called to raise an error, or\n * `complete` can be called to notify of a successful completion.\n */\n constructor(subscribe?: (this: Observable, subscriber: Subscriber) => TeardownLogic) {\n if (subscribe) {\n this._subscribe = subscribe;\n }\n }\n\n // HACK: Since TypeScript inherits static properties too, we have to\n // fight against TypeScript here so Subject can have a different static create signature\n /**\n * Creates a new Observable by calling the Observable constructor\n * @owner Observable\n * @method create\n * @param {Function} subscribe? the subscriber function to be passed to the Observable constructor\n * @return {Observable} a new observable\n * @nocollapse\n * @deprecated Use `new Observable()` instead. Will be removed in v8.\n */\n static create: (...args: any[]) => any = (subscribe?: (subscriber: Subscriber) => TeardownLogic) => {\n return new Observable(subscribe);\n };\n\n /**\n * Creates a new Observable, with this Observable instance as the source, and the passed\n * operator defined as the new observable's operator.\n * @method lift\n * @param operator the operator defining the operation to take on the observable\n * @return a new observable with the Operator applied\n * @deprecated Internal implementation detail, do not use directly. Will be made internal in v8.\n * If you have implemented an operator using `lift`, it is recommended that you create an\n * operator by simply returning `new Observable()` directly. See \"Creating new operators from\n * scratch\" section here: https://rxjs.dev/guide/operators\n */\n lift(operator?: Operator): Observable {\n const observable = new Observable();\n observable.source = this;\n observable.operator = operator;\n return observable;\n }\n\n subscribe(observerOrNext?: Partial> | ((value: T) => void)): Subscription;\n /** @deprecated Instead of passing separate callback arguments, use an observer argument. Signatures taking separate callback arguments will be removed in v8. Details: https://rxjs.dev/deprecations/subscribe-arguments */\n subscribe(next?: ((value: T) => void) | null, error?: ((error: any) => void) | null, complete?: (() => void) | null): Subscription;\n /**\n * Invokes an execution of an Observable and registers Observer handlers for notifications it will emit.\n *\n * Use it when you have all these Observables, but still nothing is happening.\n *\n * `subscribe` is not a regular operator, but a method that calls Observable's internal `subscribe` function. It\n * might be for example a function that you passed to Observable's constructor, but most of the time it is\n * a library implementation, which defines what will be emitted by an Observable, and when it be will emitted. This means\n * that calling `subscribe` is actually the moment when Observable starts its work, not when it is created, as it is often\n * the thought.\n *\n * Apart from starting the execution of an Observable, this method allows you to listen for values\n * that an Observable emits, as well as for when it completes or errors. You can achieve this in two\n * of the following ways.\n *\n * The first way is creating an object that implements {@link Observer} interface. It should have methods\n * defined by that interface, but note that it should be just a regular JavaScript object, which you can create\n * yourself in any way you want (ES6 class, classic function constructor, object literal etc.). In particular, do\n * not attempt to use any RxJS implementation details to create Observers - you don't need them. Remember also\n * that your object does not have to implement all methods. If you find yourself creating a method that doesn't\n * do anything, you can simply omit it. Note however, if the `error` method is not provided and an error happens,\n * it will be thrown asynchronously. Errors thrown asynchronously cannot be caught using `try`/`catch`. Instead,\n * use the {@link onUnhandledError} configuration option or use a runtime handler (like `window.onerror` or\n * `process.on('error)`) to be notified of unhandled errors. Because of this, it's recommended that you provide\n * an `error` method to avoid missing thrown errors.\n *\n * The second way is to give up on Observer object altogether and simply provide callback functions in place of its methods.\n * This means you can provide three functions as arguments to `subscribe`, where the first function is equivalent\n * of a `next` method, the second of an `error` method and the third of a `complete` method. Just as in case of an Observer,\n * if you do not need to listen for something, you can omit a function by passing `undefined` or `null`,\n * since `subscribe` recognizes these functions by where they were placed in function call. When it comes\n * to the `error` function, as with an Observer, if not provided, errors emitted by an Observable will be thrown asynchronously.\n *\n * You can, however, subscribe with no parameters at all. This may be the case where you're not interested in terminal events\n * and you also handled emissions internally by using operators (e.g. using `tap`).\n *\n * Whichever style of calling `subscribe` you use, in both cases it returns a Subscription object.\n * This object allows you to call `unsubscribe` on it, which in turn will stop the work that an Observable does and will clean\n * up all resources that an Observable used. Note that cancelling a subscription will not call `complete` callback\n * provided to `subscribe` function, which is reserved for a regular completion signal that comes from an Observable.\n *\n * Remember that callbacks provided to `subscribe` are not guaranteed to be called asynchronously.\n * It is an Observable itself that decides when these functions will be called. For example {@link of}\n * by default emits all its values synchronously. Always check documentation for how given Observable\n * will behave when subscribed and if its default behavior can be modified with a `scheduler`.\n *\n * #### Examples\n *\n * Subscribe with an {@link guide/observer Observer}\n *\n * ```ts\n * import { of } from 'rxjs';\n *\n * const sumObserver = {\n * sum: 0,\n * next(value) {\n * console.log('Adding: ' + value);\n * this.sum = this.sum + value;\n * },\n * error() {\n * // We actually could just remove this method,\n * // since we do not really care about errors right now.\n * },\n * complete() {\n * console.log('Sum equals: ' + this.sum);\n * }\n * };\n *\n * of(1, 2, 3) // Synchronously emits 1, 2, 3 and then completes.\n * .subscribe(sumObserver);\n *\n * // Logs:\n * // 'Adding: 1'\n * // 'Adding: 2'\n * // 'Adding: 3'\n * // 'Sum equals: 6'\n * ```\n *\n * Subscribe with functions ({@link deprecations/subscribe-arguments deprecated})\n *\n * ```ts\n * import { of } from 'rxjs'\n *\n * let sum = 0;\n *\n * of(1, 2, 3).subscribe(\n * value => {\n * console.log('Adding: ' + value);\n * sum = sum + value;\n * },\n * undefined,\n * () => console.log('Sum equals: ' + sum)\n * );\n *\n * // Logs:\n * // 'Adding: 1'\n * // 'Adding: 2'\n * // 'Adding: 3'\n * // 'Sum equals: 6'\n * ```\n *\n * Cancel a subscription\n *\n * ```ts\n * import { interval } from 'rxjs';\n *\n * const subscription = interval(1000).subscribe({\n * next(num) {\n * console.log(num)\n * },\n * complete() {\n * // Will not be called, even when cancelling subscription.\n * console.log('completed!');\n * }\n * });\n *\n * setTimeout(() => {\n * subscription.unsubscribe();\n * console.log('unsubscribed!');\n * }, 2500);\n *\n * // Logs:\n * // 0 after 1s\n * // 1 after 2s\n * // 'unsubscribed!' after 2.5s\n * ```\n *\n * @param {Observer|Function} observerOrNext (optional) Either an observer with methods to be called,\n * or the first of three possible handlers, which is the handler for each value emitted from the subscribed\n * Observable.\n * @param {Function} error (optional) A handler for a terminal event resulting from an error. If no error handler is provided,\n * the error will be thrown asynchronously as unhandled.\n * @param {Function} complete (optional) A handler for a terminal event resulting from successful completion.\n * @return {Subscription} a subscription reference to the registered handlers\n * @method subscribe\n */\n subscribe(\n observerOrNext?: Partial> | ((value: T) => void) | null,\n error?: ((error: any) => void) | null,\n complete?: (() => void) | null\n ): Subscription {\n const subscriber = isSubscriber(observerOrNext) ? observerOrNext : new SafeSubscriber(observerOrNext, error, complete);\n\n errorContext(() => {\n const { operator, source } = this;\n subscriber.add(\n operator\n ? // We're dealing with a subscription in the\n // operator chain to one of our lifted operators.\n operator.call(subscriber, source)\n : source\n ? // If `source` has a value, but `operator` does not, something that\n // had intimate knowledge of our API, like our `Subject`, must have\n // set it. We're going to just call `_subscribe` directly.\n this._subscribe(subscriber)\n : // In all other cases, we're likely wrapping a user-provided initializer\n // function, so we need to catch errors and handle them appropriately.\n this._trySubscribe(subscriber)\n );\n });\n\n return subscriber;\n }\n\n /** @internal */\n protected _trySubscribe(sink: Subscriber): TeardownLogic {\n try {\n return this._subscribe(sink);\n } catch (err) {\n // We don't need to return anything in this case,\n // because it's just going to try to `add()` to a subscription\n // above.\n sink.error(err);\n }\n }\n\n /**\n * Used as a NON-CANCELLABLE means of subscribing to an observable, for use with\n * APIs that expect promises, like `async/await`. You cannot unsubscribe from this.\n *\n * **WARNING**: Only use this with observables you *know* will complete. If the source\n * observable does not complete, you will end up with a promise that is hung up, and\n * potentially all of the state of an async function hanging out in memory. To avoid\n * this situation, look into adding something like {@link timeout}, {@link take},\n * {@link takeWhile}, or {@link takeUntil} amongst others.\n *\n * #### Example\n *\n * ```ts\n * import { interval, take } from 'rxjs';\n *\n * const source$ = interval(1000).pipe(take(4));\n *\n * async function getTotal() {\n * let total = 0;\n *\n * await source$.forEach(value => {\n * total += value;\n * console.log('observable -> ' + value);\n * });\n *\n * return total;\n * }\n *\n * getTotal().then(\n * total => console.log('Total: ' + total)\n * );\n *\n * // Expected:\n * // 'observable -> 0'\n * // 'observable -> 1'\n * // 'observable -> 2'\n * // 'observable -> 3'\n * // 'Total: 6'\n * ```\n *\n * @param next a handler for each value emitted by the observable\n * @return a promise that either resolves on observable completion or\n * rejects with the handled error\n */\n forEach(next: (value: T) => void): Promise;\n\n /**\n * @param next a handler for each value emitted by the observable\n * @param promiseCtor a constructor function used to instantiate the Promise\n * @return a promise that either resolves on observable completion or\n * rejects with the handled error\n * @deprecated Passing a Promise constructor will no longer be available\n * in upcoming versions of RxJS. This is because it adds weight to the library, for very\n * little benefit. If you need this functionality, it is recommended that you either\n * polyfill Promise, or you create an adapter to convert the returned native promise\n * to whatever promise implementation you wanted. Will be removed in v8.\n */\n forEach(next: (value: T) => void, promiseCtor: PromiseConstructorLike): Promise;\n\n forEach(next: (value: T) => void, promiseCtor?: PromiseConstructorLike): Promise {\n promiseCtor = getPromiseCtor(promiseCtor);\n\n return new promiseCtor((resolve, reject) => {\n const subscriber = new SafeSubscriber({\n next: (value) => {\n try {\n next(value);\n } catch (err) {\n reject(err);\n subscriber.unsubscribe();\n }\n },\n error: reject,\n complete: resolve,\n });\n this.subscribe(subscriber);\n }) as Promise;\n }\n\n /** @internal */\n protected _subscribe(subscriber: Subscriber): TeardownLogic {\n return this.source?.subscribe(subscriber);\n }\n\n /**\n * An interop point defined by the es7-observable spec https://github.com/zenparsing/es-observable\n * @method Symbol.observable\n * @return {Observable} this instance of the observable\n */\n [Symbol_observable]() {\n return this;\n }\n\n /* tslint:disable:max-line-length */\n pipe(): Observable;\n pipe(op1: OperatorFunction): Observable;\n pipe(op1: OperatorFunction, op2: OperatorFunction): Observable;\n pipe(op1: OperatorFunction, op2: OperatorFunction, op3: OperatorFunction): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction\n ): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction,\n op5: OperatorFunction\n ): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction,\n op5: OperatorFunction,\n op6: OperatorFunction\n ): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction,\n op5: OperatorFunction,\n op6: OperatorFunction,\n op7: OperatorFunction\n ): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction,\n op5: OperatorFunction,\n op6: OperatorFunction,\n op7: OperatorFunction,\n op8: OperatorFunction\n ): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction,\n op5: OperatorFunction,\n op6: OperatorFunction,\n op7: OperatorFunction,\n op8: OperatorFunction,\n op9: OperatorFunction\n ): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction,\n op5: OperatorFunction,\n op6: OperatorFunction,\n op7: OperatorFunction,\n op8: OperatorFunction,\n op9: OperatorFunction,\n ...operations: OperatorFunction[]\n ): Observable;\n /* tslint:enable:max-line-length */\n\n /**\n * Used to stitch together functional operators into a chain.\n * @method pipe\n * @return {Observable} the Observable result of all of the operators having\n * been called in the order they were passed in.\n *\n * ## Example\n *\n * ```ts\n * import { interval, filter, map, scan } from 'rxjs';\n *\n * interval(1000)\n * .pipe(\n * filter(x => x % 2 === 0),\n * map(x => x + x),\n * scan((acc, x) => acc + x)\n * )\n * .subscribe(x => console.log(x));\n * ```\n */\n pipe(...operations: OperatorFunction[]): Observable {\n return pipeFromArray(operations)(this);\n }\n\n /* tslint:disable:max-line-length */\n /** @deprecated Replaced with {@link firstValueFrom} and {@link lastValueFrom}. Will be removed in v8. Details: https://rxjs.dev/deprecations/to-promise */\n toPromise(): Promise;\n /** @deprecated Replaced with {@link firstValueFrom} and {@link lastValueFrom}. Will be removed in v8. Details: https://rxjs.dev/deprecations/to-promise */\n toPromise(PromiseCtor: typeof Promise): Promise;\n /** @deprecated Replaced with {@link firstValueFrom} and {@link lastValueFrom}. Will be removed in v8. Details: https://rxjs.dev/deprecations/to-promise */\n toPromise(PromiseCtor: PromiseConstructorLike): Promise;\n /* tslint:enable:max-line-length */\n\n /**\n * Subscribe to this Observable and get a Promise resolving on\n * `complete` with the last emission (if any).\n *\n * **WARNING**: Only use this with observables you *know* will complete. If the source\n * observable does not complete, you will end up with a promise that is hung up, and\n * potentially all of the state of an async function hanging out in memory. To avoid\n * this situation, look into adding something like {@link timeout}, {@link take},\n * {@link takeWhile}, or {@link takeUntil} amongst others.\n *\n * @method toPromise\n * @param [promiseCtor] a constructor function used to instantiate\n * the Promise\n * @return A Promise that resolves with the last value emit, or\n * rejects on an error. If there were no emissions, Promise\n * resolves with undefined.\n * @deprecated Replaced with {@link firstValueFrom} and {@link lastValueFrom}. Will be removed in v8. Details: https://rxjs.dev/deprecations/to-promise\n */\n toPromise(promiseCtor?: PromiseConstructorLike): Promise {\n promiseCtor = getPromiseCtor(promiseCtor);\n\n return new promiseCtor((resolve, reject) => {\n let value: T | undefined;\n this.subscribe(\n (x: T) => (value = x),\n (err: any) => reject(err),\n () => resolve(value)\n );\n }) as Promise;\n }\n}\n\n/**\n * Decides between a passed promise constructor from consuming code,\n * A default configured promise constructor, and the native promise\n * constructor and returns it. If nothing can be found, it will throw\n * an error.\n * @param promiseCtor The optional promise constructor to passed by consuming code\n */\nfunction getPromiseCtor(promiseCtor: PromiseConstructorLike | undefined) {\n return promiseCtor ?? config.Promise ?? Promise;\n}\n\nfunction isObserver(value: any): value is Observer {\n return value && isFunction(value.next) && isFunction(value.error) && isFunction(value.complete);\n}\n\nfunction isSubscriber(value: any): value is Subscriber {\n return (value && value instanceof Subscriber) || (isObserver(value) && isSubscription(value));\n}\n", "import { Observable } from '../Observable';\nimport { Subscriber } from '../Subscriber';\nimport { OperatorFunction } from '../types';\nimport { isFunction } from './isFunction';\n\n/**\n * Used to determine if an object is an Observable with a lift function.\n */\nexport function hasLift(source: any): source is { lift: InstanceType['lift'] } {\n return isFunction(source?.lift);\n}\n\n/**\n * Creates an `OperatorFunction`. Used to define operators throughout the library in a concise way.\n * @param init The logic to connect the liftedSource to the subscriber at the moment of subscription.\n */\nexport function operate(\n init: (liftedSource: Observable, subscriber: Subscriber) => (() => void) | void\n): OperatorFunction {\n return (source: Observable) => {\n if (hasLift(source)) {\n return source.lift(function (this: Subscriber, liftedSource: Observable) {\n try {\n return init(liftedSource, this);\n } catch (err) {\n this.error(err);\n }\n });\n }\n throw new TypeError('Unable to lift unknown Observable type');\n };\n}\n", "import { Subscriber } from '../Subscriber';\n\n/**\n * Creates an instance of an `OperatorSubscriber`.\n * @param destination The downstream subscriber.\n * @param onNext Handles next values, only called if this subscriber is not stopped or closed. Any\n * error that occurs in this function is caught and sent to the `error` method of this subscriber.\n * @param onError Handles errors from the subscription, any errors that occur in this handler are caught\n * and send to the `destination` error handler.\n * @param onComplete Handles completion notification from the subscription. Any errors that occur in\n * this handler are sent to the `destination` error handler.\n * @param onFinalize Additional teardown logic here. This will only be called on teardown if the\n * subscriber itself is not already closed. This is called after all other teardown logic is executed.\n */\nexport function createOperatorSubscriber(\n destination: Subscriber,\n onNext?: (value: T) => void,\n onComplete?: () => void,\n onError?: (err: any) => void,\n onFinalize?: () => void\n): Subscriber {\n return new OperatorSubscriber(destination, onNext, onComplete, onError, onFinalize);\n}\n\n/**\n * A generic helper for allowing operators to be created with a Subscriber and\n * use closures to capture necessary state from the operator function itself.\n */\nexport class OperatorSubscriber extends Subscriber {\n /**\n * Creates an instance of an `OperatorSubscriber`.\n * @param destination The downstream subscriber.\n * @param onNext Handles next values, only called if this subscriber is not stopped or closed. Any\n * error that occurs in this function is caught and sent to the `error` method of this subscriber.\n * @param onError Handles errors from the subscription, any errors that occur in this handler are caught\n * and send to the `destination` error handler.\n * @param onComplete Handles completion notification from the subscription. Any errors that occur in\n * this handler are sent to the `destination` error handler.\n * @param onFinalize Additional finalization logic here. This will only be called on finalization if the\n * subscriber itself is not already closed. This is called after all other finalization logic is executed.\n * @param shouldUnsubscribe An optional check to see if an unsubscribe call should truly unsubscribe.\n * NOTE: This currently **ONLY** exists to support the strange behavior of {@link groupBy}, where unsubscription\n * to the resulting observable does not actually disconnect from the source if there are active subscriptions\n * to any grouped observable. (DO NOT EXPOSE OR USE EXTERNALLY!!!)\n */\n constructor(\n destination: Subscriber,\n onNext?: (value: T) => void,\n onComplete?: () => void,\n onError?: (err: any) => void,\n private onFinalize?: () => void,\n private shouldUnsubscribe?: () => boolean\n ) {\n // It's important - for performance reasons - that all of this class's\n // members are initialized and that they are always initialized in the same\n // order. This will ensure that all OperatorSubscriber instances have the\n // same hidden class in V8. This, in turn, will help keep the number of\n // hidden classes involved in property accesses within the base class as\n // low as possible. If the number of hidden classes involved exceeds four,\n // the property accesses will become megamorphic and performance penalties\n // will be incurred - i.e. inline caches won't be used.\n //\n // The reasons for ensuring all instances have the same hidden class are\n // further discussed in this blog post from Benedikt Meurer:\n // https://benediktmeurer.de/2018/03/23/impact-of-polymorphism-on-component-based-frameworks-like-react/\n super(destination);\n this._next = onNext\n ? function (this: OperatorSubscriber, value: T) {\n try {\n onNext(value);\n } catch (err) {\n destination.error(err);\n }\n }\n : super._next;\n this._error = onError\n ? function (this: OperatorSubscriber, err: any) {\n try {\n onError(err);\n } catch (err) {\n // Send any errors that occur down stream.\n destination.error(err);\n } finally {\n // Ensure finalization.\n this.unsubscribe();\n }\n }\n : super._error;\n this._complete = onComplete\n ? function (this: OperatorSubscriber) {\n try {\n onComplete();\n } catch (err) {\n // Send any errors that occur down stream.\n destination.error(err);\n } finally {\n // Ensure finalization.\n this.unsubscribe();\n }\n }\n : super._complete;\n }\n\n unsubscribe() {\n if (!this.shouldUnsubscribe || this.shouldUnsubscribe()) {\n const { closed } = this;\n super.unsubscribe();\n // Execute additional teardown if we have any and we didn't already do so.\n !closed && this.onFinalize?.();\n }\n }\n}\n", "import { Subscription } from '../Subscription';\n\ninterface AnimationFrameProvider {\n schedule(callback: FrameRequestCallback): Subscription;\n requestAnimationFrame: typeof requestAnimationFrame;\n cancelAnimationFrame: typeof cancelAnimationFrame;\n delegate:\n | {\n requestAnimationFrame: typeof requestAnimationFrame;\n cancelAnimationFrame: typeof cancelAnimationFrame;\n }\n | undefined;\n}\n\nexport const animationFrameProvider: AnimationFrameProvider = {\n // When accessing the delegate, use the variable rather than `this` so that\n // the functions can be called without being bound to the provider.\n schedule(callback) {\n let request = requestAnimationFrame;\n let cancel: typeof cancelAnimationFrame | undefined = cancelAnimationFrame;\n const { delegate } = animationFrameProvider;\n if (delegate) {\n request = delegate.requestAnimationFrame;\n cancel = delegate.cancelAnimationFrame;\n }\n const handle = request((timestamp) => {\n // Clear the cancel function. The request has been fulfilled, so\n // attempting to cancel the request upon unsubscription would be\n // pointless.\n cancel = undefined;\n callback(timestamp);\n });\n return new Subscription(() => cancel?.(handle));\n },\n requestAnimationFrame(...args) {\n const { delegate } = animationFrameProvider;\n return (delegate?.requestAnimationFrame || requestAnimationFrame)(...args);\n },\n cancelAnimationFrame(...args) {\n const { delegate } = animationFrameProvider;\n return (delegate?.cancelAnimationFrame || cancelAnimationFrame)(...args);\n },\n delegate: undefined,\n};\n", "import { createErrorClass } from './createErrorClass';\n\nexport interface ObjectUnsubscribedError extends Error {}\n\nexport interface ObjectUnsubscribedErrorCtor {\n /**\n * @deprecated Internal implementation detail. Do not construct error instances.\n * Cannot be tagged as internal: https://github.com/ReactiveX/rxjs/issues/6269\n */\n new (): ObjectUnsubscribedError;\n}\n\n/**\n * An error thrown when an action is invalid because the object has been\n * unsubscribed.\n *\n * @see {@link Subject}\n * @see {@link BehaviorSubject}\n *\n * @class ObjectUnsubscribedError\n */\nexport const ObjectUnsubscribedError: ObjectUnsubscribedErrorCtor = createErrorClass(\n (_super) =>\n function ObjectUnsubscribedErrorImpl(this: any) {\n _super(this);\n this.name = 'ObjectUnsubscribedError';\n this.message = 'object unsubscribed';\n }\n);\n", "import { Operator } from './Operator';\nimport { Observable } from './Observable';\nimport { Subscriber } from './Subscriber';\nimport { Subscription, EMPTY_SUBSCRIPTION } from './Subscription';\nimport { Observer, SubscriptionLike, TeardownLogic } from './types';\nimport { ObjectUnsubscribedError } from './util/ObjectUnsubscribedError';\nimport { arrRemove } from './util/arrRemove';\nimport { errorContext } from './util/errorContext';\n\n/**\n * A Subject is a special type of Observable that allows values to be\n * multicasted to many Observers. Subjects are like EventEmitters.\n *\n * Every Subject is an Observable and an Observer. You can subscribe to a\n * Subject, and you can call next to feed values as well as error and complete.\n */\nexport class Subject extends Observable implements SubscriptionLike {\n closed = false;\n\n private currentObservers: Observer[] | null = null;\n\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n observers: Observer[] = [];\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n isStopped = false;\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n hasError = false;\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n thrownError: any = null;\n\n /**\n * Creates a \"subject\" by basically gluing an observer to an observable.\n *\n * @nocollapse\n * @deprecated Recommended you do not use. Will be removed at some point in the future. Plans for replacement still under discussion.\n */\n static create: (...args: any[]) => any = (destination: Observer, source: Observable): AnonymousSubject => {\n return new AnonymousSubject(destination, source);\n };\n\n constructor() {\n // NOTE: This must be here to obscure Observable's constructor.\n super();\n }\n\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n lift(operator: Operator): Observable {\n const subject = new AnonymousSubject(this, this);\n subject.operator = operator as any;\n return subject as any;\n }\n\n /** @internal */\n protected _throwIfClosed() {\n if (this.closed) {\n throw new ObjectUnsubscribedError();\n }\n }\n\n next(value: T) {\n errorContext(() => {\n this._throwIfClosed();\n if (!this.isStopped) {\n if (!this.currentObservers) {\n this.currentObservers = Array.from(this.observers);\n }\n for (const observer of this.currentObservers) {\n observer.next(value);\n }\n }\n });\n }\n\n error(err: any) {\n errorContext(() => {\n this._throwIfClosed();\n if (!this.isStopped) {\n this.hasError = this.isStopped = true;\n this.thrownError = err;\n const { observers } = this;\n while (observers.length) {\n observers.shift()!.error(err);\n }\n }\n });\n }\n\n complete() {\n errorContext(() => {\n this._throwIfClosed();\n if (!this.isStopped) {\n this.isStopped = true;\n const { observers } = this;\n while (observers.length) {\n observers.shift()!.complete();\n }\n }\n });\n }\n\n unsubscribe() {\n this.isStopped = this.closed = true;\n this.observers = this.currentObservers = null!;\n }\n\n get observed() {\n return this.observers?.length > 0;\n }\n\n /** @internal */\n protected _trySubscribe(subscriber: Subscriber): TeardownLogic {\n this._throwIfClosed();\n return super._trySubscribe(subscriber);\n }\n\n /** @internal */\n protected _subscribe(subscriber: Subscriber): Subscription {\n this._throwIfClosed();\n this._checkFinalizedStatuses(subscriber);\n return this._innerSubscribe(subscriber);\n }\n\n /** @internal */\n protected _innerSubscribe(subscriber: Subscriber) {\n const { hasError, isStopped, observers } = this;\n if (hasError || isStopped) {\n return EMPTY_SUBSCRIPTION;\n }\n this.currentObservers = null;\n observers.push(subscriber);\n return new Subscription(() => {\n this.currentObservers = null;\n arrRemove(observers, subscriber);\n });\n }\n\n /** @internal */\n protected _checkFinalizedStatuses(subscriber: Subscriber) {\n const { hasError, thrownError, isStopped } = this;\n if (hasError) {\n subscriber.error(thrownError);\n } else if (isStopped) {\n subscriber.complete();\n }\n }\n\n /**\n * Creates a new Observable with this Subject as the source. You can do this\n * to create custom Observer-side logic of the Subject and conceal it from\n * code that uses the Observable.\n * @return {Observable} Observable that the Subject casts to\n */\n asObservable(): Observable {\n const observable: any = new Observable();\n observable.source = this;\n return observable;\n }\n}\n\n/**\n * @class AnonymousSubject\n */\nexport class AnonymousSubject extends Subject {\n constructor(\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n public destination?: Observer,\n source?: Observable\n ) {\n super();\n this.source = source;\n }\n\n next(value: T) {\n this.destination?.next?.(value);\n }\n\n error(err: any) {\n this.destination?.error?.(err);\n }\n\n complete() {\n this.destination?.complete?.();\n }\n\n /** @internal */\n protected _subscribe(subscriber: Subscriber): Subscription {\n return this.source?.subscribe(subscriber) ?? EMPTY_SUBSCRIPTION;\n }\n}\n", "import { TimestampProvider } from '../types';\n\ninterface DateTimestampProvider extends TimestampProvider {\n delegate: TimestampProvider | undefined;\n}\n\nexport const dateTimestampProvider: DateTimestampProvider = {\n now() {\n // Use the variable rather than `this` so that the function can be called\n // without being bound to the provider.\n return (dateTimestampProvider.delegate || Date).now();\n },\n delegate: undefined,\n};\n", "import { Subject } from './Subject';\nimport { TimestampProvider } from './types';\nimport { Subscriber } from './Subscriber';\nimport { Subscription } from './Subscription';\nimport { dateTimestampProvider } from './scheduler/dateTimestampProvider';\n\n/**\n * A variant of {@link Subject} that \"replays\" old values to new subscribers by emitting them when they first subscribe.\n *\n * `ReplaySubject` has an internal buffer that will store a specified number of values that it has observed. Like `Subject`,\n * `ReplaySubject` \"observes\" values by having them passed to its `next` method. When it observes a value, it will store that\n * value for a time determined by the configuration of the `ReplaySubject`, as passed to its constructor.\n *\n * When a new subscriber subscribes to the `ReplaySubject` instance, it will synchronously emit all values in its buffer in\n * a First-In-First-Out (FIFO) manner. The `ReplaySubject` will also complete, if it has observed completion; and it will\n * error if it has observed an error.\n *\n * There are two main configuration items to be concerned with:\n *\n * 1. `bufferSize` - This will determine how many items are stored in the buffer, defaults to infinite.\n * 2. `windowTime` - The amount of time to hold a value in the buffer before removing it from the buffer.\n *\n * Both configurations may exist simultaneously. So if you would like to buffer a maximum of 3 values, as long as the values\n * are less than 2 seconds old, you could do so with a `new ReplaySubject(3, 2000)`.\n *\n * ### Differences with BehaviorSubject\n *\n * `BehaviorSubject` is similar to `new ReplaySubject(1)`, with a couple of exceptions:\n *\n * 1. `BehaviorSubject` comes \"primed\" with a single value upon construction.\n * 2. `ReplaySubject` will replay values, even after observing an error, where `BehaviorSubject` will not.\n *\n * @see {@link Subject}\n * @see {@link BehaviorSubject}\n * @see {@link shareReplay}\n */\nexport class ReplaySubject extends Subject {\n private _buffer: (T | number)[] = [];\n private _infiniteTimeWindow = true;\n\n /**\n * @param bufferSize The size of the buffer to replay on subscription\n * @param windowTime The amount of time the buffered items will stay buffered\n * @param timestampProvider An object with a `now()` method that provides the current timestamp. This is used to\n * calculate the amount of time something has been buffered.\n */\n constructor(\n private _bufferSize = Infinity,\n private _windowTime = Infinity,\n private _timestampProvider: TimestampProvider = dateTimestampProvider\n ) {\n super();\n this._infiniteTimeWindow = _windowTime === Infinity;\n this._bufferSize = Math.max(1, _bufferSize);\n this._windowTime = Math.max(1, _windowTime);\n }\n\n next(value: T): void {\n const { isStopped, _buffer, _infiniteTimeWindow, _timestampProvider, _windowTime } = this;\n if (!isStopped) {\n _buffer.push(value);\n !_infiniteTimeWindow && _buffer.push(_timestampProvider.now() + _windowTime);\n }\n this._trimBuffer();\n super.next(value);\n }\n\n /** @internal */\n protected _subscribe(subscriber: Subscriber): Subscription {\n this._throwIfClosed();\n this._trimBuffer();\n\n const subscription = this._innerSubscribe(subscriber);\n\n const { _infiniteTimeWindow, _buffer } = this;\n // We use a copy here, so reentrant code does not mutate our array while we're\n // emitting it to a new subscriber.\n const copy = _buffer.slice();\n for (let i = 0; i < copy.length && !subscriber.closed; i += _infiniteTimeWindow ? 1 : 2) {\n subscriber.next(copy[i] as T);\n }\n\n this._checkFinalizedStatuses(subscriber);\n\n return subscription;\n }\n\n private _trimBuffer() {\n const { _bufferSize, _timestampProvider, _buffer, _infiniteTimeWindow } = this;\n // If we don't have an infinite buffer size, and we're over the length,\n // use splice to truncate the old buffer values off. Note that we have to\n // double the size for instances where we're not using an infinite time window\n // because we're storing the values and the timestamps in the same array.\n const adjustedBufferSize = (_infiniteTimeWindow ? 1 : 2) * _bufferSize;\n _bufferSize < Infinity && adjustedBufferSize < _buffer.length && _buffer.splice(0, _buffer.length - adjustedBufferSize);\n\n // Now, if we're not in an infinite time window, remove all values where the time is\n // older than what is allowed.\n if (!_infiniteTimeWindow) {\n const now = _timestampProvider.now();\n let last = 0;\n // Search the array for the first timestamp that isn't expired and\n // truncate the buffer up to that point.\n for (let i = 1; i < _buffer.length && (_buffer[i] as number) <= now; i += 2) {\n last = i;\n }\n last && _buffer.splice(0, last + 1);\n }\n }\n}\n", "import { Scheduler } from '../Scheduler';\nimport { Subscription } from '../Subscription';\nimport { SchedulerAction } from '../types';\n\n/**\n * A unit of work to be executed in a `scheduler`. An action is typically\n * created from within a {@link SchedulerLike} and an RxJS user does not need to concern\n * themselves about creating and manipulating an Action.\n *\n * ```ts\n * class Action extends Subscription {\n * new (scheduler: Scheduler, work: (state?: T) => void);\n * schedule(state?: T, delay: number = 0): Subscription;\n * }\n * ```\n *\n * @class Action\n */\nexport class Action extends Subscription {\n constructor(scheduler: Scheduler, work: (this: SchedulerAction, state?: T) => void) {\n super();\n }\n /**\n * Schedules this action on its parent {@link SchedulerLike} for execution. May be passed\n * some context object, `state`. May happen at some point in the future,\n * according to the `delay` parameter, if specified.\n * @param {T} [state] Some contextual data that the `work` function uses when\n * called by the Scheduler.\n * @param {number} [delay] Time to wait before executing the work, where the\n * time unit is implicit and defined by the Scheduler.\n * @return {void}\n */\n public schedule(state?: T, delay: number = 0): Subscription {\n return this;\n }\n}\n", "import type { TimerHandle } from './timerHandle';\ntype SetIntervalFunction = (handler: () => void, timeout?: number, ...args: any[]) => TimerHandle;\ntype ClearIntervalFunction = (handle: TimerHandle) => void;\n\ninterface IntervalProvider {\n setInterval: SetIntervalFunction;\n clearInterval: ClearIntervalFunction;\n delegate:\n | {\n setInterval: SetIntervalFunction;\n clearInterval: ClearIntervalFunction;\n }\n | undefined;\n}\n\nexport const intervalProvider: IntervalProvider = {\n // When accessing the delegate, use the variable rather than `this` so that\n // the functions can be called without being bound to the provider.\n setInterval(handler: () => void, timeout?: number, ...args) {\n const { delegate } = intervalProvider;\n if (delegate?.setInterval) {\n return delegate.setInterval(handler, timeout, ...args);\n }\n return setInterval(handler, timeout, ...args);\n },\n clearInterval(handle) {\n const { delegate } = intervalProvider;\n return (delegate?.clearInterval || clearInterval)(handle as any);\n },\n delegate: undefined,\n};\n", "import { Action } from './Action';\nimport { SchedulerAction } from '../types';\nimport { Subscription } from '../Subscription';\nimport { AsyncScheduler } from './AsyncScheduler';\nimport { intervalProvider } from './intervalProvider';\nimport { arrRemove } from '../util/arrRemove';\nimport { TimerHandle } from './timerHandle';\n\nexport class AsyncAction extends Action {\n public id: TimerHandle | undefined;\n public state?: T;\n // @ts-ignore: Property has no initializer and is not definitely assigned\n public delay: number;\n protected pending: boolean = false;\n\n constructor(protected scheduler: AsyncScheduler, protected work: (this: SchedulerAction, state?: T) => void) {\n super(scheduler, work);\n }\n\n public schedule(state?: T, delay: number = 0): Subscription {\n if (this.closed) {\n return this;\n }\n\n // Always replace the current state with the new state.\n this.state = state;\n\n const id = this.id;\n const scheduler = this.scheduler;\n\n //\n // Important implementation note:\n //\n // Actions only execute once by default, unless rescheduled from within the\n // scheduled callback. This allows us to implement single and repeat\n // actions via the same code path, without adding API surface area, as well\n // as mimic traditional recursion but across asynchronous boundaries.\n //\n // However, JS runtimes and timers distinguish between intervals achieved by\n // serial `setTimeout` calls vs. a single `setInterval` call. An interval of\n // serial `setTimeout` calls can be individually delayed, which delays\n // scheduling the next `setTimeout`, and so on. `setInterval` attempts to\n // guarantee the interval callback will be invoked more precisely to the\n // interval period, regardless of load.\n //\n // Therefore, we use `setInterval` to schedule single and repeat actions.\n // If the action reschedules itself with the same delay, the interval is not\n // canceled. If the action doesn't reschedule, or reschedules with a\n // different delay, the interval will be canceled after scheduled callback\n // execution.\n //\n if (id != null) {\n this.id = this.recycleAsyncId(scheduler, id, delay);\n }\n\n // Set the pending flag indicating that this action has been scheduled, or\n // has recursively rescheduled itself.\n this.pending = true;\n\n this.delay = delay;\n // If this action has already an async Id, don't request a new one.\n this.id = this.id ?? this.requestAsyncId(scheduler, this.id, delay);\n\n return this;\n }\n\n protected requestAsyncId(scheduler: AsyncScheduler, _id?: TimerHandle, delay: number = 0): TimerHandle {\n return intervalProvider.setInterval(scheduler.flush.bind(scheduler, this), delay);\n }\n\n protected recycleAsyncId(_scheduler: AsyncScheduler, id?: TimerHandle, delay: number | null = 0): TimerHandle | undefined {\n // If this action is rescheduled with the same delay time, don't clear the interval id.\n if (delay != null && this.delay === delay && this.pending === false) {\n return id;\n }\n // Otherwise, if the action's delay time is different from the current delay,\n // or the action has been rescheduled before it's executed, clear the interval id\n if (id != null) {\n intervalProvider.clearInterval(id);\n }\n\n return undefined;\n }\n\n /**\n * Immediately executes this action and the `work` it contains.\n * @return {any}\n */\n public execute(state: T, delay: number): any {\n if (this.closed) {\n return new Error('executing a cancelled action');\n }\n\n this.pending = false;\n const error = this._execute(state, delay);\n if (error) {\n return error;\n } else if (this.pending === false && this.id != null) {\n // Dequeue if the action didn't reschedule itself. Don't call\n // unsubscribe(), because the action could reschedule later.\n // For example:\n // ```\n // scheduler.schedule(function doWork(counter) {\n // /* ... I'm a busy worker bee ... */\n // var originalAction = this;\n // /* wait 100ms before rescheduling the action */\n // setTimeout(function () {\n // originalAction.schedule(counter + 1);\n // }, 100);\n // }, 1000);\n // ```\n this.id = this.recycleAsyncId(this.scheduler, this.id, null);\n }\n }\n\n protected _execute(state: T, _delay: number): any {\n let errored: boolean = false;\n let errorValue: any;\n try {\n this.work(state);\n } catch (e) {\n errored = true;\n // HACK: Since code elsewhere is relying on the \"truthiness\" of the\n // return here, we can't have it return \"\" or 0 or false.\n // TODO: Clean this up when we refactor schedulers mid-version-8 or so.\n errorValue = e ? e : new Error('Scheduled action threw falsy error');\n }\n if (errored) {\n this.unsubscribe();\n return errorValue;\n }\n }\n\n unsubscribe() {\n if (!this.closed) {\n const { id, scheduler } = this;\n const { actions } = scheduler;\n\n this.work = this.state = this.scheduler = null!;\n this.pending = false;\n\n arrRemove(actions, this);\n if (id != null) {\n this.id = this.recycleAsyncId(scheduler, id, null);\n }\n\n this.delay = null!;\n super.unsubscribe();\n }\n }\n}\n", "import { Action } from './scheduler/Action';\nimport { Subscription } from './Subscription';\nimport { SchedulerLike, SchedulerAction } from './types';\nimport { dateTimestampProvider } from './scheduler/dateTimestampProvider';\n\n/**\n * An execution context and a data structure to order tasks and schedule their\n * execution. Provides a notion of (potentially virtual) time, through the\n * `now()` getter method.\n *\n * Each unit of work in a Scheduler is called an `Action`.\n *\n * ```ts\n * class Scheduler {\n * now(): number;\n * schedule(work, delay?, state?): Subscription;\n * }\n * ```\n *\n * @class Scheduler\n * @deprecated Scheduler is an internal implementation detail of RxJS, and\n * should not be used directly. Rather, create your own class and implement\n * {@link SchedulerLike}. Will be made internal in v8.\n */\nexport class Scheduler implements SchedulerLike {\n public static now: () => number = dateTimestampProvider.now;\n\n constructor(private schedulerActionCtor: typeof Action, now: () => number = Scheduler.now) {\n this.now = now;\n }\n\n /**\n * A getter method that returns a number representing the current time\n * (at the time this function was called) according to the scheduler's own\n * internal clock.\n * @return {number} A number that represents the current time. May or may not\n * have a relation to wall-clock time. May or may not refer to a time unit\n * (e.g. milliseconds).\n */\n public now: () => number;\n\n /**\n * Schedules a function, `work`, for execution. May happen at some point in\n * the future, according to the `delay` parameter, if specified. May be passed\n * some context object, `state`, which will be passed to the `work` function.\n *\n * The given arguments will be processed an stored as an Action object in a\n * queue of actions.\n *\n * @param {function(state: ?T): ?Subscription} work A function representing a\n * task, or some unit of work to be executed by the Scheduler.\n * @param {number} [delay] Time to wait before executing the work, where the\n * time unit is implicit and defined by the Scheduler itself.\n * @param {T} [state] Some contextual data that the `work` function uses when\n * called by the Scheduler.\n * @return {Subscription} A subscription in order to be able to unsubscribe\n * the scheduled work.\n */\n public schedule(work: (this: SchedulerAction, state?: T) => void, delay: number = 0, state?: T): Subscription {\n return new this.schedulerActionCtor(this, work).schedule(state, delay);\n }\n}\n", "import { Scheduler } from '../Scheduler';\nimport { Action } from './Action';\nimport { AsyncAction } from './AsyncAction';\nimport { TimerHandle } from './timerHandle';\n\nexport class AsyncScheduler extends Scheduler {\n public actions: Array> = [];\n /**\n * A flag to indicate whether the Scheduler is currently executing a batch of\n * queued actions.\n * @type {boolean}\n * @internal\n */\n public _active: boolean = false;\n /**\n * An internal ID used to track the latest asynchronous task such as those\n * coming from `setTimeout`, `setInterval`, `requestAnimationFrame`, and\n * others.\n * @type {any}\n * @internal\n */\n public _scheduled: TimerHandle | undefined;\n\n constructor(SchedulerAction: typeof Action, now: () => number = Scheduler.now) {\n super(SchedulerAction, now);\n }\n\n public flush(action: AsyncAction): void {\n const { actions } = this;\n\n if (this._active) {\n actions.push(action);\n return;\n }\n\n let error: any;\n this._active = true;\n\n do {\n if ((error = action.execute(action.state, action.delay))) {\n break;\n }\n } while ((action = actions.shift()!)); // exhaust the scheduler queue\n\n this._active = false;\n\n if (error) {\n while ((action = actions.shift()!)) {\n action.unsubscribe();\n }\n throw error;\n }\n }\n}\n", "import { AsyncAction } from './AsyncAction';\nimport { AsyncScheduler } from './AsyncScheduler';\n\n/**\n *\n * Async Scheduler\n *\n * Schedule task as if you used setTimeout(task, duration)\n *\n * `async` scheduler schedules tasks asynchronously, by putting them on the JavaScript\n * event loop queue. It is best used to delay tasks in time or to schedule tasks repeating\n * in intervals.\n *\n * If you just want to \"defer\" task, that is to perform it right after currently\n * executing synchronous code ends (commonly achieved by `setTimeout(deferredTask, 0)`),\n * better choice will be the {@link asapScheduler} scheduler.\n *\n * ## Examples\n * Use async scheduler to delay task\n * ```ts\n * import { asyncScheduler } from 'rxjs';\n *\n * const task = () => console.log('it works!');\n *\n * asyncScheduler.schedule(task, 2000);\n *\n * // After 2 seconds logs:\n * // \"it works!\"\n * ```\n *\n * Use async scheduler to repeat task in intervals\n * ```ts\n * import { asyncScheduler } from 'rxjs';\n *\n * function task(state) {\n * console.log(state);\n * this.schedule(state + 1, 1000); // `this` references currently executing Action,\n * // which we reschedule with new state and delay\n * }\n *\n * asyncScheduler.schedule(task, 3000, 0);\n *\n * // Logs:\n * // 0 after 3s\n * // 1 after 4s\n * // 2 after 5s\n * // 3 after 6s\n * ```\n */\n\nexport const asyncScheduler = new AsyncScheduler(AsyncAction);\n\n/**\n * @deprecated Renamed to {@link asyncScheduler}. Will be removed in v8.\n */\nexport const async = asyncScheduler;\n", "import { AsyncAction } from './AsyncAction';\nimport { AnimationFrameScheduler } from './AnimationFrameScheduler';\nimport { SchedulerAction } from '../types';\nimport { animationFrameProvider } from './animationFrameProvider';\nimport { TimerHandle } from './timerHandle';\n\nexport class AnimationFrameAction extends AsyncAction {\n constructor(protected scheduler: AnimationFrameScheduler, protected work: (this: SchedulerAction, state?: T) => void) {\n super(scheduler, work);\n }\n\n protected requestAsyncId(scheduler: AnimationFrameScheduler, id?: TimerHandle, delay: number = 0): TimerHandle {\n // If delay is greater than 0, request as an async action.\n if (delay !== null && delay > 0) {\n return super.requestAsyncId(scheduler, id, delay);\n }\n // Push the action to the end of the scheduler queue.\n scheduler.actions.push(this);\n // If an animation frame has already been requested, don't request another\n // one. If an animation frame hasn't been requested yet, request one. Return\n // the current animation frame request id.\n return scheduler._scheduled || (scheduler._scheduled = animationFrameProvider.requestAnimationFrame(() => scheduler.flush(undefined)));\n }\n\n protected recycleAsyncId(scheduler: AnimationFrameScheduler, id?: TimerHandle, delay: number = 0): TimerHandle | undefined {\n // If delay exists and is greater than 0, or if the delay is null (the\n // action wasn't rescheduled) but was originally scheduled as an async\n // action, then recycle as an async action.\n if (delay != null ? delay > 0 : this.delay > 0) {\n return super.recycleAsyncId(scheduler, id, delay);\n }\n // If the scheduler queue has no remaining actions with the same async id,\n // cancel the requested animation frame and set the scheduled flag to\n // undefined so the next AnimationFrameAction will request its own.\n const { actions } = scheduler;\n if (id != null && actions[actions.length - 1]?.id !== id) {\n animationFrameProvider.cancelAnimationFrame(id as number);\n scheduler._scheduled = undefined;\n }\n // Return undefined so the action knows to request a new async id if it's rescheduled.\n return undefined;\n }\n}\n", "import { AsyncAction } from './AsyncAction';\nimport { AsyncScheduler } from './AsyncScheduler';\n\nexport class AnimationFrameScheduler extends AsyncScheduler {\n public flush(action?: AsyncAction): void {\n this._active = true;\n // The async id that effects a call to flush is stored in _scheduled.\n // Before executing an action, it's necessary to check the action's async\n // id to determine whether it's supposed to be executed in the current\n // flush.\n // Previous implementations of this method used a count to determine this,\n // but that was unsound, as actions that are unsubscribed - i.e. cancelled -\n // are removed from the actions array and that can shift actions that are\n // scheduled to be executed in a subsequent flush into positions at which\n // they are executed within the current flush.\n const flushId = this._scheduled;\n this._scheduled = undefined;\n\n const { actions } = this;\n let error: any;\n action = action || actions.shift()!;\n\n do {\n if ((error = action.execute(action.state, action.delay))) {\n break;\n }\n } while ((action = actions[0]) && action.id === flushId && actions.shift());\n\n this._active = false;\n\n if (error) {\n while ((action = actions[0]) && action.id === flushId && actions.shift()) {\n action.unsubscribe();\n }\n throw error;\n }\n }\n}\n", "import { AnimationFrameAction } from './AnimationFrameAction';\nimport { AnimationFrameScheduler } from './AnimationFrameScheduler';\n\n/**\n *\n * Animation Frame Scheduler\n *\n * Perform task when `window.requestAnimationFrame` would fire\n *\n * When `animationFrame` scheduler is used with delay, it will fall back to {@link asyncScheduler} scheduler\n * behaviour.\n *\n * Without delay, `animationFrame` scheduler can be used to create smooth browser animations.\n * It makes sure scheduled task will happen just before next browser content repaint,\n * thus performing animations as efficiently as possible.\n *\n * ## Example\n * Schedule div height animation\n * ```ts\n * // html:
\n * import { animationFrameScheduler } from 'rxjs';\n *\n * const div = document.querySelector('div');\n *\n * animationFrameScheduler.schedule(function(height) {\n * div.style.height = height + \"px\";\n *\n * this.schedule(height + 1); // `this` references currently executing Action,\n * // which we reschedule with new state\n * }, 0, 0);\n *\n * // You will see a div element growing in height\n * ```\n */\n\nexport const animationFrameScheduler = new AnimationFrameScheduler(AnimationFrameAction);\n\n/**\n * @deprecated Renamed to {@link animationFrameScheduler}. Will be removed in v8.\n */\nexport const animationFrame = animationFrameScheduler;\n", "import { Observable } from '../Observable';\nimport { SchedulerLike } from '../types';\n\n/**\n * A simple Observable that emits no items to the Observer and immediately\n * emits a complete notification.\n *\n * Just emits 'complete', and nothing else.\n *\n * ![](empty.png)\n *\n * A simple Observable that only emits the complete notification. It can be used\n * for composing with other Observables, such as in a {@link mergeMap}.\n *\n * ## Examples\n *\n * Log complete notification\n *\n * ```ts\n * import { EMPTY } from 'rxjs';\n *\n * EMPTY.subscribe({\n * next: () => console.log('Next'),\n * complete: () => console.log('Complete!')\n * });\n *\n * // Outputs\n * // Complete!\n * ```\n *\n * Emit the number 7, then complete\n *\n * ```ts\n * import { EMPTY, startWith } from 'rxjs';\n *\n * const result = EMPTY.pipe(startWith(7));\n * result.subscribe(x => console.log(x));\n *\n * // Outputs\n * // 7\n * ```\n *\n * Map and flatten only odd numbers to the sequence `'a'`, `'b'`, `'c'`\n *\n * ```ts\n * import { interval, mergeMap, of, EMPTY } from 'rxjs';\n *\n * const interval$ = interval(1000);\n * const result = interval$.pipe(\n * mergeMap(x => x % 2 === 1 ? of('a', 'b', 'c') : EMPTY),\n * );\n * result.subscribe(x => console.log(x));\n *\n * // Results in the following to the console:\n * // x is equal to the count on the interval, e.g. (0, 1, 2, 3, ...)\n * // x will occur every 1000ms\n * // if x % 2 is equal to 1, print a, b, c (each on its own)\n * // if x % 2 is not equal to 1, nothing will be output\n * ```\n *\n * @see {@link Observable}\n * @see {@link NEVER}\n * @see {@link of}\n * @see {@link throwError}\n */\nexport const EMPTY = new Observable((subscriber) => subscriber.complete());\n\n/**\n * @param scheduler A {@link SchedulerLike} to use for scheduling\n * the emission of the complete notification.\n * @deprecated Replaced with the {@link EMPTY} constant or {@link scheduled} (e.g. `scheduled([], scheduler)`). Will be removed in v8.\n */\nexport function empty(scheduler?: SchedulerLike) {\n return scheduler ? emptyScheduled(scheduler) : EMPTY;\n}\n\nfunction emptyScheduled(scheduler: SchedulerLike) {\n return new Observable((subscriber) => scheduler.schedule(() => subscriber.complete()));\n}\n", "import { SchedulerLike } from '../types';\nimport { isFunction } from './isFunction';\n\nexport function isScheduler(value: any): value is SchedulerLike {\n return value && isFunction(value.schedule);\n}\n", "import { SchedulerLike } from '../types';\nimport { isFunction } from './isFunction';\nimport { isScheduler } from './isScheduler';\n\nfunction last(arr: T[]): T | undefined {\n return arr[arr.length - 1];\n}\n\nexport function popResultSelector(args: any[]): ((...args: unknown[]) => unknown) | undefined {\n return isFunction(last(args)) ? args.pop() : undefined;\n}\n\nexport function popScheduler(args: any[]): SchedulerLike | undefined {\n return isScheduler(last(args)) ? args.pop() : undefined;\n}\n\nexport function popNumber(args: any[], defaultValue: number): number {\n return typeof last(args) === 'number' ? args.pop()! : defaultValue;\n}\n", "export const isArrayLike = ((x: any): x is ArrayLike => x && typeof x.length === 'number' && typeof x !== 'function');", "import { isFunction } from \"./isFunction\";\n\n/**\n * Tests to see if the object is \"thennable\".\n * @param value the object to test\n */\nexport function isPromise(value: any): value is PromiseLike {\n return isFunction(value?.then);\n}\n", "import { InteropObservable } from '../types';\nimport { observable as Symbol_observable } from '../symbol/observable';\nimport { isFunction } from './isFunction';\n\n/** Identifies an input as being Observable (but not necessary an Rx Observable) */\nexport function isInteropObservable(input: any): input is InteropObservable {\n return isFunction(input[Symbol_observable]);\n}\n", "import { isFunction } from './isFunction';\n\nexport function isAsyncIterable(obj: any): obj is AsyncIterable {\n return Symbol.asyncIterator && isFunction(obj?.[Symbol.asyncIterator]);\n}\n", "/**\n * Creates the TypeError to throw if an invalid object is passed to `from` or `scheduled`.\n * @param input The object that was passed.\n */\nexport function createInvalidObservableTypeError(input: any) {\n // TODO: We should create error codes that can be looked up, so this can be less verbose.\n return new TypeError(\n `You provided ${\n input !== null && typeof input === 'object' ? 'an invalid object' : `'${input}'`\n } where a stream was expected. You can provide an Observable, Promise, ReadableStream, Array, AsyncIterable, or Iterable.`\n );\n}\n", "export function getSymbolIterator(): symbol {\n if (typeof Symbol !== 'function' || !Symbol.iterator) {\n return '@@iterator' as any;\n }\n\n return Symbol.iterator;\n}\n\nexport const iterator = getSymbolIterator();\n", "import { iterator as Symbol_iterator } from '../symbol/iterator';\nimport { isFunction } from './isFunction';\n\n/** Identifies an input as being an Iterable */\nexport function isIterable(input: any): input is Iterable {\n return isFunction(input?.[Symbol_iterator]);\n}\n", "import { ReadableStreamLike } from '../types';\nimport { isFunction } from './isFunction';\n\nexport async function* readableStreamLikeToAsyncGenerator(readableStream: ReadableStreamLike): AsyncGenerator {\n const reader = readableStream.getReader();\n try {\n while (true) {\n const { value, done } = await reader.read();\n if (done) {\n return;\n }\n yield value!;\n }\n } finally {\n reader.releaseLock();\n }\n}\n\nexport function isReadableStreamLike(obj: any): obj is ReadableStreamLike {\n // We don't want to use instanceof checks because they would return\n // false for instances from another Realm, like an