diff --git a/.nojekyll b/.nojekyll new file mode 100644 index 0000000..e69de29 diff --git a/404.html b/404.html new file mode 100644 index 0000000..116346a --- /dev/null +++ b/404.html @@ -0,0 +1,1320 @@ + + + + + + + + + + + + + + + + + + + + ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + +
+ +
+ + + + +
+
+ + + +
+
+
+ + + + + + + + +
+
+
+ + + + +
+
+ +

404 - Not found

+ +
+
+ + +
+ + + +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/NSF reporting figures.qmd b/NSF reporting figures.qmd new file mode 100644 index 0000000..2c7873b --- /dev/null +++ b/NSF reporting figures.qmd @@ -0,0 +1,168 @@ +--- +title: "nsf report 2024" +format: + html: + theme: default + toc: true + number-sections: true +--- + +```{r} +# Load necessary libraries +library(ggplot2) +library(rnaturalearth) +library(rnaturalearthdata) +library(dplyr) + +# Get world data +world <- ne_countries(scale = "medium", returnclass = "sf") + +# Data frame with country names and counts +data <- data.frame( + name = c("United States of America", "Brazil", "Germany", "Canada", "Nigeria", + "Australia", "Peru", "Israel", "United Kingdom", "Panama", + "Saudi Arabia", "Kenya", "Japan", "Nepal", "Spain", + "Sweden", "Czech Republic", "Vietnam"), + count = c(252, 6, 5, 5, 2, 2, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1) +) + +# Join this data with the world map data +world_data <- left_join(world, data, by = "name") + +# Plot +# Plot with adjusted scale +countries_plot <- ggplot(data = world_data) + + geom_sf(aes(fill = count), color = "white", size = 0.25) + + scale_fill_gradient(low = "lightblue", high = "darkblue", + limits = c(0, 252), + breaks = c(1, 50, 100, 252), + na.value = "grey90", name = "Users", + labels = scales::comma) + + #labs(title = "ESIIL Cyverse users per country") + + theme_minimal() + + theme(legend.position = "right", + plot.title = element_text(hjust = 0.5)) + +ggsave(countries_plot, file="countries_plot.png", dpi=600) + +``` + + +```{r} +# Load necessary libraries +library(ggplot2) +library(dplyr) +library(sf) +library(rnaturalearth) +library(rnaturalearthdata) + +# Get U.S. states and Canadian provinces data +states <- ne_states(country = "united states of america", returnclass = "sf") +provinces <- ne_states(country = "canada", returnclass = "sf") + +# Combine U.S. states and Canadian provinces +north_america_map <- rbind(states, provinces) + +# Data frame with regions and counts +data <- data.frame( + region = c("colorado", "california", "florida", "south dakota", "arizona", + "louisiana", "new york", "south carolina", "new mexico", "north carolina", + "minnesota", "massachusetts", "connecticut", "oregon", "wisconsin", + "maryland", "virginia", "pennsylvania", "texas", "michigan", "illinois", + "ontario", "north dakota", "georgia", "new jersey", "utah", + "missouri", "idaho", "montana", "maine", "new hampshire", "ohio", "nevada", + "hawaii", "arkansas", "wyoming", "oklahoma", "tennessee", "washington", + "alabama", "district of columbia", "kentucky", "indiana", "rhode island", "iowa", + "quebec", "british columbia"), + count = c(76, 20, 16, 13, 8, 8, 6, 6, 5, 5, 5, 5, 5, 5, 5, 5, 4, 4, 4, 4, 4, + 3, 3, 3, 3, 3, 3, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1) +) + +# Map data to region names +north_america_map$region <- tolower(north_america_map$name) +north_america_map <- left_join(north_america_map, data, by = "region") + +# Filter out regions with no data +filled_regions <- north_america_map[!is.na(north_america_map$count), ] + +# Calculate the bounding box of the filled regions +bbox <- st_bbox(filled_regions) + +# Crop the original map based on the bounding box +cropped_map <- st_crop(north_america_map, bbox) + +# Plot the map, focusing only on regions with data +states_plot <- ggplot(data = cropped_map) + + geom_sf(aes(fill = count), color = "white", size = 0.25) + + scale_fill_gradient(low = "lightblue", high = "darkblue", na.value = "grey90", name = "Users") + + #labs(title = "ESIIL Cyverse users per state") + + theme_minimal() + + theme(legend.position = "right") + +ggsave(states_plot, file="states_plot.png", dpi=600) + + +``` + + + +```{r} +library(plotly) + +# Define tasks and their assumed start and end dates +tasks <- data.frame( + Task = c("CI User Needs Assessment", "Write new draft", "IRB approval", "Send to community", + "analyze survey results", "Respond to User Needs Assessment", "CyVerse Workbench Integration", + "Requirements & UI / UX design", "Code free large JupyterHub deployment", "Docker Registry", + "Data library", "reorganize sections after summit", "write guidelines for community contribution", + "guide a prototype community contribution into the library", "recruit community contributions", + "write ESIIL contributions to the library", "Analytics library - Integrated workflows", + "ESIIL community-driven high-level design", "Write code of conduct, authorship credits", + "Write guidelines for community contribution", "Create ESIIL codes template", "Bring codes from Earth Lab's GitHub", + "CI library", "Push-button terraform template", "WG-generated value-added information products", + "Cycle ESIIL personnel through FOSS class", "Unified branding", "CI for Analytics / Data library", + "ESIIL User Tracking Site", "Jim's Data Cube Pilot Project", "gdal set up on Jim's laptop", + "Planning and Data Acquisition", "Data Cube Design and Setup", "Storage and Management", + "Analysis and Visualization", "Security and Quality Assurance", "Scalability and Maintenance"), + Start = seq(as.Date("2023-06-01"), length.out = 37, by = "15 days"), + End = seq(as.Date("2023-07-01"), length.out = 37, by = "15 days"), + Owner = rep(c("Ty", "Tyson, Ty, Cibele", "Tyson", "Erick", "Cibele", "Jim"), length.out = 37), + Color = ifelse(seq(as.Date("2023-06-01"), length.out = 37, by = "15 days") < as.Date("2024-06-01"), 'rgb(0,123,255)', 'rgb(255,0,0)') +) + +# Create a Gantt chart using Plotly +fig <- plot_ly() +fig <- fig %>% add_trace( + type = 'bar', + x = as.numeric(difftime(tasks$End, tasks$Start, units = "days")), + y = tasks$Task, + base = as.numeric(difftime(tasks$Start, as.Date("2023-06-01"), units = "days")), + orientation = 'h', + marker = list(color = tasks$Color, line = list(color = 'rgb(255,255,255)', width = 2)) +) + +fig <- fig %>% layout( + title = "Gantt Chart for ESIIL Year 2 Projects", + paper_bgcolor='rgba(0,0,0,0)', # transparent background + plot_bgcolor='rgba(0,0,0,0)', # transparent background + xaxis = list( + title = "Days from Start", + showgrid = TRUE, + tickvals = seq(0, 760, by = 30), + ticktext = seq(as.Date("2023-06-01"), length.out = 26, by = "month") %>% format("%b %Y") + ), + yaxis = list(title = "") +) + +# Show the plot +fig + +# Save Plotly plot to HTML +htmlwidgets::saveWidget(as_widget(fig), "temp_plot.html", selfcontained = TRUE) + +# Use webshot to convert the HTML to PNG +webshot::webshot("temp_plot.html", "gantt_chart.png", delay = 5) # delay may need adjustment + + +``` + diff --git a/additional-resources/bilingualism_md/bilingualism_md.md b/additional-resources/bilingualism_md/bilingualism_md.md new file mode 100644 index 0000000..5483ba7 --- /dev/null +++ b/additional-resources/bilingualism_md/bilingualism_md.md @@ -0,0 +1,2112 @@ +# R and Python bilingualism + +Welcome to the R and Python bilingualism reference guide! If you’re +fluent in one of these languages but hesitant to learn the other, you’re +in the right place. The good news is that there are many similarities +between R and Python that make it easy to switch between the two. + +Both R and Python are widely used in data science and are open-source, +meaning that they are free to use and constantly being improved by the +community. They both have extensive libraries for data analysis, +visualization, and machine learning. In fact, many of the libraries in +both languages have similar names and functions, such as Pandas in +Python and data.table in R. + +While there are differences between the two languages, they can +complement each other well. Python is versatile and scalable, making it +ideal for large and complex projects such as web development and +artificial intelligence. R, on the other hand, is known for its +exceptional statistical capabilities and is often used in data analysis +and modeling. Visualization is also easier in R, making it a popular +choice for creating graphs and charts. + +By learning both R and Python, you’ll be able to take advantage of the +strengths of each language and create more efficient and robust data +analysis workflows. Don’t let the differences between the two languages +intimidate you - once you become familiar with one, learning the other +will be much easier. + +So, whether you’re a Python enthusiast looking to expand your +statistical analysis capabilities, or an R user interested in exploring +the world of web development and artificial intelligence, this guide +will help you become bilingual in R and Python. + +## Install packages + +In R, packages can be installed from CRAN repository by using the +install.packages() function: + +R code: + +``` r +# Install the dplyr package from CRAN +install.packages("dplyr") +``` + +In Python, packages can be installed from the Anaconda repository by +using the conda install command: + +Python code: + +``` python +# Install the pandas package from Anaconda +!conda install pandas +``` + +Loading libraries in R and Python + +In R, libraries can be loaded in the same way as before, using the +library() function: + +R code: + +``` r +# Load the dplyr library +library(dplyr) +``` + +In Python, libraries can be loaded in the same way as before, using the +import statement. Here’s an example: + +Python code: + +``` python +# Load the pandas library +import pandas as pd +``` + +Note that the package or library must be installed from the respective +repository before it can be loaded. Also, make sure you have the correct +repository specified in your system before installing packages. By +default, R uses CRAN as its primary repository, whereas Anaconda uses +its own repository by default. + +## reticulate + +The reticulate package lets you run both R and Python together in the R +environment. + +R libraries are stored and managed in a repository called CRAN. You can +download R packages with the install.packages() function + +``` r +install.packages("reticulate") +``` + +You only need to install packages once, but you need to mount those +packages with the library() function each time you open R. + +``` r +library(reticulate) +``` + +Python libraries are stored and managed in a few different libraries and +their dependencies are not regulated as strictly as R libraries are in +CRAN. It’s easier to publish a python package but it can also be more +cumbersome for users because you need to manage dependencies yourself. +You can download python packages using both R and Python code + +``` r +py_install("laspy") +``` + + ## + '/Users/ty/opt/miniconda3/bin/conda' 'install' '--yes' '--prefix' '/Users/ty/opt/miniconda3/envs/earth-analytics-python' '-c' 'conda-forge' 'laspy' + +Now, let’s create a Python list and assign it to a variable py_list: + +R code: + +``` r +py_list <- r_to_py(list(1, 2, 3)) +``` + +We can now print out the py_list variable in Python using the +py_run_string() function: + +R code: + +``` r +py_run_string("print(r.py_list)") +``` + +This will output \[1, 2, 3\] in the Python console. + +Now, let’s create an R vector and assign it to a variable r_vec: + +R code: + +``` r +r_vec <- c(4, 5, 6) +``` + +We can now print out the r_vec variable in R using the py$ syntax to +access Python variables: + +R code: + +``` r +print(py$py_list) +``` + +This will output \[1, 2, 3\] in the R console. + +We can also call Python functions from R using the py_call() function. +For example, let’s call the Python sum() function on the py_list +variable and assign the result to an R variable r_sum: + +R code: + +``` r +r_sum <- py_call("sum", args = list(py_list)) +``` + +We can now print out the r_sum variable in R: + +R code: + +``` r +print(r_sum) +``` + +This will output 6 in the R console. + +## Load packages and change settings + +``` r +options(java.parameters = "-Xmx5G") + +library(r5r) +library(sf) +library(data.table) +library(ggplot2) +library(interp) +library(dplyr) +library(osmdata) +library(ggthemes) +library(sf) +library(data.table) +library(ggplot2) +library(akima) +library(dplyr) +library(raster) +library(osmdata) +library(mapview) +library(cowplot) +library(here) +library(testthat) +``` + +``` python +import sys +sys.argv.append(["--max-memory", "5G"]) + +import pandas as pd +import geopandas +import matplotlib.pyplot as plt +import numpy as np +import plotnine +import contextily as cx +import r5py +import seaborn as sns +``` + +R and Python are two popular programming languages used for data +analysis, statistics, and machine learning. Although they share some +similarities, there are some fundamental differences between them. +Here’s an example code snippet in R and Python to illustrate some of the +differences: + +R Code: + +``` r +# Create a vector of numbers from 1 to 10 +x <- 1:10 + +# Compute the mean of the vector +mean_x <- mean(x) + +# Print the result +print(mean_x) +``` + + ## [1] 5.5 + +Python Code: + +``` python +# Import the numpy library for numerical operations +import numpy as np + +# Create a numpy array of numbers from 1 to 10 +x = np.array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]) + +# Compute the mean of the array +mean_x = np.mean(x) + +# Print the result +print(mean_x) +``` + + ## 5.5 + +In this example, we can see that there are several differences between R +and Python: + +Syntax: R uses the assignment operator \<- while Python uses the equals +sign = for variable assignment. + +Libraries: Python relies heavily on external libraries such as numpy, +pandas, and matplotlib for data analysis, while R has built-in functions +for many data analysis tasks. + +Data types: R is designed to work with vectors and matrices, while +Python uses lists and arrays. In the example above, we used the numpy +library to create a numerical array in Python. + +Function names: Function names in R and Python can differ significantly. +In the example above, we used the mean() function in R and the np.mean() +function in Python to calculate the mean of the vector/array. + +These are just a few of the many differences between R and Python. +Ultimately, the choice between the two languages will depend on your +specific needs and preferences. + +## Load saved data + +R Code: + +``` r +data("iris") +here() +load(file=here("2_R_and_Py_bilingualism", "data", "iris_example_data.rdata")) +objects() +``` + +Python code: + +## Save data + +R Code: + +``` r +save(iris, file=here("2_R_and_Py_bilingualism", "data", "iris_example_data.rdata")) + +write.csv(iris, file=here("2_R_and_Py_bilingualism", "data", "iris_example_data.csv")) +``` + +Python code: + +## functions + +Both R and Python are powerful languages for writing functions that can +take input, perform a specific task, and return output. R Code: + +``` r +# Define a function that takes two arguments and returns their sum +sum_r <- function(a, b) { + return(a + b) +} + +# Call the function with two arguments and print the result +result_r <- sum_r(3, 5) +print(result_r) +``` + + ## [1] 8 + +Python code: + +``` python +# Define a function that takes two arguments and returns their sum +def sum_py(a, b): + return a + b + +# Call the function with two arguments and print the result +result_py = sum_py(3, 5) +print(result_py) +``` + + ## 8 + +In both cases, we define a function that takes two arguments and returns +their sum. In R, we use the function keyword to define a function, while +in Python, we use the def keyword. The function body in R is enclosed in +curly braces, while in Python it is indented. + +There are a few differences in the syntax and functionality between the +two approaches: + +Function arguments: In R, function arguments are separated by commas, +while in Python they are enclosed in parentheses. The syntax for +specifying default arguments and variable-length argument lists can also +differ between the two languages. Return statement: In R, we use the +return keyword to specify the return value of a function, while in +Python, we simply use the return statement. Function names: Function +names in R and Python can differ significantly. In the example above, we +used the sum_r() function in R and the sum_py() function in Python to +calculate the sum of two numbers. + +## Data Plots + +R Code: + +``` r +# Load the "ggplot2" package for plotting +library(ggplot2) + +# Generate some sample data +x <- seq(1, 10, 1) +y <- x + rnorm(10) + +# Create a scatter plot +ggplot(data.frame(x, y), aes(x = x, y = y)) + + geom_point() +``` + +![](bilingualism_md_files/figure-gfm/unnamed-chunk-25-1.pdf) +Python code: + +``` python +# Load the "matplotlib" library +import matplotlib.pyplot as plt + +# Generate some sample data +import numpy as np +x = np.arange(1, 11) +y = x + np.random.normal(0, 1, 10) + +#clear last plot +plt.clf() + +# Create a scatter plot +plt.scatter(x, y) +plt.show() +``` + +![](bilingualism_md_files/figure-gfm/unnamed-chunk-26-1.pdf) + +In both cases, we generate some sample data and create a scatter plot to +visualize the relationship between the variables. + +There are a few differences in the syntax and functionality between the +two approaches: + +Library and package names: In R, we use the ggplot2 package for +plotting, while in Python, we use the matplotlib library. Data format: +In R, we use a data frame to store the input data, while in Python, we +use numpy arrays. Plotting functions: In R, we use the ggplot() function +to create a new plot object, and then use the geom_point() function to +create a scatter plot layer. In Python, we use the scatter() function +from the matplotlib.pyplot module to create a scatter plot directly. + +## Linear regression + +R Code: + +``` r +# Load the "ggplot2" package for plotting +library(ggplot2) + +# Generate some sample data +x <- seq(1, 10, 1) +y <- x + rnorm(10) + +# Perform linear regression +model_r <- lm(y ~ x) + +# Print the model summary +summary(model_r) +``` + + ## + ## Call: + ## lm(formula = y ~ x) + ## + ## Residuals: + ## Min 1Q Median 3Q Max + ## -1.69344 -0.42336 0.08961 0.34778 1.56728 + ## + ## Coefficients: + ## Estimate Std. Error t value Pr(>|t|) + ## (Intercept) -0.1676 0.6781 -0.247 0.811 + ## x 0.9750 0.1093 8.921 1.98e-05 *** + ## --- + ## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1 + ## + ## Residual standard error: 0.9926 on 8 degrees of freedom + ## Multiple R-squared: 0.9087, Adjusted R-squared: 0.8972 + ## F-statistic: 79.59 on 1 and 8 DF, p-value: 1.976e-05 + +``` r +# Plot the data and regression line +ggplot(data.frame(x, y), aes(x = x, y = y)) + + geom_point() + + geom_smooth(method = "lm", se = FALSE) +``` + + ## `geom_smooth()` using formula = 'y ~ x' + +![](bilingualism_md_files/figure-gfm/unnamed-chunk-27-3.pdf) + +Python code: + +``` python +# Load the "matplotlib" and "scikit-learn" libraries +import matplotlib.pyplot as plt +from sklearn.linear_model import LinearRegression + +# Generate some sample data +import numpy as np +x = np.arange(1, 11) +y = x + np.random.normal(0, 1, 10) + +# Perform linear regression +model_py = LinearRegression().fit(x.reshape(-1, 1), y) + +# Print the model coefficients +print("Coefficients: ", model_py.coef_) +``` + + ## Coefficients: [1.15539692] + +``` python +print("Intercept: ", model_py.intercept_) + +#clear last plot +``` + + ## Intercept: -1.1291396173221218 + +``` python +plt.clf() + +# Plot the data and regression line +plt.scatter(x, y) +plt.plot(x, model_py.predict(x.reshape(-1, 1)), color='red') +plt.show() +``` + +![](bilingualism_md_files/figure-gfm/unnamed-chunk-28-1.pdf) + +In both cases, we generate some sample data with a linear relationship +between x and y, and then perform a simple linear regression to estimate +the slope and intercept of the line. We then plot the data and +regression line to visualize the fit. + +There are a few differences in the syntax and functionality between the +two approaches: + +Library and package names: In R, we use the lm() function from the base +package to perform linear regression, while in Python, we use the +LinearRegression() class from the scikit-learn library. Additionally, we +use the ggplot2 package in R for plotting, while we use the matplotlib +library in Python. Data format: In R, we can specify the dependent and +independent variables in the formula used for regression. In Python, we +need to reshape the input data to a two-dimensional array before fitting +the model. Model summary: In R, we can use the summary() function to +print a summary of the model, including the estimated coefficients, +standard errors, and p-values. In Python, we need to print the +coefficients and intercept separately. + +## Random Forest + +R Code: + +``` r +# Load the "randomForest" package +library(randomForest) + +# Load the "iris" dataset +data(iris) + +# Split the data into training and testing sets +set.seed(123) +train_idx <- sample(1:nrow(iris), nrow(iris) * 0.7, replace = FALSE) +train_data <- iris[train_idx, ] +test_data <- iris[-train_idx, ] + +# Build a random forest model +rf_model <- randomForest(Species ~ ., data = train_data, ntree = 500) + +# Make predictions on the testing set +predictions <- predict(rf_model, test_data) + +# Calculate accuracy of the model +accuracy <- sum(predictions == test_data$Species) / nrow(test_data) +print(paste("Accuracy:", accuracy)) +``` + + ## [1] "Accuracy: 0.977777777777778" + +Python code: + +``` python +# Load the "pandas", "numpy", and "sklearn" libraries +import pandas as pd +import numpy as np +from sklearn.ensemble import RandomForestClassifier +from sklearn.datasets import load_iris +from sklearn.model_selection import train_test_split + +# Load the "iris" dataset +iris = load_iris() + +# Split the data into training and testing sets +X_train, X_test, y_train, y_test = train_test_split(iris.data, iris.target, test_size=0.3, random_state=123) + +# Build a random forest model +rf_model = RandomForestClassifier(n_estimators=500, random_state=123) +rf_model.fit(X_train, y_train) + +# Make predictions on the testing set +``` + + ## RandomForestClassifier(n_estimators=500, random_state=123) + +``` python +predictions = rf_model.predict(X_test) + +# Calculate accuracy of the model +accuracy = sum(predictions == y_test) / len(y_test) +print("Accuracy:", accuracy) +``` + + ## Accuracy: 0.9555555555555556 + +In both cases, we load the iris dataset and split it into training and +testing sets. We then build a random forest model using the training +data and evaluate its accuracy on the testing data. + +There are a few differences in the syntax and functionality between the +two approaches: + +Library and package names: In R, we use the randomForest package to +build random forest models, while in Python, we use the +RandomForestClassifier class from the sklearn.ensemble module. We also +use different libraries for loading and manipulating data (pandas and +numpy in Python, and built-in datasets in R). Model parameters: The +syntax for setting model parameters is slightly different in R and +Python. For example, in R, we specify the number of trees using the +ntree parameter, while in Python, we use the n_estimators parameter. +Data format: In R, we use a data frame to store the input data, while in +Python, we use numpy arrays. + +## Basic streetmap from Open Street Map + +R Code: + +``` r +# Load the "osmdata" package for mapping +library(osmdata) +library(tmap) + +# Define the map location and zoom level +bbox <- c(left = -0.16, bottom = 51.49, right = -0.13, top = 51.51) + +# Get the OpenStreetMap data +osm_data <- opq(bbox) %>% + add_osm_feature(key = "highway") %>% + osmdata_sf() + +# Plot the map using tmap +tm_shape(osm_data$osm_lines) + + tm_lines() +``` + +![](bilingualism_md_files/figure-gfm/unnamed-chunk-31-1.pdf) +Python code: + +``` python +# Load the "osmnx" package for mapping +import osmnx as ox + +# Define the map location and zoom level +bbox = (51.49, -0.16, 51.51, -0.13) + +# Get the OpenStreetMap data +osm_data = ox.graph_from_bbox(north=bbox[2], south=bbox[0], east=bbox[3], west=bbox[1], network_type='all') + +# Plot the map using osmnx +ox.plot_graph(osm_data) +``` + + ## (
, ) + +![](bilingualism_md_files/figure-gfm/unnamed-chunk-32-1.pdf) + +In both cases, we define the map location and zoom level, retrieve the +OpenStreetMap data using the specified bounding box, and plot the map. + +The main differences between the two approaches are: + +Package names and syntax: In R, we use the osmdata package and its +syntax to download and process the OpenStreetMap data, while in Python, +we use the osmnx package and its syntax. Mapping libraries: In R, we use +the tmap package to create a static map of the OpenStreetMap data, while +in Python, we use the built-in ox.plot_graph function from the osmnx +package to plot the map. + +## CNN on Raster data + +R Code: + +``` r +# Load the "keras" package for building the CNN +library(tensorflow) +library(keras) + +# Load the "raster" package for working with raster data +library(raster) + +# Load the "magrittr" package for pipe operator +library(magrittr) + +# Load the data as a raster brick +raster_data <- brick("raster_data.tif") + +# Split the data into training and testing sets +split_data <- sample(1:nlayers(raster_data), size = nlayers(raster_data)*0.8, replace = FALSE) +train_data <- raster_data[[split_data]] +test_data <- raster_data[[setdiff(1:nlayers(raster_data), split_data)]] + +# Define the CNN model +model <- keras_model_sequential() %>% + layer_conv_2d(filters = 32, kernel_size = c(3, 3), activation = "relu", input_shape = c(ncol(train_data), nrow(train_data), ncell(train_data))) %>% + layer_max_pooling_2d(pool_size = c(2, 2)) %>% + layer_dropout(rate = 0.25) %>% + layer_flatten() %>% + layer_dense(units = 128, activation = "relu") %>% + layer_dropout(rate = 0.5) %>% + layer_dense(units = nlayers(train_data), activation = "softmax") + +# Compile the model +model %>% compile(loss = "categorical_crossentropy", optimizer = "adam", metrics = "accuracy") + +# Train the model +history <- model %>% fit(x = array(train_data), y = to_categorical(1:nlayers(train_data)), epochs = 10, validation_split = 0.2) + +# Evaluate the model +model %>% evaluate(x = array(test_data), y = to_categorical(1:nlayers(test_data))) + +# Plot the model accuracy over time +plot(history) +``` + +## Piping + +Piping is a powerful feature in both R and Python that allows for a more +streamlined and readable code. However, the syntax for piping is +slightly different between the two languages. + +In R, piping is done using the %\>% operator from the magrittr package, +while in Python, it is done using the \| operator from the pandas +package. + +Let’s compare and contrast piping in R and Python with some examples: + +Piping in R In R, we can use the %\>% operator to pipe output from one +function to another, which can make our code more readable and easier to +follow. Here’s an example: + +R code: + +``` r +library(dplyr) + +# create a data frame +df <- data.frame(x = c(1,2,3), y = c(4,5,6)) + +# calculate the sum of column x and y +df %>% + mutate(z = x + y) %>% + summarize(sum_z = sum(z)) +``` + + ## sum_z + ## 1 21 + +In this example, we first create a data frame df with two columns x and +y. We then pipe the output of df to mutate, which adds a new column z to +the data frame that is the sum of x and y. Finally, we pipe the output +to summarize, which calculates the sum of z and returns the result. + +Piping in Python In Python, we can use the \| operator to pipe output +from one function to another. However, instead of piping output from one +function to another, we pipe a DataFrame to a method of the DataFrame. +Here’s an example: + +Python code: + +``` python +import pandas as pd + +# create a DataFrame +df = pd.DataFrame({'x': [1,2,3], 'y': [4,5,6]}) + +# calculate the sum of column x and y +(df.assign(z = df['x'] + df['y']) + .agg(sum_z = ('z', 'sum'))) +``` + + ## z + ## sum_z 21 + +In this example, we first create a DataFrame df with two columns x and +y. We then use the assign() method to add a new column z to the +DataFrame that is the sum of x and y. Finally, we use the agg() method +to calculate the sum of z and return the result. + +As we can see, the syntax for piping is slightly different between R and +Python, but the concept remains the same. Piping can make our code more +readable and easier to follow, which is an important aspect of creating +efficient and effective code. + +R code: + +``` r +library(dplyr) +library(ggplot2) + +iris %>% + filter(Species == "setosa") %>% + group_by(Sepal.Width) %>% + summarise(mean.Petal.Length = mean(Petal.Length)) %>% + mutate(Sepal.Width = as.factor(Sepal.Width)) %>% + ggplot(aes(x = Sepal.Width, y = mean.Petal.Length)) + + geom_bar(stat = "identity", fill = "dodgerblue") + + labs(title = "Mean Petal Length of Setosa by Sepal Width", + x = "Sepal Width", + y = "Mean Petal Length") +``` + +![](bilingualism_md_files/figure-gfm/unnamed-chunk-37-1.pdf) + +In this example, we start with the iris dataset and filter it to only +include rows where the Species column is “setosa”. We then group the +remaining rows by the Sepal.Width column and calculate the mean +Petal.Length for each group. Next, we convert Sepal.Width to a factor +variable to ensure that it is treated as a categorical variable in the +visualization. Finally, we create a bar plot using ggplot2, with +Sepal.Width on the x-axis and mean.Petal.Length on the y-axis. The +resulting plot shows the mean petal length of setosa flowers for each +sepal width category. + +Python code: + +``` python +import pandas as pd + +# Load the iris dataset and pipe it into the next function +( pd.read_csv("https://archive.ics.uci.edu/ml/machine-learning-databases/iris/iris.data", header=None, names=['sepal_length', 'sepal_width', 'petal_length', 'petal_width', 'class']) + + # Select columns and pivot the dataset + .loc[:, ['sepal_length', 'sepal_width', 'petal_length']] + .melt(var_name='variable', value_name='value') + + # Group by variable and calculate mean + .groupby('variable', as_index=False) + .mean() + + # Filter for mean greater than 3.5 and sort by descending mean + .query('value > 3.5') + .sort_values('value', ascending=False) +) +``` + + ## variable value + ## 1 sepal_length 5.843333 + ## 0 petal_length 3.758667 + +## for loops + +Here is an example of a for loop in R: + +R code + +``` r +# Create a vector of numbers +numbers <- c(1, 2, 3, 4, 5) + +# Use a for loop to print out each number in the vector +for (i in numbers) { + print(i) +} +``` + + ## [1] 1 + ## [1] 2 + ## [1] 3 + ## [1] 4 + ## [1] 5 + +In this example, the for loop iterates over each element in the numbers +vector, assigning the current element to the variable i. The print(i) +statement is then executed for each iteration, outputting the value of +i. + +Here is the equivalent example in Python: + +Python code + +``` python +# Create a list of numbers +numbers = [1, 2, 3, 4, 5] + +# Use a for loop to print out each number in the list +for i in numbers: + print(i) +``` + + ## 1 + ## 2 + ## 3 + ## 4 + ## 5 + +In Python, the for loop iterates over each element in the numbers list, +assigning the current element to the variable i. The print(i) statement +is then executed for each iteration, outputting the value of i. + +Both languages also support nested for loops, which can be used to +perform iterations over multiple dimensions, such as looping through a +2D array. + +## Parallel + +Parallel computing is a technique used to execute multiple computational +tasks simultaneously, which can significantly reduce the time required +to complete a task. Both R and Python have built-in support for parallel +computing, although the approaches are slightly different. In this +answer, we will compare and contrast the parallel computing capabilities +of R and Python, and provide working examples in code. + +Parallel computing in R In R, there are several packages that support +parallel computing, such as parallel, foreach, and doParallel. The +parallel package provides basic functionality for parallel computing, +while foreach and doParallel provide higher-level abstractions that make +it easier to write parallel code. + +Here is an example of using the foreach package to execute a loop in +parallel: + +R code: + +``` r +library(foreach) +library(doParallel) + +# Set up a parallel backend with 4 workers +cl <- makeCluster(4) +registerDoParallel(cl) + +# Define a function to apply in parallel +myfunc <- function(x) { + # some computation here + return(x^2) +} + +# Generate some data +mydata <- 1:1000 + +# Apply the function to the data in parallel +result <- foreach(i = mydata) %dopar% { + myfunc(i) +} + +# Stop the cluster +stopCluster(cl) +``` + +In this example, we use the makeCluster() function to set up a cluster +with 4 workers, and the registerDoParallel() function to register the +cluster as the parallel backend for foreach. We then define a function +myfunc() that takes an input x and returns x^2. We generate some data +mydata and use foreach to apply myfunc() to each element of mydata in +parallel, using the %dopar% operator. + +R Tidyverse parallel + +In R Tidyverse, we can use the furrr package for parallel computing. +Here’s an example of using furrr to parallelize a map function: + +R Tidy code: + +``` r +library(tidyverse) +library(furrr) + +# Generate a list of numbers +numbers <- 1:10 + +# Use the future_map function from furrr to parallelize the map function +plan(multisession) +squares <- future_map(numbers, function(x) x^2) +``` + +In this example, we first load the Tidyverse and furrr libraries. We +then generate a list of numbers from 1 to 10. We then use the plan +function to set the parallelization strategy to “multisession”, which +will use multiple CPU cores to execute the code. Finally, we use the +future_map function from furrr to apply the function x^2 to each number +in the list in parallel. + +Parallel computing in Python In Python, the standard library includes +the multiprocessing module, which provides basic support for parallel +computing. Additionally, there are several third-party packages that +provide higher-level abstractions, such as joblib and dask. + +Here is an example of using the multiprocessing module to execute a loop +in parallel: + +Python code: + +``` python +def square(x): + return x**2 + +from multiprocessing import Pool + +# Generate a list of numbers +numbers = list(range(1, 11)) + +# Use the map function and a pool of workers to parallelize the square function +with Pool() as pool: + squares = pool.map(square, numbers) + +print(squares) +``` + +In this example, we define a function myfunc() that takes an input x and +returns x^2. We generate some data mydata and use the Pool class from +the multiprocessing module to set up a pool of 4 workers. We then use +the map() method of the Pool class to apply myfunc() to each element of +mydata in parallel. + +Comparison and contrast Both R and Python have built-in support for +parallel computing, with similar basic functionality for creating and +managing parallel processes. However, the higher-level abstractions +differ between the two languages. In R, the foreach package provides a +high-level interface that makes it easy to write parallel code, while in +Python, the multiprocessing module provides a basic interface that can +be extended using third-party packages like joblib and dask. + +Additionally, Python has better support for distributed computing using +frameworks like Apache Spark, while R has better support for +shared-memory parallelism using tools like data.table and ff. + +## Data wrangling + +Data wrangling is an important part of any data analysis project, and +both R and Python provide tools and libraries for performing this task. +In this answer, we will compare and contrast data wrangling in R’s +tidyverse and Python’s pandas library, with working examples in code. + +Data Wrangling in R Tidyverse + +The tidyverse is a collection of R packages designed for data science, +and it includes several packages that are useful for data wrangling. One +of the most popular packages is dplyr, which provides a grammar of data +manipulation for data frames. + +Here is an example of using dplyr to filter, mutate, and summarize a +data frame: + +R code + +``` r +library(dplyr) + +# Load data +data(mtcars) + +# Filter for cars with more than 100 horsepower +mtcars %>% + filter(hp > 100) %>% + # Add a new column with fuel efficiency in km per liter + mutate(kmpl = 0.425 * mpg) %>% + # Group by number of cylinders and summarize + group_by(cyl) %>% + summarize(mean_hp = mean(hp), + mean_kmpl = mean(kmpl)) +``` + + ## # A tibble: 3 × 3 + ## cyl mean_hp mean_kmpl + ## + ## 1 4 111 11.0 + ## 2 6 122. 8.39 + ## 3 8 209. 6.42 + +In this example, we first filter the mtcars data frame to only include +cars with more than 100 horsepower. We then use mutate to create a new +column with fuel efficiency in kilometers per liter. Finally, we group +the data by the number of cylinders and calculate the mean horsepower +and fuel efficiency. + +Data Wrangling in Python Pandas + +Pandas is a popular library for data manipulation in Python. It provides +a data frame object similar to R’s data frames, along with a wide range +of functions for data wrangling. + +Here is an example of using pandas to filter, transform, and group a +data frame: + +Python code: + +``` python +import pandas as pd + +# Load data +mtcars = pd.read_csv('https://raw.githubusercontent.com/mwaskom/seaborn-data/master/mtcars.csv') + +# Filter for cars with more than 100 horsepower +filtered_mtcars = mtcars[mtcars['hp'] > 100] + +# Add a new column with fuel efficiency in km per liter +filtered_mtcars['kmpl'] = 0.425 * filtered_mtcars['mpg'] + +# Group by number of cylinders and calculate mean horsepower and fuel efficiency +grouped_mtcars = filtered_mtcars.groupby('cyl').agg({'hp': 'mean', + 'kmpl': 'mean'}) +``` + +In this example, we first load the mtcars data from a CSV file. We then +filter the data to only include cars with more than 100 horsepower, +using boolean indexing. We use the assign function to create a new +column with fuel efficiency in kilometers per liter. Finally, we group +the data by the number of cylinders and calculate the mean horsepower +and fuel efficiency. + +Comparison + +Overall, both R’s tidyverse and Python’s pandas provide similar +functionality for data wrangling. Both allow for filtering, +transforming, and aggregating data frames. The syntax for performing +these operations is slightly different between the two languages, with R +using the %\>% operator for chaining operations and Python using method +chaining or the apply family of functions. + +One key difference between the two languages is that R’s tidyverse +provides a consistent grammar for data manipulation across its various +packages, making it easier to learn and use. However, Python’s pandas +library has a larger developer community and is more versatile for use +in other applications, such as web development or machine learning. + +In conclusion, both R and Python provide powerful tools for data +wrangling, and the choice between the two ultimately depends on the +specific needs of the user and their familiarity + +## Data from API + +Retrieving data from an API is a common task in both R and Python. Here +are examples of how to retrieve data from an API in both languages: + +Python + +To retrieve data from an API in Python, we can use the requests library. +Here’s an example of how to retrieve weather data from the +OpenWeatherMap API: + +Python code: + +``` python +import requests + +url = 'https://api.openweathermap.org/data/2.5/weather?q=London,uk&appid=API_KEY' + +response = requests.get(url) + +data = response.json() + +print(data) +``` + +This code retrieves the current weather data for London from the +OpenWeatherMap API. We first construct the API URL with the location and +API key, then use the requests.get() function to make a request to the +API. We then extract the JSON data from the response using the .json() +method and print the resulting data. + +R + +In R, we can use the httr package to retrieve data from an API. Here’s +an example of how to retrieve weather data from the OpenWeatherMap API +in R: + +R code: + +``` r +library(httr) + +url <- 'https://api.openweathermap.org/data/2.5/weather?q=London,uk&appid=API_KEY' + +response <- GET(url) + +data <- content(response, 'text') + +print(data) +``` + +This code is similar to the Python code above. We first load the httr +library, then construct the API URL and use the GET() function to make a +request to the API. We then extract the data from the response using the +content() function and print the resulting data. + +Retrieving Data from an API in R Tidyverse In R Tidyverse, we can use +the httr and jsonlite packages to retrieve and process data from an API. + +R code: + +``` r +# Load required packages +library(httr) +library(jsonlite) + +# Define API endpoint +endpoint <- "https://jsonplaceholder.typicode.com/posts" + +# Retrieve data from API +response <- GET(endpoint) + +# Extract content from response +content <- content(response, "text") + +# Convert content to JSON +json <- fromJSON(content) + +# Convert JSON to a data frame +df <- as.data.frame(json) +``` + +In the above example, we use the GET() function from the httr package to +retrieve data from an API endpoint, and the content() function to +extract the content of the response. We then use the fromJSON() function +from the jsonlite package to convert the JSON content to a list, and the +as.data.frame() function to convert the list to a data frame. + +Retrieving Data from an API in Python In Python, we can use the requests +library to retrieve data from an API, and the json library to process +the JSON data. + +Python code: + +``` python +# Load required libraries +import requests +import json + +# Define API endpoint +endpoint = "https://jsonplaceholder.typicode.com/posts" + +# Retrieve data from API +response = requests.get(endpoint) + +# Extract content from response +content = response.content + +# Convert content to JSON +json_data = json.loads(content) + +# Convert JSON to a list of dictionaries +data = [dict(row) for row in json_data] +``` + +In the above example, we use the get() function from the requests +library to retrieve data from an API endpoint, and the content attribute +to extract the content of the response. We then use the loads() function +from the json library to convert the JSON content to a list of +dictionaries. + +Comparison Both R Tidyverse and Python provide powerful tools for +retrieving and processing data from an API. In terms of syntax, the two +languages are somewhat similar. In both cases, we use a library to +retrieve data from the API, extract the content of the response, and +then process the JSON data. However, there are some differences in the +specific functions and methods used. For example, in R Tidyverse, we use +the content() function to extract the content of the response, whereas +in Python, we use the content attribute. Additionally, in R Tidyverse, +we use the fromJSON() function to convert the JSON data to a list, +whereas in Python, we use the loads() function. + +## Census data + +Retrieving USA census data in R, R Tidy, and Python can be done using +different packages and libraries. Here are some working examples in code +for each language: + +R: + +To retrieve census data in R, we can use the tidycensus package. Here’s +an example of how to retrieve the total population for the state of +California: + +R code: + +``` r +library(tidycensus) +library(tidyverse) + +# Set your Census API key +census_api_key("your_api_key") + +# Get the total population for the state of California +ca_pop <- get_acs( + geography = "state", + variables = "B01003_001", + state = "CA" +) %>% + rename(total_population = estimate) %>% + select(total_population) + +# View the result +ca_pop +``` + +R Tidy: + +To retrieve census data in R Tidy, we can also use the tidycensus +package. Here’s an example of how to retrieve the total population for +the state of California using pipes and dplyr functions: + +R tidy code: + +``` r +library(tidycensus) +library(tidyverse) + +# Set your Census API key +census_api_key("your_api_key") + +# Get the total population for the state of California +ca_pop <- get_acs( + geography = "state", + variables = "B01003_001", + state = "CA" +) %>% + rename(total_population = estimate) %>% + select(total_population) + +# View the result +ca_pop +``` + +Python: + +To retrieve census data in Python, we can use the census library. Here’s +an example of how to retrieve the total population for the state of +California: + +Python code: + +``` python +from census import Census +from us import states +import pandas as pd + +# Set your Census API key +c = Census("your_api_key") + +# Get the total population for the state of California +ca_pop = c.acs5.state(("B01003_001"), states.CA.fips, year=2019) + +# Convert the result to a Pandas DataFrame +ca_pop_df = pd.DataFrame(ca_pop) + +# Rename the column +ca_pop_df = ca_pop_df.rename(columns={"B01003_001E": "total_population"}) + +# Select only the total population column +ca_pop_df = ca_pop_df[["total_population"]] + +# View the result +ca_pop_df +``` + +## Lidar data + +To find Lidar data in R and Python, you typically need to start by +identifying sources of Lidar data and then accessing them using +appropriate packages and functions. Here are some examples of how to +find Lidar data in R and Python: + +R: + +Identify sources of Lidar data: The USGS National Map Viewer provides +access to Lidar data for the United States. You can also find Lidar data +on state and local government websites, as well as on commercial data +providers’ websites. Access the data: You can use the lidR package in R +to download and read Lidar data in the LAS format. For example, the +following code downloads and reads Lidar data for a specific area: + +R code: + +``` r +library(lidR) + +# Download Lidar data +LASfile <- system.file("extdata", "Megaplot.laz", package="lidR") +lidar <- readLAS(LASfile) + +# Visualize the data +plot(lidar) +``` + +Python: + +Identify sources of Lidar data: The USGS 3DEP program provides access to +Lidar data for the United States. You can also find Lidar data on state +and local government websites, as well as on commercial data providers’ +websites. Access the data: You can use the pylastools package in Python +to download and read Lidar data in the LAS format. For example, the +following code downloads and reads Lidar data for a specific area: + +Python code: + +``` r +py_install("requests") +py_install("pylas") +py_install("laspy") +``` + +``` python +import requests +from pylas import read +import laspy +import numpy as np + +# Download Lidar data +url = "https://s3-us-west-2.amazonaws.com/usgs-lidar-public/USGS_LPC_CA_SanFrancisco_2016_LAS_2018.zip" +lasfile = "USGS_LPC_CA_SanFrancisco_2016_LAS_2018.las" +r = requests.get(url, allow_redirects=True) +open(lasfile, 'wb').write(r.content) + +# Read the data +lidar = read(lasfile) + +# Visualize the data +laspy.plot.plot(lidar) +``` + +## Data for black lives + +Data for Black Lives () is a movement that uses data +science to create measurable change in the lives of Black people. While +the Data for Black Lives website provides resources, reports, articles, +and datasets related to racial equity, it doesn’t provide a direct API +for downloading data. + +Instead, you can access the Data for Black Lives GitHub repository +() to find datasets and resources to +work with. In this example, we’ll use a sample dataset available at +. The +dataset “COVID19_race_data.csv” contains COVID-19 race-related data. + +R: In R, we’ll use the ‘readr’ and ‘dplyr’ packages to read, process, +and analyze the dataset. + +R code: + +``` r +# Install and load necessary libraries + +library(readr) +library(dplyr) + +# Read the CSV file +url <- "https://raw.githubusercontent.com/Data4BlackLives/covid-19/master/data/COVID19_race_data.csv" +data <- read_csv(url) + +# Basic information about the dataset +print(dim(data)) +print(head(data)) + +# Example analysis: calculate the mean of 'cases_total' by 'state' +data %>% + group_by(state) %>% + summarize(mean_cases_total = mean(cases_total, na.rm = TRUE)) %>% + arrange(desc(mean_cases_total)) +``` + +Python: In Python, we’ll use the ‘pandas’ library to read, process, and +analyze the dataset. + +Python code: + +``` python +import pandas as pd + +# Read the CSV file +url = "https://raw.githubusercontent.com/Data4BlackLives/covid-19/master/data/COVID19_race_data.csv" +data = pd.read_csv(url) + +# Basic information about the dataset +print(data.shape) +print(data.head()) + +# Example analysis: calculate the mean of 'cases_total' by 'state' +mean_cases_total = data.groupby("state")["cases_total"].mean().sort_values(ascending=False) +print(mean_cases_total) +``` + +In conclusion, both R and Python provide powerful libraries and tools +for downloading, processing, and analyzing datasets, such as those found +in the Data for Black Lives repository. The ‘readr’ and ‘dplyr’ +libraries in R offer a simple and intuitive way to read and manipulate +data, while the ‘pandas’ library in Python offers similar functionality +with a different syntax. Depending on your preferred programming +language and environment, both options can be effective in working with +social justice datasets. + +## Propublica Congress API + +The ProPublica Congress API provides information about the U.S. Congress +members and their voting records. In this example, we’ll fetch data +about the current Senate members and calculate the number of members in +each party. + +R: In R, we’ll use the ‘httr’ and ‘jsonlite’ packages to fetch and +process data from the ProPublica Congress API. + +R code: + +``` r +# load necessary libraries +library(httr) +library(jsonlite) + +# Replace 'your_api_key' with your ProPublica API key + +# + +# Fetch data about the current Senate members +url <- "https://api.propublica.org/congress/v1/117/senate/members.json" +response <- GET(url, add_headers(`X-API-Key` = api_key)) + +# Check if the request was successful +if (http_status(response)$category == "Success") { + data <- content(response, "parsed") + members <- data$results[[1]]$members + + # Calculate the number of members in each party + party_counts <- table(sapply(members, function(x) x$party)) + print(party_counts) +} else { + print(http_status(response)$message) +} +``` + + ## + ## D I ID R + ## 49 1 2 51 + +Python: In Python, we’ll use the ‘requests’ library to fetch data from +the ProPublica Congress API and ‘pandas’ library to process the data. + +python code: + +``` python +# Install necessary libraries + +import requests +import pandas as pd + +# Replace 'your_api_key' with your ProPublica API key +api_key = "your_api_key" +headers = {"X-API-Key": api_key} + +# Fetch data about the current Senate members +url = "https://api.propublica.org/congress/v1/117/senate/members.json" +response = requests.get(url, headers=headers) + +# Check if the request was successful +if response.status_code == 200: + data = response.json() + members = data["results"][0]["members"] + + # Calculate the number of members in each party + party_counts = pd.DataFrame(members)["party"].value_counts() + print(party_counts) +else: + print(f"Error: {response.status_code}") +``` + +In conclusion, both R and Python offer efficient ways to fetch and +process data from APIs like the ProPublica Congress API. The ‘httr’ and +‘jsonlite’ libraries in R provide a straightforward way to make HTTP +requests and parse JSON data, while the ‘requests’ library in Python +offers similar functionality. The ‘pandas’ library in Python can be used +for data manipulation and analysis, and R provides built-in functions +like table() for aggregating data. Depending on your preferred +programming language and environment, both options can be effective for +working with the ProPublica Congress API. + +## Nonprofit Explorer API by ProPublica + +The Nonprofit Explorer API by ProPublica provides data on tax-exempt +organizations in the United States. In this example, we’ll search for +organizations with the keyword “education” and analyze the results. + +R: In R, we’ll use the ‘httr’ and ‘jsonlite’ packages to fetch and +process data from the Nonprofit Explorer API. + +R code: + +``` r +# Install and load necessary libraries +library(httr) +library(jsonlite) + +# Fetch data for organizations with the keyword "education" +url <- "https://projects.propublica.org/nonprofits/api/v2/search.json?q=education" +response <- GET(url) + +# Check if the request was successful +if (http_status(response)$category == "Success") { + data <- content(response, "parsed") + organizations <- data$organizations + + # Count the number of organizations per state + state_counts <- table(sapply(organizations, function(x) x$state)) + print(state_counts) +} else { + print(http_status(response)$message) +} +``` + + ## + ## AZ CA CO DC FL GA HI IL Indiana LA + ## 3 22 6 5 3 2 1 2 1 1 + ## MD MI MN MO MP MS NC NE NJ NM + ## 1 2 5 3 1 1 2 2 2 1 + ## NY OH OK Oregon PA TX UT VA WA WV + ## 1 5 1 2 2 12 1 4 3 1 + ## ZZ + ## 2 + +Python: In Python, we’ll use the ‘requests’ library to fetch data from +the Nonprofit Explorer API and ‘pandas’ library to process the data. + +Python code: + +``` python +# Install necessary libraries +import requests +import pandas as pd + +# Fetch data for organizations with the keyword "education" +url = "https://projects.propublica.org/nonprofits/api/v2/search.json?q=education" +response = requests.get(url) + +# Check if the request was successful +if response.status_code == 200: + data = response.json() + organizations = data["organizations"] + + # Count the number of organizations per state + state_counts = pd.DataFrame(organizations)["state"].value_counts() + print(state_counts) +else: + print(f"Error: {response.status_code}") +``` + + ## CA 22 + ## TX 12 + ## CO 6 + ## MN 5 + ## OH 5 + ## DC 5 + ## VA 4 + ## AZ 3 + ## WA 3 + ## MO 3 + ## FL 3 + ## IL 2 + ## GA 2 + ## NC 2 + ## MI 2 + ## Oregon 2 + ## NE 2 + ## ZZ 2 + ## PA 2 + ## NJ 2 + ## HI 1 + ## MS 1 + ## NY 1 + ## Indiana 1 + ## NM 1 + ## LA 1 + ## UT 1 + ## MD 1 + ## MP 1 + ## WV 1 + ## OK 1 + ## Name: state, dtype: int64 + +In conclusion, both R and Python offer efficient ways to fetch and +process data from APIs like the Nonprofit Explorer API. The ‘httr’ and +‘jsonlite’ libraries in R provide a straightforward way to make HTTP +requests and parse JSON data, while the ‘requests’ library in Python +offers similar functionality. The ‘pandas’ library in Python can be used +for data manipulation and analysis, and R provides built-in functions +like table() for aggregating data. Depending on your preferred +programming language and environment, both options can be effective for +working with the Nonprofit Explorer API. + +## Campaign Finance API by ProPublica + +The Campaign Finance API by the Federal Election Commission (FEC) +provides data on campaign finance in U.S. federal elections. In this +example, we’ll fetch data about individual contributions for the 2020 +election cycle and analyze the results. + +R: In R, we’ll use the ‘httr’ and ‘jsonlite’ packages to fetch and +process data from the Campaign Finance API. + +R code: + +``` r +# Install and load necessary libraries +library(httr) +library(jsonlite) + +# Fetch data about individual contributions for the 2020 election cycle +url <- "https://api.open.fec.gov/v1/schedules/schedule_a/?api_key='OGwpkX7tH5Jihs1qQcisKfVAMddJzmzouWKtKoby'&two_year_transaction_period=2020&sort_hide_null=false&sort_null_only=false&per_page=20&page=1" +response <- GET(url) + +# Check if the request was successful +if (http_status(response)$category == "Success") { + data <- content(response, "parsed") + contributions <- data$results + + # Calculate the total contributions per state + state_totals <- aggregate(contributions$contributor_state, by = list(contributions$contributor_state), FUN = sum) + colnames(state_totals) <- c("State", "Total_Contributions") + print(state_totals) +} else { + print(http_status(response)$message) +} +``` + + ## [1] "Client error: (403) Forbidden" + +Python: In Python, we’ll use the ‘requests’ library to fetch data from +the Campaign Finance API and ‘pandas’ library to process the data. + +Python code: + +``` python +# Install necessary libraries + +import requests +import pandas as pd + +# Fetch data about individual contributions for the 2020 election cycle +url = "https://api.open.fec.gov/v1/schedules/schedule_a/?api_key=your_api_key&two_year_transaction_period=2020&sort_hide_null=false&sort_null_only=false&per_page=20&page=1" +response = requests.get(url) + +# Check if the request was successful +if response.status_code == 200: + data = response.json() + contributions = data["results"] + + # Calculate the total contributions per state + df = pd.DataFrame(contributions) + state_totals = df.groupby("contributor_state")["contribution_receipt_amount"].sum() + print(state_totals) +else: + print(f"Error: {response.status_code}") +``` + + ## Error: 403 + +In conclusion, both R and Python offer efficient ways to fetch and +process data from APIs like the Campaign Finance API. The ‘httr’ and +‘jsonlite’ libraries in R provide a straightforward way to make HTTP +requests and parse JSON data, while the ‘requests’ library in Python +offers similar functionality. The ‘pandas’ library in Python can be used +for data manipulation and analysis, and R provides built-in functions +like aggregate() for aggregating data. Depending on your preferred +programming language and environment, both options can be effective for +working with the Campaign Finance API. + +Note: Remember to replace your_api_key with your actual FEC API key in +the code examples above. + +## Historic Redlining + +Historic redlining data refers to data from the Home Owners’ Loan +Corporation (HOLC) that created residential security maps in the 1930s, +which contributed to racial segregation and disinvestment in minority +neighborhoods. One popular source for this data is the Mapping +Inequality project (). + +In this example, we’ll download historic redlining data for Philadelphia +in the form of a GeoJSON file and analyze the data in R and Python. + +R: In R, we’ll use the ‘sf’ and ‘dplyr’ packages to read and process the +GeoJSON data. + +R code: + +``` r +# Install and load necessary libraries +library(sf) +library(dplyr) + +# Download historic redlining data for Philadelphia +url <- "https://dsl.richmond.edu/panorama/redlining/static/downloads/geojson/PAPhiladelphia1937.geojson" +philly_geojson <- read_sf(url) + +# Count the number of areas per HOLC grade +grade_counts <- philly_geojson %>% + group_by(holc_grade) %>% + summarize(count = n()) + +plot(grade_counts) +``` + +![](bilingualism_md_files/figure-gfm/unnamed-chunk-65-1.pdf) + +Python: In Python, we’ll use the ‘geopandas’ library to read and process +the GeoJSON data. + +Python code: + +``` python +# Install necessary libraries + + +import geopandas as gpd + +# Download historic redlining data for Philadelphia +url = "https://dsl.richmond.edu/panorama/redlining/static/downloads/geojson/PAPhiladelphia1937.geojson" +philly_geojson = gpd.read_file(url) + +# Count the number of areas per HOLC grade +grade_counts = philly_geojson["holc_grade"].value_counts() +print(grade_counts) +``` + + ## B 28 + ## D 26 + ## C 18 + ## A 10 + ## Name: holc_grade, dtype: int64 + +In conclusion, both R and Python offer efficient ways to download and +process historic redlining data in the form of GeoJSON files. The ‘sf’ +package in R provides a simple way to read and manipulate spatial data, +while the ‘geopandas’ library in Python offers similar functionality. +The ‘dplyr’ package in R can be used for data manipulation and analysis, +and Python’s built-in functions like value_counts() can be used for +aggregating data. Depending on your preferred programming language and +environment, both options can be effective for working with historic +redlining data. + +## American Indian and Alaska Native Areas (AIANNH) + +In this example, we’ll download and analyze the American Indian and +Alaska Native Areas (AIANNH) TIGER/Line Shapefile from the U.S. Census +Bureau. We’ll download the data for the year 2020, and analyze the +number of AIANNH per congressional district + +R: In R, we’ll use the ‘sf’ and ‘dplyr’ packages to read and process the +Shapefile data. + +R code: + +``` r +# Install and load necessary libraries +library(sf) +library(dplyr) + +# Download historic redlining data for Philadelphia +url <- "https://www2.census.gov/geo/tiger/TIGER2020/AIANNH/tl_2020_us_aiannh.zip" +temp_file <- tempfile(fileext = ".zip") +download.file(url, temp_file, mode = "wb") +unzip(temp_file, exdir = tempdir()) + +# Read the Shapefile +shapefile_path <- file.path(tempdir(), "tl_2020_us_aiannh.shp") +aiannh <- read_sf(shapefile_path) + +# Count the number of AIANNH per congressional district +state_counts <- aiannh %>% + group_by(LSAD) %>% + summarize(count = n()) + +print(state_counts[order(-state_counts$count),]) +``` + + ## Simple feature collection with 26 features and 2 fields + ## Geometry type: GEOMETRY + ## Dimension: XY + ## Bounding box: xmin: -174.236 ymin: 18.91069 xmax: -67.03552 ymax: 71.34019 + ## Geodetic CRS: NAD83 + ## # A tibble: 26 × 3 + ## LSAD count geometry + ## + ## 1 79 221 (((-166.5331 65.33918, -166.5331 65.33906, -166.533 65.33699, -1… + ## 2 86 206 (((-83.38811 35.46645, -83.38342 35.46596, -83.38316 35.46593, -… + ## 3 OT 155 (((-92.32972 47.81374, -92.3297 47.81305, -92.32967 47.81196, -9… + ## 4 78 75 (((-155.729 20.02457, -155.7288 20.02428, -155.7288 20.02427, -1… + ## 5 85 46 (((-122.3355 37.95215, -122.3354 37.95206, -122.3352 37.95199, -… + ## 6 92 35 (((-93.01356 31.56287, -93.01354 31.56251, -93.01316 31.56019, -… + ## 7 88 25 (((-97.35299 36.908, -97.35291 36.90801, -97.35287 36.908, -97.3… + ## 8 96 19 (((-116.48 32.63814, -116.48 32.63718, -116.4794 32.63716, -116.… + ## 9 84 16 (((-105.5937 36.40379, -105.5937 36.40324, -105.5937 36.40251, -… + ## 10 89 11 (((-95.91705 41.28037, -95.91653 41.28036, -95.91653 41.28125, -… + ## # ℹ 16 more rows + +Python: In Python, we’ll use the ‘geopandas’ library to read and process +the Shapefile data. + +Python code: + +``` python +import geopandas as gpd +import pandas as pd +import requests +import zipfile +import os +from io import BytesIO + +# Download historic redlining data for Philadelphia +url = "https://www2.census.gov/geo/tiger/TIGER2020/AIANNH/tl_2020_us_aiannh.zip" +response = requests.get(url) +zip_file = zipfile.ZipFile(BytesIO(response.content)) + +# Extract Shapefile +temp_dir = "temp" +if not os.path.exists(temp_dir): + os.makedirs(temp_dir) + +zip_file.extractall(path=temp_dir) +shapefile_path = os.path.join(temp_dir, "tl_2020_us_aiannh.shp") + +# Read the Shapefile +aiannh = gpd.read_file(shapefile_path) + +# Count the number of AIANNH per congressional district +state_counts = aiannh.groupby("LSAD").size().reset_index(name="count") + +# Sort by descending count +state_counts_sorted = state_counts.sort_values(by="count", ascending=False) + +print(state_counts_sorted) +``` + + ## LSAD count + ## 2 79 221 + ## 9 86 206 + ## 25 OT 155 + ## 1 78 75 + ## 8 85 46 + ## 15 92 35 + ## 11 88 25 + ## 19 96 19 + ## 7 84 16 + ## 12 89 11 + ## 5 82 8 + ## 3 80 7 + ## 4 81 6 + ## 21 98 5 + ## 20 97 5 + ## 13 90 4 + ## 18 95 3 + ## 6 83 3 + ## 17 94 2 + ## 16 93 1 + ## 14 91 1 + ## 10 87 1 + ## 22 99 1 + ## 23 9C 1 + ## 24 9D 1 + ## 0 00 1 + +In conclusion, both R and Python offer efficient ways to download and +process AIANNH TIGER/Line Shapefile data from the U.S. Census Bureau. +The ‘sf’ package in R provides a simple way to read and manipulate +spatial data, while the ‘geopandas’ library in Python offers similar +functionality. The ‘dplyr’ package in R can be used for data +manipulation and analysis, and Python’s built-in functions like +value_counts() can be used for aggregating data. Depending on your +preferred programming language and environment, both options can be +effective for working with AIANNH data. + +## Indian Entities Recognized and Eligible To Receive Services by BIA + +The Bureau of Indian Affairs (BIA) provides a PDF document containing a +list of Indian Entities Recognized and Eligible To Receive Services. To +analyze the data, we’ll first need to extract the information from the +PDF. In this example, we’ll extract the names of the recognized tribes +and count the number of tribes per state. + +R: In R, we’ll use the ‘pdftools’ package to extract text from the PDF +and the ‘stringr’ package to process the text data. + +R code: + +``` r +# Install and load necessary libraries +library(pdftools) +library(stringr) +library(dplyr) + +# Download the BIA PDF +url <- "https://www.govinfo.gov/content/pkg/FR-2022-01-28/pdf/2022-01789.pdf" +temp_file <- tempfile(fileext = ".pdf") +download.file(url, temp_file, mode = "wb") + +# Extract text from the PDF +pdf_text <- pdf_text(temp_file) +tribe_text <- pdf_text[4:length(pdf_text)] + +# Define helper functions +tribe_state_extractor <- function(text_line) { + regex_pattern <- "(.*),\\s+([A-Z]{2})$" + tribe_state <- str_match(text_line, regex_pattern) + return(tribe_state) +} + +is_valid_tribe_line <- function(text_line) { + regex_pattern <- "^\\d+\\s+" + return(!is.na(str_match(text_line, regex_pattern))) +} + +# Process text data to extract tribes and states +tribe_states <- sapply(tribe_text, tribe_state_extractor) +valid_lines <- sapply(tribe_text, is_valid_tribe_line) +tribe_states <- tribe_states[valid_lines, 2:3] + +# Count the number of tribes per state +tribe_data <- as.data.frame(tribe_states) +colnames(tribe_data) <- c("Tribe", "State") +state_counts <- tribe_data %>% + group_by(State) %>% + summarise(Count = n()) + +print(state_counts) +``` + + ## # A tibble: 0 × 2 + ## # ℹ 2 variables: State , Count + +Python: In Python, we’ll use the ‘PyPDF2’ library to extract text from +the PDF and the ‘re’ module to process the text data. + +Python code: + +``` python +# Install necessary libraries +import requests +import PyPDF2 +import io +import re +from collections import Counter + +# Download the BIA PDF +url = "https://www.bia.gov/sites/bia.gov/files/assets/public/raca/online-tribal-leaders-directory/tribal_leaders_2021-12-27.pdf" +response = requests.get(url) + +# Extract text from the PDF +pdf_reader = PyPDF2.PdfFileReader(io.BytesIO(response.content)) +tribe_text = [pdf_reader.getPage(i).extractText() for i in range(3, pdf_reader.numPages)] + +# Process text data to extract tribes and states +tribes = [re.findall(r'^\d+\s+(.+),\s+([A-Z]{2})', line) for text in tribe_text for line in text.split('\n') if line] +tribe_states = [state for tribe, state in tribes] + +# Count the number of tribes per state +state_counts = Counter(tribe_states) +print(state_counts) +``` + +In conclusion, both R and Python offer efficient ways to download and +process the list of Indian Entities Recognized and Eligible To Receive +Services from the BIA. The ‘pdftools’ package in R provides a simple way +to extract text from PDF files, while the ‘PyPDF2’ library in Python +offers similar functionality. The ‘stringr’ package in R and the ‘re’ +module in Python can be used to process and analyze text data. Depending +on your preferred programming language and environment, both options can +be effective for working with BIA data. + +## National Atlas - Indian Lands of the United States dataset + +In this example, we will download and analyze the National Atlas - +Indian Lands of the United States dataset in both R and Python. We will +read the dataset and count the number of Indian lands per state. + +R: In R, we’ll use the ‘sf’ package to read the Shapefile and the +‘dplyr’ package to process the data. + +R code: + +``` r +# Install and load necessary libraries + +library(sf) +library(dplyr) + +# Download the Indian Lands dataset +url <- "https://prd-tnm.s3.amazonaws.com/StagedProducts/Small-scale/data/Boundaries/indlanp010g.shp_nt00968.tar.gz" +temp_file <- tempfile(fileext = ".tar.gz") +download.file(url, temp_file, mode = "wb") +untar(temp_file, exdir = tempdir()) + +# Read the Shapefile +shapefile_path <- file.path(tempdir(), "indlanp010g.shp") +indian_lands <- read_sf(shapefile_path) + +# Count the number of Indian lands per state +# state_counts <- indian_lands %>% +# group_by(STATE) %>% +# summarize(count = n()) + +plot(indian_lands) +``` + + ## Warning: plotting the first 9 out of 23 attributes; use max.plot = 23 to plot + ## all + +![](bilingualism_md_files/figure-gfm/unnamed-chunk-71-1.pdf) + +Python: In Python, we’ll use the ‘geopandas’ and ‘pandas’ libraries to +read the Shapefile and process the data. + +Python code: + +``` python +import geopandas as gpd +import pandas as pd +import requests +import tarfile +import os +from io import BytesIO + +# Download the Indian Lands dataset +url = "https://prd-tnm.s3.amazonaws.com/StagedProducts/Small-scale/data/Boundaries/indlanp010g.shp_nt00966.tar.gz" +response = requests.get(url) +tar_file = tarfile.open(fileobj=BytesIO(response.content), mode='r:gz') + +# Extract Shapefile +temp_dir = "temp" +if not os.path.exists(temp_dir): + os.makedirs(temp_dir) + +tar_file.extractall(path=temp_dir) +shapefile_path = os.path.join(temp_dir, "indlanp010g.shp") + +# Read the Shapefile +indian_lands = gpd.read_file(shapefile_path) + +# Count the number of Indian lands per state +state_counts = indian_lands.groupby("STATE").size().reset_index(name="count") + +print(state_counts) +``` + +Both R and Python codes download the dataset and read the Shapefile +using the respective packages. They then group the data by the ‘STATE’ +attribute and calculate the count of Indian lands per state. diff --git a/additional-resources/bilingualism_md/index.html b/additional-resources/bilingualism_md/index.html new file mode 100644 index 0000000..a5abacd --- /dev/null +++ b/additional-resources/bilingualism_md/index.html @@ -0,0 +1,3079 @@ + + + + + + + + + + + + + + + + + + + + + + R and Python bilingualism - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + + + + + + +
+ +
+ + + + +
+
+ + + +
+
+
+ + + + + + + + +
+
+
+ + + + +
+
+ + + + + + + + + +

R and Python bilingualism

+

Welcome to the R and Python bilingualism reference guide! If you’re +fluent in one of these languages but hesitant to learn the other, you’re +in the right place. The good news is that there are many similarities +between R and Python that make it easy to switch between the two.

+

Both R and Python are widely used in data science and are open-source, +meaning that they are free to use and constantly being improved by the +community. They both have extensive libraries for data analysis, +visualization, and machine learning. In fact, many of the libraries in +both languages have similar names and functions, such as Pandas in +Python and data.table in R.

+

While there are differences between the two languages, they can +complement each other well. Python is versatile and scalable, making it +ideal for large and complex projects such as web development and +artificial intelligence. R, on the other hand, is known for its +exceptional statistical capabilities and is often used in data analysis +and modeling. Visualization is also easier in R, making it a popular +choice for creating graphs and charts.

+

By learning both R and Python, you’ll be able to take advantage of the +strengths of each language and create more efficient and robust data +analysis workflows. Don’t let the differences between the two languages +intimidate you - once you become familiar with one, learning the other +will be much easier.

+

So, whether you’re a Python enthusiast looking to expand your +statistical analysis capabilities, or an R user interested in exploring +the world of web development and artificial intelligence, this guide +will help you become bilingual in R and Python.

+

Install packages

+

In R, packages can be installed from CRAN repository by using the +install.packages() function:

+

R code:

+
# Install the dplyr package from CRAN
+install.packages("dplyr")
+
+

In Python, packages can be installed from the Anaconda repository by +using the conda install command:

+

Python code:

+
# Install the pandas package from Anaconda
+!conda install pandas
+
+

Loading libraries in R and Python

+

In R, libraries can be loaded in the same way as before, using the +library() function:

+

R code:

+
# Load the dplyr library
+library(dplyr)
+
+

In Python, libraries can be loaded in the same way as before, using the +import statement. Here’s an example:

+

Python code:

+
# Load the pandas library
+import pandas as pd
+
+

Note that the package or library must be installed from the respective +repository before it can be loaded. Also, make sure you have the correct +repository specified in your system before installing packages. By +default, R uses CRAN as its primary repository, whereas Anaconda uses +its own repository by default.

+

reticulate

+

The reticulate package lets you run both R and Python together in the R +environment.

+

R libraries are stored and managed in a repository called CRAN. You can +download R packages with the install.packages() function

+
install.packages("reticulate")
+
+

You only need to install packages once, but you need to mount those +packages with the library() function each time you open R.

+
library(reticulate)
+
+

Python libraries are stored and managed in a few different libraries and +their dependencies are not regulated as strictly as R libraries are in +CRAN. It’s easier to publish a python package but it can also be more +cumbersome for users because you need to manage dependencies yourself. +You can download python packages using both R and Python code

+
py_install("laspy")
+
+
## + '/Users/ty/opt/miniconda3/bin/conda' 'install' '--yes' '--prefix' '/Users/ty/opt/miniconda3/envs/earth-analytics-python' '-c' 'conda-forge' 'laspy'
+
+

Now, let’s create a Python list and assign it to a variable py_list:

+

R code:

+
py_list <- r_to_py(list(1, 2, 3))
+
+

We can now print out the py_list variable in Python using the +py_run_string() function:

+

R code:

+
py_run_string("print(r.py_list)")
+
+

This will output [1, 2, 3] in the Python console.

+

Now, let’s create an R vector and assign it to a variable r_vec:

+

R code:

+
r_vec <- c(4, 5, 6)
+
+

We can now print out the r_vec variable in R using the py$ syntax to +access Python variables:

+

R code:

+
print(py$py_list)
+
+

This will output [1, 2, 3] in the R console.

+

We can also call Python functions from R using the py_call() function. +For example, let’s call the Python sum() function on the py_list +variable and assign the result to an R variable r_sum:

+

R code:

+
r_sum <- py_call("sum", args = list(py_list))
+
+

We can now print out the r_sum variable in R:

+

R code:

+
print(r_sum)
+
+

This will output 6 in the R console.

+

Load packages and change settings

+
options(java.parameters = "-Xmx5G")
+
+library(r5r)
+library(sf)
+library(data.table)
+library(ggplot2)
+library(interp)
+library(dplyr)
+library(osmdata)
+library(ggthemes)
+library(sf)
+library(data.table)
+library(ggplot2)
+library(akima)
+library(dplyr)
+library(raster)
+library(osmdata)
+library(mapview)
+library(cowplot)
+library(here)
+library(testthat)
+
+
import sys
+sys.argv.append(["--max-memory", "5G"])
+
+import pandas as pd
+import geopandas
+import matplotlib.pyplot as plt
+import numpy as np
+import plotnine
+import contextily as cx
+import r5py
+import seaborn as sns
+
+

R and Python are two popular programming languages used for data +analysis, statistics, and machine learning. Although they share some +similarities, there are some fundamental differences between them. +Here’s an example code snippet in R and Python to illustrate some of the +differences:

+

R Code:

+
# Create a vector of numbers from 1 to 10
+x <- 1:10
+
+# Compute the mean of the vector
+mean_x <- mean(x)
+
+# Print the result
+print(mean_x)
+
+
## [1] 5.5
+
+

Python Code:

+
# Import the numpy library for numerical operations
+import numpy as np
+
+# Create a numpy array of numbers from 1 to 10
+x = np.array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10])
+
+# Compute the mean of the array
+mean_x = np.mean(x)
+
+# Print the result
+print(mean_x)
+
+
## 5.5
+
+

In this example, we can see that there are several differences between R +and Python:

+

Syntax: R uses the assignment operator \<- while Python uses the equals +sign = for variable assignment.

+

Libraries: Python relies heavily on external libraries such as numpy, +pandas, and matplotlib for data analysis, while R has built-in functions +for many data analysis tasks.

+

Data types: R is designed to work with vectors and matrices, while +Python uses lists and arrays. In the example above, we used the numpy +library to create a numerical array in Python.

+

Function names: Function names in R and Python can differ significantly. +In the example above, we used the mean() function in R and the np.mean() +function in Python to calculate the mean of the vector/array.

+

These are just a few of the many differences between R and Python. +Ultimately, the choice between the two languages will depend on your +specific needs and preferences.

+

Load saved data

+

R Code:

+
data("iris")
+here()
+load(file=here("2_R_and_Py_bilingualism", "data", "iris_example_data.rdata"))
+objects()
+
+

Python code:

+

Save data

+

R Code:

+
save(iris, file=here("2_R_and_Py_bilingualism", "data", "iris_example_data.rdata"))
+
+write.csv(iris, file=here("2_R_and_Py_bilingualism", "data", "iris_example_data.csv"))
+
+

Python code:

+

functions

+

Both R and Python are powerful languages for writing functions that can +take input, perform a specific task, and return output. R Code:

+
# Define a function that takes two arguments and returns their sum
+sum_r <- function(a, b) {
+  return(a + b)
+}
+
+# Call the function with two arguments and print the result
+result_r <- sum_r(3, 5)
+print(result_r)
+
+
## [1] 8
+
+

Python code:

+
# Define a function that takes two arguments and returns their sum
+def sum_py(a, b):
+    return a + b
+
+# Call the function with two arguments and print the result
+result_py = sum_py(3, 5)
+print(result_py)
+
+
## 8
+
+

In both cases, we define a function that takes two arguments and returns +their sum. In R, we use the function keyword to define a function, while +in Python, we use the def keyword. The function body in R is enclosed in +curly braces, while in Python it is indented.

+

There are a few differences in the syntax and functionality between the +two approaches:

+

Function arguments: In R, function arguments are separated by commas, +while in Python they are enclosed in parentheses. The syntax for +specifying default arguments and variable-length argument lists can also +differ between the two languages. Return statement: In R, we use the +return keyword to specify the return value of a function, while in +Python, we simply use the return statement. Function names: Function +names in R and Python can differ significantly. In the example above, we +used the sum_r() function in R and the sum_py() function in Python to +calculate the sum of two numbers.

+

Data Plots

+

R Code:

+
# Load the "ggplot2" package for plotting
+library(ggplot2)
+
+# Generate some sample data
+x <- seq(1, 10, 1)
+y <- x + rnorm(10)
+
+# Create a scatter plot
+ggplot(data.frame(x, y), aes(x = x, y = y)) +
+  geom_point()
+
+

+Python code:

+
# Load the "matplotlib" library
+import matplotlib.pyplot as plt
+
+# Generate some sample data
+import numpy as np
+x = np.arange(1, 11)
+y = x + np.random.normal(0, 1, 10)
+
+#clear last plot
+plt.clf()
+
+# Create a scatter plot
+plt.scatter(x, y)
+plt.show()
+
+

+

In both cases, we generate some sample data and create a scatter plot to +visualize the relationship between the variables.

+

There are a few differences in the syntax and functionality between the +two approaches:

+

Library and package names: In R, we use the ggplot2 package for +plotting, while in Python, we use the matplotlib library. Data format: +In R, we use a data frame to store the input data, while in Python, we +use numpy arrays. Plotting functions: In R, we use the ggplot() function +to create a new plot object, and then use the geom_point() function to +create a scatter plot layer. In Python, we use the scatter() function +from the matplotlib.pyplot module to create a scatter plot directly.

+

Linear regression

+

R Code:

+
# Load the "ggplot2" package for plotting
+library(ggplot2)
+
+# Generate some sample data
+x <- seq(1, 10, 1)
+y <- x + rnorm(10)
+
+# Perform linear regression
+model_r <- lm(y ~ x)
+
+# Print the model summary
+summary(model_r)
+
+
## 
+## Call:
+## lm(formula = y ~ x)
+## 
+## Residuals:
+##      Min       1Q   Median       3Q      Max 
+## -1.69344 -0.42336  0.08961  0.34778  1.56728 
+## 
+## Coefficients:
+##             Estimate Std. Error t value Pr(>|t|)    
+## (Intercept)  -0.1676     0.6781  -0.247    0.811    
+## x             0.9750     0.1093   8.921 1.98e-05 ***
+## ---
+## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
+## 
+## Residual standard error: 0.9926 on 8 degrees of freedom
+## Multiple R-squared:  0.9087, Adjusted R-squared:  0.8972 
+## F-statistic: 79.59 on 1 and 8 DF,  p-value: 1.976e-05
+
+
# Plot the data and regression line
+ggplot(data.frame(x, y), aes(x = x, y = y)) +
+  geom_point() +
+  geom_smooth(method = "lm", se = FALSE)
+
+
## `geom_smooth()` using formula = 'y ~ x'
+
+

+

Python code:

+
# Load the "matplotlib" and "scikit-learn" libraries
+import matplotlib.pyplot as plt
+from sklearn.linear_model import LinearRegression
+
+# Generate some sample data
+import numpy as np
+x = np.arange(1, 11)
+y = x + np.random.normal(0, 1, 10)
+
+# Perform linear regression
+model_py = LinearRegression().fit(x.reshape(-1, 1), y)
+
+# Print the model coefficients
+print("Coefficients: ", model_py.coef_)
+
+
## Coefficients:  [1.15539692]
+
+
print("Intercept: ", model_py.intercept_)
+
+#clear last plot
+
+
## Intercept:  -1.1291396173221218
+
+
plt.clf()
+
+# Plot the data and regression line
+plt.scatter(x, y)
+plt.plot(x, model_py.predict(x.reshape(-1, 1)), color='red')
+plt.show()
+
+

+

In both cases, we generate some sample data with a linear relationship +between x and y, and then perform a simple linear regression to estimate +the slope and intercept of the line. We then plot the data and +regression line to visualize the fit.

+

There are a few differences in the syntax and functionality between the +two approaches:

+

Library and package names: In R, we use the lm() function from the base +package to perform linear regression, while in Python, we use the +LinearRegression() class from the scikit-learn library. Additionally, we +use the ggplot2 package in R for plotting, while we use the matplotlib +library in Python. Data format: In R, we can specify the dependent and +independent variables in the formula used for regression. In Python, we +need to reshape the input data to a two-dimensional array before fitting +the model. Model summary: In R, we can use the summary() function to +print a summary of the model, including the estimated coefficients, +standard errors, and p-values. In Python, we need to print the +coefficients and intercept separately.

+

Random Forest

+

R Code:

+
# Load the "randomForest" package
+library(randomForest)
+
+# Load the "iris" dataset
+data(iris)
+
+# Split the data into training and testing sets
+set.seed(123)
+train_idx <- sample(1:nrow(iris), nrow(iris) * 0.7, replace = FALSE)
+train_data <- iris[train_idx, ]
+test_data <- iris[-train_idx, ]
+
+# Build a random forest model
+rf_model <- randomForest(Species ~ ., data = train_data, ntree = 500)
+
+# Make predictions on the testing set
+predictions <- predict(rf_model, test_data)
+
+# Calculate accuracy of the model
+accuracy <- sum(predictions == test_data$Species) / nrow(test_data)
+print(paste("Accuracy:", accuracy))
+
+
## [1] "Accuracy: 0.977777777777778"
+
+

Python code:

+
# Load the "pandas", "numpy", and "sklearn" libraries
+import pandas as pd
+import numpy as np
+from sklearn.ensemble import RandomForestClassifier
+from sklearn.datasets import load_iris
+from sklearn.model_selection import train_test_split
+
+# Load the "iris" dataset
+iris = load_iris()
+
+# Split the data into training and testing sets
+X_train, X_test, y_train, y_test = train_test_split(iris.data, iris.target, test_size=0.3, random_state=123)
+
+# Build a random forest model
+rf_model = RandomForestClassifier(n_estimators=500, random_state=123)
+rf_model.fit(X_train, y_train)
+
+# Make predictions on the testing set
+
+
## RandomForestClassifier(n_estimators=500, random_state=123)
+
+
predictions = rf_model.predict(X_test)
+
+# Calculate accuracy of the model
+accuracy = sum(predictions == y_test) / len(y_test)
+print("Accuracy:", accuracy)
+
+
## Accuracy: 0.9555555555555556
+
+

In both cases, we load the iris dataset and split it into training and +testing sets. We then build a random forest model using the training +data and evaluate its accuracy on the testing data.

+

There are a few differences in the syntax and functionality between the +two approaches:

+

Library and package names: In R, we use the randomForest package to +build random forest models, while in Python, we use the +RandomForestClassifier class from the sklearn.ensemble module. We also +use different libraries for loading and manipulating data (pandas and +numpy in Python, and built-in datasets in R). Model parameters: The +syntax for setting model parameters is slightly different in R and +Python. For example, in R, we specify the number of trees using the +ntree parameter, while in Python, we use the n_estimators parameter. +Data format: In R, we use a data frame to store the input data, while in +Python, we use numpy arrays.

+

Basic streetmap from Open Street Map

+

R Code:

+
# Load the "osmdata" package for mapping
+library(osmdata)
+library(tmap)
+
+# Define the map location and zoom level
+bbox <- c(left = -0.16, bottom = 51.49, right = -0.13, top = 51.51)
+
+# Get the OpenStreetMap data
+osm_data <- opq(bbox) %>% 
+  add_osm_feature(key = "highway") %>% 
+  osmdata_sf()
+
+# Plot the map using tmap
+tm_shape(osm_data$osm_lines) + 
+  tm_lines()
+
+

+Python code:

+
# Load the "osmnx" package for mapping
+import osmnx as ox
+
+# Define the map location and zoom level
+bbox = (51.49, -0.16, 51.51, -0.13)
+
+# Get the OpenStreetMap data
+osm_data = ox.graph_from_bbox(north=bbox[2], south=bbox[0], east=bbox[3], west=bbox[1], network_type='all')
+
+# Plot the map using osmnx
+ox.plot_graph(osm_data)
+
+
## (<Figure size 1600x1600 with 0 Axes>, <AxesSubplot:>)
+
+

+

In both cases, we define the map location and zoom level, retrieve the +OpenStreetMap data using the specified bounding box, and plot the map.

+

The main differences between the two approaches are:

+

Package names and syntax: In R, we use the osmdata package and its +syntax to download and process the OpenStreetMap data, while in Python, +we use the osmnx package and its syntax. Mapping libraries: In R, we use +the tmap package to create a static map of the OpenStreetMap data, while +in Python, we use the built-in ox.plot_graph function from the osmnx +package to plot the map.

+

CNN on Raster data

+

R Code:

+
# Load the "keras" package for building the CNN
+library(tensorflow)
+library(keras)
+
+# Load the "raster" package for working with raster data
+library(raster)
+
+# Load the "magrittr" package for pipe operator
+library(magrittr)
+
+# Load the data as a raster brick
+raster_data <- brick("raster_data.tif")
+
+# Split the data into training and testing sets
+split_data <- sample(1:nlayers(raster_data), size = nlayers(raster_data)*0.8, replace = FALSE)
+train_data <- raster_data[[split_data]]
+test_data <- raster_data[[setdiff(1:nlayers(raster_data), split_data)]]
+
+# Define the CNN model
+model <- keras_model_sequential() %>% 
+  layer_conv_2d(filters = 32, kernel_size = c(3, 3), activation = "relu", input_shape = c(ncol(train_data), nrow(train_data), ncell(train_data))) %>% 
+  layer_max_pooling_2d(pool_size = c(2, 2)) %>% 
+  layer_dropout(rate = 0.25) %>% 
+  layer_flatten() %>% 
+  layer_dense(units = 128, activation = "relu") %>% 
+  layer_dropout(rate = 0.5) %>% 
+  layer_dense(units = nlayers(train_data), activation = "softmax")
+
+# Compile the model
+model %>% compile(loss = "categorical_crossentropy", optimizer = "adam", metrics = "accuracy")
+
+# Train the model
+history <- model %>% fit(x = array(train_data), y = to_categorical(1:nlayers(train_data)), epochs = 10, validation_split = 0.2)
+
+# Evaluate the model
+model %>% evaluate(x = array(test_data), y = to_categorical(1:nlayers(test_data)))
+
+# Plot the model accuracy over time
+plot(history)
+
+

Piping

+

Piping is a powerful feature in both R and Python that allows for a more +streamlined and readable code. However, the syntax for piping is +slightly different between the two languages.

+

In R, piping is done using the %>% operator from the magrittr package, +while in Python, it is done using the | operator from the pandas +package.

+

Let’s compare and contrast piping in R and Python with some examples:

+

Piping in R In R, we can use the %>% operator to pipe output from one +function to another, which can make our code more readable and easier to +follow. Here’s an example:

+

R code:

+
library(dplyr)
+
+# create a data frame
+df <- data.frame(x = c(1,2,3), y = c(4,5,6))
+
+# calculate the sum of column x and y
+df %>%
+  mutate(z = x + y) %>%
+  summarize(sum_z = sum(z))
+
+
##   sum_z
+## 1    21
+
+

In this example, we first create a data frame df with two columns x and +y. We then pipe the output of df to mutate, which adds a new column z to +the data frame that is the sum of x and y. Finally, we pipe the output +to summarize, which calculates the sum of z and returns the result.

+

Piping in Python In Python, we can use the | operator to pipe output +from one function to another. However, instead of piping output from one +function to another, we pipe a DataFrame to a method of the DataFrame. +Here’s an example:

+

Python code:

+
import pandas as pd
+
+# create a DataFrame
+df = pd.DataFrame({'x': [1,2,3], 'y': [4,5,6]})
+
+# calculate the sum of column x and y
+(df.assign(z = df['x'] + df['y'])
+   .agg(sum_z = ('z', 'sum')))
+
+
##         z
+## sum_z  21
+
+

In this example, we first create a DataFrame df with two columns x and +y. We then use the assign() method to add a new column z to the +DataFrame that is the sum of x and y. Finally, we use the agg() method +to calculate the sum of z and return the result.

+

As we can see, the syntax for piping is slightly different between R and +Python, but the concept remains the same. Piping can make our code more +readable and easier to follow, which is an important aspect of creating +efficient and effective code.

+

R code:

+
library(dplyr)
+library(ggplot2)
+
+iris %>%
+  filter(Species == "setosa") %>%
+  group_by(Sepal.Width) %>%
+  summarise(mean.Petal.Length = mean(Petal.Length)) %>%
+  mutate(Sepal.Width = as.factor(Sepal.Width)) %>%
+  ggplot(aes(x = Sepal.Width, y = mean.Petal.Length)) +
+  geom_bar(stat = "identity", fill = "dodgerblue") +
+  labs(title = "Mean Petal Length of Setosa by Sepal Width",
+       x = "Sepal Width",
+       y = "Mean Petal Length")
+
+

+

In this example, we start with the iris dataset and filter it to only +include rows where the Species column is “setosa”. We then group the +remaining rows by the Sepal.Width column and calculate the mean +Petal.Length for each group. Next, we convert Sepal.Width to a factor +variable to ensure that it is treated as a categorical variable in the +visualization. Finally, we create a bar plot using ggplot2, with +Sepal.Width on the x-axis and mean.Petal.Length on the y-axis. The +resulting plot shows the mean petal length of setosa flowers for each +sepal width category.

+

Python code:

+
import pandas as pd
+
+# Load the iris dataset and pipe it into the next function
+( pd.read_csv("https://archive.ics.uci.edu/ml/machine-learning-databases/iris/iris.data", header=None, names=['sepal_length', 'sepal_width', 'petal_length', 'petal_width', 'class'])
+
+  # Select columns and pivot the dataset
+  .loc[:, ['sepal_length', 'sepal_width', 'petal_length']]
+  .melt(var_name='variable', value_name='value')
+
+  # Group by variable and calculate mean
+  .groupby('variable', as_index=False)
+  .mean()
+
+  # Filter for mean greater than 3.5 and sort by descending mean
+  .query('value > 3.5')
+  .sort_values('value', ascending=False)
+)
+
+
##        variable     value
+## 1  sepal_length  5.843333
+## 0  petal_length  3.758667
+
+

for loops

+

Here is an example of a for loop in R:

+

R code

+
# Create a vector of numbers
+numbers <- c(1, 2, 3, 4, 5)
+
+# Use a for loop to print out each number in the vector
+for (i in numbers) {
+  print(i)
+}
+
+
## [1] 1
+## [1] 2
+## [1] 3
+## [1] 4
+## [1] 5
+
+

In this example, the for loop iterates over each element in the numbers +vector, assigning the current element to the variable i. The print(i) +statement is then executed for each iteration, outputting the value of +i.

+

Here is the equivalent example in Python:

+

Python code

+
# Create a list of numbers
+numbers = [1, 2, 3, 4, 5]
+
+# Use a for loop to print out each number in the list
+for i in numbers:
+  print(i)
+
+
## 1
+## 2
+## 3
+## 4
+## 5
+
+

In Python, the for loop iterates over each element in the numbers list, +assigning the current element to the variable i. The print(i) statement +is then executed for each iteration, outputting the value of i.

+

Both languages also support nested for loops, which can be used to +perform iterations over multiple dimensions, such as looping through a +2D array.

+

Parallel

+

Parallel computing is a technique used to execute multiple computational +tasks simultaneously, which can significantly reduce the time required +to complete a task. Both R and Python have built-in support for parallel +computing, although the approaches are slightly different. In this +answer, we will compare and contrast the parallel computing capabilities +of R and Python, and provide working examples in code.

+

Parallel computing in R In R, there are several packages that support +parallel computing, such as parallel, foreach, and doParallel. The +parallel package provides basic functionality for parallel computing, +while foreach and doParallel provide higher-level abstractions that make +it easier to write parallel code.

+

Here is an example of using the foreach package to execute a loop in +parallel:

+

R code:

+
library(foreach)
+library(doParallel)
+
+# Set up a parallel backend with 4 workers
+cl <- makeCluster(4)
+registerDoParallel(cl)
+
+# Define a function to apply in parallel
+myfunc <- function(x) {
+  # some computation here
+  return(x^2)
+}
+
+# Generate some data
+mydata <- 1:1000
+
+# Apply the function to the data in parallel
+result <- foreach(i = mydata) %dopar% {
+  myfunc(i)
+}
+
+# Stop the cluster
+stopCluster(cl)
+
+

In this example, we use the makeCluster() function to set up a cluster +with 4 workers, and the registerDoParallel() function to register the +cluster as the parallel backend for foreach. We then define a function +myfunc() that takes an input x and returns x^2. We generate some data +mydata and use foreach to apply myfunc() to each element of mydata in +parallel, using the %dopar% operator.

+

R Tidyverse parallel

+

In R Tidyverse, we can use the furrr package for parallel computing. +Here’s an example of using furrr to parallelize a map function:

+

R Tidy code:

+
library(tidyverse)
+library(furrr)
+
+# Generate a list of numbers
+numbers <- 1:10
+
+# Use the future_map function from furrr to parallelize the map function
+plan(multisession)
+squares <- future_map(numbers, function(x) x^2)
+
+

In this example, we first load the Tidyverse and furrr libraries. We +then generate a list of numbers from 1 to 10. We then use the plan +function to set the parallelization strategy to “multisession”, which +will use multiple CPU cores to execute the code. Finally, we use the +future_map function from furrr to apply the function x^2 to each number +in the list in parallel.

+

Parallel computing in Python In Python, the standard library includes +the multiprocessing module, which provides basic support for parallel +computing. Additionally, there are several third-party packages that +provide higher-level abstractions, such as joblib and dask.

+

Here is an example of using the multiprocessing module to execute a loop +in parallel:

+

Python code:

+
def square(x):
+    return x**2
+
+from multiprocessing import Pool
+
+# Generate a list of numbers
+numbers = list(range(1, 11))
+
+# Use the map function and a pool of workers to parallelize the square function
+with Pool() as pool:
+    squares = pool.map(square, numbers)
+
+print(squares)
+
+

In this example, we define a function myfunc() that takes an input x and +returns x^2. We generate some data mydata and use the Pool class from +the multiprocessing module to set up a pool of 4 workers. We then use +the map() method of the Pool class to apply myfunc() to each element of +mydata in parallel.

+

Comparison and contrast Both R and Python have built-in support for +parallel computing, with similar basic functionality for creating and +managing parallel processes. However, the higher-level abstractions +differ between the two languages. In R, the foreach package provides a +high-level interface that makes it easy to write parallel code, while in +Python, the multiprocessing module provides a basic interface that can +be extended using third-party packages like joblib and dask.

+

Additionally, Python has better support for distributed computing using +frameworks like Apache Spark, while R has better support for +shared-memory parallelism using tools like data.table and ff.

+

Data wrangling

+

Data wrangling is an important part of any data analysis project, and +both R and Python provide tools and libraries for performing this task. +In this answer, we will compare and contrast data wrangling in R’s +tidyverse and Python’s pandas library, with working examples in code.

+

Data Wrangling in R Tidyverse

+

The tidyverse is a collection of R packages designed for data science, +and it includes several packages that are useful for data wrangling. One +of the most popular packages is dplyr, which provides a grammar of data +manipulation for data frames.

+

Here is an example of using dplyr to filter, mutate, and summarize a +data frame:

+

R code

+
library(dplyr)
+
+# Load data
+data(mtcars)
+
+# Filter for cars with more than 100 horsepower
+mtcars %>%
+  filter(hp > 100) %>%
+  # Add a new column with fuel efficiency in km per liter
+  mutate(kmpl = 0.425 * mpg) %>%
+  # Group by number of cylinders and summarize
+  group_by(cyl) %>%
+  summarize(mean_hp = mean(hp),
+            mean_kmpl = mean(kmpl))
+
+
## # A tibble: 3 × 3
+##     cyl mean_hp mean_kmpl
+##   <dbl>   <dbl>     <dbl>
+## 1     4    111      11.0 
+## 2     6    122.      8.39
+## 3     8    209.      6.42
+
+

In this example, we first filter the mtcars data frame to only include +cars with more than 100 horsepower. We then use mutate to create a new +column with fuel efficiency in kilometers per liter. Finally, we group +the data by the number of cylinders and calculate the mean horsepower +and fuel efficiency.

+

Data Wrangling in Python Pandas

+

Pandas is a popular library for data manipulation in Python. It provides +a data frame object similar to R’s data frames, along with a wide range +of functions for data wrangling.

+

Here is an example of using pandas to filter, transform, and group a +data frame:

+

Python code:

+
import pandas as pd
+
+# Load data
+mtcars = pd.read_csv('https://raw.githubusercontent.com/mwaskom/seaborn-data/master/mtcars.csv')
+
+# Filter for cars with more than 100 horsepower
+filtered_mtcars = mtcars[mtcars['hp'] > 100]
+
+# Add a new column with fuel efficiency in km per liter
+filtered_mtcars['kmpl'] = 0.425 * filtered_mtcars['mpg']
+
+# Group by number of cylinders and calculate mean horsepower and fuel efficiency
+grouped_mtcars = filtered_mtcars.groupby('cyl').agg({'hp': 'mean',
+                                                     'kmpl': 'mean'})
+
+

In this example, we first load the mtcars data from a CSV file. We then +filter the data to only include cars with more than 100 horsepower, +using boolean indexing. We use the assign function to create a new +column with fuel efficiency in kilometers per liter. Finally, we group +the data by the number of cylinders and calculate the mean horsepower +and fuel efficiency.

+

Comparison

+

Overall, both R’s tidyverse and Python’s pandas provide similar +functionality for data wrangling. Both allow for filtering, +transforming, and aggregating data frames. The syntax for performing +these operations is slightly different between the two languages, with R +using the %>% operator for chaining operations and Python using method +chaining or the apply family of functions.

+

One key difference between the two languages is that R’s tidyverse +provides a consistent grammar for data manipulation across its various +packages, making it easier to learn and use. However, Python’s pandas +library has a larger developer community and is more versatile for use +in other applications, such as web development or machine learning.

+

In conclusion, both R and Python provide powerful tools for data +wrangling, and the choice between the two ultimately depends on the +specific needs of the user and their familiarity

+

Data from API

+

Retrieving data from an API is a common task in both R and Python. Here +are examples of how to retrieve data from an API in both languages:

+

Python

+

To retrieve data from an API in Python, we can use the requests library. +Here’s an example of how to retrieve weather data from the +OpenWeatherMap API:

+

Python code:

+
import requests
+
+url = 'https://api.openweathermap.org/data/2.5/weather?q=London,uk&appid=API_KEY'
+
+response = requests.get(url)
+
+data = response.json()
+
+print(data)
+
+

This code retrieves the current weather data for London from the +OpenWeatherMap API. We first construct the API URL with the location and +API key, then use the requests.get() function to make a request to the +API. We then extract the JSON data from the response using the .json() +method and print the resulting data.

+

R

+

In R, we can use the httr package to retrieve data from an API. Here’s +an example of how to retrieve weather data from the OpenWeatherMap API +in R:

+

R code:

+
library(httr)
+
+url <- 'https://api.openweathermap.org/data/2.5/weather?q=London,uk&appid=API_KEY'
+
+response <- GET(url)
+
+data <- content(response, 'text')
+
+print(data)
+
+

This code is similar to the Python code above. We first load the httr +library, then construct the API URL and use the GET() function to make a +request to the API. We then extract the data from the response using the +content() function and print the resulting data.

+

Retrieving Data from an API in R Tidyverse In R Tidyverse, we can use +the httr and jsonlite packages to retrieve and process data from an API.

+

R code:

+
# Load required packages
+library(httr)
+library(jsonlite)
+
+# Define API endpoint
+endpoint <- "https://jsonplaceholder.typicode.com/posts"
+
+# Retrieve data from API
+response <- GET(endpoint)
+
+# Extract content from response
+content <- content(response, "text")
+
+# Convert content to JSON
+json <- fromJSON(content)
+
+# Convert JSON to a data frame
+df <- as.data.frame(json)
+
+

In the above example, we use the GET() function from the httr package to +retrieve data from an API endpoint, and the content() function to +extract the content of the response. We then use the fromJSON() function +from the jsonlite package to convert the JSON content to a list, and the +as.data.frame() function to convert the list to a data frame.

+

Retrieving Data from an API in Python In Python, we can use the requests +library to retrieve data from an API, and the json library to process +the JSON data.

+

Python code:

+
# Load required libraries
+import requests
+import json
+
+# Define API endpoint
+endpoint = "https://jsonplaceholder.typicode.com/posts"
+
+# Retrieve data from API
+response = requests.get(endpoint)
+
+# Extract content from response
+content = response.content
+
+# Convert content to JSON
+json_data = json.loads(content)
+
+# Convert JSON to a list of dictionaries
+data = [dict(row) for row in json_data]
+
+

In the above example, we use the get() function from the requests +library to retrieve data from an API endpoint, and the content attribute +to extract the content of the response. We then use the loads() function +from the json library to convert the JSON content to a list of +dictionaries.

+

Comparison Both R Tidyverse and Python provide powerful tools for +retrieving and processing data from an API. In terms of syntax, the two +languages are somewhat similar. In both cases, we use a library to +retrieve data from the API, extract the content of the response, and +then process the JSON data. However, there are some differences in the +specific functions and methods used. For example, in R Tidyverse, we use +the content() function to extract the content of the response, whereas +in Python, we use the content attribute. Additionally, in R Tidyverse, +we use the fromJSON() function to convert the JSON data to a list, +whereas in Python, we use the loads() function.

+

Census data

+

Retrieving USA census data in R, R Tidy, and Python can be done using +different packages and libraries. Here are some working examples in code +for each language:

+

R:

+

To retrieve census data in R, we can use the tidycensus package. Here’s +an example of how to retrieve the total population for the state of +California:

+

R code:

+
library(tidycensus)
+library(tidyverse)
+
+# Set your Census API key
+census_api_key("your_api_key")
+
+# Get the total population for the state of California
+ca_pop <- get_acs(
+  geography = "state",
+  variables = "B01003_001",
+  state = "CA"
+) %>% 
+  rename(total_population = estimate) %>% 
+  select(total_population)
+
+# View the result
+ca_pop
+
+

R Tidy:

+

To retrieve census data in R Tidy, we can also use the tidycensus +package. Here’s an example of how to retrieve the total population for +the state of California using pipes and dplyr functions:

+

R tidy code:

+
library(tidycensus)
+library(tidyverse)
+
+# Set your Census API key
+census_api_key("your_api_key")
+
+# Get the total population for the state of California
+ca_pop <- get_acs(
+  geography = "state",
+  variables = "B01003_001",
+  state = "CA"
+) %>% 
+  rename(total_population = estimate) %>% 
+  select(total_population)
+
+# View the result
+ca_pop
+
+

Python:

+

To retrieve census data in Python, we can use the census library. Here’s +an example of how to retrieve the total population for the state of +California:

+

Python code:

+
from census import Census
+from us import states
+import pandas as pd
+
+# Set your Census API key
+c = Census("your_api_key")
+
+# Get the total population for the state of California
+ca_pop = c.acs5.state(("B01003_001"), states.CA.fips, year=2019)
+
+# Convert the result to a Pandas DataFrame
+ca_pop_df = pd.DataFrame(ca_pop)
+
+# Rename the column
+ca_pop_df = ca_pop_df.rename(columns={"B01003_001E": "total_population"})
+
+# Select only the total population column
+ca_pop_df = ca_pop_df[["total_population"]]
+
+# View the result
+ca_pop_df
+
+

Lidar data

+

To find Lidar data in R and Python, you typically need to start by +identifying sources of Lidar data and then accessing them using +appropriate packages and functions. Here are some examples of how to +find Lidar data in R and Python:

+

R:

+

Identify sources of Lidar data: The USGS National Map Viewer provides +access to Lidar data for the United States. You can also find Lidar data +on state and local government websites, as well as on commercial data +providers’ websites. Access the data: You can use the lidR package in R +to download and read Lidar data in the LAS format. For example, the +following code downloads and reads Lidar data for a specific area:

+

R code:

+
library(lidR)
+
+# Download Lidar data
+LASfile <- system.file("extdata", "Megaplot.laz", package="lidR")
+lidar <- readLAS(LASfile)
+
+# Visualize the data
+plot(lidar)
+
+

Python:

+

Identify sources of Lidar data: The USGS 3DEP program provides access to +Lidar data for the United States. You can also find Lidar data on state +and local government websites, as well as on commercial data providers’ +websites. Access the data: You can use the pylastools package in Python +to download and read Lidar data in the LAS format. For example, the +following code downloads and reads Lidar data for a specific area:

+

Python code:

+
py_install("requests")
+py_install("pylas")
+py_install("laspy")
+
+
import requests
+from pylas import read
+import laspy
+import numpy as np
+
+# Download Lidar data
+url = "https://s3-us-west-2.amazonaws.com/usgs-lidar-public/USGS_LPC_CA_SanFrancisco_2016_LAS_2018.zip"
+lasfile = "USGS_LPC_CA_SanFrancisco_2016_LAS_2018.las"
+r = requests.get(url, allow_redirects=True)
+open(lasfile, 'wb').write(r.content)
+
+# Read the data
+lidar = read(lasfile)
+
+# Visualize the data
+laspy.plot.plot(lidar)
+
+

Data for black lives

+

Data for Black Lives (https://d4bl.org/) is a movement that uses data +science to create measurable change in the lives of Black people. While +the Data for Black Lives website provides resources, reports, articles, +and datasets related to racial equity, it doesn’t provide a direct API +for downloading data.

+

Instead, you can access the Data for Black Lives GitHub repository +(https://github.com/Data4BlackLives) to find datasets and resources to +work with. In this example, we’ll use a sample dataset available at +https://github.com/Data4BlackLives/covid-19/tree/master/data. The +dataset “COVID19_race_data.csv” contains COVID-19 race-related data.

+

R: In R, we’ll use the ‘readr’ and ‘dplyr’ packages to read, process, +and analyze the dataset.

+

R code:

+
# Install and load necessary libraries
+
+library(readr)
+library(dplyr)
+
+# Read the CSV file
+url <- "https://raw.githubusercontent.com/Data4BlackLives/covid-19/master/data/COVID19_race_data.csv"
+data <- read_csv(url)
+
+# Basic information about the dataset
+print(dim(data))
+print(head(data))
+
+# Example analysis: calculate the mean of 'cases_total' by 'state'
+data %>%
+  group_by(state) %>%
+  summarize(mean_cases_total = mean(cases_total, na.rm = TRUE)) %>%
+  arrange(desc(mean_cases_total))
+
+

Python: In Python, we’ll use the ‘pandas’ library to read, process, and +analyze the dataset.

+

Python code:

+
import pandas as pd
+
+# Read the CSV file
+url = "https://raw.githubusercontent.com/Data4BlackLives/covid-19/master/data/COVID19_race_data.csv"
+data = pd.read_csv(url)
+
+# Basic information about the dataset
+print(data.shape)
+print(data.head())
+
+# Example analysis: calculate the mean of 'cases_total' by 'state'
+mean_cases_total = data.groupby("state")["cases_total"].mean().sort_values(ascending=False)
+print(mean_cases_total)
+
+

In conclusion, both R and Python provide powerful libraries and tools +for downloading, processing, and analyzing datasets, such as those found +in the Data for Black Lives repository. The ‘readr’ and ‘dplyr’ +libraries in R offer a simple and intuitive way to read and manipulate +data, while the ‘pandas’ library in Python offers similar functionality +with a different syntax. Depending on your preferred programming +language and environment, both options can be effective in working with +social justice datasets.

+

Propublica Congress API

+

The ProPublica Congress API provides information about the U.S. Congress +members and their voting records. In this example, we’ll fetch data +about the current Senate members and calculate the number of members in +each party.

+

R: In R, we’ll use the ‘httr’ and ‘jsonlite’ packages to fetch and +process data from the ProPublica Congress API.

+

R code:

+
# load necessary libraries
+library(httr)
+library(jsonlite)
+
+# Replace 'your_api_key' with your ProPublica API key
+
+#
+
+# Fetch data about the current Senate members
+url <- "https://api.propublica.org/congress/v1/117/senate/members.json"
+response <- GET(url, add_headers(`X-API-Key` = api_key))
+
+# Check if the request was successful
+if (http_status(response)$category == "Success") {
+  data <- content(response, "parsed")
+  members <- data$results[[1]]$members
+
+  # Calculate the number of members in each party
+  party_counts <- table(sapply(members, function(x) x$party))
+  print(party_counts)
+} else {
+  print(http_status(response)$message)
+}
+
+
## 
+##  D  I ID  R 
+## 49  1  2 51
+
+

Python: In Python, we’ll use the ‘requests’ library to fetch data from +the ProPublica Congress API and ‘pandas’ library to process the data.

+

python code:

+
# Install necessary libraries
+
+import requests
+import pandas as pd
+
+# Replace 'your_api_key' with your ProPublica API key
+api_key = "your_api_key"
+headers = {"X-API-Key": api_key}
+
+# Fetch data about the current Senate members
+url = "https://api.propublica.org/congress/v1/117/senate/members.json"
+response = requests.get(url, headers=headers)
+
+# Check if the request was successful
+if response.status_code == 200:
+    data = response.json()
+    members = data["results"][0]["members"]
+
+    # Calculate the number of members in each party
+    party_counts = pd.DataFrame(members)["party"].value_counts()
+    print(party_counts)
+else:
+    print(f"Error: {response.status_code}")
+
+

In conclusion, both R and Python offer efficient ways to fetch and +process data from APIs like the ProPublica Congress API. The ‘httr’ and +‘jsonlite’ libraries in R provide a straightforward way to make HTTP +requests and parse JSON data, while the ‘requests’ library in Python +offers similar functionality. The ‘pandas’ library in Python can be used +for data manipulation and analysis, and R provides built-in functions +like table() for aggregating data. Depending on your preferred +programming language and environment, both options can be effective for +working with the ProPublica Congress API.

+

Nonprofit Explorer API by ProPublica

+

The Nonprofit Explorer API by ProPublica provides data on tax-exempt +organizations in the United States. In this example, we’ll search for +organizations with the keyword “education” and analyze the results.

+

R: In R, we’ll use the ‘httr’ and ‘jsonlite’ packages to fetch and +process data from the Nonprofit Explorer API.

+

R code:

+
# Install and load necessary libraries
+library(httr)
+library(jsonlite)
+
+# Fetch data for organizations with the keyword "education"
+url <- "https://projects.propublica.org/nonprofits/api/v2/search.json?q=education"
+response <- GET(url)
+
+# Check if the request was successful
+if (http_status(response)$category == "Success") {
+  data <- content(response, "parsed")
+  organizations <- data$organizations
+
+  # Count the number of organizations per state
+  state_counts <- table(sapply(organizations, function(x) x$state))
+  print(state_counts)
+} else {
+  print(http_status(response)$message)
+}
+
+
## 
+##      AZ      CA      CO      DC      FL      GA      HI      IL Indiana      LA 
+##       3      22       6       5       3       2       1       2       1       1 
+##      MD      MI      MN      MO      MP      MS      NC      NE      NJ      NM 
+##       1       2       5       3       1       1       2       2       2       1 
+##      NY      OH      OK  Oregon      PA      TX      UT      VA      WA      WV 
+##       1       5       1       2       2      12       1       4       3       1 
+##      ZZ 
+##       2
+
+

Python: In Python, we’ll use the ‘requests’ library to fetch data from +the Nonprofit Explorer API and ‘pandas’ library to process the data.

+

Python code:

+
# Install necessary libraries
+import requests
+import pandas as pd
+
+# Fetch data for organizations with the keyword "education"
+url = "https://projects.propublica.org/nonprofits/api/v2/search.json?q=education"
+response = requests.get(url)
+
+# Check if the request was successful
+if response.status_code == 200:
+    data = response.json()
+    organizations = data["organizations"]
+
+    # Count the number of organizations per state
+    state_counts = pd.DataFrame(organizations)["state"].value_counts()
+    print(state_counts)
+else:
+    print(f"Error: {response.status_code}")
+
+
## CA         22
+## TX         12
+## CO          6
+## MN          5
+## OH          5
+## DC          5
+## VA          4
+## AZ          3
+## WA          3
+## MO          3
+## FL          3
+## IL          2
+## GA          2
+## NC          2
+## MI          2
+## Oregon      2
+## NE          2
+## ZZ          2
+## PA          2
+## NJ          2
+## HI          1
+## MS          1
+## NY          1
+## Indiana     1
+## NM          1
+## LA          1
+## UT          1
+## MD          1
+## MP          1
+## WV          1
+## OK          1
+## Name: state, dtype: int64
+
+

In conclusion, both R and Python offer efficient ways to fetch and +process data from APIs like the Nonprofit Explorer API. The ‘httr’ and +‘jsonlite’ libraries in R provide a straightforward way to make HTTP +requests and parse JSON data, while the ‘requests’ library in Python +offers similar functionality. The ‘pandas’ library in Python can be used +for data manipulation and analysis, and R provides built-in functions +like table() for aggregating data. Depending on your preferred +programming language and environment, both options can be effective for +working with the Nonprofit Explorer API.

+

Campaign Finance API by ProPublica

+

The Campaign Finance API by the Federal Election Commission (FEC) +provides data on campaign finance in U.S. federal elections. In this +example, we’ll fetch data about individual contributions for the 2020 +election cycle and analyze the results.

+

R: In R, we’ll use the ‘httr’ and ‘jsonlite’ packages to fetch and +process data from the Campaign Finance API.

+

R code:

+
# Install and load necessary libraries
+library(httr)
+library(jsonlite)
+
+# Fetch data about individual contributions for the 2020 election cycle
+url <- "https://api.open.fec.gov/v1/schedules/schedule_a/?api_key='OGwpkX7tH5Jihs1qQcisKfVAMddJzmzouWKtKoby'&two_year_transaction_period=2020&sort_hide_null=false&sort_null_only=false&per_page=20&page=1"
+response <- GET(url)
+
+# Check if the request was successful
+if (http_status(response)$category == "Success") {
+  data <- content(response, "parsed")
+  contributions <- data$results
+
+  # Calculate the total contributions per state
+  state_totals <- aggregate(contributions$contributor_state, by = list(contributions$contributor_state), FUN = sum)
+  colnames(state_totals) <- c("State", "Total_Contributions")
+  print(state_totals)
+} else {
+  print(http_status(response)$message)
+}
+
+
## [1] "Client error: (403) Forbidden"
+
+

Python: In Python, we’ll use the ‘requests’ library to fetch data from +the Campaign Finance API and ‘pandas’ library to process the data.

+

Python code:

+
# Install necessary libraries
+
+import requests
+import pandas as pd
+
+# Fetch data about individual contributions for the 2020 election cycle
+url = "https://api.open.fec.gov/v1/schedules/schedule_a/?api_key=your_api_key&two_year_transaction_period=2020&sort_hide_null=false&sort_null_only=false&per_page=20&page=1"
+response = requests.get(url)
+
+# Check if the request was successful
+if response.status_code == 200:
+    data = response.json()
+    contributions = data["results"]
+
+    # Calculate the total contributions per state
+    df = pd.DataFrame(contributions)
+    state_totals = df.groupby("contributor_state")["contribution_receipt_amount"].sum()
+    print(state_totals)
+else:
+    print(f"Error: {response.status_code}")
+
+
## Error: 403
+
+

In conclusion, both R and Python offer efficient ways to fetch and +process data from APIs like the Campaign Finance API. The ‘httr’ and +‘jsonlite’ libraries in R provide a straightforward way to make HTTP +requests and parse JSON data, while the ‘requests’ library in Python +offers similar functionality. The ‘pandas’ library in Python can be used +for data manipulation and analysis, and R provides built-in functions +like aggregate() for aggregating data. Depending on your preferred +programming language and environment, both options can be effective for +working with the Campaign Finance API.

+

Note: Remember to replace your_api_key with your actual FEC API key in +the code examples above.

+

Historic Redlining

+

Historic redlining data refers to data from the Home Owners’ Loan +Corporation (HOLC) that created residential security maps in the 1930s, +which contributed to racial segregation and disinvestment in minority +neighborhoods. One popular source for this data is the Mapping +Inequality project (https://dsl.richmond.edu/panorama/redlining/).

+

In this example, we’ll download historic redlining data for Philadelphia +in the form of a GeoJSON file and analyze the data in R and Python.

+

R: In R, we’ll use the ‘sf’ and ‘dplyr’ packages to read and process the +GeoJSON data.

+

R code:

+
# Install and load necessary libraries
+library(sf)
+library(dplyr)
+
+# Download historic redlining data for Philadelphia
+url <- "https://dsl.richmond.edu/panorama/redlining/static/downloads/geojson/PAPhiladelphia1937.geojson"
+philly_geojson <- read_sf(url)
+
+# Count the number of areas per HOLC grade
+grade_counts <- philly_geojson %>%
+  group_by(holc_grade) %>%
+  summarize(count = n())
+
+plot(grade_counts)
+
+

+

Python: In Python, we’ll use the ‘geopandas’ library to read and process +the GeoJSON data.

+

Python code:

+
# Install necessary libraries
+
+
+import geopandas as gpd
+
+# Download historic redlining data for Philadelphia
+url = "https://dsl.richmond.edu/panorama/redlining/static/downloads/geojson/PAPhiladelphia1937.geojson"
+philly_geojson = gpd.read_file(url)
+
+# Count the number of areas per HOLC grade
+grade_counts = philly_geojson["holc_grade"].value_counts()
+print(grade_counts)
+
+
## B    28
+## D    26
+## C    18
+## A    10
+## Name: holc_grade, dtype: int64
+
+

In conclusion, both R and Python offer efficient ways to download and +process historic redlining data in the form of GeoJSON files. The ‘sf’ +package in R provides a simple way to read and manipulate spatial data, +while the ‘geopandas’ library in Python offers similar functionality. +The ‘dplyr’ package in R can be used for data manipulation and analysis, +and Python’s built-in functions like value_counts() can be used for +aggregating data. Depending on your preferred programming language and +environment, both options can be effective for working with historic +redlining data.

+

American Indian and Alaska Native Areas (AIANNH)

+

In this example, we’ll download and analyze the American Indian and +Alaska Native Areas (AIANNH) TIGER/Line Shapefile from the U.S. Census +Bureau. We’ll download the data for the year 2020, and analyze the +number of AIANNH per congressional district

+

R: In R, we’ll use the ‘sf’ and ‘dplyr’ packages to read and process the +Shapefile data.

+

R code:

+
# Install and load necessary libraries
+library(sf)
+library(dplyr)
+
+# Download historic redlining data for Philadelphia
+url <- "https://www2.census.gov/geo/tiger/TIGER2020/AIANNH/tl_2020_us_aiannh.zip"
+temp_file <- tempfile(fileext = ".zip")
+download.file(url, temp_file, mode = "wb")
+unzip(temp_file, exdir = tempdir())
+
+# Read the Shapefile
+shapefile_path <- file.path(tempdir(), "tl_2020_us_aiannh.shp")
+aiannh <- read_sf(shapefile_path)
+
+# Count the number of AIANNH per congressional district
+state_counts <- aiannh %>%
+  group_by(LSAD) %>%
+  summarize(count = n())
+
+print(state_counts[order(-state_counts$count),])
+
+
## Simple feature collection with 26 features and 2 fields
+## Geometry type: GEOMETRY
+## Dimension:     XY
+## Bounding box:  xmin: -174.236 ymin: 18.91069 xmax: -67.03552 ymax: 71.34019
+## Geodetic CRS:  NAD83
+## # A tibble: 26 × 3
+##    LSAD  count                                                          geometry
+##    <chr> <int>                                                <MULTIPOLYGON [°]>
+##  1 79      221 (((-166.5331 65.33918, -166.5331 65.33906, -166.533 65.33699, -1…
+##  2 86      206 (((-83.38811 35.46645, -83.38342 35.46596, -83.38316 35.46593, -…
+##  3 OT      155 (((-92.32972 47.81374, -92.3297 47.81305, -92.32967 47.81196, -9…
+##  4 78       75 (((-155.729 20.02457, -155.7288 20.02428, -155.7288 20.02427, -1…
+##  5 85       46 (((-122.3355 37.95215, -122.3354 37.95206, -122.3352 37.95199, -…
+##  6 92       35 (((-93.01356 31.56287, -93.01354 31.56251, -93.01316 31.56019, -…
+##  7 88       25 (((-97.35299 36.908, -97.35291 36.90801, -97.35287 36.908, -97.3…
+##  8 96       19 (((-116.48 32.63814, -116.48 32.63718, -116.4794 32.63716, -116.…
+##  9 84       16 (((-105.5937 36.40379, -105.5937 36.40324, -105.5937 36.40251, -…
+## 10 89       11 (((-95.91705 41.28037, -95.91653 41.28036, -95.91653 41.28125, -…
+## # ℹ 16 more rows
+
+

Python: In Python, we’ll use the ‘geopandas’ library to read and process +the Shapefile data.

+

Python code:

+
import geopandas as gpd
+import pandas as pd
+import requests
+import zipfile
+import os
+from io import BytesIO
+
+# Download historic redlining data for Philadelphia
+url = "https://www2.census.gov/geo/tiger/TIGER2020/AIANNH/tl_2020_us_aiannh.zip"
+response = requests.get(url)
+zip_file = zipfile.ZipFile(BytesIO(response.content))
+
+# Extract Shapefile
+temp_dir = "temp"
+if not os.path.exists(temp_dir):
+    os.makedirs(temp_dir)
+
+zip_file.extractall(path=temp_dir)
+shapefile_path = os.path.join(temp_dir, "tl_2020_us_aiannh.shp")
+
+# Read the Shapefile
+aiannh = gpd.read_file(shapefile_path)
+
+# Count the number of AIANNH per congressional district
+state_counts = aiannh.groupby("LSAD").size().reset_index(name="count")
+
+# Sort by descending count
+state_counts_sorted = state_counts.sort_values(by="count", ascending=False)
+
+print(state_counts_sorted)
+
+
##    LSAD  count
+## 2    79    221
+## 9    86    206
+## 25   OT    155
+## 1    78     75
+## 8    85     46
+## 15   92     35
+## 11   88     25
+## 19   96     19
+## 7    84     16
+## 12   89     11
+## 5    82      8
+## 3    80      7
+## 4    81      6
+## 21   98      5
+## 20   97      5
+## 13   90      4
+## 18   95      3
+## 6    83      3
+## 17   94      2
+## 16   93      1
+## 14   91      1
+## 10   87      1
+## 22   99      1
+## 23   9C      1
+## 24   9D      1
+## 0    00      1
+
+

In conclusion, both R and Python offer efficient ways to download and +process AIANNH TIGER/Line Shapefile data from the U.S. Census Bureau. +The ‘sf’ package in R provides a simple way to read and manipulate +spatial data, while the ‘geopandas’ library in Python offers similar +functionality. The ‘dplyr’ package in R can be used for data +manipulation and analysis, and Python’s built-in functions like +value_counts() can be used for aggregating data. Depending on your +preferred programming language and environment, both options can be +effective for working with AIANNH data.

+

Indian Entities Recognized and Eligible To Receive Services by BIA

+

The Bureau of Indian Affairs (BIA) provides a PDF document containing a +list of Indian Entities Recognized and Eligible To Receive Services. To +analyze the data, we’ll first need to extract the information from the +PDF. In this example, we’ll extract the names of the recognized tribes +and count the number of tribes per state.

+

R: In R, we’ll use the ‘pdftools’ package to extract text from the PDF +and the ‘stringr’ package to process the text data.

+

R code:

+
# Install and load necessary libraries
+library(pdftools)
+library(stringr)
+library(dplyr)
+
+# Download the BIA PDF
+url <- "https://www.govinfo.gov/content/pkg/FR-2022-01-28/pdf/2022-01789.pdf"
+temp_file <- tempfile(fileext = ".pdf")
+download.file(url, temp_file, mode = "wb")
+
+# Extract text from the PDF
+pdf_text <- pdf_text(temp_file)
+tribe_text <- pdf_text[4:length(pdf_text)]
+
+# Define helper functions
+tribe_state_extractor <- function(text_line) {
+  regex_pattern <- "(.*),\\s+([A-Z]{2})$"
+  tribe_state <- str_match(text_line, regex_pattern)
+  return(tribe_state)
+}
+
+is_valid_tribe_line <- function(text_line) {
+  regex_pattern <- "^\\d+\\s+"
+  return(!is.na(str_match(text_line, regex_pattern)))
+}
+
+# Process text data to extract tribes and states
+tribe_states <- sapply(tribe_text, tribe_state_extractor)
+valid_lines <- sapply(tribe_text, is_valid_tribe_line)
+tribe_states <- tribe_states[valid_lines, 2:3]
+
+# Count the number of tribes per state
+tribe_data <- as.data.frame(tribe_states)
+colnames(tribe_data) <- c("Tribe", "State")
+state_counts <- tribe_data %>%
+  group_by(State) %>%
+  summarise(Count = n())
+
+print(state_counts)
+
+
## # A tibble: 0 × 2
+## # ℹ 2 variables: State <chr>, Count <int>
+
+

Python: In Python, we’ll use the ‘PyPDF2’ library to extract text from +the PDF and the ‘re’ module to process the text data.

+

Python code:

+
# Install necessary libraries
+import requests
+import PyPDF2
+import io
+import re
+from collections import Counter
+
+# Download the BIA PDF
+url = "https://www.bia.gov/sites/bia.gov/files/assets/public/raca/online-tribal-leaders-directory/tribal_leaders_2021-12-27.pdf"
+response = requests.get(url)
+
+# Extract text from the PDF
+pdf_reader = PyPDF2.PdfFileReader(io.BytesIO(response.content))
+tribe_text = [pdf_reader.getPage(i).extractText() for i in range(3, pdf_reader.numPages)]
+
+# Process text data to extract tribes and states
+tribes = [re.findall(r'^\d+\s+(.+),\s+([A-Z]{2})', line) for text in tribe_text for line in text.split('\n') if line]
+tribe_states = [state for tribe, state in tribes]
+
+# Count the number of tribes per state
+state_counts = Counter(tribe_states)
+print(state_counts)
+
+

In conclusion, both R and Python offer efficient ways to download and +process the list of Indian Entities Recognized and Eligible To Receive +Services from the BIA. The ‘pdftools’ package in R provides a simple way +to extract text from PDF files, while the ‘PyPDF2’ library in Python +offers similar functionality. The ‘stringr’ package in R and the ‘re’ +module in Python can be used to process and analyze text data. Depending +on your preferred programming language and environment, both options can +be effective for working with BIA data.

+

National Atlas - Indian Lands of the United States dataset

+

In this example, we will download and analyze the National Atlas - +Indian Lands of the United States dataset in both R and Python. We will +read the dataset and count the number of Indian lands per state.

+

R: In R, we’ll use the ‘sf’ package to read the Shapefile and the +‘dplyr’ package to process the data.

+

R code:

+
# Install and load necessary libraries
+
+library(sf)
+library(dplyr)
+
+# Download the Indian Lands dataset
+url <- "https://prd-tnm.s3.amazonaws.com/StagedProducts/Small-scale/data/Boundaries/indlanp010g.shp_nt00968.tar.gz"
+temp_file <- tempfile(fileext = ".tar.gz")
+download.file(url, temp_file, mode = "wb")
+untar(temp_file, exdir = tempdir())
+
+# Read the Shapefile
+shapefile_path <- file.path(tempdir(), "indlanp010g.shp")
+indian_lands <- read_sf(shapefile_path)
+
+# Count the number of Indian lands per state
+# state_counts <- indian_lands %>%
+#   group_by(STATE) %>%
+#   summarize(count = n())
+
+plot(indian_lands)
+
+
## Warning: plotting the first 9 out of 23 attributes; use max.plot = 23 to plot
+## all
+
+

+

Python: In Python, we’ll use the ‘geopandas’ and ‘pandas’ libraries to +read the Shapefile and process the data.

+

Python code:

+
import geopandas as gpd
+import pandas as pd
+import requests
+import tarfile
+import os
+from io import BytesIO
+
+# Download the Indian Lands dataset
+url = "https://prd-tnm.s3.amazonaws.com/StagedProducts/Small-scale/data/Boundaries/indlanp010g.shp_nt00966.tar.gz"
+response = requests.get(url)
+tar_file = tarfile.open(fileobj=BytesIO(response.content), mode='r:gz')
+
+# Extract Shapefile
+temp_dir = "temp"
+if not os.path.exists(temp_dir):
+    os.makedirs(temp_dir)
+
+tar_file.extractall(path=temp_dir)
+shapefile_path = os.path.join(temp_dir, "indlanp010g.shp")
+
+# Read the Shapefile
+indian_lands = gpd.read_file(shapefile_path)
+
+# Count the number of Indian lands per state
+state_counts = indian_lands.groupby("STATE").size().reset_index(name="count")
+
+print(state_counts)
+
+

Both R and Python codes download the dataset and read the Shapefile +using the respective packages. They then group the data by the ‘STATE’ +attribute and calculate the count of Indian lands per state.

+ +
+
+ + + Last update: + 2024-09-23 + + +
+ + + + + + +
+
+ + +
+ + + +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/additional-resources/code-of-conduct/code-of-conduct.md b/additional-resources/code-of-conduct/code-of-conduct.md new file mode 100644 index 0000000..9fbd87e --- /dev/null +++ b/additional-resources/code-of-conduct/code-of-conduct.md @@ -0,0 +1,199 @@ +# Code of Conduct and Respectful Inclusive Collaboration Guidelines + +Environmental Data Science Innovation & Inclusion Lab (ESIIL) is committed to building, maintaining, and fostering an inclusive, kind, collaborative, and diverse transdisciplinary environmental data science community, whose members feel welcome, supported, and safe to contribute ideas and knowledge. + +The 2024 ESIIL Innovation Summit will follow all aspects of the ESIIL Code of Conduct (below). + +**All community members are responsible for creating this culture, embodying our values, welcoming diverse perspectives and ways of knowing, creating safe inclusive spaces, and conducting ethical science as guided by FAIR (Findable, Accessible, Interoperable, Reusable) and CARE (Collective Benefit, Authority to Control, Responsibility, and Ethics) principles for scientific and Indigenous data management, governance, and stewardship.** + +### Our values + +ESIIL’s vision is grounded in the conviction that innovation and breakthroughs in environmental data science will be precipitated by a diverse, collaborative, curious, and inclusive research community empowered by open data and infrastructure, cross-sector and community partnerships, team science, and engaged learning. + +As such, our core values center people through inclusion, kindness, respect, collaboration, and genuine relationships. They also center innovation, driven by collaborative, cross-sector science and synthesis, open, accessible data and tools, and fun, diverse teams. Finally, they center learning, propelled by curiosity and accessible, inclusive training, and education opportunities. + +### When and how to use these guidelines + +These guidelines outline behavior expectations for ESIIL community members. Your participation in the ESIIL network is contingent upon following these guidelines in all ESIIL activities, including, but not limited to, participating in meetings, webinars, hackathons, working groups, hosted or funded by ESIIL, as well as email lists and online forums such as GutHub, Slack, and Twitter. These guidelines have been adapted from those of the International Arctic Research Policy Committee, the Geological Society of America, the American Geophysical Union, the University Corporation for Atmospheric Research, The Carpentries, and others. We encourage other organizations to adapt these guidelines for use in their own meetings. + +**Note:** Working groups and hackathon/codefest teams are encouraged to discuss these guidelines and what they mean to them, and will have the opportunity to add to them to specifically support and empower their team. Collaborative and behavior commitments complement data use, management, authorship, and access plans that commit to CARE and FAIR principles. + +# Behavior Agreements + +ESIIL community members are expected to act professionally and respectfully in all activities, such that each person, regardless of gender, gender identity or expression, sexual orientation, disability, physical appearance, age, body size, race, religion, national origin, ethnicity, level of experience, language fluency, political affiliation, veteran status, pregnancy, country of origin, and any other characteristic protected under state or federal law, feels safe and welcome in our activities and community. We gain strength from diversity and actively seek participation from those who enhance it. + +In order to garner the benefits of a diverse community and to reach the full potential of our mission and charge, ESIIL participants must be allowed to develop a sense of belonging and trust within a respectful, inclusive, and collaborative culture. Guiding behaviors that contribute to this culture include, but are not limited to: + +## Showing Respect + +- **Listen carefully** – we each bring our own styles of communication, language, and ideas, and we must do our best to accept and accommodate differences. Do not interrupt when someone is speaking and maintain an open mind when others have different ideas than yours. + +- **Be present** – when engaging with others, give them your full attention. If you need to respond to outside needs, please step away from the group quietly. + +- **Be kind** – offer positive, supportive comments and constructive feedback. Critique ideas, not people. Harassment, discrimination, bullying, aggression, including offensive comments, jokes, and imagery, are unacceptable, regardless of intent, and will not be tolerated. + +- **Be punctual** - adhere to the schedule provided by the organizers and avoid disruptive behavior during presentations, trainings, or working sessions. + +- **Respect privacy** - be mindful of the confidentiality of others. Always obtain explicit consent before recording, sharing, or using someone else’s personal information, photos, or recordings. + +- **Practice good digital etiquette (netiquette)** when communicating online, whether in emails, messages, or social media - think before posting online and consider the potential impact on others. Do not share or distribute content generated by or involving others without their explicit consent. + +## Being Inclusive + +- **Create space for everyone to participate** – be thoughtful about who is at the table; openly address accessibility needs, and provide multiple ways to contribute. + +- **Be welcoming** – ESIIL participants come from a wide range of skill levels and career stages, backgrounds, and cultures. Demonstrate that you value these different perspectives and identities through your words and actions, including through correct use of names, titles, and pronouns. + +- **Be self-aware** – recognize that positionality, identity, unconscious biases, and upbringing can all affect how words and behaviors are perceived. Ensure that your words and behavior make others feel welcome. + +- **Commit to ongoing learning** – the move toward inclusive, equitable, and just environmental data science is a collective journey. Continue to learn about and apply practices of inclusion, anti-racism, bystander intervention, and cultural sensitivity. None of us is perfect; all of us will, from time to time, fail to live up to our own high standards. Being perfect is not what matters; owning our mistakes and committing to clear and persistent efforts to grow and improve is. + +# Being Curious + +- **Check your presumptions** – we each bring our own ideas and assumptions about how the world should and does work – what are yours, and how do they affect how you interact with others? How do they shape your perception of new ideas? + +- **Ask questions** – one of the strengths of interdisciplinary and diverse teams is that we all bring different knowledge and viewpoints; no one person is expected to know everything. So don’t be afraid to ask, to learn, and to share. + +- **Be bold** – significant innovations don’t come from incremental efforts. Be brave in proposing and testing new ideas. When things don’t work, learn from the experience. + +- **Invite feedback** – new ideas and improvements can emerge from many places when we’re open to hearing them. Check your defensiveness and listen; accept feedback as a gift toward improving our work and ourselves. + +## Being Collaborative + +- **Recognize that everyone is bringing something different to the table** – take the time to get to know each other. Keep an open mind, encourage ideas that are different from yours, and learn from each other’s expertise and experience. + +- **Be accountable** - great team science depends on trust, communication, respect, and delivering on your commitments. Be clear about your needs, as both a requester and a responder, realistic about your time and capacity commitments, and communicate timelines and standards in advance. + +- **Make assumptions explicit and provide context wherever possible** - misunderstandings are common on transdisciplinary and cross-cultural teams and can best be managed with intentionality. Check in about assumptions, and be willing to share and correct misunderstandings or mistakes when they happen. Make use of collaboration agreements, communicate clearly and avoid jargon wherever possible. + +- **Respect intellectual property and Indigenous data sovereignty** – ESIIL recognizes the extractive and abusive history of scientific engagement with Native peoples, and is committed to doing better. Indigenous knowledge holders are under no obligation to share their data, stories or knowledge. Their work should always be credited, and only shared with permission. Follow guidelines for authorship, Indigenous data sovereignty, and CARE principles. Acknowledge and credit the ideas and work of others. + +- **Use the resources that we provide** - take advantage of the cyberinfrastructure and data cube at your disposal, but do not use them for unrelated tasks, as it could disrupt the event, introduce security risks, undermine the spirit of collaboration and fair play, and erode trust within the event community. + +- **Be safe** - never share sensitive personal information; use strong passwords for your Cyverse and GitHub accounts and do not share them with other participants; be cautious of unsolicited emails, messages, or links; and verify online contacts. If you encounter any illegal or harmful activities online related to this event, report them to Tyler McIntosh or Susan Sullivan. + +**Finally, speak up if you experience or notice a dangerous situation, or someone in distress!** + +# Code of Conduct: Unacceptable behaviors + +We adopt the full Code of Conduct of our home institution, the University of Colorado, details of which are found [here](#). To summarize, examples of unacceptable and reportable behaviors include, but are not limited to: + +- Harassment, intimidation, or discrimination in any form +- Physical or verbal abuse by anyone to anyone, including but not limited to a participant, member of the public, guest, member of any institution or sponsor +- Unwelcome sexual attention or advances +- Personal attacks directed at other guests, members, participants, etc. +- Alarming, intimidating, threatening, or hostile comments or conduct +- Inappropriate use of nudity and/or sexual images in public spaces or in presentations +- Threatening or stalking anyone +- Unauthorized use or sharing of personal or confidential information or private communication +- Continuing interactions, including but not limited to conversations, photographies, recordings, instant messages, and emails, after being asked to stop +- Ethical and scientific misconduct, including failing to credit contributions or respect intellectual property +- Engaging in any illegal activities, including hacking, cheating, or unauthorized access to systems or data +- Using the cyberinfrastructure provided by the organizers for activities unrelated to this event. +- Other conduct which could reasonably be considered inappropriate in a professional setting. + +The University of Colorado recognizes all Federal and State protected classes, which include the following: race, color, national origin, sex, pregnancy, age, marital status, disability, creed, religion, sexual orientation, gender identity, gender expression, veteran status, political affiliation or political philosophy. Mistreatment or harassment not related to protected class also has a negative impact and will be addressed by the ESIIL team. + +Anyone requested to stop unacceptable behavior is expected to comply immediately. + +If there is a clear violation of the code of conduct during an ESIIL event—for example, a meeting is Zoom bombed or a team member is verbally abusing another participant during a workshop— ESIIL leaders, facilitators (or their designee) or campus/local police may take any action deemed necessary and appropriate, including expelling the violator, or immediate removal of the violator from any online or in-person event or platform without warning or refund. If such actions are necessary, there will be follow up with the ESIIL Diversity Equity and Inclusion (DEI) team to determine what further action is needed (see Reporting Process and Consequences below). + +## Addressing Behavior Directly + +For smaller incidents that might be settled with a brief conversation, you may choose to contact the person in question or set up a (video) conversation to discuss how the behavior affected you. Please use this approach only if you feel comfortable; you do not have to carry the weight of addressing these issues yourself. If you are interested in this option but unsure how to go about it, please contact the ESIIL DEI lead, Susan Sullivan, first—she will have advice on how to make the conversation happen and is available to join you in a conversation as requested. + +# Reporting Process and Consequences + +We take any reports of Code of Conduct violations seriously, and aim to support those who are impacted and ensure that problematic behavior doesn’t happen again. + +## Making a Report + +If you believe you’re experiencing or have experienced unacceptable behavior that is counter to this code of conduct, or you are witness to this behavior happening to someone else, we encourage you to contact our DEI lead: + +- **Susan Sullivan, CIRES** + - Email: [susan.sullivan@colorado.edu](mailto:susan.sullivan@colorado.edu) + +You may also choose to anonymously report behavior to ESIIL using [this form](#). + +The DEI team will keep reports as confidential as possible. However, as mandatory reporters, we have an obligation to report alleged protected class violations to our home institution or to law enforcement. + +### Specifically: + +- Cases of potential protected-class harassment will be reported to the CU Office of Institutional Equity and Compliance. +- If the violation is made by a member of another institution, that information may also be shared with that member’s home institution by the CU Office of Institutional Equity and Compliance under Title IX. +- In some instances, harassment information may be shared with the National Science Foundation, who are the funding organization of ESIIL. + +When we discuss incidents with people who are accused of misconduct (the respondent), we will anonymize details as much as possible to protect the privacy of the reporter and the person who was impacted (the complainant). In some cases, even when the details are anonymized, the respondent may guess at the identities of the reporter and complainants. If you have concerns about retaliation or your personal safety, please let us know (or note that in your report). We encourage you to report in any case, so that we can support you while keeping ESIIL members safe. In some cases, we are able to compile several anonymized reports into a pattern of behavior, and take action based on that pattern. + +If you prefer to speak with someone who is not on the ESIIL leadership team, or who can maintain confidentiality, you may contact: + +- **CU Ombuds** + - Phone: 303-492-5077 (for guidance and support navigating difficult conversations) +- **CU Office of Victim Assistance** + - Phone: 303-492-8855 + +If you want more information about when to report, or how to help someone who needs to report, please review the resources at [Don’t Ignore It](#). + +**Note:** The reporting party does not need to be directly involved in a code of conduct violation incident. Please make a bystander report if you observe a potentially dangerous situation, someone in distress, or violations of these guidelines, even if the situation is not happening to you. + +# What Happens After a Report Is Filed + +After a member of the ESIIL DEI team takes your report, they will (if necessary) consult with the appropriate support people at CU. The ESIIL DEI team will respond with a status update within 5 business days. + +During this time, they, or members of the CU Office of Institutional Equity and Compliance, will: + +- Meet with you or review report documentation to determine what happened +- Consult documentation of past incidents for patterns of behavior +- Discuss appropriate response(s) to the incident +- Connect with the appropriate offices and/or make those response(s) +- Determine the follow-up actions for any impacted people and/or the reporter +- Follow up with the impacted people, including connecting them with support and resources. + +## As a result of this process, in minor cases ESIIL DEI may communicate with the respondent to: + +- Explain what happened and the impact of their behavior +- Offer concrete examples of how to improve their behavior +- Explain consequences of their behavior, or future consequences if the behavior is repeated. + +For significant infractions, follow up to the report may be turned over to the CU Office of Institutional Equity and Compliance and/or campus police. + +## Possible Consequences to Code of Conduct Violations + +What follows are examples of possible responses to an incident report. This list is not inclusive, and ESIIL reserves the right to take any action it deems necessary. Generally speaking, the strongest response ESIIL may take is to completely ban a user from further engagement with ESIIL activities and, as is required, report a person to the CU Office of Institutional Equity and Compliance and/or their home institution and NSF. If law enforcement should be involved, they will recommend that the complainant make that contact. Employees of CU Boulder may also be subject to consequences as determined by the institution. + +In addition to the responses above, ESIIL responses may include but are not limited to the following: + +- A verbal discussion in person or via phone/Zoom followed by documentation of the conversation via email +- Not publishing the video or slides of a talk that violated the code of conduct +- Not allowing a speaker who violated the code of conduct to give (further) talks +- Immediately ending any team leadership, membership, or other responsibilities and privileges that a person holds +- Temporarily banning a person from ESIIL activities +- Permanently banning a person from ESIIL activities +- Nothing, if the behavior is determined to not be a code of conduct violation + +Do you need more resources? + +Please don’t hesitate to contact the ESIIL DEI lead, Susan Sullivan, if you have questions or +concerns. + +The CU Office of Institutional Equity and Compliance is a resource for all of us in navigating this space. They also offer resource materials that can assist you in exploring various topics and skills here. + +If you have questions about what, when or how to report, or how to help someone else with +concerns, Don’t Ignore It. + +CU Ombud’s Office: Confidential support to navigate university situations. (Most universities +have these resources) + +The CU Office of Victims Assistance (counseling limited to CU students/staff/faculty, though +advocacy is open to everyone engaged with a CU-sponsored activity. Please look for a similar resource on your campus if you are from another institution). + +National Crisis Hotlines + +How are we doing? + +Despite our best intentions, in some cases we may not be living up to our ideals of a positive, +supportive, inclusive, respectful and collaborative community. If you feel we could do better, we welcome your feedback. Comments, suggestions and praise are also very welcome! +Acknowledgment +By participating in this event, you agree to abide by this code of conduct and understand the consequences of violating it. We believe that a respectful and inclusive environment benefits all participants and leads to more creative and successful outcomes. +Thank you for your cooperation in making the this event a welcoming event for all. Have fun! + diff --git a/additional-resources/code-of-conduct/index.html b/additional-resources/code-of-conduct/index.html new file mode 100644 index 0000000..2021dfd --- /dev/null +++ b/additional-resources/code-of-conduct/index.html @@ -0,0 +1,1618 @@ + + + + + + + + + + + + + + + + + + + + + + Code of Conduct - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + + + + + + +
+ +
+ + + + +
+
+ + + +
+
+
+ + + + + + + + +
+
+
+ + + + +
+
+ + + + + + + + + +

Code of Conduct and Respectful Inclusive Collaboration Guidelines

+

Environmental Data Science Innovation & Inclusion Lab (ESIIL) is committed to building, maintaining, and fostering an inclusive, kind, collaborative, and diverse transdisciplinary environmental data science community, whose members feel welcome, supported, and safe to contribute ideas and knowledge.

+

The 2024 ESIIL Innovation Summit will follow all aspects of the ESIIL Code of Conduct (below).

+

All community members are responsible for creating this culture, embodying our values, welcoming diverse perspectives and ways of knowing, creating safe inclusive spaces, and conducting ethical science as guided by FAIR (Findable, Accessible, Interoperable, Reusable) and CARE (Collective Benefit, Authority to Control, Responsibility, and Ethics) principles for scientific and Indigenous data management, governance, and stewardship.

+

Our values

+

ESIIL’s vision is grounded in the conviction that innovation and breakthroughs in environmental data science will be precipitated by a diverse, collaborative, curious, and inclusive research community empowered by open data and infrastructure, cross-sector and community partnerships, team science, and engaged learning.

+

As such, our core values center people through inclusion, kindness, respect, collaboration, and genuine relationships. They also center innovation, driven by collaborative, cross-sector science and synthesis, open, accessible data and tools, and fun, diverse teams. Finally, they center learning, propelled by curiosity and accessible, inclusive training, and education opportunities.

+

When and how to use these guidelines

+

These guidelines outline behavior expectations for ESIIL community members. Your participation in the ESIIL network is contingent upon following these guidelines in all ESIIL activities, including, but not limited to, participating in meetings, webinars, hackathons, working groups, hosted or funded by ESIIL, as well as email lists and online forums such as GutHub, Slack, and Twitter. These guidelines have been adapted from those of the International Arctic Research Policy Committee, the Geological Society of America, the American Geophysical Union, the University Corporation for Atmospheric Research, The Carpentries, and others. We encourage other organizations to adapt these guidelines for use in their own meetings.

+

Note: Working groups and hackathon/codefest teams are encouraged to discuss these guidelines and what they mean to them, and will have the opportunity to add to them to specifically support and empower their team. Collaborative and behavior commitments complement data use, management, authorship, and access plans that commit to CARE and FAIR principles.

+

Behavior Agreements

+

ESIIL community members are expected to act professionally and respectfully in all activities, such that each person, regardless of gender, gender identity or expression, sexual orientation, disability, physical appearance, age, body size, race, religion, national origin, ethnicity, level of experience, language fluency, political affiliation, veteran status, pregnancy, country of origin, and any other characteristic protected under state or federal law, feels safe and welcome in our activities and community. We gain strength from diversity and actively seek participation from those who enhance it.

+

In order to garner the benefits of a diverse community and to reach the full potential of our mission and charge, ESIIL participants must be allowed to develop a sense of belonging and trust within a respectful, inclusive, and collaborative culture. Guiding behaviors that contribute to this culture include, but are not limited to:

+

Showing Respect

+
    +
  • +

    Listen carefully – we each bring our own styles of communication, language, and ideas, and we must do our best to accept and accommodate differences. Do not interrupt when someone is speaking and maintain an open mind when others have different ideas than yours.

    +
  • +
  • +

    Be present – when engaging with others, give them your full attention. If you need to respond to outside needs, please step away from the group quietly.

    +
  • +
  • +

    Be kind – offer positive, supportive comments and constructive feedback. Critique ideas, not people. Harassment, discrimination, bullying, aggression, including offensive comments, jokes, and imagery, are unacceptable, regardless of intent, and will not be tolerated.

    +
  • +
  • +

    Be punctual - adhere to the schedule provided by the organizers and avoid disruptive behavior during presentations, trainings, or working sessions.

    +
  • +
  • +

    Respect privacy - be mindful of the confidentiality of others. Always obtain explicit consent before recording, sharing, or using someone else’s personal information, photos, or recordings.

    +
  • +
  • +

    Practice good digital etiquette (netiquette) when communicating online, whether in emails, messages, or social media - think before posting online and consider the potential impact on others. Do not share or distribute content generated by or involving others without their explicit consent.

    +
  • +
+

Being Inclusive

+
    +
  • +

    Create space for everyone to participate – be thoughtful about who is at the table; openly address accessibility needs, and provide multiple ways to contribute.

    +
  • +
  • +

    Be welcoming – ESIIL participants come from a wide range of skill levels and career stages, backgrounds, and cultures. Demonstrate that you value these different perspectives and identities through your words and actions, including through correct use of names, titles, and pronouns.

    +
  • +
  • +

    Be self-aware – recognize that positionality, identity, unconscious biases, and upbringing can all affect how words and behaviors are perceived. Ensure that your words and behavior make others feel welcome.

    +
  • +
  • +

    Commit to ongoing learning – the move toward inclusive, equitable, and just environmental data science is a collective journey. Continue to learn about and apply practices of inclusion, anti-racism, bystander intervention, and cultural sensitivity. None of us is perfect; all of us will, from time to time, fail to live up to our own high standards. Being perfect is not what matters; owning our mistakes and committing to clear and persistent efforts to grow and improve is.

    +
  • +
+

Being Curious

+
    +
  • +

    Check your presumptions – we each bring our own ideas and assumptions about how the world should and does work – what are yours, and how do they affect how you interact with others? How do they shape your perception of new ideas?

    +
  • +
  • +

    Ask questions – one of the strengths of interdisciplinary and diverse teams is that we all bring different knowledge and viewpoints; no one person is expected to know everything. So don’t be afraid to ask, to learn, and to share.

    +
  • +
  • +

    Be bold – significant innovations don’t come from incremental efforts. Be brave in proposing and testing new ideas. When things don’t work, learn from the experience.

    +
  • +
  • +

    Invite feedback – new ideas and improvements can emerge from many places when we’re open to hearing them. Check your defensiveness and listen; accept feedback as a gift toward improving our work and ourselves.

    +
  • +
+

Being Collaborative

+
    +
  • +

    Recognize that everyone is bringing something different to the table – take the time to get to know each other. Keep an open mind, encourage ideas that are different from yours, and learn from each other’s expertise and experience.

    +
  • +
  • +

    Be accountable - great team science depends on trust, communication, respect, and delivering on your commitments. Be clear about your needs, as both a requester and a responder, realistic about your time and capacity commitments, and communicate timelines and standards in advance.

    +
  • +
  • +

    Make assumptions explicit and provide context wherever possible - misunderstandings are common on transdisciplinary and cross-cultural teams and can best be managed with intentionality. Check in about assumptions, and be willing to share and correct misunderstandings or mistakes when they happen. Make use of collaboration agreements, communicate clearly and avoid jargon wherever possible.

    +
  • +
  • +

    Respect intellectual property and Indigenous data sovereignty – ESIIL recognizes the extractive and abusive history of scientific engagement with Native peoples, and is committed to doing better. Indigenous knowledge holders are under no obligation to share their data, stories or knowledge. Their work should always be credited, and only shared with permission. Follow guidelines for authorship, Indigenous data sovereignty, and CARE principles. Acknowledge and credit the ideas and work of others.

    +
  • +
  • +

    Use the resources that we provide - take advantage of the cyberinfrastructure and data cube at your disposal, but do not use them for unrelated tasks, as it could disrupt the event, introduce security risks, undermine the spirit of collaboration and fair play, and erode trust within the event community.

    +
  • +
  • +

    Be safe - never share sensitive personal information; use strong passwords for your Cyverse and GitHub accounts and do not share them with other participants; be cautious of unsolicited emails, messages, or links; and verify online contacts. If you encounter any illegal or harmful activities online related to this event, report them to Tyler McIntosh or Susan Sullivan.

    +
  • +
+

Finally, speak up if you experience or notice a dangerous situation, or someone in distress!

+

Code of Conduct: Unacceptable behaviors

+

We adopt the full Code of Conduct of our home institution, the University of Colorado, details of which are found here. To summarize, examples of unacceptable and reportable behaviors include, but are not limited to:

+
    +
  • Harassment, intimidation, or discrimination in any form
  • +
  • Physical or verbal abuse by anyone to anyone, including but not limited to a participant, member of the public, guest, member of any institution or sponsor
  • +
  • Unwelcome sexual attention or advances
  • +
  • Personal attacks directed at other guests, members, participants, etc.
  • +
  • Alarming, intimidating, threatening, or hostile comments or conduct
  • +
  • Inappropriate use of nudity and/or sexual images in public spaces or in presentations
  • +
  • Threatening or stalking anyone
  • +
  • Unauthorized use or sharing of personal or confidential information or private communication
  • +
  • Continuing interactions, including but not limited to conversations, photographies, recordings, instant messages, and emails, after being asked to stop
  • +
  • Ethical and scientific misconduct, including failing to credit contributions or respect intellectual property
  • +
  • Engaging in any illegal activities, including hacking, cheating, or unauthorized access to systems or data
  • +
  • Using the cyberinfrastructure provided by the organizers for activities unrelated to this event.
  • +
  • Other conduct which could reasonably be considered inappropriate in a professional setting.
  • +
+

The University of Colorado recognizes all Federal and State protected classes, which include the following: race, color, national origin, sex, pregnancy, age, marital status, disability, creed, religion, sexual orientation, gender identity, gender expression, veteran status, political affiliation or political philosophy. Mistreatment or harassment not related to protected class also has a negative impact and will be addressed by the ESIIL team.

+

Anyone requested to stop unacceptable behavior is expected to comply immediately.

+

If there is a clear violation of the code of conduct during an ESIIL event—for example, a meeting is Zoom bombed or a team member is verbally abusing another participant during a workshop— ESIIL leaders, facilitators (or their designee) or campus/local police may take any action deemed necessary and appropriate, including expelling the violator, or immediate removal of the violator from any online or in-person event or platform without warning or refund. If such actions are necessary, there will be follow up with the ESIIL Diversity Equity and Inclusion (DEI) team to determine what further action is needed (see Reporting Process and Consequences below).

+

Addressing Behavior Directly

+

For smaller incidents that might be settled with a brief conversation, you may choose to contact the person in question or set up a (video) conversation to discuss how the behavior affected you. Please use this approach only if you feel comfortable; you do not have to carry the weight of addressing these issues yourself. If you are interested in this option but unsure how to go about it, please contact the ESIIL DEI lead, Susan Sullivan, first—she will have advice on how to make the conversation happen and is available to join you in a conversation as requested.

+

Reporting Process and Consequences

+

We take any reports of Code of Conduct violations seriously, and aim to support those who are impacted and ensure that problematic behavior doesn’t happen again.

+

Making a Report

+

If you believe you’re experiencing or have experienced unacceptable behavior that is counter to this code of conduct, or you are witness to this behavior happening to someone else, we encourage you to contact our DEI lead:

+ +

You may also choose to anonymously report behavior to ESIIL using this form.

+

The DEI team will keep reports as confidential as possible. However, as mandatory reporters, we have an obligation to report alleged protected class violations to our home institution or to law enforcement.

+

Specifically:

+
    +
  • Cases of potential protected-class harassment will be reported to the CU Office of Institutional Equity and Compliance.
  • +
  • If the violation is made by a member of another institution, that information may also be shared with that member’s home institution by the CU Office of Institutional Equity and Compliance under Title IX.
  • +
  • In some instances, harassment information may be shared with the National Science Foundation, who are the funding organization of ESIIL.
  • +
+

When we discuss incidents with people who are accused of misconduct (the respondent), we will anonymize details as much as possible to protect the privacy of the reporter and the person who was impacted (the complainant). In some cases, even when the details are anonymized, the respondent may guess at the identities of the reporter and complainants. If you have concerns about retaliation or your personal safety, please let us know (or note that in your report). We encourage you to report in any case, so that we can support you while keeping ESIIL members safe. In some cases, we are able to compile several anonymized reports into a pattern of behavior, and take action based on that pattern.

+

If you prefer to speak with someone who is not on the ESIIL leadership team, or who can maintain confidentiality, you may contact:

+
    +
  • CU Ombuds
  • +
  • Phone: 303-492-5077 (for guidance and support navigating difficult conversations)
  • +
  • CU Office of Victim Assistance
  • +
  • Phone: 303-492-8855
  • +
+

If you want more information about when to report, or how to help someone who needs to report, please review the resources at Don’t Ignore It.

+

Note: The reporting party does not need to be directly involved in a code of conduct violation incident. Please make a bystander report if you observe a potentially dangerous situation, someone in distress, or violations of these guidelines, even if the situation is not happening to you.

+

What Happens After a Report Is Filed

+

After a member of the ESIIL DEI team takes your report, they will (if necessary) consult with the appropriate support people at CU. The ESIIL DEI team will respond with a status update within 5 business days.

+

During this time, they, or members of the CU Office of Institutional Equity and Compliance, will:

+
    +
  • Meet with you or review report documentation to determine what happened
  • +
  • Consult documentation of past incidents for patterns of behavior
  • +
  • Discuss appropriate response(s) to the incident
  • +
  • Connect with the appropriate offices and/or make those response(s)
  • +
  • Determine the follow-up actions for any impacted people and/or the reporter
  • +
  • Follow up with the impacted people, including connecting them with support and resources.
  • +
+

As a result of this process, in minor cases ESIIL DEI may communicate with the respondent to:

+
    +
  • Explain what happened and the impact of their behavior
  • +
  • Offer concrete examples of how to improve their behavior
  • +
  • Explain consequences of their behavior, or future consequences if the behavior is repeated.
  • +
+

For significant infractions, follow up to the report may be turned over to the CU Office of Institutional Equity and Compliance and/or campus police.

+

Possible Consequences to Code of Conduct Violations

+

What follows are examples of possible responses to an incident report. This list is not inclusive, and ESIIL reserves the right to take any action it deems necessary. Generally speaking, the strongest response ESIIL may take is to completely ban a user from further engagement with ESIIL activities and, as is required, report a person to the CU Office of Institutional Equity and Compliance and/or their home institution and NSF. If law enforcement should be involved, they will recommend that the complainant make that contact. Employees of CU Boulder may also be subject to consequences as determined by the institution.

+

In addition to the responses above, ESIIL responses may include but are not limited to the following:

+
    +
  • A verbal discussion in person or via phone/Zoom followed by documentation of the conversation via email
  • +
  • Not publishing the video or slides of a talk that violated the code of conduct
  • +
  • Not allowing a speaker who violated the code of conduct to give (further) talks
  • +
  • Immediately ending any team leadership, membership, or other responsibilities and privileges that a person holds
  • +
  • Temporarily banning a person from ESIIL activities
  • +
  • Permanently banning a person from ESIIL activities
  • +
  • Nothing, if the behavior is determined to not be a code of conduct violation
  • +
+

Do you need more resources?

+

Please don’t hesitate to contact the ESIIL DEI lead, Susan Sullivan, if you have questions or +concerns.

+

The CU Office of Institutional Equity and Compliance is a resource for all of us in navigating this space. They also offer resource materials that can assist you in exploring various topics and skills here.

+

If you have questions about what, when or how to report, or how to help someone else with +concerns, Don’t Ignore It.

+

CU Ombud’s Office: Confidential support to navigate university situations. (Most universities +have these resources)

+

The CU Office of Victims Assistance (counseling limited to CU students/staff/faculty, though +advocacy is open to everyone engaged with a CU-sponsored activity. Please look for a similar resource on your campus if you are from another institution).

+

National Crisis Hotlines

+

How are we doing?

+

Despite our best intentions, in some cases we may not be living up to our ideals of a positive, +supportive, inclusive, respectful and collaborative community. If you feel we could do better, we welcome your feedback. Comments, suggestions and praise are also very welcome! +Acknowledgment +By participating in this event, you agree to abide by this code of conduct and understand the consequences of violating it. We believe that a respectful and inclusive environment benefits all participants and leads to more creative and successful outcomes. +Thank you for your cooperation in making the this event a welcoming event for all. Have fun!

+ +
+
+ + + Last update: + 2024-09-23 + + +
+ + + + + + +
+
+ + +
+ + + +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/additional-resources/cyverse_hacks/cyverse_hacks.md b/additional-resources/cyverse_hacks/cyverse_hacks.md new file mode 100644 index 0000000..2ee58d4 --- /dev/null +++ b/additional-resources/cyverse_hacks/cyverse_hacks.md @@ -0,0 +1,37 @@ +# Transitioning Workflows to CyVerse: Tips & Tricks + +## Forest Carbon Codefest Data Storage +- **Path:** `~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/` +- Your team has a subdirectory within the Team_outputs directory. + +## Setup +1. **CyVerse Account:** + - Create an account if not already owned. + - Contact Tyson for account upgrades after maximizing current limits. + +## GitHub Connection +- Follow [the guide for connecting GitHub to CyVerse](https://cu-esiil.github.io/forest-carbon-codefest/collaborating-on-the-cloud/cyverse-instructions/) +- Select “JupyterLab ESIIL” and choose “macrosystems” in the version dropdown. +- Clone into `/home/jovyan/data-store`. +- Clone `innovation-summit-utils` for SSH connection to GitHub. +- Run `conda install -c conda-forge openssh` in the terminal if encountering errors. +- GitHub authentication is session-specific. + +## RStudio in Discovery Environment +1. Copy your instance ID. It can be found in your analyis URL in form https://.cyverse.run/lab. +2. Use your ID in these links and run them each, in sequence, in the same browser window: + - `https://.cyverse.run/rstudio/auth-sign-in` + - `https://.cyverse.run/rstudio/` + +## Data Transfer to CyVerse +- Use GoCommands for HPC/CyVerse transfers. +- **Installation:** + - **Linux:** GOCMD_VER=$(curl -L -s https://raw.githubusercontent.com/cyverse/gocommands/main/VERSION.txt); \ +curl -L -s https://github.com/cyverse/gocommands/releases/download/${GOCMD_VER}/gocmd-${GOCMD_VER}-linux-amd64.tar.gz | tar zxvf - + - **Windows Powershell:** curl -o gocmdv.txt https://raw.githubusercontent.com/cyverse/gocommands/main/VERSION.txt ; $env:GOCMD_VER = (Get-Content gocmdv.txt) +curl -o gocmd.zip https://github.com/cyverse/gocommands/releases/download/$env:GOCMD_VER/gocmd-$env:GOCMD_VER-windows-amd64.zip ; tar zxvf gocmd.zip ; del gocmd.zip ; del gocmdv.txt +- **Usage:** + - ./gocmd init + - Hit enter until you are asked for your iRODS Username (which is your cyverse username) + - Use `put` for upload and `get` for download. + - Ensure correct CyVerse directory path. Note that the CyVerse directory path should start from “/iplant/home/…” (i.e. if you start from ‘/home/jovyan/…’ GoCommands will not find the directory and throw an error) diff --git a/additional-resources/cyverse_hacks/index.html b/additional-resources/cyverse_hacks/index.html new file mode 100644 index 0000000..6a930fc --- /dev/null +++ b/additional-resources/cyverse_hacks/index.html @@ -0,0 +1,1500 @@ + + + + + + + + + + + + + + + + + + + + + + Cyverse hacks - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + + + + + + +
+ +
+ + + + +
+
+ + + +
+
+
+ + + + + + + + +
+
+
+ + + + +
+
+ + + + + + + + + +

Transitioning Workflows to CyVerse: Tips & Tricks

+

Forest Carbon Codefest Data Storage

+
    +
  • Path: ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/
  • +
  • Your team has a subdirectory within the Team_outputs directory.
  • +
+

Setup

+
    +
  1. CyVerse Account:
  2. +
  3. Create an account if not already owned.
  4. +
  5. Contact Tyson for account upgrades after maximizing current limits.
  6. +
+

GitHub Connection

+
    +
  • Follow the guide for connecting GitHub to CyVerse
  • +
  • Select “JupyterLab ESIIL” and choose “macrosystems” in the version dropdown.
  • +
  • Clone into /home/jovyan/data-store.
  • +
  • Clone innovation-summit-utils for SSH connection to GitHub.
  • +
  • Run conda install -c conda-forge openssh in the terminal if encountering errors.
  • +
  • GitHub authentication is session-specific.
  • +
+

RStudio in Discovery Environment

+
    +
  1. Copy your instance ID. It can be found in your analyis URL in form https://.cyverse.run/lab.
  2. +
  3. Use your ID in these links and run them each, in sequence, in the same browser window:
  4. +
  5. https://<id>.cyverse.run/rstudio/auth-sign-in
  6. +
  7. https://<id>.cyverse.run/rstudio/
  8. +
+

Data Transfer to CyVerse

+
    +
  • Use GoCommands for HPC/CyVerse transfers.
  • +
  • Installation:
  • +
  • Linux: GOCMD_VER=\((curl -L -s https://raw.githubusercontent.com/cyverse/gocommands/main/VERSION.txt); \ +curl -L -s https://github.com/cyverse/gocommands/releases/download/\)-linux-amd64.tar.gz | tar zxvf -}/gocmd-${GOCMD_VER
  • +
  • Windows Powershell: curl -o gocmdv.txt https://raw.githubusercontent.com/cyverse/gocommands/main/VERSION.txt ; \(env:GOCMD_VER = (Get-Content gocmdv.txt) +curl -o gocmd.zip https://github.com/cyverse/gocommands/releases/download/\)env:GOCMD_VER/gocmd-$env:GOCMD_VER-windows-amd64.zip ; tar zxvf gocmd.zip ; del gocmd.zip ; del gocmdv.txt
  • +
  • Usage:
  • +
  • ./gocmd init
  • +
  • Hit enter until you are asked for your iRODS Username (which is your cyverse username)
  • +
  • Use put for upload and get for download.
  • +
  • Ensure correct CyVerse directory path. Note that the CyVerse directory path should start from “/iplant/home/…” (i.e. if you start from ‘/home/jovyan/…’ GoCommands will not find the directory and throw an error)
  • +
+ +
+
+ + + Last update: + 2024-09-23 + + +
+ + + + + + +
+
+ + +
+ + + +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/additional-resources/intellectual-contributions b/additional-resources/intellectual-contributions new file mode 100644 index 0000000..8e5c3a0 --- /dev/null +++ b/additional-resources/intellectual-contributions @@ -0,0 +1,53 @@ +# ESIIL's Guideline for Intellectual Contributions and Credit +ESIIL's guidelines for intellectual contributions and credit take a comprehensive and inclusive +approach to contributorship, as per Allen et al. 2019 and the CRediT taxonomy (Brand et al. +2015). We advocate for contributions that adhere to the principles of open science while also +respecting data sovereignty (Carroll et al. 2020). We urge all ESIIL participants to implement +these guidelines in all our endorsed research and educational ventures. We acknowledge that +different disciplines, sectors, and institutions may have unique approaches to contributions, +credit, and authorship. However, we strongly advise teams to develop an agreement around +contributions and credit, which should be regularly revisited and updated throughout the project. +When in doubt, lean towards giving credit rather than withholding it. + +**Guidelines:** +1. Initiate early and ongoing conversations among teams about expectations and roles, +acknowledging that these may change over time. Teams should document these discussions +and formalize their decisions (e.g., such as through an authorship agreement form and +contributions table). +2. Honor various forms of contribution, for example, the categories from the CRediT taxonomy: +Conceptualization, Data curation, Formal analysis, Funding acquisition, Investigation, +Methodology, Project administration, Resources, Software, Supervision, Validation, +Visualization, Writing – original draft, and Writing – review & editing (and there may be other +forms of contribution not adequately addressed here); +3. Consider including author contributions in publications, even if it is not a requirement of the +journal or other outlet; +4. Clarify how credit is attributed to early-career scientists, and ensure that mechanisms are in +place to actively involve them in the contribution process; +5. Create leadership opportunities for, and promote the contributions of, members of +underrepresented communities in work outputs; +6. Where appropriate, provide open access publication of products throughout the entire +scientific process, including pre-prints (Hoy 2020) and for data, tools, code, models, educational +materials, manuscripts, and other intellectual contributions; +7. Consider alternative author listings that provide better recognition of contributions, such as +shared and indicated lead author roles, team author names for very large author groups, and/or +tiered authorship based on efforts; +8. Consider open source licenses when publishing; +9. Explore ways to track success beyond traditional publication citations, for example, altmetrics +that capture attention and engagement on digital platforms, patents and inventions, policy +impact, among others. +These guidelines on intellectual contribution and credit are intended to create a safe intellectual +space for idea exchange, acknowledgment of individual contributions, and facilitation of +large-scale collaborations. + +**References:** +Allen, L., A. O’Connell, and V. Kiermer. 2019. How can we ensure visibility and diversity in +research contributions? How the Contributor Role Taxonomy (CRediT) is helping the shift from +authorship to contributorship. Learned Publishing 32:71–74. +Brand, A., L. Allen, M. Altman, M. Hlava, and J. Scott. 2015. Beyond authorship: attribution, +contribution, collaboration, and credit. Learned Publishing 28:151–155. +Carroll, S. R., I. Garba, O. L. Figueroa-Rodríguez, J. Holbrook, R. Lovett, S. Materechera, M. +Parsons, K. Raseroka, D. Rodriguez-Lonebear, R. Rowe, R. Sara, J. D. Walker, J. Anderson, +and M. Hudson. 2020. The CARE Principles for Indigenous data governance. Data Science +Journal 19:43. +Hoy, M. B. 2020. Rise of the Rxivs: How preprint servers are changing the publishing process. +Medical Reference Services Quarterly 39:84–89. diff --git a/additional-resources/participant_agreement/index.html b/additional-resources/participant_agreement/index.html new file mode 100644 index 0000000..92305e0 --- /dev/null +++ b/additional-resources/participant_agreement/index.html @@ -0,0 +1,1498 @@ + + + + + + + + + + + + + + + + + + + + + + Participant Agreement - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + + + + + + +
+ +
+ + + + +
+
+ + + +
+
+
+ + + + + + + + +
+
+
+ + + + +
+
+ + + + + + + + + +

Participant Agreement

+

This Participant Agreement (“Agreement”) is a contract between you (“You/Your” or “Participant”) and THE REGENTS OF THE UNIVERSITY OF COLORADO, a body corporate, acting on behalf of the University of Colorado Boulder, a public institution of higher education created under the Constitution and the Law of the State of Colorado (the “University”), having offices located at 3100 Marine Street, Boulder, CO 80309.

+

In consideration of Your participation in the 2024 ESIIL Innovation Summit, the sufficiency of which is hereby acknowledged, You agree as follows:

+

Environmental Data Science Innovation & Inclusion Lab (“ESIIL”) is a National Science Foundation (“NSF”) funded data synthesis center led by the University. Earth Lab is part of the Cooperative Institute for Research in Environmental Sciences (CIRES) specializing in data-intensive open, reproducible environmental science. ESIIL will host the Summit in person from May 13 through May 16, 2024.

+

Innovation Summit Description

+

ESIIL's 2024 Innovation Summit will offer an opportunity to use big data to understand resilience across genes, species, ecosystems and societies, advance ecological forecasting with solutions in mind, and inform adaptive management and natural climate solutions. The Summit will support attendees to advance data-informed courses of action for resilience and adaptation in the face of our changing environment. It will be an in-person ‘unconference’, enabling participants to dynamically work on themes that most inspire them, with inclusive physical and intellectual spaces for working together. Over two and a half days participants will work in teams to explore research questions using open science approaches, including: data infrastructure, artificial intelligence (AI) and novel analytics, and cloud computing. Participants will be encouraged to work across and respect different perspectives, with the aim of co-developing resilience solutions. ESIIL will provide participants with opportunities to learn more about cultural intelligence, ethical and open science practices, and leadership in the rapidly evolving field of environmental data science. Overall, the Summit will capitalize on the combination of open data and analytics opportunities to develop innovative or impactful approaches that improve environmental resilience and adaptation.

+

How to Participate

+

You will join a team of environmental scientists, data experts, and coders to explore curated data, consider the objectivity of the data, propose a scientific question that can be addressed with all or some of the data sets, and analyze the data in an attempt to answer your scientific question. You will present your Work to the event community. ESIIL will provide environmental data, cyberinfrastructure, cyberinfrastructure and data analytics training, and technical support.

+

Representations and Warranties

+

By and through Your participation in the Summit, You represent and warrant the following:

+
    +
  • You have read, understand, and agree to abide by the Code of Conduct and Respectful Inclusive Collaboration Guidelines for the 2024 ESIIL Innovation Summit (“Code of Conduct”).
  • +
  • Any decisions concerning the Code of Conduct, Official Rules, or any other matter relating to this Summit by the University is final and binding on all Participants.
  • +
+

Summit Assets

+

5.1 Access and Use

+

By participating in the Innovation Summit, You may receive access to certain datasets, webinars, and/or other copyrighted materials (collectively, the “Summit Assets”). You agree to follow all licenses, restrictions, and other instructions provided to You with the Summit Assets.

+

5.2 Disclaimer

+

The Summit Assets are provided “as is” without warranty of any kind, either express or implied, including, without limitation, any implied warranties of merchantability and fitness for a particular purpose. Without limiting the foregoing, the University does not warrant that the Materials will be suitable for Your Solution or that the operation or supply of the Summit Assets will be uninterrupted or error free.

+

5.3 Restrictions

+

You agree not to access or use the Summit Assets in a manner that may interfere with any other participants’ or users’ use of such assets, unless provided with express written consent by the University. Your access to and use of the Summit Assets may be limited, throttled, or terminated at any time at the sole discretion of the University.

+

5.4 Originality and Third-Party Materials

+

You represent that Your Work is Your original creation. If you obtain permission to include third-party materials, You represent that Your Work includes complete details of any third-party license or other restriction (including, but not limited to, related patents and trademarks) of which You are aware and which are associated with any part of Your Work. You represent and warrant that You will not submit any materials to the University that You know or believe to have components that are malicious or harmful. You represent that You will perform a reasonable amount of due diligence in order to be properly informed of third-party licenses, infringing materials, or harmful content associated with any part of Your Work.

+

5.5 Work Publication

+

You agree to make Your Work publicly available in GitHub under the MIT open-source license within five (5) months from the end of the Summit.

+

Limitation of Liability

+

TO THE EXTENT ALLOWED BY LAW, IN NO EVENT SHALL THE UNIVERSITY, ITS PARTNERS, LICENSORS, SERVICE PROVIDERS, OR ANY OF THEIR RESPECTIVE OFFICERS, DIRECTORS, AGENTS, EMPLOYEES OR REPRESENTATIVES, BE LIABLE FOR DIRECT, INCIDENTAL, CONSEQUENTIAL, EXEMPLARY OR PUNITIVE DAMAGES ARISING OUT OF OR IN CONNECTION WITH THE SUMMIT OR THIS AGREEMENT (HOWEVER ARISING, INCLUDING NEGLIGENCE). IF YOU HAVE A DISPUTE WITH ANY PARTICIPANT OR ANY OTHER THIRD PARTY, YOU RELEASE THE UNIVERSITY, ITS, PARTNERS, LICENSORS, AND SERVICE PROVIDERS, AND EACH OF THEIR RESPECTIVE OFFICERS, DIRECTORS, AGENTS, EMPLOYEES AND REPRESENTATIVES FROM ANY AND ALL CLAIMS, DEMANDS AND DAMAGES (ACTUAL AND CONSEQUENTIAL) OF EVERY KIND AND NATURE ARISING OUT OF OR IN ANY WAY CONNECTED WITH SUCH DISPUTES. YOU AGREE THAT ANY CLAIMS AGAINST UNIVERSITY ARISING OUT OF THE SUMMIT OR THIS AGREEMENT MUST BE FILED WITHIN ONE YEAR AFTER SUCH CLAIM AROSE; OTHERWISE, YOUR CLAIM IS PERMANENTLY BARRED.

+

Not an Offer or Contract of Employment

+

Under no circumstances will Your participation in the Summit or anything in this Agreement be construed as an offer or contract of employment with the University.

+

Additional Terms

+
    +
  • You must be at least eighteen (18) years of age to participate in the Summit. The Summit is subject to applicable federal, state, and local laws.
  • +
  • The University reserves the right to permanently disqualify any person from the Summit that it reasonably believes has violated this Agreement, the Code of Conduct, and/or the Official Rules.
  • +
  • Any attempt to deliberately damage the Summit or the operation thereof is unlawful and subject to legal action by the University, which may seek damages to the fullest extent permitted by law.
  • +
  • The University assumes no responsibility for any injury or damage to Your or any other person’s computer relating to or resulting from entering or downloading materials or software in connection with the Summit.
  • +
  • The University is not responsible for telecommunications, network, electronic, technical, or computer failures of any kind; for inaccurate transcription of entry information; for any human or electronic error; or for Solutions that are stolen, misdirected, garbled, delayed, lost, late, damaged, or returned.
  • +
  • The University reserves the right to cancel, modify, or suspend the Summit or any element thereof (including, without limitation, this Agreement) without notice in any manner and for any reason (including, without limitation, in the event of any unanticipated occurrence that is not fully addressed in this Agreement).
  • +
  • The University may prohibit any person from participating in the Summit, if such person shows a disregard for this Agreement; acts with an intent to annoy, abuse, threaten, or harass any other entrant or any agents or representatives of the University (or any associated, partners, licensors, or service providers for the University); or behaves in any other disruptive manner (as determined by the University in its sole discretion).
  • +
  • Nothing contained in this Agreement shall be construed as an express or implied waiver by University of its governmental immunity or of the governmental immunity of the State of Colorado.
  • +
  • Your Work shall not contain any item(s) that are either export-controlled under the International Traffic in Arms Regulations, or that appear on the Commerce Control List (except as EAR99) of the Export Administration Regulations.
  • +
+

Dispute Resolution

+

This Agreement and the Summit shall be governed and construed in accordance with and governed by the laws of the state of Colorado without giving effect to conflict of law provisions.

+

Entire Agreement

+

This Agreement and the Event Code of Conduct, constitutes the entire agreement between the University and You with respect to the Summit and supersedes all previous or contemporaneous oral or written agreements concerning the Summit. In the event of a conflict between this Agreement and/or the Event Code of Conduct, the conflict shall be resolved with the following order of precedence:

+
    +
  1. This Agreement
  2. +
  3. The Event Code of Conduct
  4. +
+

Severability

+

The invalidity, illegality, or unenforceability of any one or more phrases, sentences, clauses, or sections in this Agreement does not affect the remaining portions of this Agreement.

+

If you have questions about the Summit, please contact ESIIL at esiil@colorado.edu.

+

Guidelines for Intellectual Contributions and Credit

+

ESIIL Guidelines for Intellectual Contributions and Credit

+ +
+
+ + + Last update: + 2024-09-23 + + +
+ + + + + + +
+
+ + +
+ + + +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/additional-resources/participant_agreement/participant_agreement.md b/additional-resources/participant_agreement/participant_agreement.md new file mode 100644 index 0000000..4442b02 --- /dev/null +++ b/additional-resources/participant_agreement/participant_agreement.md @@ -0,0 +1,80 @@ +# Participant Agreement + +This Participant Agreement (“Agreement”) is a contract between you (“You/Your” or “Participant”) and THE REGENTS OF THE UNIVERSITY OF COLORADO, a body corporate, acting on behalf of the University of Colorado Boulder, a public institution of higher education created under the Constitution and the Law of the State of Colorado (the “University”), having offices located at 3100 Marine Street, Boulder, CO 80309. + +In consideration of Your participation in the 2024 ESIIL Innovation Summit, the sufficiency of which is hereby acknowledged, You agree as follows: + +Environmental Data Science Innovation & Inclusion Lab (“ESIIL”) is a National Science Foundation (“NSF”) funded data synthesis center led by the University. Earth Lab is part of the Cooperative Institute for Research in Environmental Sciences (CIRES) specializing in data-intensive open, reproducible environmental science. ESIIL will host the Summit in person from May 13 through May 16, 2024. + +## Innovation Summit Description + +ESIIL's 2024 Innovation Summit will offer an opportunity to use big data to understand resilience across genes, species, ecosystems and societies, advance ecological forecasting with solutions in mind, and inform adaptive management and natural climate solutions. The Summit will support attendees to advance data-informed courses of action for resilience and adaptation in the face of our changing environment. It will be an in-person ‘unconference’, enabling participants to dynamically work on themes that most inspire them, with inclusive physical and intellectual spaces for working together. Over two and a half days participants will work in teams to explore research questions using open science approaches, including: data infrastructure, artificial intelligence (AI) and novel analytics, and cloud computing. Participants will be encouraged to work across and respect different perspectives, with the aim of co-developing resilience solutions. ESIIL will provide participants with opportunities to learn more about cultural intelligence, ethical and open science practices, and leadership in the rapidly evolving field of environmental data science. Overall, the Summit will capitalize on the combination of open data and analytics opportunities to develop innovative or impactful approaches that improve environmental resilience and adaptation. + +## How to Participate + +You will join a team of environmental scientists, data experts, and coders to explore curated data, consider the objectivity of the data, propose a scientific question that can be addressed with all or some of the data sets, and analyze the data in an attempt to answer your scientific question. You will present your Work to the event community. ESIIL will provide environmental data, cyberinfrastructure, cyberinfrastructure and data analytics training, and technical support. + +## Representations and Warranties + +By and through Your participation in the Summit, You represent and warrant the following: + +- You have read, understand, and agree to abide by the Code of Conduct and Respectful Inclusive Collaboration Guidelines for the 2024 ESIIL Innovation Summit (“Code of Conduct”). +- Any decisions concerning the Code of Conduct, Official Rules, or any other matter relating to this Summit by the University is final and binding on all Participants. + +# Summit Assets + +## 5.1 Access and Use +By participating in the Innovation Summit, You may receive access to certain datasets, webinars, and/or other copyrighted materials (collectively, the “Summit Assets”). You agree to follow all licenses, restrictions, and other instructions provided to You with the Summit Assets. + +## 5.2 Disclaimer +The Summit Assets are provided “as is” without warranty of any kind, either express or implied, including, without limitation, any implied warranties of merchantability and fitness for a particular purpose. Without limiting the foregoing, the University does not warrant that the Materials will be suitable for Your Solution or that the operation or supply of the Summit Assets will be uninterrupted or error free. + +## 5.3 Restrictions +You agree not to access or use the Summit Assets in a manner that may interfere with any other participants’ or users’ use of such assets, unless provided with express written consent by the University. Your access to and use of the Summit Assets may be limited, throttled, or terminated at any time at the sole discretion of the University. + +## 5.4 Originality and Third-Party Materials +You represent that Your Work is Your original creation. If you obtain permission to include third-party materials, You represent that Your Work includes complete details of any third-party license or other restriction (including, but not limited to, related patents and trademarks) of which You are aware and which are associated with any part of Your Work. You represent and warrant that You will not submit any materials to the University that You know or believe to have components that are malicious or harmful. You represent that You will perform a reasonable amount of due diligence in order to be properly informed of third-party licenses, infringing materials, or harmful content associated with any part of Your Work. + +## 5.5 Work Publication +You agree to make Your Work publicly available in GitHub under the MIT open-source license within five (5) months from the end of the Summit. + +# Limitation of Liability + +TO THE EXTENT ALLOWED BY LAW, IN NO EVENT SHALL THE UNIVERSITY, ITS PARTNERS, LICENSORS, SERVICE PROVIDERS, OR ANY OF THEIR RESPECTIVE OFFICERS, DIRECTORS, AGENTS, EMPLOYEES OR REPRESENTATIVES, BE LIABLE FOR DIRECT, INCIDENTAL, CONSEQUENTIAL, EXEMPLARY OR PUNITIVE DAMAGES ARISING OUT OF OR IN CONNECTION WITH THE SUMMIT OR THIS AGREEMENT (HOWEVER ARISING, INCLUDING NEGLIGENCE). IF YOU HAVE A DISPUTE WITH ANY PARTICIPANT OR ANY OTHER THIRD PARTY, YOU RELEASE THE UNIVERSITY, ITS, PARTNERS, LICENSORS, AND SERVICE PROVIDERS, AND EACH OF THEIR RESPECTIVE OFFICERS, DIRECTORS, AGENTS, EMPLOYEES AND REPRESENTATIVES FROM ANY AND ALL CLAIMS, DEMANDS AND DAMAGES (ACTUAL AND CONSEQUENTIAL) OF EVERY KIND AND NATURE ARISING OUT OF OR IN ANY WAY CONNECTED WITH SUCH DISPUTES. YOU AGREE THAT ANY CLAIMS AGAINST UNIVERSITY ARISING OUT OF THE SUMMIT OR THIS AGREEMENT MUST BE FILED WITHIN ONE YEAR AFTER SUCH CLAIM AROSE; OTHERWISE, YOUR CLAIM IS PERMANENTLY BARRED. + +# Not an Offer or Contract of Employment + +Under no circumstances will Your participation in the Summit or anything in this Agreement be construed as an offer or contract of employment with the University. + +# Additional Terms + +- You must be at least eighteen (18) years of age to participate in the Summit. The Summit is subject to applicable federal, state, and local laws. +- The University reserves the right to permanently disqualify any person from the Summit that it reasonably believes has violated this Agreement, the Code of Conduct, and/or the Official Rules. +- Any attempt to deliberately damage the Summit or the operation thereof is unlawful and subject to legal action by the University, which may seek damages to the fullest extent permitted by law. +- The University assumes no responsibility for any injury or damage to Your or any other person’s computer relating to or resulting from entering or downloading materials or software in connection with the Summit. +- The University is not responsible for telecommunications, network, electronic, technical, or computer failures of any kind; for inaccurate transcription of entry information; for any human or electronic error; or for Solutions that are stolen, misdirected, garbled, delayed, lost, late, damaged, or returned. +- The University reserves the right to cancel, modify, or suspend the Summit or any element thereof (including, without limitation, this Agreement) without notice in any manner and for any reason (including, without limitation, in the event of any unanticipated occurrence that is not fully addressed in this Agreement). +- The University may prohibit any person from participating in the Summit, if such person shows a disregard for this Agreement; acts with an intent to annoy, abuse, threaten, or harass any other entrant or any agents or representatives of the University (or any associated, partners, licensors, or service providers for the University); or behaves in any other disruptive manner (as determined by the University in its sole discretion). +- Nothing contained in this Agreement shall be construed as an express or implied waiver by University of its governmental immunity or of the governmental immunity of the State of Colorado. +- Your Work shall not contain any item(s) that are either export-controlled under the International Traffic in Arms Regulations, or that appear on the Commerce Control List (except as EAR99) of the Export Administration Regulations. + +# Dispute Resolution + +This Agreement and the Summit shall be governed and construed in accordance with and governed by the laws of the state of Colorado without giving effect to conflict of law provisions. + +# Entire Agreement + +This Agreement and the Event Code of Conduct, constitutes the entire agreement between the University and You with respect to the Summit and supersedes all previous or contemporaneous oral or written agreements concerning the Summit. In the event of a conflict between this Agreement and/or the Event Code of Conduct, the conflict shall be resolved with the following order of precedence: + +1. This Agreement +2. The Event Code of Conduct + +# Severability + +The invalidity, illegality, or unenforceability of any one or more phrases, sentences, clauses, or sections in this Agreement does not affect the remaining portions of this Agreement. + +If you have questions about the Summit, please contact ESIIL at [esiil@colorado.edu](mailto:esiil@colorado.edu). + +## Guidelines for Intellectual Contributions and Credit + +[ESIIL Guidelines for Intellectual Contributions and Credit](https://drive.google.com/file/d/1WcjV412EzCxohNtjFtca-o2Gpf36ISAX/view) diff --git a/additional-resources/useful_links/index.html b/additional-resources/useful_links/index.html new file mode 100644 index 0000000..5d49f73 --- /dev/null +++ b/additional-resources/useful_links/index.html @@ -0,0 +1,1389 @@ + + + + + + + + + + + + + + + + + + + + + + Useful links - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + + + + + + +
+ +
+ + + + +
+
+ + + +
+
+
+ + + + + + + + +
+
+
+ + + + +
+ +
+ + +
+ + + +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/additional-resources/useful_links/useful_links.md b/additional-resources/useful_links/useful_links.md new file mode 100644 index 0000000..1b91301 --- /dev/null +++ b/additional-resources/useful_links/useful_links.md @@ -0,0 +1,9 @@ +# Useful links + +[CyVerse User Portal](https://user.cyverse.org/services) + +[GitHub](https://github.com/) + +[ESIIL Website](https://esiil.org/) + +[2024 Summit Slack](https://join.slack.com/t/slack-ekh3745/shared_invite/zt-2ffdma1ck-rdDxOfWeNIk29vhbCwpHzQ) diff --git a/agenda/agenda.md b/agenda/agenda.md new file mode 100644 index 0000000..ca3766b --- /dev/null +++ b/agenda/agenda.md @@ -0,0 +1,74 @@ +# ESIIL Innovation Summit - Agenda +**Big Data for Environmental Resilience and Adaptation** +**Date:** May 13-16, 2024 +**Location:** SEEC Auditorium, University of Colorado Boulder +**[Summit Website](https://cu-esiil.github.io/Innovation-Summit-2024/)** + +## Goals of the 2024 ESIIL Innovation Summit +- Explore big data for environmental resilience and adaptation by identifying data synthesis opportunities and utilizing ESIIL cloud-compute capabilities. +- Promote best practices in ethical, open science, by supporting accessibility and usability of environmental data by all stakeholders. +- Champion ethical and equitable practices in environmental science, honoring data sovereignty and encouraging the responsible use of AI. +- Support diverse and inclusive teams by establishing collaborations around data-inspired themes across different disciplines, sectors, career stages, and backgrounds. +- Encourage the co-production of environmental knowledge with communities that are experiencing significant environmental challenges. + +### Day Zero - May 13th +| Time | Event | Location | +|------------------|------------------------------|--------------| +| 9:00 AM - 12:00 PM MDT | Leadership Program | S372 (Viz Studio) | +| 9:00 AM - 12:00 PM MDT | Auditorium Set Up: Tables, Questions, Handouts, etc. | SEEC Auditorium | +| 12:00 - 1:00 PM MDT | Facilitators Lunch | | +| 1:00 or 1:30 PM MDT | Concurrent Optional Activities | NEON Tour, HIKE | +| 3:00 - 5:00 PM MDT | Early Registration opens | SEEC Atrium | +| 3:00 - 4:00 PM MDT | Technical Help Desk | SEEC Auditorium | +| 4:00 - 6:00 PM MDT | Social Mixer | SEEC Cafe | + +### Day One - May 14th +| Time | Event | Location | +|------------------|------------------------------|--------------| +| 8:30 AM MDT | Registration | SEEC Atrium | +| 9:00 AM MDT | Welcome & Opening Ceremony | SEEC Auditorium | +| 9:35 AM MDT | Logistics and Planning Team Introductions | SEEC Auditorium | +| 9:45 AM MDT | Positive Polarities | SEEC Auditorium | +| 10:00 AM MDT | Navigating Miscommunications | SEEC Auditorium | +| 10:15 AM MDT | Creating a shared language | SEEC Auditorium | +| 10:30 AM MDT | Break | SEEC Atrium | +| 10:45 AM MDT | Science of Team Science | SEEC Auditorium | +| 11:05 AM MDT | Big Data for Resilience | SEEC Auditorium | +| 11:45 AM MDT | Q&A | SEEC Auditorium | +| 12:15 PM MDT | Group Photo | SEEC Atrium | +| 12:30 PM MDT | Lunch | SEEC Atrium | +| 1:30 PM MDT | Leveraging NEON to Understand Ecosystem Resilience Across Scales | SEEC Auditorium | +| 1:45 PM MDT | Explore Topics in Resilience and Adaptation | SEEC Auditorium | +| 3:15 PM MDT | Break | SEEC Atrium | +| 3:30 PM MDT | Team Breakouts: Innovation Time | Rooms available: S124, S127, S221, etc. | +| 4:20 PM MDT | Report Back | SEEC Auditorium | +| 4:50 PM MDT | Whole Group Reflection | SEEC Auditorium | +| 4:55 PM MDT | Day 1 Evaluation | SEEC Auditorium | +| 5:00 PM MDT | Day 1 Close | SEEC Auditorium | + +### Day Two - May 15th +| Time | Event | Location | +|------------------|------------------------------|--------------| +| 8:30 AM MDT | Coffee & Tea | SEEC Atrium | +| 9:00 AM MDT | Welcome Back | SEEC Auditorium | +| 9:20 AM MDT | AI Research for Climate Change and Environmental Sustainability | SEEC Auditorium | +| 9:35 PM MDT | Prepare for the day | SEEC Auditorium | +| 9:50 AM MDT | Team Breakouts: Innovation Time | Breakout Spaces with your Team | +| 12:30 PM MDT | Lunch | SEEC Atrium | +| 1:30 PM MDT | Working Through the Groan Zone | SEEC Auditorium | +| 1:50 PM MDT | Team Breakouts: Innovation Time | Breakout Spaces with your Team | +| 4:10 PM MDT | Report Back | SEEC Auditorium | +| 4:50 PM MDT | Whole Group Reflection | SEEC Auditorium | +| 5:00 PM MDT | Day 2 Close | | + +### Day Three - May 16th +| Time | Event | Location | +|------------------|------------------------------|--------------| +| 8:30 AM MDT | Coffee & Tea | SEEC Atrium | +| 9:00 AM MDT | Welcome Back | SEEC Auditorium | +| 9:15 AM MDT | Final Team Breakout: Prepare for the Final Report Back | Breakout Spaces with your Team | +| 9:45 AM MDT | Final Break | SEEC Atrium | +| 10:00 AM MDT | Final Report back | SEEC Auditorium | +| 11:20 AM MDT | What’s Next? | SEEC Auditorium | +| 11:35 AM MDT | Final Reflection | SEEC Auditorium | +| 11:50 PM MDT | Closing | SEEC Auditorium | diff --git a/agenda/index.html b/agenda/index.html new file mode 100644 index 0000000..d055165 --- /dev/null +++ b/agenda/index.html @@ -0,0 +1,1755 @@ + + + + + + + + + + + + + + + + + + + + + + Agenda - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + + + + + + +
+ +
+ + + + +
+
+ + + +
+
+
+ + + + + + + + +
+
+
+ + + + +
+
+ + + + + + + + + +

ESIIL Innovation Summit - Agenda

+

Big Data for Environmental Resilience and Adaptation
+Date: May 13-16, 2024
+Location: SEEC Auditorium, University of Colorado Boulder
+Summit Website

+

Goals of the 2024 ESIIL Innovation Summit

+
    +
  • Explore big data for environmental resilience and adaptation by identifying data synthesis opportunities and utilizing ESIIL cloud-compute capabilities.
  • +
  • Promote best practices in ethical, open science, by supporting accessibility and usability of environmental data by all stakeholders.
  • +
  • Champion ethical and equitable practices in environmental science, honoring data sovereignty and encouraging the responsible use of AI.
  • +
  • Support diverse and inclusive teams by establishing collaborations around data-inspired themes across different disciplines, sectors, career stages, and backgrounds.
  • +
  • Encourage the co-production of environmental knowledge with communities that are experiencing significant environmental challenges.
  • +
+

Day Zero - May 13th

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
TimeEventLocation
9:00 AM - 12:00 PM MDTLeadership ProgramS372 (Viz Studio)
9:00 AM - 12:00 PM MDTAuditorium Set Up: Tables, Questions, Handouts, etc.SEEC Auditorium
12:00 - 1:00 PM MDTFacilitators Lunch
1:00 or 1:30 PM MDTConcurrent Optional ActivitiesNEON Tour, HIKE
3:00 - 5:00 PM MDTEarly Registration opensSEEC Atrium
3:00 - 4:00 PM MDTTechnical Help DeskSEEC Auditorium
4:00 - 6:00 PM MDTSocial MixerSEEC Cafe
+

Day One - May 14th

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
TimeEventLocation
8:30 AM MDTRegistrationSEEC Atrium
9:00 AM MDTWelcome & Opening CeremonySEEC Auditorium
9:35 AM MDTLogistics and Planning Team IntroductionsSEEC Auditorium
9:45 AM MDTPositive PolaritiesSEEC Auditorium
10:00 AM MDTNavigating MiscommunicationsSEEC Auditorium
10:15 AM MDTCreating a shared languageSEEC Auditorium
10:30 AM MDTBreakSEEC Atrium
10:45 AM MDTScience of Team ScienceSEEC Auditorium
11:05 AM MDTBig Data for ResilienceSEEC Auditorium
11:45 AM MDTQ&ASEEC Auditorium
12:15 PM MDTGroup PhotoSEEC Atrium
12:30 PM MDTLunchSEEC Atrium
1:30 PM MDTLeveraging NEON to Understand Ecosystem Resilience Across ScalesSEEC Auditorium
1:45 PM MDTExplore Topics in Resilience and AdaptationSEEC Auditorium
3:15 PM MDTBreakSEEC Atrium
3:30 PM MDTTeam Breakouts: Innovation TimeRooms available: S124, S127, S221, etc.
4:20 PM MDTReport BackSEEC Auditorium
4:50 PM MDTWhole Group ReflectionSEEC Auditorium
4:55 PM MDTDay 1 EvaluationSEEC Auditorium
5:00 PM MDTDay 1 CloseSEEC Auditorium
+

Day Two - May 15th

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
TimeEventLocation
8:30 AM MDTCoffee & TeaSEEC Atrium
9:00 AM MDTWelcome BackSEEC Auditorium
9:20 AM MDTAI Research for Climate Change and Environmental SustainabilitySEEC Auditorium
9:35 PM MDTPrepare for the daySEEC Auditorium
9:50 AM MDTTeam Breakouts: Innovation TimeBreakout Spaces with your Team
12:30 PM MDTLunchSEEC Atrium
1:30 PM MDTWorking Through the Groan ZoneSEEC Auditorium
1:50 PM MDTTeam Breakouts: Innovation TimeBreakout Spaces with your Team
4:10 PM MDTReport BackSEEC Auditorium
4:50 PM MDTWhole Group ReflectionSEEC Auditorium
5:00 PM MDTDay 2 Close
+

Day Three - May 16th

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
TimeEventLocation
8:30 AM MDTCoffee & TeaSEEC Atrium
9:00 AM MDTWelcome BackSEEC Auditorium
9:15 AM MDTFinal Team Breakout: Prepare for the Final Report BackBreakout Spaces with your Team
9:45 AM MDTFinal BreakSEEC Atrium
10:00 AM MDTFinal Report backSEEC Auditorium
11:20 AM MDTWhat’s Next?SEEC Auditorium
11:35 AM MDTFinal ReflectionSEEC Auditorium
11:50 PM MDTClosingSEEC Auditorium
+ +
+
+ + + Last update: + 2024-09-23 + + +
+ + + + + + +
+
+ + +
+ + + +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/assets/ForestBanner_NASA_Imagery.png b/assets/ForestBanner_NASA_Imagery.png new file mode 100644 index 0000000..49ac839 Binary files /dev/null and b/assets/ForestBanner_NASA_Imagery.png differ diff --git a/assets/TreeMap2016_Data_Dictionary.pdf b/assets/TreeMap2016_Data_Dictionary.pdf new file mode 100644 index 0000000..95160b5 Binary files /dev/null and b/assets/TreeMap2016_Data_Dictionary.pdf differ diff --git a/assets/_mkdocstrings.css b/assets/_mkdocstrings.css new file mode 100644 index 0000000..e69de29 diff --git a/assets/codefest_hero.png b/assets/codefest_hero.png new file mode 100644 index 0000000..c03dfb6 Binary files /dev/null and b/assets/codefest_hero.png differ diff --git a/assets/create-github-keypair.png b/assets/create-github-keypair.png new file mode 100644 index 0000000..668aaa1 Binary files /dev/null and b/assets/create-github-keypair.png differ diff --git a/assets/cyverse-user-portal.png b/assets/cyverse-user-portal.png new file mode 100644 index 0000000..7a682b4 Binary files /dev/null and b/assets/cyverse-user-portal.png differ diff --git a/assets/cyverse_basics/app_launch.png b/assets/cyverse_basics/app_launch.png new file mode 100644 index 0000000..30fc759 Binary files /dev/null and b/assets/cyverse_basics/app_launch.png differ diff --git a/assets/cyverse_basics/app_settings.png b/assets/cyverse_basics/app_settings.png new file mode 100644 index 0000000..dbf7179 Binary files /dev/null and b/assets/cyverse_basics/app_settings.png differ diff --git a/assets/cyverse_basics/apps.png b/assets/cyverse_basics/apps.png new file mode 100644 index 0000000..ad242a7 Binary files /dev/null and b/assets/cyverse_basics/apps.png differ diff --git a/assets/cyverse_basics/click_cyverse_utils.png b/assets/cyverse_basics/click_cyverse_utils.png new file mode 100644 index 0000000..ddf7445 Binary files /dev/null and b/assets/cyverse_basics/click_cyverse_utils.png differ diff --git a/assets/cyverse_basics/clone.png b/assets/cyverse_basics/clone.png new file mode 100644 index 0000000..875860b Binary files /dev/null and b/assets/cyverse_basics/clone.png differ diff --git a/assets/cyverse_basics/cyverse-utils.png b/assets/cyverse_basics/cyverse-utils.png new file mode 100644 index 0000000..de9c188 Binary files /dev/null and b/assets/cyverse_basics/cyverse-utils.png differ diff --git a/assets/cyverse_basics/email.png b/assets/cyverse_basics/email.png new file mode 100644 index 0000000..6d4bcfa Binary files /dev/null and b/assets/cyverse_basics/email.png differ diff --git a/assets/cyverse_basics/final.png b/assets/cyverse_basics/final.png new file mode 100644 index 0000000..3d92723 Binary files /dev/null and b/assets/cyverse_basics/final.png differ diff --git a/assets/cyverse_basics/go_to_analysis.png b/assets/cyverse_basics/go_to_analysis.png new file mode 100644 index 0000000..802ef3f Binary files /dev/null and b/assets/cyverse_basics/go_to_analysis.png differ diff --git a/assets/cyverse_basics/jupyterlab.png b/assets/cyverse_basics/jupyterlab.png new file mode 100644 index 0000000..f4b4540 Binary files /dev/null and b/assets/cyverse_basics/jupyterlab.png differ diff --git a/assets/cyverse_basics/key.png b/assets/cyverse_basics/key.png new file mode 100644 index 0000000..1670fea Binary files /dev/null and b/assets/cyverse_basics/key.png differ diff --git a/assets/cyverse_basics/launch.png b/assets/cyverse_basics/launch.png new file mode 100644 index 0000000..49e7044 Binary files /dev/null and b/assets/cyverse_basics/launch.png differ diff --git a/assets/cyverse_basics/new_key.png b/assets/cyverse_basics/new_key.png new file mode 100644 index 0000000..5306330 Binary files /dev/null and b/assets/cyverse_basics/new_key.png differ diff --git a/assets/cyverse_basics/open_cyverse_utils.png b/assets/cyverse_basics/open_cyverse_utils.png new file mode 100644 index 0000000..d542da4 Binary files /dev/null and b/assets/cyverse_basics/open_cyverse_utils.png differ diff --git a/assets/cyverse_basics/paste_key.png b/assets/cyverse_basics/paste_key.png new file mode 100644 index 0000000..f515a1b Binary files /dev/null and b/assets/cyverse_basics/paste_key.png differ diff --git a/assets/cyverse_basics/script_1.png b/assets/cyverse_basics/script_1.png new file mode 100644 index 0000000..f316c2c Binary files /dev/null and b/assets/cyverse_basics/script_1.png differ diff --git a/assets/cyverse_basics/settings.png b/assets/cyverse_basics/settings.png new file mode 100644 index 0000000..874200e Binary files /dev/null and b/assets/cyverse_basics/settings.png differ diff --git a/assets/cyverse_basics/ssh.png b/assets/cyverse_basics/ssh.png new file mode 100644 index 0000000..17dc6b8 Binary files /dev/null and b/assets/cyverse_basics/ssh.png differ diff --git a/assets/cyverse_basics/use_this_app.png b/assets/cyverse_basics/use_this_app.png new file mode 100644 index 0000000..c4876cb Binary files /dev/null and b/assets/cyverse_basics/use_this_app.png differ diff --git a/assets/cyverse_basics/username.png b/assets/cyverse_basics/username.png new file mode 100644 index 0000000..fdd0cbc Binary files /dev/null and b/assets/cyverse_basics/username.png differ diff --git a/assets/defaults1.png b/assets/defaults1.png new file mode 100644 index 0000000..995b9ed Binary files /dev/null and b/assets/defaults1.png differ diff --git a/assets/defaults3.png b/assets/defaults3.png new file mode 100644 index 0000000..ea57807 Binary files /dev/null and b/assets/defaults3.png differ diff --git a/assets/esiil_art/antenna_girl.png b/assets/esiil_art/antenna_girl.png new file mode 100644 index 0000000..e59ce9a Binary files /dev/null and b/assets/esiil_art/antenna_girl.png differ diff --git a/assets/esiil_art/child_plant_interaction.png b/assets/esiil_art/child_plant_interaction.png new file mode 100644 index 0000000..a9c0682 Binary files /dev/null and b/assets/esiil_art/child_plant_interaction.png differ diff --git a/assets/esiil_art/diver.png b/assets/esiil_art/diver.png new file mode 100644 index 0000000..1e161fe Binary files /dev/null and b/assets/esiil_art/diver.png differ diff --git a/assets/esiil_art/fancy_dandilion.png b/assets/esiil_art/fancy_dandilion.png new file mode 100644 index 0000000..7bfd2d8 Binary files /dev/null and b/assets/esiil_art/fancy_dandilion.png differ diff --git a/assets/esiil_art/gull_trash.png b/assets/esiil_art/gull_trash.png new file mode 100644 index 0000000..0e0e0bb Binary files /dev/null and b/assets/esiil_art/gull_trash.png differ diff --git a/assets/esiil_art/hen_harrier.png b/assets/esiil_art/hen_harrier.png new file mode 100644 index 0000000..235f06c Binary files /dev/null and b/assets/esiil_art/hen_harrier.png differ diff --git a/assets/esiil_art/looker.png b/assets/esiil_art/looker.png new file mode 100644 index 0000000..725bd87 Binary files /dev/null and b/assets/esiil_art/looker.png differ diff --git a/assets/esiil_art/monolith-alpha.png b/assets/esiil_art/monolith-alpha.png new file mode 100644 index 0000000..a8c9cf4 Binary files /dev/null and b/assets/esiil_art/monolith-alpha.png differ diff --git a/assets/esiil_art/paperclip.png b/assets/esiil_art/paperclip.png new file mode 100644 index 0000000..fe5aca5 Binary files /dev/null and b/assets/esiil_art/paperclip.png differ diff --git a/assets/esiil_art/peel_back.png b/assets/esiil_art/peel_back.png new file mode 100644 index 0000000..75e2778 Binary files /dev/null and b/assets/esiil_art/peel_back.png differ diff --git a/assets/esiil_art/sculpture_toothpaste.png b/assets/esiil_art/sculpture_toothpaste.png new file mode 100644 index 0000000..85e989b Binary files /dev/null and b/assets/esiil_art/sculpture_toothpaste.png differ diff --git a/assets/esiil_art/swans.png b/assets/esiil_art/swans.png new file mode 100644 index 0000000..15bf83d Binary files /dev/null and b/assets/esiil_art/swans.png differ diff --git a/assets/esiil_art/tree hands.png b/assets/esiil_art/tree hands.png new file mode 100644 index 0000000..aa03415 Binary files /dev/null and b/assets/esiil_art/tree hands.png differ diff --git a/assets/esiil_art/tree_eyes.png b/assets/esiil_art/tree_eyes.png new file mode 100644 index 0000000..74cde3a Binary files /dev/null and b/assets/esiil_art/tree_eyes.png differ diff --git a/assets/esiil_art/tree_hair.png b/assets/esiil_art/tree_hair.png new file mode 100644 index 0000000..73ac371 Binary files /dev/null and b/assets/esiil_art/tree_hair.png differ diff --git a/assets/esiil_art/tree_palm.png b/assets/esiil_art/tree_palm.png new file mode 100644 index 0000000..86161a2 Binary files /dev/null and b/assets/esiil_art/tree_palm.png differ diff --git a/assets/esiil_art/turtle.png b/assets/esiil_art/turtle.png new file mode 100644 index 0000000..4d6cff6 Binary files /dev/null and b/assets/esiil_art/turtle.png differ diff --git a/assets/esiil_art/veg_as_hair.png b/assets/esiil_art/veg_as_hair.png new file mode 100644 index 0000000..5d940a6 Binary files /dev/null and b/assets/esiil_art/veg_as_hair.png differ diff --git a/assets/esiil_art/veg_from_box.png b/assets/esiil_art/veg_from_box.png new file mode 100644 index 0000000..c50c81d Binary files /dev/null and b/assets/esiil_art/veg_from_box.png differ diff --git a/assets/esiil_content/ESIIL log white letteringAsset 4@2x.png b/assets/esiil_content/ESIIL log white letteringAsset 4@2x.png new file mode 100644 index 0000000..b4f154f Binary files /dev/null and b/assets/esiil_content/ESIIL log white letteringAsset 4@2x.png differ diff --git a/assets/esiil_content/ESIIL_logo.png b/assets/esiil_content/ESIIL_logo.png new file mode 100644 index 0000000..0f7d2f3 Binary files /dev/null and b/assets/esiil_content/ESIIL_logo.png differ diff --git a/assets/esiil_content/Summit_Header.png b/assets/esiil_content/Summit_Header.png new file mode 100644 index 0000000..c47aef2 Binary files /dev/null and b/assets/esiil_content/Summit_Header.png differ diff --git a/assets/esiil_content/favicon.ico b/assets/esiil_content/favicon.ico new file mode 100644 index 0000000..ca12695 Binary files /dev/null and b/assets/esiil_content/favicon.ico differ diff --git a/assets/esiil_content/stac_mount_save/Ansel_Adams_datacube.png b/assets/esiil_content/stac_mount_save/Ansel_Adams_datacube.png new file mode 100644 index 0000000..b0ec189 Binary files /dev/null and b/assets/esiil_content/stac_mount_save/Ansel_Adams_datacube.png differ diff --git a/assets/esiil_content/stac_mount_save/Ansel_adams_Jackson_hole.png b/assets/esiil_content/stac_mount_save/Ansel_adams_Jackson_hole.png new file mode 100644 index 0000000..77f5014 Binary files /dev/null and b/assets/esiil_content/stac_mount_save/Ansel_adams_Jackson_hole.png differ diff --git a/assets/esiil_content/stac_mount_save/David-Yarrow-sorrel-sky-gallery-Photographic-Print-Cindys-Shotgun-Wedding.png b/assets/esiil_content/stac_mount_save/David-Yarrow-sorrel-sky-gallery-Photographic-Print-Cindys-Shotgun-Wedding.png new file mode 100644 index 0000000..d1dde79 Binary files /dev/null and b/assets/esiil_content/stac_mount_save/David-Yarrow-sorrel-sky-gallery-Photographic-Print-Cindys-Shotgun-Wedding.png differ diff --git a/assets/esiil_content/stac_mount_save/View_Ansel-Adams_Camera.png b/assets/esiil_content/stac_mount_save/View_Ansel-Adams_Camera.png new file mode 100644 index 0000000..0ee44fb Binary files /dev/null and b/assets/esiil_content/stac_mount_save/View_Ansel-Adams_Camera.png differ diff --git a/assets/esiil_content/stac_mount_save/anim.gif b/assets/esiil_content/stac_mount_save/anim.gif new file mode 100644 index 0000000..a751b2c Binary files /dev/null and b/assets/esiil_content/stac_mount_save/anim.gif differ diff --git a/assets/esiil_content/stac_mount_save/antelopeinpython.png b/assets/esiil_content/stac_mount_save/antelopeinpython.png new file mode 100644 index 0000000..a9bfef4 Binary files /dev/null and b/assets/esiil_content/stac_mount_save/antelopeinpython.png differ diff --git a/assets/esiil_content/stac_mount_save/bison.png b/assets/esiil_content/stac_mount_save/bison.png new file mode 100644 index 0000000..e9bbf03 Binary files /dev/null and b/assets/esiil_content/stac_mount_save/bison.png differ diff --git a/assets/esiil_content/stac_mount_save/drink-firehose.png b/assets/esiil_content/stac_mount_save/drink-firehose.png new file mode 100644 index 0000000..9299c74 Binary files /dev/null and b/assets/esiil_content/stac_mount_save/drink-firehose.png differ diff --git a/assets/esiil_content/stac_mount_save/mouseinsnake.png b/assets/esiil_content/stac_mount_save/mouseinsnake.png new file mode 100644 index 0000000..14ee5cf Binary files /dev/null and b/assets/esiil_content/stac_mount_save/mouseinsnake.png differ diff --git a/assets/fcc-workshop-tile.png b/assets/fcc-workshop-tile.png new file mode 100644 index 0000000..4cd2be0 Binary files /dev/null and b/assets/fcc-workshop-tile.png differ diff --git a/assets/images/favicon.png b/assets/images/favicon.png new file mode 100644 index 0000000..1cf13b9 Binary files /dev/null and b/assets/images/favicon.png differ diff --git a/assets/javascripts/bundle.5a2dcb6a.min.js b/assets/javascripts/bundle.5a2dcb6a.min.js new file mode 100644 index 0000000..6f9720b --- /dev/null +++ b/assets/javascripts/bundle.5a2dcb6a.min.js @@ -0,0 +1,29 @@ +"use strict";(()=>{var aa=Object.create;var wr=Object.defineProperty;var sa=Object.getOwnPropertyDescriptor;var ca=Object.getOwnPropertyNames,kt=Object.getOwnPropertySymbols,fa=Object.getPrototypeOf,Er=Object.prototype.hasOwnProperty,fn=Object.prototype.propertyIsEnumerable;var cn=(e,t,r)=>t in e?wr(e,t,{enumerable:!0,configurable:!0,writable:!0,value:r}):e[t]=r,H=(e,t)=>{for(var r in t||(t={}))Er.call(t,r)&&cn(e,r,t[r]);if(kt)for(var r of kt(t))fn.call(t,r)&&cn(e,r,t[r]);return e};var un=(e,t)=>{var r={};for(var n in e)Er.call(e,n)&&t.indexOf(n)<0&&(r[n]=e[n]);if(e!=null&&kt)for(var n of kt(e))t.indexOf(n)<0&&fn.call(e,n)&&(r[n]=e[n]);return r};var yt=(e,t)=>()=>(t||e((t={exports:{}}).exports,t),t.exports);var ua=(e,t,r,n)=>{if(t&&typeof t=="object"||typeof t=="function")for(let o of ca(t))!Er.call(e,o)&&o!==r&&wr(e,o,{get:()=>t[o],enumerable:!(n=sa(t,o))||n.enumerable});return e};var Ye=(e,t,r)=>(r=e!=null?aa(fa(e)):{},ua(t||!e||!e.__esModule?wr(r,"default",{value:e,enumerable:!0}):r,e));var ln=yt((Sr,pn)=>{(function(e,t){typeof Sr=="object"&&typeof pn!="undefined"?t():typeof define=="function"&&define.amd?define(t):t()})(Sr,function(){"use strict";function e(r){var n=!0,o=!1,i=null,s={text:!0,search:!0,url:!0,tel:!0,email:!0,password:!0,number:!0,date:!0,month:!0,week:!0,time:!0,datetime:!0,"datetime-local":!0};function a(_){return!!(_&&_!==document&&_.nodeName!=="HTML"&&_.nodeName!=="BODY"&&"classList"in _&&"contains"in _.classList)}function c(_){var We=_.type,Fe=_.tagName;return!!(Fe==="INPUT"&&s[We]&&!_.readOnly||Fe==="TEXTAREA"&&!_.readOnly||_.isContentEditable)}function f(_){_.classList.contains("focus-visible")||(_.classList.add("focus-visible"),_.setAttribute("data-focus-visible-added",""))}function u(_){!_.hasAttribute("data-focus-visible-added")||(_.classList.remove("focus-visible"),_.removeAttribute("data-focus-visible-added"))}function p(_){_.metaKey||_.altKey||_.ctrlKey||(a(r.activeElement)&&f(r.activeElement),n=!0)}function l(_){n=!1}function d(_){!a(_.target)||(n||c(_.target))&&f(_.target)}function h(_){!a(_.target)||(_.target.classList.contains("focus-visible")||_.target.hasAttribute("data-focus-visible-added"))&&(o=!0,window.clearTimeout(i),i=window.setTimeout(function(){o=!1},100),u(_.target))}function b(_){document.visibilityState==="hidden"&&(o&&(n=!0),U())}function U(){document.addEventListener("mousemove",W),document.addEventListener("mousedown",W),document.addEventListener("mouseup",W),document.addEventListener("pointermove",W),document.addEventListener("pointerdown",W),document.addEventListener("pointerup",W),document.addEventListener("touchmove",W),document.addEventListener("touchstart",W),document.addEventListener("touchend",W)}function G(){document.removeEventListener("mousemove",W),document.removeEventListener("mousedown",W),document.removeEventListener("mouseup",W),document.removeEventListener("pointermove",W),document.removeEventListener("pointerdown",W),document.removeEventListener("pointerup",W),document.removeEventListener("touchmove",W),document.removeEventListener("touchstart",W),document.removeEventListener("touchend",W)}function W(_){_.target.nodeName&&_.target.nodeName.toLowerCase()==="html"||(n=!1,G())}document.addEventListener("keydown",p,!0),document.addEventListener("mousedown",l,!0),document.addEventListener("pointerdown",l,!0),document.addEventListener("touchstart",l,!0),document.addEventListener("visibilitychange",b,!0),U(),r.addEventListener("focus",d,!0),r.addEventListener("blur",h,!0),r.nodeType===Node.DOCUMENT_FRAGMENT_NODE&&r.host?r.host.setAttribute("data-js-focus-visible",""):r.nodeType===Node.DOCUMENT_NODE&&(document.documentElement.classList.add("js-focus-visible"),document.documentElement.setAttribute("data-js-focus-visible",""))}if(typeof window!="undefined"&&typeof document!="undefined"){window.applyFocusVisiblePolyfill=e;var t;try{t=new CustomEvent("focus-visible-polyfill-ready")}catch(r){t=document.createEvent("CustomEvent"),t.initCustomEvent("focus-visible-polyfill-ready",!1,!1,{})}window.dispatchEvent(t)}typeof document!="undefined"&&e(document)})});var mn=yt(Or=>{(function(e){var t=function(){try{return!!Symbol.iterator}catch(f){return!1}},r=t(),n=function(f){var u={next:function(){var p=f.shift();return{done:p===void 0,value:p}}};return r&&(u[Symbol.iterator]=function(){return u}),u},o=function(f){return encodeURIComponent(f).replace(/%20/g,"+")},i=function(f){return decodeURIComponent(String(f).replace(/\+/g," "))},s=function(){var f=function(p){Object.defineProperty(this,"_entries",{writable:!0,value:{}});var l=typeof p;if(l!=="undefined")if(l==="string")p!==""&&this._fromString(p);else if(p instanceof f){var d=this;p.forEach(function(G,W){d.append(W,G)})}else if(p!==null&&l==="object")if(Object.prototype.toString.call(p)==="[object Array]")for(var h=0;hd[0]?1:0}),f._entries&&(f._entries={});for(var p=0;p1?i(d[1]):"")}})})(typeof global!="undefined"?global:typeof window!="undefined"?window:typeof self!="undefined"?self:Or);(function(e){var t=function(){try{var o=new e.URL("b","http://a");return o.pathname="c d",o.href==="http://a/c%20d"&&o.searchParams}catch(i){return!1}},r=function(){var o=e.URL,i=function(c,f){typeof c!="string"&&(c=String(c)),f&&typeof f!="string"&&(f=String(f));var u=document,p;if(f&&(e.location===void 0||f!==e.location.href)){f=f.toLowerCase(),u=document.implementation.createHTMLDocument(""),p=u.createElement("base"),p.href=f,u.head.appendChild(p);try{if(p.href.indexOf(f)!==0)throw new Error(p.href)}catch(_){throw new Error("URL unable to set base "+f+" due to "+_)}}var l=u.createElement("a");l.href=c,p&&(u.body.appendChild(l),l.href=l.href);var d=u.createElement("input");if(d.type="url",d.value=c,l.protocol===":"||!/:/.test(l.href)||!d.checkValidity()&&!f)throw new TypeError("Invalid URL");Object.defineProperty(this,"_anchorElement",{value:l});var h=new e.URLSearchParams(this.search),b=!0,U=!0,G=this;["append","delete","set"].forEach(function(_){var We=h[_];h[_]=function(){We.apply(h,arguments),b&&(U=!1,G.search=h.toString(),U=!0)}}),Object.defineProperty(this,"searchParams",{value:h,enumerable:!0});var W=void 0;Object.defineProperty(this,"_updateSearchParams",{enumerable:!1,configurable:!1,writable:!1,value:function(){this.search!==W&&(W=this.search,U&&(b=!1,this.searchParams._fromString(this.search),b=!0))}})},s=i.prototype,a=function(c){Object.defineProperty(s,c,{get:function(){return this._anchorElement[c]},set:function(f){this._anchorElement[c]=f},enumerable:!0})};["hash","host","hostname","port","protocol"].forEach(function(c){a(c)}),Object.defineProperty(s,"search",{get:function(){return this._anchorElement.search},set:function(c){this._anchorElement.search=c,this._updateSearchParams()},enumerable:!0}),Object.defineProperties(s,{toString:{get:function(){var c=this;return function(){return c.href}}},href:{get:function(){return this._anchorElement.href.replace(/\?$/,"")},set:function(c){this._anchorElement.href=c,this._updateSearchParams()},enumerable:!0},pathname:{get:function(){return this._anchorElement.pathname.replace(/(^\/?)/,"/")},set:function(c){this._anchorElement.pathname=c},enumerable:!0},origin:{get:function(){var c={"http:":80,"https:":443,"ftp:":21}[this._anchorElement.protocol],f=this._anchorElement.port!=c&&this._anchorElement.port!=="";return this._anchorElement.protocol+"//"+this._anchorElement.hostname+(f?":"+this._anchorElement.port:"")},enumerable:!0},password:{get:function(){return""},set:function(c){},enumerable:!0},username:{get:function(){return""},set:function(c){},enumerable:!0}}),i.createObjectURL=function(c){return o.createObjectURL.apply(o,arguments)},i.revokeObjectURL=function(c){return o.revokeObjectURL.apply(o,arguments)},e.URL=i};if(t()||r(),e.location!==void 0&&!("origin"in e.location)){var n=function(){return e.location.protocol+"//"+e.location.hostname+(e.location.port?":"+e.location.port:"")};try{Object.defineProperty(e.location,"origin",{get:n,enumerable:!0})}catch(o){setInterval(function(){e.location.origin=n()},100)}}})(typeof global!="undefined"?global:typeof window!="undefined"?window:typeof self!="undefined"?self:Or)});var Pn=yt((Ks,$t)=>{/*! ***************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */var dn,hn,bn,vn,gn,yn,xn,wn,En,Ht,_r,Sn,On,_n,rt,Tn,Mn,Ln,An,Cn,Rn,kn,Hn,Pt;(function(e){var t=typeof global=="object"?global:typeof self=="object"?self:typeof this=="object"?this:{};typeof define=="function"&&define.amd?define("tslib",["exports"],function(n){e(r(t,r(n)))}):typeof $t=="object"&&typeof $t.exports=="object"?e(r(t,r($t.exports))):e(r(t));function r(n,o){return n!==t&&(typeof Object.create=="function"?Object.defineProperty(n,"__esModule",{value:!0}):n.__esModule=!0),function(i,s){return n[i]=o?o(i,s):s}}})(function(e){var t=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(n,o){n.__proto__=o}||function(n,o){for(var i in o)Object.prototype.hasOwnProperty.call(o,i)&&(n[i]=o[i])};dn=function(n,o){if(typeof o!="function"&&o!==null)throw new TypeError("Class extends value "+String(o)+" is not a constructor or null");t(n,o);function i(){this.constructor=n}n.prototype=o===null?Object.create(o):(i.prototype=o.prototype,new i)},hn=Object.assign||function(n){for(var o,i=1,s=arguments.length;i=0;u--)(f=n[u])&&(c=(a<3?f(c):a>3?f(o,i,c):f(o,i))||c);return a>3&&c&&Object.defineProperty(o,i,c),c},gn=function(n,o){return function(i,s){o(i,s,n)}},yn=function(n,o){if(typeof Reflect=="object"&&typeof Reflect.metadata=="function")return Reflect.metadata(n,o)},xn=function(n,o,i,s){function a(c){return c instanceof i?c:new i(function(f){f(c)})}return new(i||(i=Promise))(function(c,f){function u(d){try{l(s.next(d))}catch(h){f(h)}}function p(d){try{l(s.throw(d))}catch(h){f(h)}}function l(d){d.done?c(d.value):a(d.value).then(u,p)}l((s=s.apply(n,o||[])).next())})},wn=function(n,o){var i={label:0,sent:function(){if(c[0]&1)throw c[1];return c[1]},trys:[],ops:[]},s,a,c,f;return f={next:u(0),throw:u(1),return:u(2)},typeof Symbol=="function"&&(f[Symbol.iterator]=function(){return this}),f;function u(l){return function(d){return p([l,d])}}function p(l){if(s)throw new TypeError("Generator is already executing.");for(;i;)try{if(s=1,a&&(c=l[0]&2?a.return:l[0]?a.throw||((c=a.return)&&c.call(a),0):a.next)&&!(c=c.call(a,l[1])).done)return c;switch(a=0,c&&(l=[l[0]&2,c.value]),l[0]){case 0:case 1:c=l;break;case 4:return i.label++,{value:l[1],done:!1};case 5:i.label++,a=l[1],l=[0];continue;case 7:l=i.ops.pop(),i.trys.pop();continue;default:if(c=i.trys,!(c=c.length>0&&c[c.length-1])&&(l[0]===6||l[0]===2)){i=0;continue}if(l[0]===3&&(!c||l[1]>c[0]&&l[1]=n.length&&(n=void 0),{value:n&&n[s++],done:!n}}};throw new TypeError(o?"Object is not iterable.":"Symbol.iterator is not defined.")},_r=function(n,o){var i=typeof Symbol=="function"&&n[Symbol.iterator];if(!i)return n;var s=i.call(n),a,c=[],f;try{for(;(o===void 0||o-- >0)&&!(a=s.next()).done;)c.push(a.value)}catch(u){f={error:u}}finally{try{a&&!a.done&&(i=s.return)&&i.call(s)}finally{if(f)throw f.error}}return c},Sn=function(){for(var n=[],o=0;o1||u(b,U)})})}function u(b,U){try{p(s[b](U))}catch(G){h(c[0][3],G)}}function p(b){b.value instanceof rt?Promise.resolve(b.value.v).then(l,d):h(c[0][2],b)}function l(b){u("next",b)}function d(b){u("throw",b)}function h(b,U){b(U),c.shift(),c.length&&u(c[0][0],c[0][1])}},Mn=function(n){var o,i;return o={},s("next"),s("throw",function(a){throw a}),s("return"),o[Symbol.iterator]=function(){return this},o;function s(a,c){o[a]=n[a]?function(f){return(i=!i)?{value:rt(n[a](f)),done:a==="return"}:c?c(f):f}:c}},Ln=function(n){if(!Symbol.asyncIterator)throw new TypeError("Symbol.asyncIterator is not defined.");var o=n[Symbol.asyncIterator],i;return o?o.call(n):(n=typeof Ht=="function"?Ht(n):n[Symbol.iterator](),i={},s("next"),s("throw"),s("return"),i[Symbol.asyncIterator]=function(){return this},i);function s(c){i[c]=n[c]&&function(f){return new Promise(function(u,p){f=n[c](f),a(u,p,f.done,f.value)})}}function a(c,f,u,p){Promise.resolve(p).then(function(l){c({value:l,done:u})},f)}},An=function(n,o){return Object.defineProperty?Object.defineProperty(n,"raw",{value:o}):n.raw=o,n};var r=Object.create?function(n,o){Object.defineProperty(n,"default",{enumerable:!0,value:o})}:function(n,o){n.default=o};Cn=function(n){if(n&&n.__esModule)return n;var o={};if(n!=null)for(var i in n)i!=="default"&&Object.prototype.hasOwnProperty.call(n,i)&&Pt(o,n,i);return r(o,n),o},Rn=function(n){return n&&n.__esModule?n:{default:n}},kn=function(n,o,i,s){if(i==="a"&&!s)throw new TypeError("Private accessor was defined without a getter");if(typeof o=="function"?n!==o||!s:!o.has(n))throw new TypeError("Cannot read private member from an object whose class did not declare it");return i==="m"?s:i==="a"?s.call(n):s?s.value:o.get(n)},Hn=function(n,o,i,s,a){if(s==="m")throw new TypeError("Private method is not writable");if(s==="a"&&!a)throw new TypeError("Private accessor was defined without a setter");if(typeof o=="function"?n!==o||!a:!o.has(n))throw new TypeError("Cannot write private member to an object whose class did not declare it");return s==="a"?a.call(n,i):a?a.value=i:o.set(n,i),i},e("__extends",dn),e("__assign",hn),e("__rest",bn),e("__decorate",vn),e("__param",gn),e("__metadata",yn),e("__awaiter",xn),e("__generator",wn),e("__exportStar",En),e("__createBinding",Pt),e("__values",Ht),e("__read",_r),e("__spread",Sn),e("__spreadArrays",On),e("__spreadArray",_n),e("__await",rt),e("__asyncGenerator",Tn),e("__asyncDelegator",Mn),e("__asyncValues",Ln),e("__makeTemplateObject",An),e("__importStar",Cn),e("__importDefault",Rn),e("__classPrivateFieldGet",kn),e("__classPrivateFieldSet",Hn)})});var Br=yt((At,Yr)=>{/*! + * clipboard.js v2.0.11 + * https://clipboardjs.com/ + * + * Licensed MIT © Zeno Rocha + */(function(t,r){typeof At=="object"&&typeof Yr=="object"?Yr.exports=r():typeof define=="function"&&define.amd?define([],r):typeof At=="object"?At.ClipboardJS=r():t.ClipboardJS=r()})(At,function(){return function(){var e={686:function(n,o,i){"use strict";i.d(o,{default:function(){return ia}});var s=i(279),a=i.n(s),c=i(370),f=i.n(c),u=i(817),p=i.n(u);function l(j){try{return document.execCommand(j)}catch(T){return!1}}var d=function(T){var O=p()(T);return l("cut"),O},h=d;function b(j){var T=document.documentElement.getAttribute("dir")==="rtl",O=document.createElement("textarea");O.style.fontSize="12pt",O.style.border="0",O.style.padding="0",O.style.margin="0",O.style.position="absolute",O.style[T?"right":"left"]="-9999px";var k=window.pageYOffset||document.documentElement.scrollTop;return O.style.top="".concat(k,"px"),O.setAttribute("readonly",""),O.value=j,O}var U=function(T,O){var k=b(T);O.container.appendChild(k);var $=p()(k);return l("copy"),k.remove(),$},G=function(T){var O=arguments.length>1&&arguments[1]!==void 0?arguments[1]:{container:document.body},k="";return typeof T=="string"?k=U(T,O):T instanceof HTMLInputElement&&!["text","search","url","tel","password"].includes(T==null?void 0:T.type)?k=U(T.value,O):(k=p()(T),l("copy")),k},W=G;function _(j){return typeof Symbol=="function"&&typeof Symbol.iterator=="symbol"?_=function(O){return typeof O}:_=function(O){return O&&typeof Symbol=="function"&&O.constructor===Symbol&&O!==Symbol.prototype?"symbol":typeof O},_(j)}var We=function(){var T=arguments.length>0&&arguments[0]!==void 0?arguments[0]:{},O=T.action,k=O===void 0?"copy":O,$=T.container,q=T.target,Te=T.text;if(k!=="copy"&&k!=="cut")throw new Error('Invalid "action" value, use either "copy" or "cut"');if(q!==void 0)if(q&&_(q)==="object"&&q.nodeType===1){if(k==="copy"&&q.hasAttribute("disabled"))throw new Error('Invalid "target" attribute. Please use "readonly" instead of "disabled" attribute');if(k==="cut"&&(q.hasAttribute("readonly")||q.hasAttribute("disabled")))throw new Error(`Invalid "target" attribute. You can't cut text from elements with "readonly" or "disabled" attributes`)}else throw new Error('Invalid "target" value, use a valid Element');if(Te)return W(Te,{container:$});if(q)return k==="cut"?h(q):W(q,{container:$})},Fe=We;function Pe(j){return typeof Symbol=="function"&&typeof Symbol.iterator=="symbol"?Pe=function(O){return typeof O}:Pe=function(O){return O&&typeof Symbol=="function"&&O.constructor===Symbol&&O!==Symbol.prototype?"symbol":typeof O},Pe(j)}function Ji(j,T){if(!(j instanceof T))throw new TypeError("Cannot call a class as a function")}function sn(j,T){for(var O=0;O0&&arguments[0]!==void 0?arguments[0]:{};this.action=typeof $.action=="function"?$.action:this.defaultAction,this.target=typeof $.target=="function"?$.target:this.defaultTarget,this.text=typeof $.text=="function"?$.text:this.defaultText,this.container=Pe($.container)==="object"?$.container:document.body}},{key:"listenClick",value:function($){var q=this;this.listener=f()($,"click",function(Te){return q.onClick(Te)})}},{key:"onClick",value:function($){var q=$.delegateTarget||$.currentTarget,Te=this.action(q)||"copy",Rt=Fe({action:Te,container:this.container,target:this.target(q),text:this.text(q)});this.emit(Rt?"success":"error",{action:Te,text:Rt,trigger:q,clearSelection:function(){q&&q.focus(),window.getSelection().removeAllRanges()}})}},{key:"defaultAction",value:function($){return xr("action",$)}},{key:"defaultTarget",value:function($){var q=xr("target",$);if(q)return document.querySelector(q)}},{key:"defaultText",value:function($){return xr("text",$)}},{key:"destroy",value:function(){this.listener.destroy()}}],[{key:"copy",value:function($){var q=arguments.length>1&&arguments[1]!==void 0?arguments[1]:{container:document.body};return W($,q)}},{key:"cut",value:function($){return h($)}},{key:"isSupported",value:function(){var $=arguments.length>0&&arguments[0]!==void 0?arguments[0]:["copy","cut"],q=typeof $=="string"?[$]:$,Te=!!document.queryCommandSupported;return q.forEach(function(Rt){Te=Te&&!!document.queryCommandSupported(Rt)}),Te}}]),O}(a()),ia=oa},828:function(n){var o=9;if(typeof Element!="undefined"&&!Element.prototype.matches){var i=Element.prototype;i.matches=i.matchesSelector||i.mozMatchesSelector||i.msMatchesSelector||i.oMatchesSelector||i.webkitMatchesSelector}function s(a,c){for(;a&&a.nodeType!==o;){if(typeof a.matches=="function"&&a.matches(c))return a;a=a.parentNode}}n.exports=s},438:function(n,o,i){var s=i(828);function a(u,p,l,d,h){var b=f.apply(this,arguments);return u.addEventListener(l,b,h),{destroy:function(){u.removeEventListener(l,b,h)}}}function c(u,p,l,d,h){return typeof u.addEventListener=="function"?a.apply(null,arguments):typeof l=="function"?a.bind(null,document).apply(null,arguments):(typeof u=="string"&&(u=document.querySelectorAll(u)),Array.prototype.map.call(u,function(b){return a(b,p,l,d,h)}))}function f(u,p,l,d){return function(h){h.delegateTarget=s(h.target,p),h.delegateTarget&&d.call(u,h)}}n.exports=c},879:function(n,o){o.node=function(i){return i!==void 0&&i instanceof HTMLElement&&i.nodeType===1},o.nodeList=function(i){var s=Object.prototype.toString.call(i);return i!==void 0&&(s==="[object NodeList]"||s==="[object HTMLCollection]")&&"length"in i&&(i.length===0||o.node(i[0]))},o.string=function(i){return typeof i=="string"||i instanceof String},o.fn=function(i){var s=Object.prototype.toString.call(i);return s==="[object Function]"}},370:function(n,o,i){var s=i(879),a=i(438);function c(l,d,h){if(!l&&!d&&!h)throw new Error("Missing required arguments");if(!s.string(d))throw new TypeError("Second argument must be a String");if(!s.fn(h))throw new TypeError("Third argument must be a Function");if(s.node(l))return f(l,d,h);if(s.nodeList(l))return u(l,d,h);if(s.string(l))return p(l,d,h);throw new TypeError("First argument must be a String, HTMLElement, HTMLCollection, or NodeList")}function f(l,d,h){return l.addEventListener(d,h),{destroy:function(){l.removeEventListener(d,h)}}}function u(l,d,h){return Array.prototype.forEach.call(l,function(b){b.addEventListener(d,h)}),{destroy:function(){Array.prototype.forEach.call(l,function(b){b.removeEventListener(d,h)})}}}function p(l,d,h){return a(document.body,l,d,h)}n.exports=c},817:function(n){function o(i){var s;if(i.nodeName==="SELECT")i.focus(),s=i.value;else if(i.nodeName==="INPUT"||i.nodeName==="TEXTAREA"){var a=i.hasAttribute("readonly");a||i.setAttribute("readonly",""),i.select(),i.setSelectionRange(0,i.value.length),a||i.removeAttribute("readonly"),s=i.value}else{i.hasAttribute("contenteditable")&&i.focus();var c=window.getSelection(),f=document.createRange();f.selectNodeContents(i),c.removeAllRanges(),c.addRange(f),s=c.toString()}return s}n.exports=o},279:function(n){function o(){}o.prototype={on:function(i,s,a){var c=this.e||(this.e={});return(c[i]||(c[i]=[])).push({fn:s,ctx:a}),this},once:function(i,s,a){var c=this;function f(){c.off(i,f),s.apply(a,arguments)}return f._=s,this.on(i,f,a)},emit:function(i){var s=[].slice.call(arguments,1),a=((this.e||(this.e={}))[i]||[]).slice(),c=0,f=a.length;for(c;c{"use strict";/*! + * escape-html + * Copyright(c) 2012-2013 TJ Holowaychuk + * Copyright(c) 2015 Andreas Lubbe + * Copyright(c) 2015 Tiancheng "Timothy" Gu + * MIT Licensed + */var Ms=/["'&<>]/;Si.exports=Ls;function Ls(e){var t=""+e,r=Ms.exec(t);if(!r)return t;var n,o="",i=0,s=0;for(i=r.index;i0},enumerable:!1,configurable:!0}),t.prototype._trySubscribe=function(r){return this._throwIfClosed(),e.prototype._trySubscribe.call(this,r)},t.prototype._subscribe=function(r){return this._throwIfClosed(),this._checkFinalizedStatuses(r),this._innerSubscribe(r)},t.prototype._innerSubscribe=function(r){var n=this,o=this,i=o.hasError,s=o.isStopped,a=o.observers;return i||s?Tr:(this.currentObservers=null,a.push(r),new $e(function(){n.currentObservers=null,Ue(a,r)}))},t.prototype._checkFinalizedStatuses=function(r){var n=this,o=n.hasError,i=n.thrownError,s=n.isStopped;o?r.error(i):s&&r.complete()},t.prototype.asObservable=function(){var r=new F;return r.source=this,r},t.create=function(r,n){return new Qn(r,n)},t}(F);var Qn=function(e){ne(t,e);function t(r,n){var o=e.call(this)||this;return o.destination=r,o.source=n,o}return t.prototype.next=function(r){var n,o;(o=(n=this.destination)===null||n===void 0?void 0:n.next)===null||o===void 0||o.call(n,r)},t.prototype.error=function(r){var n,o;(o=(n=this.destination)===null||n===void 0?void 0:n.error)===null||o===void 0||o.call(n,r)},t.prototype.complete=function(){var r,n;(n=(r=this.destination)===null||r===void 0?void 0:r.complete)===null||n===void 0||n.call(r)},t.prototype._subscribe=function(r){var n,o;return(o=(n=this.source)===null||n===void 0?void 0:n.subscribe(r))!==null&&o!==void 0?o:Tr},t}(E);var wt={now:function(){return(wt.delegate||Date).now()},delegate:void 0};var Et=function(e){ne(t,e);function t(r,n,o){r===void 0&&(r=1/0),n===void 0&&(n=1/0),o===void 0&&(o=wt);var i=e.call(this)||this;return i._bufferSize=r,i._windowTime=n,i._timestampProvider=o,i._buffer=[],i._infiniteTimeWindow=!0,i._infiniteTimeWindow=n===1/0,i._bufferSize=Math.max(1,r),i._windowTime=Math.max(1,n),i}return t.prototype.next=function(r){var n=this,o=n.isStopped,i=n._buffer,s=n._infiniteTimeWindow,a=n._timestampProvider,c=n._windowTime;o||(i.push(r),!s&&i.push(a.now()+c)),this._trimBuffer(),e.prototype.next.call(this,r)},t.prototype._subscribe=function(r){this._throwIfClosed(),this._trimBuffer();for(var n=this._innerSubscribe(r),o=this,i=o._infiniteTimeWindow,s=o._buffer,a=s.slice(),c=0;c0?e.prototype.requestAsyncId.call(this,r,n,o):(r.actions.push(this),r._scheduled||(r._scheduled=at.requestAnimationFrame(function(){return r.flush(void 0)})))},t.prototype.recycleAsyncId=function(r,n,o){var i;if(o===void 0&&(o=0),o!=null?o>0:this.delay>0)return e.prototype.recycleAsyncId.call(this,r,n,o);var s=r.actions;n!=null&&((i=s[s.length-1])===null||i===void 0?void 0:i.id)!==n&&(at.cancelAnimationFrame(n),r._scheduled=void 0)},t}(zt);var Gn=function(e){ne(t,e);function t(){return e!==null&&e.apply(this,arguments)||this}return t.prototype.flush=function(r){this._active=!0;var n=this._scheduled;this._scheduled=void 0;var o=this.actions,i;r=r||o.shift();do if(i=r.execute(r.state,r.delay))break;while((r=o[0])&&r.id===n&&o.shift());if(this._active=!1,i){for(;(r=o[0])&&r.id===n&&o.shift();)r.unsubscribe();throw i}},t}(Nt);var xe=new Gn(Bn);var R=new F(function(e){return e.complete()});function qt(e){return e&&L(e.schedule)}function Hr(e){return e[e.length-1]}function Ve(e){return L(Hr(e))?e.pop():void 0}function Ee(e){return qt(Hr(e))?e.pop():void 0}function Kt(e,t){return typeof Hr(e)=="number"?e.pop():t}var st=function(e){return e&&typeof e.length=="number"&&typeof e!="function"};function Qt(e){return L(e==null?void 0:e.then)}function Yt(e){return L(e[it])}function Bt(e){return Symbol.asyncIterator&&L(e==null?void 0:e[Symbol.asyncIterator])}function Gt(e){return new TypeError("You provided "+(e!==null&&typeof e=="object"?"an invalid object":"'"+e+"'")+" where a stream was expected. You can provide an Observable, Promise, ReadableStream, Array, AsyncIterable, or Iterable.")}function ya(){return typeof Symbol!="function"||!Symbol.iterator?"@@iterator":Symbol.iterator}var Jt=ya();function Xt(e){return L(e==null?void 0:e[Jt])}function Zt(e){return jn(this,arguments,function(){var r,n,o,i;return It(this,function(s){switch(s.label){case 0:r=e.getReader(),s.label=1;case 1:s.trys.push([1,,9,10]),s.label=2;case 2:return[4,jt(r.read())];case 3:return n=s.sent(),o=n.value,i=n.done,i?[4,jt(void 0)]:[3,5];case 4:return[2,s.sent()];case 5:return[4,jt(o)];case 6:return[4,s.sent()];case 7:return s.sent(),[3,2];case 8:return[3,10];case 9:return r.releaseLock(),[7];case 10:return[2]}})})}function er(e){return L(e==null?void 0:e.getReader)}function z(e){if(e instanceof F)return e;if(e!=null){if(Yt(e))return xa(e);if(st(e))return wa(e);if(Qt(e))return Ea(e);if(Bt(e))return Jn(e);if(Xt(e))return Sa(e);if(er(e))return Oa(e)}throw Gt(e)}function xa(e){return new F(function(t){var r=e[it]();if(L(r.subscribe))return r.subscribe(t);throw new TypeError("Provided object does not correctly implement Symbol.observable")})}function wa(e){return new F(function(t){for(var r=0;r=2,!0))}function ie(e){e===void 0&&(e={});var t=e.connector,r=t===void 0?function(){return new E}:t,n=e.resetOnError,o=n===void 0?!0:n,i=e.resetOnComplete,s=i===void 0?!0:i,a=e.resetOnRefCountZero,c=a===void 0?!0:a;return function(f){var u,p,l,d=0,h=!1,b=!1,U=function(){p==null||p.unsubscribe(),p=void 0},G=function(){U(),u=l=void 0,h=b=!1},W=function(){var _=u;G(),_==null||_.unsubscribe()};return g(function(_,We){d++,!b&&!h&&U();var Fe=l=l!=null?l:r();We.add(function(){d--,d===0&&!b&&!h&&(p=Dr(W,c))}),Fe.subscribe(We),!u&&d>0&&(u=new Ge({next:function(Pe){return Fe.next(Pe)},error:function(Pe){b=!0,U(),p=Dr(G,o,Pe),Fe.error(Pe)},complete:function(){h=!0,U(),p=Dr(G,s),Fe.complete()}}),z(_).subscribe(u))})(f)}}function Dr(e,t){for(var r=[],n=2;ne.next(document)),e}function Q(e,t=document){return Array.from(t.querySelectorAll(e))}function K(e,t=document){let r=pe(e,t);if(typeof r=="undefined")throw new ReferenceError(`Missing element: expected "${e}" to be present`);return r}function pe(e,t=document){return t.querySelector(e)||void 0}function Ie(){return document.activeElement instanceof HTMLElement&&document.activeElement||void 0}function nr(e){return A(v(document.body,"focusin"),v(document.body,"focusout")).pipe(Re(1),m(()=>{let t=Ie();return typeof t!="undefined"?e.contains(t):!1}),N(e===Ie()),B())}function qe(e){return{x:e.offsetLeft,y:e.offsetTop}}function yo(e){return A(v(window,"load"),v(window,"resize")).pipe(Ae(0,xe),m(()=>qe(e)),N(qe(e)))}function or(e){return{x:e.scrollLeft,y:e.scrollTop}}function pt(e){return A(v(e,"scroll"),v(window,"resize")).pipe(Ae(0,xe),m(()=>or(e)),N(or(e)))}var wo=function(){if(typeof Map!="undefined")return Map;function e(t,r){var n=-1;return t.some(function(o,i){return o[0]===r?(n=i,!0):!1}),n}return function(){function t(){this.__entries__=[]}return Object.defineProperty(t.prototype,"size",{get:function(){return this.__entries__.length},enumerable:!0,configurable:!0}),t.prototype.get=function(r){var n=e(this.__entries__,r),o=this.__entries__[n];return o&&o[1]},t.prototype.set=function(r,n){var o=e(this.__entries__,r);~o?this.__entries__[o][1]=n:this.__entries__.push([r,n])},t.prototype.delete=function(r){var n=this.__entries__,o=e(n,r);~o&&n.splice(o,1)},t.prototype.has=function(r){return!!~e(this.__entries__,r)},t.prototype.clear=function(){this.__entries__.splice(0)},t.prototype.forEach=function(r,n){n===void 0&&(n=null);for(var o=0,i=this.__entries__;o0},e.prototype.connect_=function(){!qr||this.connected_||(document.addEventListener("transitionend",this.onTransitionEnd_),window.addEventListener("resize",this.refresh),Ka?(this.mutationsObserver_=new MutationObserver(this.refresh),this.mutationsObserver_.observe(document,{attributes:!0,childList:!0,characterData:!0,subtree:!0})):(document.addEventListener("DOMSubtreeModified",this.refresh),this.mutationEventsAdded_=!0),this.connected_=!0)},e.prototype.disconnect_=function(){!qr||!this.connected_||(document.removeEventListener("transitionend",this.onTransitionEnd_),window.removeEventListener("resize",this.refresh),this.mutationsObserver_&&this.mutationsObserver_.disconnect(),this.mutationEventsAdded_&&document.removeEventListener("DOMSubtreeModified",this.refresh),this.mutationsObserver_=null,this.mutationEventsAdded_=!1,this.connected_=!1)},e.prototype.onTransitionEnd_=function(t){var r=t.propertyName,n=r===void 0?"":r,o=qa.some(function(i){return!!~n.indexOf(i)});o&&this.refresh()},e.getInstance=function(){return this.instance_||(this.instance_=new e),this.instance_},e.instance_=null,e}(),Eo=function(e,t){for(var r=0,n=Object.keys(t);r0},e}(),Oo=typeof WeakMap!="undefined"?new WeakMap:new wo,_o=function(){function e(t){if(!(this instanceof e))throw new TypeError("Cannot call a class as a function.");if(!arguments.length)throw new TypeError("1 argument required, but only 0 present.");var r=Qa.getInstance(),n=new ns(t,r,this);Oo.set(this,n)}return e}();["observe","unobserve","disconnect"].forEach(function(e){_o.prototype[e]=function(){var t;return(t=Oo.get(this))[e].apply(t,arguments)}});var os=function(){return typeof ir.ResizeObserver!="undefined"?ir.ResizeObserver:_o}(),To=os;var Mo=new E,is=P(()=>I(new To(e=>{for(let t of e)Mo.next(t)}))).pipe(S(e=>A(Se,I(e)).pipe(C(()=>e.disconnect()))),X(1));function he(e){return{width:e.offsetWidth,height:e.offsetHeight}}function ve(e){return is.pipe(w(t=>t.observe(e)),S(t=>Mo.pipe(x(({target:r})=>r===e),C(()=>t.unobserve(e)),m(()=>he(e)))),N(he(e)))}function mt(e){return{width:e.scrollWidth,height:e.scrollHeight}}function cr(e){let t=e.parentElement;for(;t&&(e.scrollWidth<=t.scrollWidth&&e.scrollHeight<=t.scrollHeight);)t=(e=t).parentElement;return t?e:void 0}var Lo=new E,as=P(()=>I(new IntersectionObserver(e=>{for(let t of e)Lo.next(t)},{threshold:0}))).pipe(S(e=>A(Se,I(e)).pipe(C(()=>e.disconnect()))),X(1));function fr(e){return as.pipe(w(t=>t.observe(e)),S(t=>Lo.pipe(x(({target:r})=>r===e),C(()=>t.unobserve(e)),m(({isIntersecting:r})=>r))))}function Ao(e,t=16){return pt(e).pipe(m(({y:r})=>{let n=he(e),o=mt(e);return r>=o.height-n.height-t}),B())}var ur={drawer:K("[data-md-toggle=drawer]"),search:K("[data-md-toggle=search]")};function Co(e){return ur[e].checked}function Ke(e,t){ur[e].checked!==t&&ur[e].click()}function dt(e){let t=ur[e];return v(t,"change").pipe(m(()=>t.checked),N(t.checked))}function ss(e,t){switch(e.constructor){case HTMLInputElement:return e.type==="radio"?/^Arrow/.test(t):!0;case HTMLSelectElement:case HTMLTextAreaElement:return!0;default:return e.isContentEditable}}function Ro(){return v(window,"keydown").pipe(x(e=>!(e.metaKey||e.ctrlKey)),m(e=>({mode:Co("search")?"search":"global",type:e.key,claim(){e.preventDefault(),e.stopPropagation()}})),x(({mode:e,type:t})=>{if(e==="global"){let r=Ie();if(typeof r!="undefined")return!ss(r,t)}return!0}),ie())}function Oe(){return new URL(location.href)}function pr(e){location.href=e.href}function ko(){return new E}function Ho(e,t){if(typeof t=="string"||typeof t=="number")e.innerHTML+=t.toString();else if(t instanceof Node)e.appendChild(t);else if(Array.isArray(t))for(let r of t)Ho(e,r)}function M(e,t,...r){let n=document.createElement(e);if(t)for(let o of Object.keys(t))typeof t[o]!="undefined"&&(typeof t[o]!="boolean"?n.setAttribute(o,t[o]):n.setAttribute(o,""));for(let o of r)Ho(n,o);return n}function Po(e,t){let r=t;if(e.length>r){for(;e[r]!==" "&&--r>0;);return`${e.substring(0,r)}...`}return e}function lr(e){if(e>999){let t=+((e-950)%1e3>99);return`${((e+1e-6)/1e3).toFixed(t)}k`}else return e.toString()}function $o(){return location.hash.substring(1)}function Io(e){let t=M("a",{href:e});t.addEventListener("click",r=>r.stopPropagation()),t.click()}function cs(){return v(window,"hashchange").pipe(m($o),N($o()),x(e=>e.length>0),X(1))}function jo(){return cs().pipe(m(e=>pe(`[id="${e}"]`)),x(e=>typeof e!="undefined"))}function Kr(e){let t=matchMedia(e);return rr(r=>t.addListener(()=>r(t.matches))).pipe(N(t.matches))}function Fo(){let e=matchMedia("print");return A(v(window,"beforeprint").pipe(m(()=>!0)),v(window,"afterprint").pipe(m(()=>!1))).pipe(N(e.matches))}function Qr(e,t){return e.pipe(S(r=>r?t():R))}function mr(e,t={credentials:"same-origin"}){return ue(fetch(`${e}`,t)).pipe(ce(()=>R),S(r=>r.status!==200?Ot(()=>new Error(r.statusText)):I(r)))}function je(e,t){return mr(e,t).pipe(S(r=>r.json()),X(1))}function Uo(e,t){let r=new DOMParser;return mr(e,t).pipe(S(n=>n.text()),m(n=>r.parseFromString(n,"text/xml")),X(1))}function Do(e){let t=M("script",{src:e});return P(()=>(document.head.appendChild(t),A(v(t,"load"),v(t,"error").pipe(S(()=>Ot(()=>new ReferenceError(`Invalid script: ${e}`))))).pipe(m(()=>{}),C(()=>document.head.removeChild(t)),oe(1))))}function Wo(){return{x:Math.max(0,scrollX),y:Math.max(0,scrollY)}}function Vo(){return A(v(window,"scroll",{passive:!0}),v(window,"resize",{passive:!0})).pipe(m(Wo),N(Wo()))}function zo(){return{width:innerWidth,height:innerHeight}}function No(){return v(window,"resize",{passive:!0}).pipe(m(zo),N(zo()))}function qo(){return Y([Vo(),No()]).pipe(m(([e,t])=>({offset:e,size:t})),X(1))}function dr(e,{viewport$:t,header$:r}){let n=t.pipe(J("size")),o=Y([n,r]).pipe(m(()=>qe(e)));return Y([r,t,o]).pipe(m(([{height:i},{offset:s,size:a},{x:c,y:f}])=>({offset:{x:s.x-c,y:s.y-f+i},size:a})))}function Ko(e,{tx$:t}){let r=v(e,"message").pipe(m(({data:n})=>n));return t.pipe(Lt(()=>r,{leading:!0,trailing:!0}),w(n=>e.postMessage(n)),S(()=>r),ie())}var fs=K("#__config"),ht=JSON.parse(fs.textContent);ht.base=`${new URL(ht.base,Oe())}`;function le(){return ht}function Z(e){return ht.features.includes(e)}function re(e,t){return typeof t!="undefined"?ht.translations[e].replace("#",t.toString()):ht.translations[e]}function _e(e,t=document){return K(`[data-md-component=${e}]`,t)}function te(e,t=document){return Q(`[data-md-component=${e}]`,t)}function us(e){let t=K(".md-typeset > :first-child",e);return v(t,"click",{once:!0}).pipe(m(()=>K(".md-typeset",e)),m(r=>({hash:__md_hash(r.innerHTML)})))}function Qo(e){return!Z("announce.dismiss")||!e.childElementCount?R:P(()=>{let t=new E;return t.pipe(N({hash:__md_get("__announce")})).subscribe(({hash:r})=>{var n;r&&r===((n=__md_get("__announce"))!=null?n:r)&&(e.hidden=!0,__md_set("__announce",r))}),us(e).pipe(w(r=>t.next(r)),C(()=>t.complete()),m(r=>H({ref:e},r)))})}function ps(e,{target$:t}){return t.pipe(m(r=>({hidden:r!==e})))}function Yo(e,t){let r=new E;return r.subscribe(({hidden:n})=>{e.hidden=n}),ps(e,t).pipe(w(n=>r.next(n)),C(()=>r.complete()),m(n=>H({ref:e},n)))}var ii=Ye(Br());function Gr(e){return M("div",{class:"md-tooltip",id:e},M("div",{class:"md-tooltip__inner md-typeset"}))}function Bo(e,t){if(t=t?`${t}_annotation_${e}`:void 0,t){let r=t?`#${t}`:void 0;return M("aside",{class:"md-annotation",tabIndex:0},Gr(t),M("a",{href:r,class:"md-annotation__index",tabIndex:-1},M("span",{"data-md-annotation-id":e})))}else return M("aside",{class:"md-annotation",tabIndex:0},Gr(t),M("span",{class:"md-annotation__index",tabIndex:-1},M("span",{"data-md-annotation-id":e})))}function Go(e){return M("button",{class:"md-clipboard md-icon",title:re("clipboard.copy"),"data-clipboard-target":`#${e} > code`})}function Jr(e,t){let r=t&2,n=t&1,o=Object.keys(e.terms).filter(a=>!e.terms[a]).reduce((a,c)=>[...a,M("del",null,c)," "],[]).slice(0,-1),i=new URL(e.location);Z("search.highlight")&&i.searchParams.set("h",Object.entries(e.terms).filter(([,a])=>a).reduce((a,[c])=>`${a} ${c}`.trim(),""));let{tags:s}=le();return M("a",{href:`${i}`,class:"md-search-result__link",tabIndex:-1},M("article",{class:["md-search-result__article",...r?["md-search-result__article--document"]:[]].join(" "),"data-md-score":e.score.toFixed(2)},r>0&&M("div",{class:"md-search-result__icon md-icon"}),M("h1",{class:"md-search-result__title"},e.title),n>0&&e.text.length>0&&M("p",{class:"md-search-result__teaser"},Po(e.text,320)),e.tags&&M("div",{class:"md-typeset"},e.tags.map(a=>{let c=a.replace(/<[^>]+>/g,""),f=s?c in s?`md-tag-icon md-tag-icon--${s[c]}`:"md-tag-icon":"";return M("span",{class:`md-tag ${f}`},a)})),n>0&&o.length>0&&M("p",{class:"md-search-result__terms"},re("search.result.term.missing"),": ",...o)))}function Jo(e){let t=e[0].score,r=[...e],n=r.findIndex(f=>!f.location.includes("#")),[o]=r.splice(n,1),i=r.findIndex(f=>f.scoreJr(f,1)),...a.length?[M("details",{class:"md-search-result__more"},M("summary",{tabIndex:-1},a.length>0&&a.length===1?re("search.result.more.one"):re("search.result.more.other",a.length)),...a.map(f=>Jr(f,1)))]:[]];return M("li",{class:"md-search-result__item"},c)}function Xo(e){return M("ul",{class:"md-source__facts"},Object.entries(e).map(([t,r])=>M("li",{class:`md-source__fact md-source__fact--${t}`},typeof r=="number"?lr(r):r)))}function Xr(e){let t=`tabbed-control tabbed-control--${e}`;return M("div",{class:t,hidden:!0},M("button",{class:"tabbed-button",tabIndex:-1}))}function Zo(e){return M("div",{class:"md-typeset__scrollwrap"},M("div",{class:"md-typeset__table"},e))}function ls(e){let t=le(),r=new URL(`../${e.version}/`,t.base);return M("li",{class:"md-version__item"},M("a",{href:`${r}`,class:"md-version__link"},e.title))}function ei(e,t){return M("div",{class:"md-version"},M("button",{class:"md-version__current","aria-label":re("select.version.title")},t.title),M("ul",{class:"md-version__list"},e.map(ls)))}function ms(e,t){let r=P(()=>Y([yo(e),pt(t)])).pipe(m(([{x:n,y:o},i])=>{let{width:s,height:a}=he(e);return{x:n-i.x+s/2,y:o-i.y+a/2}}));return nr(e).pipe(S(n=>r.pipe(m(o=>({active:n,offset:o})),oe(+!n||1/0))))}function ti(e,t,{target$:r}){let[n,o]=Array.from(e.children);return P(()=>{let i=new E,s=i.pipe(de(1));return i.subscribe({next({offset:a}){e.style.setProperty("--md-tooltip-x",`${a.x}px`),e.style.setProperty("--md-tooltip-y",`${a.y}px`)},complete(){e.style.removeProperty("--md-tooltip-x"),e.style.removeProperty("--md-tooltip-y")}}),fr(e).pipe(ee(s)).subscribe(a=>{e.toggleAttribute("data-md-visible",a)}),A(i.pipe(x(({active:a})=>a)),i.pipe(Re(250),x(({active:a})=>!a))).subscribe({next({active:a}){a?e.prepend(n):n.remove()},complete(){e.prepend(n)}}),i.pipe(Ae(16,xe)).subscribe(({active:a})=>{n.classList.toggle("md-tooltip--active",a)}),i.pipe(zr(125,xe),x(()=>!!e.offsetParent),m(()=>e.offsetParent.getBoundingClientRect()),m(({x:a})=>a)).subscribe({next(a){a?e.style.setProperty("--md-tooltip-0",`${-a}px`):e.style.removeProperty("--md-tooltip-0")},complete(){e.style.removeProperty("--md-tooltip-0")}}),v(o,"click").pipe(ee(s),x(a=>!(a.metaKey||a.ctrlKey))).subscribe(a=>a.preventDefault()),v(o,"mousedown").pipe(ee(s),ae(i)).subscribe(([a,{active:c}])=>{var f;if(a.button!==0||a.metaKey||a.ctrlKey)a.preventDefault();else if(c){a.preventDefault();let u=e.parentElement.closest(".md-annotation");u instanceof HTMLElement?u.focus():(f=Ie())==null||f.blur()}}),r.pipe(ee(s),x(a=>a===n),ke(125)).subscribe(()=>e.focus()),ms(e,t).pipe(w(a=>i.next(a)),C(()=>i.complete()),m(a=>H({ref:e},a)))})}function ds(e){let t=[];for(let r of Q(".c, .c1, .cm",e)){let n=[],o=document.createNodeIterator(r,NodeFilter.SHOW_TEXT);for(let i=o.nextNode();i;i=o.nextNode())n.push(i);for(let i of n){let s;for(;s=/(\(\d+\))(!)?/.exec(i.textContent);){let[,a,c]=s;if(typeof c=="undefined"){let f=i.splitText(s.index);i=f.splitText(a.length),t.push(f)}else{i.textContent=a,t.push(i);break}}}}return t}function ri(e,t){t.append(...Array.from(e.childNodes))}function ni(e,t,{target$:r,print$:n}){let o=t.closest("[id]"),i=o==null?void 0:o.id,s=new Map;for(let a of ds(t)){let[,c]=a.textContent.match(/\((\d+)\)/);pe(`li:nth-child(${c})`,e)&&(s.set(c,Bo(c,i)),a.replaceWith(s.get(c)))}return s.size===0?R:P(()=>{let a=new E,c=[];for(let[f,u]of s)c.push([K(".md-typeset",u),K(`li:nth-child(${f})`,e)]);return n.pipe(ee(a.pipe(de(1)))).subscribe(f=>{e.hidden=!f;for(let[u,p]of c)f?ri(u,p):ri(p,u)}),A(...[...s].map(([,f])=>ti(f,t,{target$:r}))).pipe(C(()=>a.complete()),ie())})}var hs=0;function ai(e){if(e.nextElementSibling){let t=e.nextElementSibling;if(t.tagName==="OL")return t;if(t.tagName==="P"&&!t.children.length)return ai(t)}}function oi(e){return ve(e).pipe(m(({width:t})=>({scrollable:mt(e).width>t})),J("scrollable"))}function si(e,t){let{matches:r}=matchMedia("(hover)"),n=P(()=>{let o=new E;if(o.subscribe(({scrollable:s})=>{s&&r?e.setAttribute("tabindex","0"):e.removeAttribute("tabindex")}),ii.default.isSupported()){let s=e.closest("pre");s.id=`__code_${++hs}`,s.insertBefore(Go(s.id),e)}let i=e.closest(".highlight");if(i instanceof HTMLElement){let s=ai(i);if(typeof s!="undefined"&&(i.classList.contains("annotate")||Z("content.code.annotate"))){let a=ni(s,e,t);return oi(e).pipe(w(c=>o.next(c)),C(()=>o.complete()),m(c=>H({ref:e},c)),et(ve(i).pipe(m(({width:c,height:f})=>c&&f),B(),S(c=>c?a:R))))}}return oi(e).pipe(w(s=>o.next(s)),C(()=>o.complete()),m(s=>H({ref:e},s)))});return Z("content.lazy")?fr(e).pipe(x(o=>o),oe(1),S(()=>n)):n}var ci=".node circle,.node ellipse,.node path,.node polygon,.node rect{fill:var(--md-mermaid-node-bg-color);stroke:var(--md-mermaid-node-fg-color)}marker{fill:var(--md-mermaid-edge-color)!important}.edgeLabel .label rect{fill:#0000}.label{color:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.label foreignObject{line-height:normal;overflow:visible}.label div .edgeLabel{color:var(--md-mermaid-label-fg-color)}.edgeLabel,.edgeLabel rect,.label div .edgeLabel{background-color:var(--md-mermaid-label-bg-color)}.edgeLabel,.edgeLabel rect{fill:var(--md-mermaid-label-bg-color);color:var(--md-mermaid-edge-color)}.edgePath .path,.flowchart-link{stroke:var(--md-mermaid-edge-color)}.edgePath .arrowheadPath{fill:var(--md-mermaid-edge-color);stroke:none}.cluster rect{fill:var(--md-default-fg-color--lightest);stroke:var(--md-default-fg-color--lighter)}.cluster span{color:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}defs #flowchart-circleEnd,defs #flowchart-circleStart,defs #flowchart-crossEnd,defs #flowchart-crossStart,defs #flowchart-pointEnd,defs #flowchart-pointStart{stroke:none}g.classGroup line,g.classGroup rect{fill:var(--md-mermaid-node-bg-color);stroke:var(--md-mermaid-node-fg-color)}g.classGroup text{fill:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.classLabel .box{fill:var(--md-mermaid-label-bg-color);background-color:var(--md-mermaid-label-bg-color);opacity:1}.classLabel .label{fill:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.node .divider{stroke:var(--md-mermaid-node-fg-color)}.relation{stroke:var(--md-mermaid-edge-color)}.cardinality{fill:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.cardinality text{fill:inherit!important}defs #classDiagram-compositionEnd,defs #classDiagram-compositionStart,defs #classDiagram-dependencyEnd,defs #classDiagram-dependencyStart,defs #classDiagram-extensionEnd,defs #classDiagram-extensionStart{fill:var(--md-mermaid-edge-color)!important;stroke:var(--md-mermaid-edge-color)!important}defs #classDiagram-aggregationEnd,defs #classDiagram-aggregationStart{fill:var(--md-mermaid-label-bg-color)!important;stroke:var(--md-mermaid-edge-color)!important}g.stateGroup rect{fill:var(--md-mermaid-node-bg-color);stroke:var(--md-mermaid-node-fg-color)}g.stateGroup .state-title{fill:var(--md-mermaid-label-fg-color)!important;font-family:var(--md-mermaid-font-family)}g.stateGroup .composit{fill:var(--md-mermaid-label-bg-color)}.nodeLabel{color:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.node circle.state-end,.node circle.state-start,.start-state{fill:var(--md-mermaid-edge-color);stroke:none}.end-state-inner,.end-state-outer{fill:var(--md-mermaid-edge-color)}.end-state-inner,.node circle.state-end{stroke:var(--md-mermaid-label-bg-color)}.transition{stroke:var(--md-mermaid-edge-color)}[id^=state-fork] rect,[id^=state-join] rect{fill:var(--md-mermaid-edge-color)!important;stroke:none!important}.statediagram-cluster.statediagram-cluster .inner{fill:var(--md-default-bg-color)}.statediagram-cluster rect{fill:var(--md-mermaid-node-bg-color);stroke:var(--md-mermaid-node-fg-color)}.statediagram-state rect.divider{fill:var(--md-default-fg-color--lightest);stroke:var(--md-default-fg-color--lighter)}defs #statediagram-barbEnd{stroke:var(--md-mermaid-edge-color)}.entityBox{fill:var(--md-mermaid-label-bg-color);stroke:var(--md-mermaid-node-fg-color)}.entityLabel{fill:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.relationshipLabelBox{fill:var(--md-mermaid-label-bg-color);fill-opacity:1;background-color:var(--md-mermaid-label-bg-color);opacity:1}.relationshipLabel{fill:var(--md-mermaid-label-fg-color)}.relationshipLine{stroke:var(--md-mermaid-edge-color)}defs #ONE_OR_MORE_END *,defs #ONE_OR_MORE_START *,defs #ONLY_ONE_END *,defs #ONLY_ONE_START *,defs #ZERO_OR_MORE_END *,defs #ZERO_OR_MORE_START *,defs #ZERO_OR_ONE_END *,defs #ZERO_OR_ONE_START *{stroke:var(--md-mermaid-edge-color)!important}.actor,defs #ZERO_OR_MORE_END circle,defs #ZERO_OR_MORE_START circle{fill:var(--md-mermaid-label-bg-color)}.actor{stroke:var(--md-mermaid-node-fg-color)}text.actor>tspan{fill:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}line{stroke:var(--md-default-fg-color--lighter)}.messageLine0,.messageLine1{stroke:var(--md-mermaid-edge-color)}.loopText>tspan,.messageText,.noteText>tspan{fill:var(--md-mermaid-edge-color);stroke:none;font-family:var(--md-mermaid-font-family)!important}.noteText>tspan{fill:#000}#arrowhead path{fill:var(--md-mermaid-edge-color);stroke:none}.loopLine{stroke:var(--md-mermaid-node-fg-color)}.labelBox,.loopLine{fill:var(--md-mermaid-node-bg-color)}.labelBox{stroke:none}.labelText,.labelText>span{fill:var(--md-mermaid-node-fg-color);font-family:var(--md-mermaid-font-family)}";var Zr,vs=0;function gs(){return typeof mermaid=="undefined"||mermaid instanceof Element?Do("https://unpkg.com/mermaid@9.1.7/dist/mermaid.min.js"):I(void 0)}function fi(e){return e.classList.remove("mermaid"),Zr||(Zr=gs().pipe(w(()=>mermaid.initialize({startOnLoad:!1,themeCSS:ci,sequence:{actorFontSize:"16px",messageFontSize:"16px",noteFontSize:"16px"}})),m(()=>{}),X(1))),Zr.subscribe(()=>{e.classList.add("mermaid");let t=`__mermaid_${vs++}`,r=M("div",{class:"mermaid"});mermaid.mermaidAPI.render(t,e.textContent,n=>{let o=r.attachShadow({mode:"closed"});o.innerHTML=n,e.replaceWith(r)})}),Zr.pipe(m(()=>({ref:e})))}function ys(e,{target$:t,print$:r}){let n=!0;return A(t.pipe(m(o=>o.closest("details:not([open])")),x(o=>e===o),m(()=>({action:"open",reveal:!0}))),r.pipe(x(o=>o||!n),w(()=>n=e.open),m(o=>({action:o?"open":"close"}))))}function ui(e,t){return P(()=>{let r=new E;return r.subscribe(({action:n,reveal:o})=>{e.toggleAttribute("open",n==="open"),o&&e.scrollIntoView()}),ys(e,t).pipe(w(n=>r.next(n)),C(()=>r.complete()),m(n=>H({ref:e},n)))})}var pi=M("table");function li(e){return e.replaceWith(pi),pi.replaceWith(Zo(e)),I({ref:e})}function xs(e){let t=Q(":scope > input",e),r=t.find(n=>n.checked)||t[0];return A(...t.map(n=>v(n,"change").pipe(m(()=>K(`label[for="${n.id}"]`))))).pipe(N(K(`label[for="${r.id}"]`)),m(n=>({active:n})))}function mi(e,{viewport$:t}){let r=Xr("prev");e.append(r);let n=Xr("next");e.append(n);let o=K(".tabbed-labels",e);return P(()=>{let i=new E,s=i.pipe(de(1));return Y([i,ve(e)]).pipe(Ae(1,xe),ee(s)).subscribe({next([{active:a},c]){let f=qe(a),{width:u}=he(a);e.style.setProperty("--md-indicator-x",`${f.x}px`),e.style.setProperty("--md-indicator-width",`${u}px`);let p=or(o);(f.xp.x+c.width)&&o.scrollTo({left:Math.max(0,f.x-16),behavior:"smooth"})},complete(){e.style.removeProperty("--md-indicator-x"),e.style.removeProperty("--md-indicator-width")}}),Y([pt(o),ve(o)]).pipe(ee(s)).subscribe(([a,c])=>{let f=mt(o);r.hidden=a.x<16,n.hidden=a.x>f.width-c.width-16}),A(v(r,"click").pipe(m(()=>-1)),v(n,"click").pipe(m(()=>1))).pipe(ee(s)).subscribe(a=>{let{width:c}=he(o);o.scrollBy({left:c*a,behavior:"smooth"})}),Z("content.tabs.link")&&i.pipe(He(1),ae(t)).subscribe(([{active:a},{offset:c}])=>{let f=a.innerText.trim();if(a.hasAttribute("data-md-switching"))a.removeAttribute("data-md-switching");else{let u=e.offsetTop-c.y;for(let l of Q("[data-tabs]"))for(let d of Q(":scope > input",l)){let h=K(`label[for="${d.id}"]`);if(h!==a&&h.innerText.trim()===f){h.setAttribute("data-md-switching",""),d.click();break}}window.scrollTo({top:e.offsetTop-u});let p=__md_get("__tabs")||[];__md_set("__tabs",[...new Set([f,...p])])}}),xs(e).pipe(w(a=>i.next(a)),C(()=>i.complete()),m(a=>H({ref:e},a)))}).pipe(Je(fe))}function di(e,{viewport$:t,target$:r,print$:n}){return A(...Q("pre:not(.mermaid) > code",e).map(o=>si(o,{target$:r,print$:n})),...Q("pre.mermaid",e).map(o=>fi(o)),...Q("table:not([class])",e).map(o=>li(o)),...Q("details",e).map(o=>ui(o,{target$:r,print$:n})),...Q("[data-tabs]",e).map(o=>mi(o,{viewport$:t})))}function ws(e,{alert$:t}){return t.pipe(S(r=>A(I(!0),I(!1).pipe(ke(2e3))).pipe(m(n=>({message:r,active:n})))))}function hi(e,t){let r=K(".md-typeset",e);return P(()=>{let n=new E;return n.subscribe(({message:o,active:i})=>{e.classList.toggle("md-dialog--active",i),r.textContent=o}),ws(e,t).pipe(w(o=>n.next(o)),C(()=>n.complete()),m(o=>H({ref:e},o)))})}function Es({viewport$:e}){if(!Z("header.autohide"))return I(!1);let t=e.pipe(m(({offset:{y:o}})=>o),Ce(2,1),m(([o,i])=>[oMath.abs(i-o.y)>100),m(([,[o]])=>o),B()),n=dt("search");return Y([e,n]).pipe(m(([{offset:o},i])=>o.y>400&&!i),B(),S(o=>o?r:I(!1)),N(!1))}function bi(e,t){return P(()=>Y([ve(e),Es(t)])).pipe(m(([{height:r},n])=>({height:r,hidden:n})),B((r,n)=>r.height===n.height&&r.hidden===n.hidden),X(1))}function vi(e,{header$:t,main$:r}){return P(()=>{let n=new E,o=n.pipe(de(1));return n.pipe(J("active"),Ze(t)).subscribe(([{active:i},{hidden:s}])=>{e.classList.toggle("md-header--shadow",i&&!s),e.hidden=s}),r.subscribe(n),t.pipe(ee(o),m(i=>H({ref:e},i)))})}function Ss(e,{viewport$:t,header$:r}){return dr(e,{viewport$:t,header$:r}).pipe(m(({offset:{y:n}})=>{let{height:o}=he(e);return{active:n>=o}}),J("active"))}function gi(e,t){return P(()=>{let r=new E;r.subscribe(({active:o})=>{e.classList.toggle("md-header__title--active",o)});let n=pe("article h1");return typeof n=="undefined"?R:Ss(n,t).pipe(w(o=>r.next(o)),C(()=>r.complete()),m(o=>H({ref:e},o)))})}function yi(e,{viewport$:t,header$:r}){let n=r.pipe(m(({height:i})=>i),B()),o=n.pipe(S(()=>ve(e).pipe(m(({height:i})=>({top:e.offsetTop,bottom:e.offsetTop+i})),J("bottom"))));return Y([n,o,t]).pipe(m(([i,{top:s,bottom:a},{offset:{y:c},size:{height:f}}])=>(f=Math.max(0,f-Math.max(0,s-c,i)-Math.max(0,f+c-a)),{offset:s-i,height:f,active:s-i<=c})),B((i,s)=>i.offset===s.offset&&i.height===s.height&&i.active===s.active))}function Os(e){let t=__md_get("__palette")||{index:e.findIndex(r=>matchMedia(r.getAttribute("data-md-color-media")).matches)};return I(...e).pipe(se(r=>v(r,"change").pipe(m(()=>r))),N(e[Math.max(0,t.index)]),m(r=>({index:e.indexOf(r),color:{scheme:r.getAttribute("data-md-color-scheme"),primary:r.getAttribute("data-md-color-primary"),accent:r.getAttribute("data-md-color-accent")}})),X(1))}function xi(e){return P(()=>{let t=new E;t.subscribe(n=>{document.body.setAttribute("data-md-color-switching","");for(let[o,i]of Object.entries(n.color))document.body.setAttribute(`data-md-color-${o}`,i);for(let o=0;o{document.body.removeAttribute("data-md-color-switching")});let r=Q("input",e);return Os(r).pipe(w(n=>t.next(n)),C(()=>t.complete()),m(n=>H({ref:e},n)))})}var en=Ye(Br());function _s(e){e.setAttribute("data-md-copying","");let t=e.innerText;return e.removeAttribute("data-md-copying"),t}function wi({alert$:e}){en.default.isSupported()&&new F(t=>{new en.default("[data-clipboard-target], [data-clipboard-text]",{text:r=>r.getAttribute("data-clipboard-text")||_s(K(r.getAttribute("data-clipboard-target")))}).on("success",r=>t.next(r))}).pipe(w(t=>{t.trigger.focus()}),m(()=>re("clipboard.copied"))).subscribe(e)}function Ts(e){if(e.length<2)return[""];let[t,r]=[...e].sort((o,i)=>o.length-i.length).map(o=>o.replace(/[^/]+$/,"")),n=0;if(t===r)n=t.length;else for(;t.charCodeAt(n)===r.charCodeAt(n);)n++;return e.map(o=>o.replace(t.slice(0,n),""))}function hr(e){let t=__md_get("__sitemap",sessionStorage,e);if(t)return I(t);{let r=le();return Uo(new URL("sitemap.xml",e||r.base)).pipe(m(n=>Ts(Q("loc",n).map(o=>o.textContent))),ce(()=>R),De([]),w(n=>__md_set("__sitemap",n,sessionStorage,e)))}}function Ei({document$:e,location$:t,viewport$:r}){let n=le();if(location.protocol==="file:")return;"scrollRestoration"in history&&(history.scrollRestoration="manual",v(window,"beforeunload").subscribe(()=>{history.scrollRestoration="auto"}));let o=pe("link[rel=icon]");typeof o!="undefined"&&(o.href=o.href);let i=hr().pipe(m(f=>f.map(u=>`${new URL(u,n.base)}`)),S(f=>v(document.body,"click").pipe(x(u=>!u.metaKey&&!u.ctrlKey),S(u=>{if(u.target instanceof Element){let p=u.target.closest("a");if(p&&!p.target){let l=new URL(p.href);if(l.search="",l.hash="",l.pathname!==location.pathname&&f.includes(l.toString()))return u.preventDefault(),I({url:new URL(p.href)})}}return Se}))),ie()),s=v(window,"popstate").pipe(x(f=>f.state!==null),m(f=>({url:new URL(location.href),offset:f.state})),ie());A(i,s).pipe(B((f,u)=>f.url.href===u.url.href),m(({url:f})=>f)).subscribe(t);let a=t.pipe(J("pathname"),S(f=>mr(f.href).pipe(ce(()=>(pr(f),Se)))),ie());i.pipe(ut(a)).subscribe(({url:f})=>{history.pushState({},"",`${f}`)});let c=new DOMParser;a.pipe(S(f=>f.text()),m(f=>c.parseFromString(f,"text/html"))).subscribe(e),e.pipe(He(1)).subscribe(f=>{for(let u of["title","link[rel=canonical]","meta[name=author]","meta[name=description]","[data-md-component=announce]","[data-md-component=container]","[data-md-component=header-topic]","[data-md-component=outdated]","[data-md-component=logo]","[data-md-component=skip]",...Z("navigation.tabs.sticky")?["[data-md-component=tabs]"]:[]]){let p=pe(u),l=pe(u,f);typeof p!="undefined"&&typeof l!="undefined"&&p.replaceWith(l)}}),e.pipe(He(1),m(()=>_e("container")),S(f=>Q("script",f)),Ir(f=>{let u=M("script");if(f.src){for(let p of f.getAttributeNames())u.setAttribute(p,f.getAttribute(p));return f.replaceWith(u),new F(p=>{u.onload=()=>p.complete()})}else return u.textContent=f.textContent,f.replaceWith(u),R})).subscribe(),A(i,s).pipe(ut(e)).subscribe(({url:f,offset:u})=>{f.hash&&!u?Io(f.hash):window.scrollTo(0,(u==null?void 0:u.y)||0)}),r.pipe(Mt(i),Re(250),J("offset")).subscribe(({offset:f})=>{history.replaceState(f,"")}),A(i,s).pipe(Ce(2,1),x(([f,u])=>f.url.pathname===u.url.pathname),m(([,f])=>f)).subscribe(({offset:f})=>{window.scrollTo(0,(f==null?void 0:f.y)||0)})}var As=Ye(tn());var Oi=Ye(tn());function rn(e,t){let r=new RegExp(e.separator,"img"),n=(o,i,s)=>`${i}${s}`;return o=>{o=o.replace(/[\s*+\-:~^]+/g," ").trim();let i=new RegExp(`(^|${e.separator})(${o.replace(/[|\\{}()[\]^$+*?.-]/g,"\\$&").replace(r,"|")})`,"img");return s=>(t?(0,Oi.default)(s):s).replace(i,n).replace(/<\/mark>(\s+)]*>/img,"$1")}}function _i(e){return e.split(/"([^"]+)"/g).map((t,r)=>r&1?t.replace(/^\b|^(?![^\x00-\x7F]|$)|\s+/g," +"):t).join("").replace(/"|(?:^|\s+)[*+\-:^~]+(?=\s+|$)/g,"").trim()}function bt(e){return e.type===1}function Ti(e){return e.type===2}function vt(e){return e.type===3}function Rs({config:e,docs:t}){e.lang.length===1&&e.lang[0]==="en"&&(e.lang=[re("search.config.lang")]),e.separator==="[\\s\\-]+"&&(e.separator=re("search.config.separator"));let n={pipeline:re("search.config.pipeline").split(/\s*,\s*/).filter(Boolean),suggestions:Z("search.suggest")};return{config:e,docs:t,options:n}}function Mi(e,t){let r=le(),n=new Worker(e),o=new E,i=Ko(n,{tx$:o}).pipe(m(s=>{if(vt(s))for(let a of s.data.items)for(let c of a)c.location=`${new URL(c.location,r.base)}`;return s}),ie());return ue(t).pipe(m(s=>({type:0,data:Rs(s)}))).subscribe(o.next.bind(o)),{tx$:o,rx$:i}}function Li({document$:e}){let t=le(),r=je(new URL("../versions.json",t.base)).pipe(ce(()=>R)),n=r.pipe(m(o=>{let[,i]=t.base.match(/([^/]+)\/?$/);return o.find(({version:s,aliases:a})=>s===i||a.includes(i))||o[0]}));r.pipe(m(o=>new Map(o.map(i=>[`${new URL(`../${i.version}/`,t.base)}`,i]))),S(o=>v(document.body,"click").pipe(x(i=>!i.metaKey&&!i.ctrlKey),ae(n),S(([i,s])=>{if(i.target instanceof Element){let a=i.target.closest("a");if(a&&!a.target&&o.has(a.href)){let c=a.href;return!i.target.closest(".md-version")&&o.get(c)===s?R:(i.preventDefault(),I(c))}}return R}),S(i=>{let{version:s}=o.get(i);return hr(new URL(i)).pipe(m(a=>{let f=Oe().href.replace(t.base,"");return a.includes(f.split("#")[0])?new URL(`../${s}/${f}`,t.base):new URL(i)}))})))).subscribe(o=>pr(o)),Y([r,n]).subscribe(([o,i])=>{K(".md-header__topic").appendChild(ei(o,i))}),e.pipe(S(()=>n)).subscribe(o=>{var s;let i=__md_get("__outdated",sessionStorage);if(i===null){let a=((s=t.version)==null?void 0:s.default)||"latest";i=!o.aliases.includes(a),__md_set("__outdated",i,sessionStorage)}if(i)for(let a of te("outdated"))a.hidden=!1})}function ks(e,{rx$:t}){let r=(__search==null?void 0:__search.transform)||_i,{searchParams:n}=Oe();n.has("q")&&Ke("search",!0);let o=t.pipe(x(bt),oe(1),m(()=>n.get("q")||""));dt("search").pipe(x(a=>!a),oe(1)).subscribe(()=>{let a=new URL(location.href);a.searchParams.delete("q"),history.replaceState({},"",`${a}`)}),o.subscribe(a=>{a&&(e.value=a,e.focus())});let i=nr(e),s=A(v(e,"keyup"),v(e,"focus").pipe(ke(1)),o).pipe(m(()=>r(e.value)),N(""),B());return Y([s,i]).pipe(m(([a,c])=>({value:a,focus:c})),X(1))}function Ai(e,{tx$:t,rx$:r}){let n=new E,o=n.pipe(de(1));return n.pipe(J("value"),m(({value:i})=>({type:2,data:i}))).subscribe(t.next.bind(t)),n.pipe(J("focus")).subscribe(({focus:i})=>{i?(Ke("search",i),e.placeholder=""):e.placeholder=re("search.placeholder")}),v(e.form,"reset").pipe(ee(o)).subscribe(()=>e.focus()),ks(e,{tx$:t,rx$:r}).pipe(w(i=>n.next(i)),C(()=>n.complete()),m(i=>H({ref:e},i)),ie())}function Ci(e,{rx$:t},{query$:r}){let n=new E,o=Ao(e.parentElement).pipe(x(Boolean)),i=K(":scope > :first-child",e),s=K(":scope > :last-child",e),a=t.pipe(x(bt),oe(1));return n.pipe(ae(r),Mt(a)).subscribe(([{items:f},{value:u}])=>{if(u)switch(f.length){case 0:i.textContent=re("search.result.none");break;case 1:i.textContent=re("search.result.one");break;default:i.textContent=re("search.result.other",lr(f.length))}else i.textContent=re("search.result.placeholder")}),n.pipe(w(()=>s.innerHTML=""),S(({items:f})=>A(I(...f.slice(0,10)),I(...f.slice(10)).pipe(Ce(4),Nr(o),S(([u])=>u))))).subscribe(f=>s.appendChild(Jo(f))),t.pipe(x(vt),m(({data:f})=>f)).pipe(w(f=>n.next(f)),C(()=>n.complete()),m(f=>H({ref:e},f)))}function Hs(e,{query$:t}){return t.pipe(m(({value:r})=>{let n=Oe();return n.hash="",n.searchParams.delete("h"),n.searchParams.set("q",r),{url:n}}))}function Ri(e,t){let r=new E;return r.subscribe(({url:n})=>{e.setAttribute("data-clipboard-text",e.href),e.href=`${n}`}),v(e,"click").subscribe(n=>n.preventDefault()),Hs(e,t).pipe(w(n=>r.next(n)),C(()=>r.complete()),m(n=>H({ref:e},n)))}function ki(e,{rx$:t},{keyboard$:r}){let n=new E,o=_e("search-query"),i=A(v(o,"keydown"),v(o,"focus")).pipe(Le(fe),m(()=>o.value),B());return n.pipe(Ze(i),m(([{suggestions:a},c])=>{let f=c.split(/([\s-]+)/);if((a==null?void 0:a.length)&&f[f.length-1]){let u=a[a.length-1];u.startsWith(f[f.length-1])&&(f[f.length-1]=u)}else f.length=0;return f})).subscribe(a=>e.innerHTML=a.join("").replace(/\s/g," ")),r.pipe(x(({mode:a})=>a==="search")).subscribe(a=>{switch(a.type){case"ArrowRight":e.innerText.length&&o.selectionStart===o.value.length&&(o.value=e.innerText);break}}),t.pipe(x(vt),m(({data:a})=>a)).pipe(w(a=>n.next(a)),C(()=>n.complete()),m(()=>({ref:e})))}function Hi(e,{index$:t,keyboard$:r}){let n=le();try{let o=(__search==null?void 0:__search.worker)||n.search,i=Mi(o,t),s=_e("search-query",e),a=_e("search-result",e),{tx$:c,rx$:f}=i;c.pipe(x(Ti),ut(f.pipe(x(bt))),oe(1)).subscribe(c.next.bind(c)),r.pipe(x(({mode:l})=>l==="search")).subscribe(l=>{let d=Ie();switch(l.type){case"Enter":if(d===s){let h=new Map;for(let b of Q(":first-child [href]",a)){let U=b.firstElementChild;h.set(b,parseFloat(U.getAttribute("data-md-score")))}if(h.size){let[[b]]=[...h].sort(([,U],[,G])=>G-U);b.click()}l.claim()}break;case"Escape":case"Tab":Ke("search",!1),s.blur();break;case"ArrowUp":case"ArrowDown":if(typeof d=="undefined")s.focus();else{let h=[s,...Q(":not(details) > [href], summary, details[open] [href]",a)],b=Math.max(0,(Math.max(0,h.indexOf(d))+h.length+(l.type==="ArrowUp"?-1:1))%h.length);h[b].focus()}l.claim();break;default:s!==Ie()&&s.focus()}}),r.pipe(x(({mode:l})=>l==="global")).subscribe(l=>{switch(l.type){case"f":case"s":case"/":s.focus(),s.select(),l.claim();break}});let u=Ai(s,i),p=Ci(a,i,{query$:u});return A(u,p).pipe(et(...te("search-share",e).map(l=>Ri(l,{query$:u})),...te("search-suggest",e).map(l=>ki(l,i,{keyboard$:r}))))}catch(o){return e.hidden=!0,Se}}function Pi(e,{index$:t,location$:r}){return Y([t,r.pipe(N(Oe()),x(n=>!!n.searchParams.get("h")))]).pipe(m(([n,o])=>rn(n.config,!0)(o.searchParams.get("h"))),m(n=>{var s;let o=new Map,i=document.createNodeIterator(e,NodeFilter.SHOW_TEXT);for(let a=i.nextNode();a;a=i.nextNode())if((s=a.parentElement)!=null&&s.offsetHeight){let c=a.textContent,f=n(c);f.length>c.length&&o.set(a,f)}for(let[a,c]of o){let{childNodes:f}=M("span",null,c);a.replaceWith(...Array.from(f))}return{ref:e,nodes:o}}))}function Ps(e,{viewport$:t,main$:r}){let n=e.parentElement,o=n.offsetTop-n.parentElement.offsetTop;return Y([r,t]).pipe(m(([{offset:i,height:s},{offset:{y:a}}])=>(s=s+Math.min(o,Math.max(0,a-i))-o,{height:s,locked:a>=i+o})),B((i,s)=>i.height===s.height&&i.locked===s.locked))}function nn(e,n){var o=n,{header$:t}=o,r=un(o,["header$"]);let i=K(".md-sidebar__scrollwrap",e),{y:s}=qe(i);return P(()=>{let a=new E;return a.pipe(Ae(0,xe),ae(t)).subscribe({next([{height:c},{height:f}]){i.style.height=`${c-2*s}px`,e.style.top=`${f}px`},complete(){i.style.height="",e.style.top=""}}),a.pipe(Le(xe),oe(1)).subscribe(()=>{for(let c of Q(".md-nav__link--active[href]",e)){let f=cr(c);if(typeof f!="undefined"){let u=c.offsetTop-f.offsetTop,{height:p}=he(f);f.scrollTo({top:u-p/2})}}}),Ps(e,r).pipe(w(c=>a.next(c)),C(()=>a.complete()),m(c=>H({ref:e},c)))})}function $i(e,t){if(typeof t!="undefined"){let r=`https://api.github.com/repos/${e}/${t}`;return _t(je(`${r}/releases/latest`).pipe(ce(()=>R),m(n=>({version:n.tag_name})),De({})),je(r).pipe(ce(()=>R),m(n=>({stars:n.stargazers_count,forks:n.forks_count})),De({}))).pipe(m(([n,o])=>H(H({},n),o)))}else{let r=`https://api.github.com/users/${e}`;return je(r).pipe(m(n=>({repositories:n.public_repos})),De({}))}}function Ii(e,t){let r=`https://${e}/api/v4/projects/${encodeURIComponent(t)}`;return je(r).pipe(ce(()=>R),m(({star_count:n,forks_count:o})=>({stars:n,forks:o})),De({}))}function ji(e){let t=e.match(/^.+github\.com\/([^/]+)\/?([^/]+)?/i);if(t){let[,r,n]=t;return $i(r,n)}if(t=e.match(/^.+?([^/]*gitlab[^/]+)\/(.+?)\/?$/i),t){let[,r,n]=t;return Ii(r,n)}return R}var $s;function Is(e){return $s||($s=P(()=>{let t=__md_get("__source",sessionStorage);if(t)return I(t);if(te("consent").length){let n=__md_get("__consent");if(!(n&&n.github))return R}return ji(e.href).pipe(w(n=>__md_set("__source",n,sessionStorage)))}).pipe(ce(()=>R),x(t=>Object.keys(t).length>0),m(t=>({facts:t})),X(1)))}function Fi(e){let t=K(":scope > :last-child",e);return P(()=>{let r=new E;return r.subscribe(({facts:n})=>{t.appendChild(Xo(n)),t.classList.add("md-source__repository--active")}),Is(e).pipe(w(n=>r.next(n)),C(()=>r.complete()),m(n=>H({ref:e},n)))})}function js(e,{viewport$:t,header$:r}){return ve(document.body).pipe(S(()=>dr(e,{header$:r,viewport$:t})),m(({offset:{y:n}})=>({hidden:n>=10})),J("hidden"))}function Ui(e,t){return P(()=>{let r=new E;return r.subscribe({next({hidden:n}){e.hidden=n},complete(){e.hidden=!1}}),(Z("navigation.tabs.sticky")?I({hidden:!1}):js(e,t)).pipe(w(n=>r.next(n)),C(()=>r.complete()),m(n=>H({ref:e},n)))})}function Fs(e,{viewport$:t,header$:r}){let n=new Map,o=Q("[href^=\\#]",e);for(let a of o){let c=decodeURIComponent(a.hash.substring(1)),f=pe(`[id="${c}"]`);typeof f!="undefined"&&n.set(a,f)}let i=r.pipe(J("height"),m(({height:a})=>{let c=_e("main"),f=K(":scope > :first-child",c);return a+.8*(f.offsetTop-c.offsetTop)}),ie());return ve(document.body).pipe(J("height"),S(a=>P(()=>{let c=[];return I([...n].reduce((f,[u,p])=>{for(;c.length&&n.get(c[c.length-1]).tagName>=p.tagName;)c.pop();let l=p.offsetTop;for(;!l&&p.parentElement;)p=p.parentElement,l=p.offsetTop;return f.set([...c=[...c,u]].reverse(),l)},new Map))}).pipe(m(c=>new Map([...c].sort(([,f],[,u])=>f-u))),Ze(i),S(([c,f])=>t.pipe(Ur(([u,p],{offset:{y:l},size:d})=>{let h=l+d.height>=Math.floor(a.height);for(;p.length;){let[,b]=p[0];if(b-f=l&&!h)p=[u.pop(),...p];else break}return[u,p]},[[],[...c]]),B((u,p)=>u[0]===p[0]&&u[1]===p[1])))))).pipe(m(([a,c])=>({prev:a.map(([f])=>f),next:c.map(([f])=>f)})),N({prev:[],next:[]}),Ce(2,1),m(([a,c])=>a.prev.length{let o=new E,i=o.pipe(de(1));if(o.subscribe(({prev:s,next:a})=>{for(let[c]of a)c.classList.remove("md-nav__link--passed"),c.classList.remove("md-nav__link--active");for(let[c,[f]]of s.entries())f.classList.add("md-nav__link--passed"),f.classList.toggle("md-nav__link--active",c===s.length-1)}),Z("toc.follow")){let s=A(t.pipe(Re(1),m(()=>{})),t.pipe(Re(250),m(()=>"smooth")));o.pipe(x(({prev:a})=>a.length>0),ae(s)).subscribe(([{prev:a},c])=>{let[f]=a[a.length-1];if(f.offsetHeight){let u=cr(f);if(typeof u!="undefined"){let p=f.offsetTop-u.offsetTop,{height:l}=he(u);u.scrollTo({top:p-l/2,behavior:c})}}})}return Z("navigation.tracking")&&t.pipe(ee(i),J("offset"),Re(250),He(1),ee(n.pipe(He(1))),Tt({delay:250}),ae(o)).subscribe(([,{prev:s}])=>{let a=Oe(),c=s[s.length-1];if(c&&c.length){let[f]=c,{hash:u}=new URL(f.href);a.hash!==u&&(a.hash=u,history.replaceState({},"",`${a}`))}else a.hash="",history.replaceState({},"",`${a}`)}),Fs(e,{viewport$:t,header$:r}).pipe(w(s=>o.next(s)),C(()=>o.complete()),m(s=>H({ref:e},s)))})}function Us(e,{viewport$:t,main$:r,target$:n}){let o=t.pipe(m(({offset:{y:s}})=>s),Ce(2,1),m(([s,a])=>s>a&&a>0),B()),i=r.pipe(m(({active:s})=>s));return Y([i,o]).pipe(m(([s,a])=>!(s&&a)),B(),ee(n.pipe(He(1))),Fr(!0),Tt({delay:250}),m(s=>({hidden:s})))}function Wi(e,{viewport$:t,header$:r,main$:n,target$:o}){let i=new E,s=i.pipe(de(1));return i.subscribe({next({hidden:a}){e.hidden=a,a?(e.setAttribute("tabindex","-1"),e.blur()):e.removeAttribute("tabindex")},complete(){e.style.top="",e.hidden=!0,e.removeAttribute("tabindex")}}),r.pipe(ee(s),J("height")).subscribe(({height:a})=>{e.style.top=`${a+16}px`}),Us(e,{viewport$:t,main$:n,target$:o}).pipe(w(a=>i.next(a)),C(()=>i.complete()),m(a=>H({ref:e},a)))}function Vi({document$:e,tablet$:t}){e.pipe(S(()=>Q(".md-toggle--indeterminate, [data-md-state=indeterminate]")),w(r=>{r.indeterminate=!0,r.checked=!1}),se(r=>v(r,"change").pipe(Wr(()=>r.classList.contains("md-toggle--indeterminate")),m(()=>r))),ae(t)).subscribe(([r,n])=>{r.classList.remove("md-toggle--indeterminate"),n&&(r.checked=!1)})}function Ds(){return/(iPad|iPhone|iPod)/.test(navigator.userAgent)}function zi({document$:e}){e.pipe(S(()=>Q("[data-md-scrollfix]")),w(t=>t.removeAttribute("data-md-scrollfix")),x(Ds),se(t=>v(t,"touchstart").pipe(m(()=>t)))).subscribe(t=>{let r=t.scrollTop;r===0?t.scrollTop=1:r+t.offsetHeight===t.scrollHeight&&(t.scrollTop=r-1)})}function Ni({viewport$:e,tablet$:t}){Y([dt("search"),t]).pipe(m(([r,n])=>r&&!n),S(r=>I(r).pipe(ke(r?400:100))),ae(e)).subscribe(([r,{offset:{y:n}}])=>{if(r)document.body.setAttribute("data-md-scrolllock",""),document.body.style.top=`-${n}px`;else{let o=-1*parseInt(document.body.style.top,10);document.body.removeAttribute("data-md-scrolllock"),document.body.style.top="",o&&window.scrollTo(0,o)}})}Object.entries||(Object.entries=function(e){let t=[];for(let r of Object.keys(e))t.push([r,e[r]]);return t});Object.values||(Object.values=function(e){let t=[];for(let r of Object.keys(e))t.push(e[r]);return t});typeof Element!="undefined"&&(Element.prototype.scrollTo||(Element.prototype.scrollTo=function(e,t){typeof e=="object"?(this.scrollLeft=e.left,this.scrollTop=e.top):(this.scrollLeft=e,this.scrollTop=t)}),Element.prototype.replaceWith||(Element.prototype.replaceWith=function(...e){let t=this.parentNode;if(t){e.length===0&&t.removeChild(this);for(let r=e.length-1;r>=0;r--){let n=e[r];typeof n=="string"?n=document.createTextNode(n):n.parentNode&&n.parentNode.removeChild(n),r?t.insertBefore(this.previousSibling,n):t.replaceChild(n,this)}}}));document.documentElement.classList.remove("no-js");document.documentElement.classList.add("js");var tt=go(),vr=ko(),gt=jo(),on=Ro(),we=qo(),gr=Kr("(min-width: 960px)"),Ki=Kr("(min-width: 1220px)"),Qi=Fo(),Yi=le(),Bi=document.forms.namedItem("search")?(__search==null?void 0:__search.index)||je(new URL("search/search_index.json",Yi.base)):Se,an=new E;wi({alert$:an});Z("navigation.instant")&&Ei({document$:tt,location$:vr,viewport$:we});var qi;((qi=Yi.version)==null?void 0:qi.provider)==="mike"&&Li({document$:tt});A(vr,gt).pipe(ke(125)).subscribe(()=>{Ke("drawer",!1),Ke("search",!1)});on.pipe(x(({mode:e})=>e==="global")).subscribe(e=>{switch(e.type){case"p":case",":let t=pe("[href][rel=prev]");typeof t!="undefined"&&t.click();break;case"n":case".":let r=pe("[href][rel=next]");typeof r!="undefined"&&r.click();break}});Vi({document$:tt,tablet$:gr});zi({document$:tt});Ni({viewport$:we,tablet$:gr});var Qe=bi(_e("header"),{viewport$:we}),br=tt.pipe(m(()=>_e("main")),S(e=>yi(e,{viewport$:we,header$:Qe})),X(1)),Ws=A(...te("consent").map(e=>Yo(e,{target$:gt})),...te("dialog").map(e=>hi(e,{alert$:an})),...te("header").map(e=>vi(e,{viewport$:we,header$:Qe,main$:br})),...te("palette").map(e=>xi(e)),...te("search").map(e=>Hi(e,{index$:Bi,keyboard$:on})),...te("source").map(e=>Fi(e))),Vs=P(()=>A(...te("announce").map(e=>Qo(e)),...te("content").map(e=>di(e,{viewport$:we,target$:gt,print$:Qi})),...te("content").map(e=>Z("search.highlight")?Pi(e,{index$:Bi,location$:vr}):R),...te("header-title").map(e=>gi(e,{viewport$:we,header$:Qe})),...te("sidebar").map(e=>e.getAttribute("data-md-type")==="navigation"?Qr(Ki,()=>nn(e,{viewport$:we,header$:Qe,main$:br})):Qr(gr,()=>nn(e,{viewport$:we,header$:Qe,main$:br}))),...te("tabs").map(e=>Ui(e,{viewport$:we,header$:Qe})),...te("toc").map(e=>Di(e,{viewport$:we,header$:Qe,target$:gt})),...te("top").map(e=>Wi(e,{viewport$:we,header$:Qe,main$:br,target$:gt})))),Gi=tt.pipe(S(()=>Vs),et(Ws),X(1));Gi.subscribe();window.document$=tt;window.location$=vr;window.target$=gt;window.keyboard$=on;window.viewport$=we;window.tablet$=gr;window.screen$=Ki;window.print$=Qi;window.alert$=an;window.component$=Gi;})(); +//# sourceMappingURL=bundle.5a2dcb6a.min.js.map + diff --git a/assets/javascripts/bundle.5a2dcb6a.min.js.map b/assets/javascripts/bundle.5a2dcb6a.min.js.map new file mode 100644 index 0000000..34e26a3 --- /dev/null +++ b/assets/javascripts/bundle.5a2dcb6a.min.js.map @@ -0,0 +1,8 @@ +{ + "version": 3, + "sources": ["node_modules/focus-visible/dist/focus-visible.js", "node_modules/url-polyfill/url-polyfill.js", "node_modules/rxjs/node_modules/tslib/tslib.js", "node_modules/clipboard/dist/clipboard.js", "node_modules/escape-html/index.js", "node_modules/array-flat-polyfill/index.mjs", "src/assets/javascripts/bundle.ts", "node_modules/unfetch/polyfill/index.js", "node_modules/rxjs/node_modules/tslib/modules/index.js", "node_modules/rxjs/src/internal/util/isFunction.ts", "node_modules/rxjs/src/internal/util/createErrorClass.ts", "node_modules/rxjs/src/internal/util/UnsubscriptionError.ts", "node_modules/rxjs/src/internal/util/arrRemove.ts", "node_modules/rxjs/src/internal/Subscription.ts", "node_modules/rxjs/src/internal/config.ts", "node_modules/rxjs/src/internal/scheduler/timeoutProvider.ts", "node_modules/rxjs/src/internal/util/reportUnhandledError.ts", "node_modules/rxjs/src/internal/util/noop.ts", "node_modules/rxjs/src/internal/NotificationFactories.ts", "node_modules/rxjs/src/internal/util/errorContext.ts", "node_modules/rxjs/src/internal/Subscriber.ts", "node_modules/rxjs/src/internal/symbol/observable.ts", "node_modules/rxjs/src/internal/util/identity.ts", "node_modules/rxjs/src/internal/util/pipe.ts", "node_modules/rxjs/src/internal/Observable.ts", "node_modules/rxjs/src/internal/util/lift.ts", "node_modules/rxjs/src/internal/operators/OperatorSubscriber.ts", "node_modules/rxjs/src/internal/scheduler/animationFrameProvider.ts", "node_modules/rxjs/src/internal/util/ObjectUnsubscribedError.ts", "node_modules/rxjs/src/internal/Subject.ts", "node_modules/rxjs/src/internal/scheduler/dateTimestampProvider.ts", "node_modules/rxjs/src/internal/ReplaySubject.ts", "node_modules/rxjs/src/internal/scheduler/Action.ts", "node_modules/rxjs/src/internal/scheduler/intervalProvider.ts", "node_modules/rxjs/src/internal/scheduler/AsyncAction.ts", "node_modules/rxjs/src/internal/Scheduler.ts", "node_modules/rxjs/src/internal/scheduler/AsyncScheduler.ts", "node_modules/rxjs/src/internal/scheduler/async.ts", "node_modules/rxjs/src/internal/scheduler/AnimationFrameAction.ts", "node_modules/rxjs/src/internal/scheduler/AnimationFrameScheduler.ts", "node_modules/rxjs/src/internal/scheduler/animationFrame.ts", "node_modules/rxjs/src/internal/observable/empty.ts", "node_modules/rxjs/src/internal/util/isScheduler.ts", "node_modules/rxjs/src/internal/util/args.ts", "node_modules/rxjs/src/internal/util/isArrayLike.ts", "node_modules/rxjs/src/internal/util/isPromise.ts", "node_modules/rxjs/src/internal/util/isInteropObservable.ts", "node_modules/rxjs/src/internal/util/isAsyncIterable.ts", "node_modules/rxjs/src/internal/util/throwUnobservableError.ts", "node_modules/rxjs/src/internal/symbol/iterator.ts", "node_modules/rxjs/src/internal/util/isIterable.ts", "node_modules/rxjs/src/internal/util/isReadableStreamLike.ts", "node_modules/rxjs/src/internal/observable/innerFrom.ts", "node_modules/rxjs/src/internal/util/executeSchedule.ts", "node_modules/rxjs/src/internal/operators/observeOn.ts", "node_modules/rxjs/src/internal/operators/subscribeOn.ts", "node_modules/rxjs/src/internal/scheduled/scheduleObservable.ts", "node_modules/rxjs/src/internal/scheduled/schedulePromise.ts", "node_modules/rxjs/src/internal/scheduled/scheduleArray.ts", "node_modules/rxjs/src/internal/scheduled/scheduleIterable.ts", "node_modules/rxjs/src/internal/scheduled/scheduleAsyncIterable.ts", "node_modules/rxjs/src/internal/scheduled/scheduleReadableStreamLike.ts", "node_modules/rxjs/src/internal/scheduled/scheduled.ts", "node_modules/rxjs/src/internal/observable/from.ts", "node_modules/rxjs/src/internal/observable/of.ts", "node_modules/rxjs/src/internal/observable/throwError.ts", "node_modules/rxjs/src/internal/util/isDate.ts", "node_modules/rxjs/src/internal/operators/map.ts", "node_modules/rxjs/src/internal/util/mapOneOrManyArgs.ts", "node_modules/rxjs/src/internal/util/argsArgArrayOrObject.ts", "node_modules/rxjs/src/internal/util/createObject.ts", "node_modules/rxjs/src/internal/observable/combineLatest.ts", "node_modules/rxjs/src/internal/operators/mergeInternals.ts", "node_modules/rxjs/src/internal/operators/mergeMap.ts", "node_modules/rxjs/src/internal/operators/mergeAll.ts", "node_modules/rxjs/src/internal/operators/concatAll.ts", "node_modules/rxjs/src/internal/observable/concat.ts", "node_modules/rxjs/src/internal/observable/defer.ts", "node_modules/rxjs/src/internal/observable/fromEvent.ts", "node_modules/rxjs/src/internal/observable/fromEventPattern.ts", "node_modules/rxjs/src/internal/observable/timer.ts", "node_modules/rxjs/src/internal/observable/merge.ts", "node_modules/rxjs/src/internal/observable/never.ts", "node_modules/rxjs/src/internal/util/argsOrArgArray.ts", "node_modules/rxjs/src/internal/operators/filter.ts", "node_modules/rxjs/src/internal/observable/zip.ts", "node_modules/rxjs/src/internal/operators/audit.ts", "node_modules/rxjs/src/internal/operators/auditTime.ts", "node_modules/rxjs/src/internal/operators/bufferCount.ts", "node_modules/rxjs/src/internal/operators/catchError.ts", "node_modules/rxjs/src/internal/operators/scanInternals.ts", "node_modules/rxjs/src/internal/operators/combineLatest.ts", "node_modules/rxjs/src/internal/operators/combineLatestWith.ts", "node_modules/rxjs/src/internal/operators/concatMap.ts", "node_modules/rxjs/src/internal/operators/debounceTime.ts", "node_modules/rxjs/src/internal/operators/defaultIfEmpty.ts", "node_modules/rxjs/src/internal/operators/take.ts", "node_modules/rxjs/src/internal/operators/ignoreElements.ts", "node_modules/rxjs/src/internal/operators/mapTo.ts", "node_modules/rxjs/src/internal/operators/delayWhen.ts", "node_modules/rxjs/src/internal/operators/delay.ts", "node_modules/rxjs/src/internal/operators/distinctUntilChanged.ts", "node_modules/rxjs/src/internal/operators/distinctUntilKeyChanged.ts", "node_modules/rxjs/src/internal/operators/endWith.ts", "node_modules/rxjs/src/internal/operators/finalize.ts", "node_modules/rxjs/src/internal/operators/takeLast.ts", "node_modules/rxjs/src/internal/operators/merge.ts", "node_modules/rxjs/src/internal/operators/mergeWith.ts", "node_modules/rxjs/src/internal/operators/repeat.ts", "node_modules/rxjs/src/internal/operators/sample.ts", "node_modules/rxjs/src/internal/operators/scan.ts", "node_modules/rxjs/src/internal/operators/share.ts", "node_modules/rxjs/src/internal/operators/shareReplay.ts", "node_modules/rxjs/src/internal/operators/skip.ts", "node_modules/rxjs/src/internal/operators/skipUntil.ts", "node_modules/rxjs/src/internal/operators/startWith.ts", "node_modules/rxjs/src/internal/operators/switchMap.ts", "node_modules/rxjs/src/internal/operators/takeUntil.ts", "node_modules/rxjs/src/internal/operators/takeWhile.ts", "node_modules/rxjs/src/internal/operators/tap.ts", "node_modules/rxjs/src/internal/operators/throttle.ts", "node_modules/rxjs/src/internal/operators/throttleTime.ts", "node_modules/rxjs/src/internal/operators/withLatestFrom.ts", "node_modules/rxjs/src/internal/operators/zip.ts", "node_modules/rxjs/src/internal/operators/zipWith.ts", "src/assets/javascripts/browser/document/index.ts", "src/assets/javascripts/browser/element/_/index.ts", "src/assets/javascripts/browser/element/focus/index.ts", "src/assets/javascripts/browser/element/offset/_/index.ts", "src/assets/javascripts/browser/element/offset/content/index.ts", "node_modules/resize-observer-polyfill/dist/ResizeObserver.es.js", "src/assets/javascripts/browser/element/size/_/index.ts", "src/assets/javascripts/browser/element/size/content/index.ts", "src/assets/javascripts/browser/element/visibility/index.ts", "src/assets/javascripts/browser/toggle/index.ts", "src/assets/javascripts/browser/keyboard/index.ts", "src/assets/javascripts/browser/location/_/index.ts", "src/assets/javascripts/utilities/h/index.ts", "src/assets/javascripts/utilities/string/index.ts", "src/assets/javascripts/browser/location/hash/index.ts", "src/assets/javascripts/browser/media/index.ts", "src/assets/javascripts/browser/request/index.ts", "src/assets/javascripts/browser/script/index.ts", "src/assets/javascripts/browser/viewport/offset/index.ts", "src/assets/javascripts/browser/viewport/size/index.ts", "src/assets/javascripts/browser/viewport/_/index.ts", "src/assets/javascripts/browser/viewport/at/index.ts", "src/assets/javascripts/browser/worker/index.ts", "src/assets/javascripts/_/index.ts", "src/assets/javascripts/components/_/index.ts", "src/assets/javascripts/components/announce/index.ts", "src/assets/javascripts/components/consent/index.ts", "src/assets/javascripts/components/content/code/_/index.ts", "src/assets/javascripts/templates/tooltip/index.tsx", "src/assets/javascripts/templates/annotation/index.tsx", "src/assets/javascripts/templates/clipboard/index.tsx", "src/assets/javascripts/templates/search/index.tsx", "src/assets/javascripts/templates/source/index.tsx", "src/assets/javascripts/templates/tabbed/index.tsx", "src/assets/javascripts/templates/table/index.tsx", "src/assets/javascripts/templates/version/index.tsx", "src/assets/javascripts/components/content/annotation/_/index.ts", "src/assets/javascripts/components/content/annotation/list/index.ts", "src/assets/javascripts/components/content/code/mermaid/index.ts", "src/assets/javascripts/components/content/details/index.ts", "src/assets/javascripts/components/content/table/index.ts", "src/assets/javascripts/components/content/tabs/index.ts", "src/assets/javascripts/components/content/_/index.ts", "src/assets/javascripts/components/dialog/index.ts", "src/assets/javascripts/components/header/_/index.ts", "src/assets/javascripts/components/header/title/index.ts", "src/assets/javascripts/components/main/index.ts", "src/assets/javascripts/components/palette/index.ts", "src/assets/javascripts/integrations/clipboard/index.ts", "src/assets/javascripts/integrations/sitemap/index.ts", "src/assets/javascripts/integrations/instant/index.ts", "src/assets/javascripts/integrations/search/document/index.ts", "src/assets/javascripts/integrations/search/highlighter/index.ts", "src/assets/javascripts/integrations/search/query/transform/index.ts", "src/assets/javascripts/integrations/search/worker/message/index.ts", "src/assets/javascripts/integrations/search/worker/_/index.ts", "src/assets/javascripts/integrations/version/index.ts", "src/assets/javascripts/components/search/query/index.ts", "src/assets/javascripts/components/search/result/index.ts", "src/assets/javascripts/components/search/share/index.ts", "src/assets/javascripts/components/search/suggest/index.ts", "src/assets/javascripts/components/search/_/index.ts", "src/assets/javascripts/components/search/highlight/index.ts", "src/assets/javascripts/components/sidebar/index.ts", "src/assets/javascripts/components/source/facts/github/index.ts", "src/assets/javascripts/components/source/facts/gitlab/index.ts", "src/assets/javascripts/components/source/facts/_/index.ts", "src/assets/javascripts/components/source/_/index.ts", "src/assets/javascripts/components/tabs/index.ts", "src/assets/javascripts/components/toc/index.ts", "src/assets/javascripts/components/top/index.ts", "src/assets/javascripts/patches/indeterminate/index.ts", "src/assets/javascripts/patches/scrollfix/index.ts", "src/assets/javascripts/patches/scrolllock/index.ts", "src/assets/javascripts/polyfills/index.ts"], + "sourceRoot": "../../../..", + "sourcesContent": ["(function (global, factory) {\n typeof exports === 'object' && typeof module !== 'undefined' ? factory() :\n typeof define === 'function' && define.amd ? define(factory) :\n (factory());\n}(this, (function () { 'use strict';\n\n /**\n * Applies the :focus-visible polyfill at the given scope.\n * A scope in this case is either the top-level Document or a Shadow Root.\n *\n * @param {(Document|ShadowRoot)} scope\n * @see https://github.com/WICG/focus-visible\n */\n function applyFocusVisiblePolyfill(scope) {\n var hadKeyboardEvent = true;\n var hadFocusVisibleRecently = false;\n var hadFocusVisibleRecentlyTimeout = null;\n\n var inputTypesAllowlist = {\n text: true,\n search: true,\n url: true,\n tel: true,\n email: true,\n password: true,\n number: true,\n date: true,\n month: true,\n week: true,\n time: true,\n datetime: true,\n 'datetime-local': true\n };\n\n /**\n * Helper function for legacy browsers and iframes which sometimes focus\n * elements like document, body, and non-interactive SVG.\n * @param {Element} el\n */\n function isValidFocusTarget(el) {\n if (\n el &&\n el !== document &&\n el.nodeName !== 'HTML' &&\n el.nodeName !== 'BODY' &&\n 'classList' in el &&\n 'contains' in el.classList\n ) {\n return true;\n }\n return false;\n }\n\n /**\n * Computes whether the given element should automatically trigger the\n * `focus-visible` class being added, i.e. whether it should always match\n * `:focus-visible` when focused.\n * @param {Element} el\n * @return {boolean}\n */\n function focusTriggersKeyboardModality(el) {\n var type = el.type;\n var tagName = el.tagName;\n\n if (tagName === 'INPUT' && inputTypesAllowlist[type] && !el.readOnly) {\n return true;\n }\n\n if (tagName === 'TEXTAREA' && !el.readOnly) {\n return true;\n }\n\n if (el.isContentEditable) {\n return true;\n }\n\n return false;\n }\n\n /**\n * Add the `focus-visible` class to the given element if it was not added by\n * the author.\n * @param {Element} el\n */\n function addFocusVisibleClass(el) {\n if (el.classList.contains('focus-visible')) {\n return;\n }\n el.classList.add('focus-visible');\n el.setAttribute('data-focus-visible-added', '');\n }\n\n /**\n * Remove the `focus-visible` class from the given element if it was not\n * originally added by the author.\n * @param {Element} el\n */\n function removeFocusVisibleClass(el) {\n if (!el.hasAttribute('data-focus-visible-added')) {\n return;\n }\n el.classList.remove('focus-visible');\n el.removeAttribute('data-focus-visible-added');\n }\n\n /**\n * If the most recent user interaction was via the keyboard;\n * and the key press did not include a meta, alt/option, or control key;\n * then the modality is keyboard. Otherwise, the modality is not keyboard.\n * Apply `focus-visible` to any current active element and keep track\n * of our keyboard modality state with `hadKeyboardEvent`.\n * @param {KeyboardEvent} e\n */\n function onKeyDown(e) {\n if (e.metaKey || e.altKey || e.ctrlKey) {\n return;\n }\n\n if (isValidFocusTarget(scope.activeElement)) {\n addFocusVisibleClass(scope.activeElement);\n }\n\n hadKeyboardEvent = true;\n }\n\n /**\n * If at any point a user clicks with a pointing device, ensure that we change\n * the modality away from keyboard.\n * This avoids the situation where a user presses a key on an already focused\n * element, and then clicks on a different element, focusing it with a\n * pointing device, while we still think we're in keyboard modality.\n * @param {Event} e\n */\n function onPointerDown(e) {\n hadKeyboardEvent = false;\n }\n\n /**\n * On `focus`, add the `focus-visible` class to the target if:\n * - the target received focus as a result of keyboard navigation, or\n * - the event target is an element that will likely require interaction\n * via the keyboard (e.g. a text box)\n * @param {Event} e\n */\n function onFocus(e) {\n // Prevent IE from focusing the document or HTML element.\n if (!isValidFocusTarget(e.target)) {\n return;\n }\n\n if (hadKeyboardEvent || focusTriggersKeyboardModality(e.target)) {\n addFocusVisibleClass(e.target);\n }\n }\n\n /**\n * On `blur`, remove the `focus-visible` class from the target.\n * @param {Event} e\n */\n function onBlur(e) {\n if (!isValidFocusTarget(e.target)) {\n return;\n }\n\n if (\n e.target.classList.contains('focus-visible') ||\n e.target.hasAttribute('data-focus-visible-added')\n ) {\n // To detect a tab/window switch, we look for a blur event followed\n // rapidly by a visibility change.\n // If we don't see a visibility change within 100ms, it's probably a\n // regular focus change.\n hadFocusVisibleRecently = true;\n window.clearTimeout(hadFocusVisibleRecentlyTimeout);\n hadFocusVisibleRecentlyTimeout = window.setTimeout(function() {\n hadFocusVisibleRecently = false;\n }, 100);\n removeFocusVisibleClass(e.target);\n }\n }\n\n /**\n * If the user changes tabs, keep track of whether or not the previously\n * focused element had .focus-visible.\n * @param {Event} e\n */\n function onVisibilityChange(e) {\n if (document.visibilityState === 'hidden') {\n // If the tab becomes active again, the browser will handle calling focus\n // on the element (Safari actually calls it twice).\n // If this tab change caused a blur on an element with focus-visible,\n // re-apply the class when the user switches back to the tab.\n if (hadFocusVisibleRecently) {\n hadKeyboardEvent = true;\n }\n addInitialPointerMoveListeners();\n }\n }\n\n /**\n * Add a group of listeners to detect usage of any pointing devices.\n * These listeners will be added when the polyfill first loads, and anytime\n * the window is blurred, so that they are active when the window regains\n * focus.\n */\n function addInitialPointerMoveListeners() {\n document.addEventListener('mousemove', onInitialPointerMove);\n document.addEventListener('mousedown', onInitialPointerMove);\n document.addEventListener('mouseup', onInitialPointerMove);\n document.addEventListener('pointermove', onInitialPointerMove);\n document.addEventListener('pointerdown', onInitialPointerMove);\n document.addEventListener('pointerup', onInitialPointerMove);\n document.addEventListener('touchmove', onInitialPointerMove);\n document.addEventListener('touchstart', onInitialPointerMove);\n document.addEventListener('touchend', onInitialPointerMove);\n }\n\n function removeInitialPointerMoveListeners() {\n document.removeEventListener('mousemove', onInitialPointerMove);\n document.removeEventListener('mousedown', onInitialPointerMove);\n document.removeEventListener('mouseup', onInitialPointerMove);\n document.removeEventListener('pointermove', onInitialPointerMove);\n document.removeEventListener('pointerdown', onInitialPointerMove);\n document.removeEventListener('pointerup', onInitialPointerMove);\n document.removeEventListener('touchmove', onInitialPointerMove);\n document.removeEventListener('touchstart', onInitialPointerMove);\n document.removeEventListener('touchend', onInitialPointerMove);\n }\n\n /**\n * When the polfyill first loads, assume the user is in keyboard modality.\n * If any event is received from a pointing device (e.g. mouse, pointer,\n * touch), turn off keyboard modality.\n * This accounts for situations where focus enters the page from the URL bar.\n * @param {Event} e\n */\n function onInitialPointerMove(e) {\n // Work around a Safari quirk that fires a mousemove on whenever the\n // window blurs, even if you're tabbing out of the page. \u00AF\\_(\u30C4)_/\u00AF\n if (e.target.nodeName && e.target.nodeName.toLowerCase() === 'html') {\n return;\n }\n\n hadKeyboardEvent = false;\n removeInitialPointerMoveListeners();\n }\n\n // For some kinds of state, we are interested in changes at the global scope\n // only. For example, global pointer input, global key presses and global\n // visibility change should affect the state at every scope:\n document.addEventListener('keydown', onKeyDown, true);\n document.addEventListener('mousedown', onPointerDown, true);\n document.addEventListener('pointerdown', onPointerDown, true);\n document.addEventListener('touchstart', onPointerDown, true);\n document.addEventListener('visibilitychange', onVisibilityChange, true);\n\n addInitialPointerMoveListeners();\n\n // For focus and blur, we specifically care about state changes in the local\n // scope. This is because focus / blur events that originate from within a\n // shadow root are not re-dispatched from the host element if it was already\n // the active element in its own scope:\n scope.addEventListener('focus', onFocus, true);\n scope.addEventListener('blur', onBlur, true);\n\n // We detect that a node is a ShadowRoot by ensuring that it is a\n // DocumentFragment and also has a host property. This check covers native\n // implementation and polyfill implementation transparently. If we only cared\n // about the native implementation, we could just check if the scope was\n // an instance of a ShadowRoot.\n if (scope.nodeType === Node.DOCUMENT_FRAGMENT_NODE && scope.host) {\n // Since a ShadowRoot is a special kind of DocumentFragment, it does not\n // have a root element to add a class to. So, we add this attribute to the\n // host element instead:\n scope.host.setAttribute('data-js-focus-visible', '');\n } else if (scope.nodeType === Node.DOCUMENT_NODE) {\n document.documentElement.classList.add('js-focus-visible');\n document.documentElement.setAttribute('data-js-focus-visible', '');\n }\n }\n\n // It is important to wrap all references to global window and document in\n // these checks to support server-side rendering use cases\n // @see https://github.com/WICG/focus-visible/issues/199\n if (typeof window !== 'undefined' && typeof document !== 'undefined') {\n // Make the polyfill helper globally available. This can be used as a signal\n // to interested libraries that wish to coordinate with the polyfill for e.g.,\n // applying the polyfill to a shadow root:\n window.applyFocusVisiblePolyfill = applyFocusVisiblePolyfill;\n\n // Notify interested libraries of the polyfill's presence, in case the\n // polyfill was loaded lazily:\n var event;\n\n try {\n event = new CustomEvent('focus-visible-polyfill-ready');\n } catch (error) {\n // IE11 does not support using CustomEvent as a constructor directly:\n event = document.createEvent('CustomEvent');\n event.initCustomEvent('focus-visible-polyfill-ready', false, false, {});\n }\n\n window.dispatchEvent(event);\n }\n\n if (typeof document !== 'undefined') {\n // Apply the polyfill to the global document, so that no JavaScript\n // coordination is required to use the polyfill in the top-level document:\n applyFocusVisiblePolyfill(document);\n }\n\n})));\n", "(function(global) {\r\n /**\r\n * Polyfill URLSearchParams\r\n *\r\n * Inspired from : https://github.com/WebReflection/url-search-params/blob/master/src/url-search-params.js\r\n */\r\n\r\n var checkIfIteratorIsSupported = function() {\r\n try {\r\n return !!Symbol.iterator;\r\n } catch (error) {\r\n return false;\r\n }\r\n };\r\n\r\n\r\n var iteratorSupported = checkIfIteratorIsSupported();\r\n\r\n var createIterator = function(items) {\r\n var iterator = {\r\n next: function() {\r\n var value = items.shift();\r\n return { done: value === void 0, value: value };\r\n }\r\n };\r\n\r\n if (iteratorSupported) {\r\n iterator[Symbol.iterator] = function() {\r\n return iterator;\r\n };\r\n }\r\n\r\n return iterator;\r\n };\r\n\r\n /**\r\n * Search param name and values should be encoded according to https://url.spec.whatwg.org/#urlencoded-serializing\r\n * encodeURIComponent() produces the same result except encoding spaces as `%20` instead of `+`.\r\n */\r\n var serializeParam = function(value) {\r\n return encodeURIComponent(value).replace(/%20/g, '+');\r\n };\r\n\r\n var deserializeParam = function(value) {\r\n return decodeURIComponent(String(value).replace(/\\+/g, ' '));\r\n };\r\n\r\n var polyfillURLSearchParams = function() {\r\n\r\n var URLSearchParams = function(searchString) {\r\n Object.defineProperty(this, '_entries', { writable: true, value: {} });\r\n var typeofSearchString = typeof searchString;\r\n\r\n if (typeofSearchString === 'undefined') {\r\n // do nothing\r\n } else if (typeofSearchString === 'string') {\r\n if (searchString !== '') {\r\n this._fromString(searchString);\r\n }\r\n } else if (searchString instanceof URLSearchParams) {\r\n var _this = this;\r\n searchString.forEach(function(value, name) {\r\n _this.append(name, value);\r\n });\r\n } else if ((searchString !== null) && (typeofSearchString === 'object')) {\r\n if (Object.prototype.toString.call(searchString) === '[object Array]') {\r\n for (var i = 0; i < searchString.length; i++) {\r\n var entry = searchString[i];\r\n if ((Object.prototype.toString.call(entry) === '[object Array]') || (entry.length !== 2)) {\r\n this.append(entry[0], entry[1]);\r\n } else {\r\n throw new TypeError('Expected [string, any] as entry at index ' + i + ' of URLSearchParams\\'s input');\r\n }\r\n }\r\n } else {\r\n for (var key in searchString) {\r\n if (searchString.hasOwnProperty(key)) {\r\n this.append(key, searchString[key]);\r\n }\r\n }\r\n }\r\n } else {\r\n throw new TypeError('Unsupported input\\'s type for URLSearchParams');\r\n }\r\n };\r\n\r\n var proto = URLSearchParams.prototype;\r\n\r\n proto.append = function(name, value) {\r\n if (name in this._entries) {\r\n this._entries[name].push(String(value));\r\n } else {\r\n this._entries[name] = [String(value)];\r\n }\r\n };\r\n\r\n proto.delete = function(name) {\r\n delete this._entries[name];\r\n };\r\n\r\n proto.get = function(name) {\r\n return (name in this._entries) ? this._entries[name][0] : null;\r\n };\r\n\r\n proto.getAll = function(name) {\r\n return (name in this._entries) ? this._entries[name].slice(0) : [];\r\n };\r\n\r\n proto.has = function(name) {\r\n return (name in this._entries);\r\n };\r\n\r\n proto.set = function(name, value) {\r\n this._entries[name] = [String(value)];\r\n };\r\n\r\n proto.forEach = function(callback, thisArg) {\r\n var entries;\r\n for (var name in this._entries) {\r\n if (this._entries.hasOwnProperty(name)) {\r\n entries = this._entries[name];\r\n for (var i = 0; i < entries.length; i++) {\r\n callback.call(thisArg, entries[i], name, this);\r\n }\r\n }\r\n }\r\n };\r\n\r\n proto.keys = function() {\r\n var items = [];\r\n this.forEach(function(value, name) {\r\n items.push(name);\r\n });\r\n return createIterator(items);\r\n };\r\n\r\n proto.values = function() {\r\n var items = [];\r\n this.forEach(function(value) {\r\n items.push(value);\r\n });\r\n return createIterator(items);\r\n };\r\n\r\n proto.entries = function() {\r\n var items = [];\r\n this.forEach(function(value, name) {\r\n items.push([name, value]);\r\n });\r\n return createIterator(items);\r\n };\r\n\r\n if (iteratorSupported) {\r\n proto[Symbol.iterator] = proto.entries;\r\n }\r\n\r\n proto.toString = function() {\r\n var searchArray = [];\r\n this.forEach(function(value, name) {\r\n searchArray.push(serializeParam(name) + '=' + serializeParam(value));\r\n });\r\n return searchArray.join('&');\r\n };\r\n\r\n\r\n global.URLSearchParams = URLSearchParams;\r\n };\r\n\r\n var checkIfURLSearchParamsSupported = function() {\r\n try {\r\n var URLSearchParams = global.URLSearchParams;\r\n\r\n return (\r\n (new URLSearchParams('?a=1').toString() === 'a=1') &&\r\n (typeof URLSearchParams.prototype.set === 'function') &&\r\n (typeof URLSearchParams.prototype.entries === 'function')\r\n );\r\n } catch (e) {\r\n return false;\r\n }\r\n };\r\n\r\n if (!checkIfURLSearchParamsSupported()) {\r\n polyfillURLSearchParams();\r\n }\r\n\r\n var proto = global.URLSearchParams.prototype;\r\n\r\n if (typeof proto.sort !== 'function') {\r\n proto.sort = function() {\r\n var _this = this;\r\n var items = [];\r\n this.forEach(function(value, name) {\r\n items.push([name, value]);\r\n if (!_this._entries) {\r\n _this.delete(name);\r\n }\r\n });\r\n items.sort(function(a, b) {\r\n if (a[0] < b[0]) {\r\n return -1;\r\n } else if (a[0] > b[0]) {\r\n return +1;\r\n } else {\r\n return 0;\r\n }\r\n });\r\n if (_this._entries) { // force reset because IE keeps keys index\r\n _this._entries = {};\r\n }\r\n for (var i = 0; i < items.length; i++) {\r\n this.append(items[i][0], items[i][1]);\r\n }\r\n };\r\n }\r\n\r\n if (typeof proto._fromString !== 'function') {\r\n Object.defineProperty(proto, '_fromString', {\r\n enumerable: false,\r\n configurable: false,\r\n writable: false,\r\n value: function(searchString) {\r\n if (this._entries) {\r\n this._entries = {};\r\n } else {\r\n var keys = [];\r\n this.forEach(function(value, name) {\r\n keys.push(name);\r\n });\r\n for (var i = 0; i < keys.length; i++) {\r\n this.delete(keys[i]);\r\n }\r\n }\r\n\r\n searchString = searchString.replace(/^\\?/, '');\r\n var attributes = searchString.split('&');\r\n var attribute;\r\n for (var i = 0; i < attributes.length; i++) {\r\n attribute = attributes[i].split('=');\r\n this.append(\r\n deserializeParam(attribute[0]),\r\n (attribute.length > 1) ? deserializeParam(attribute[1]) : ''\r\n );\r\n }\r\n }\r\n });\r\n }\r\n\r\n // HTMLAnchorElement\r\n\r\n})(\r\n (typeof global !== 'undefined') ? global\r\n : ((typeof window !== 'undefined') ? window\r\n : ((typeof self !== 'undefined') ? self : this))\r\n);\r\n\r\n(function(global) {\r\n /**\r\n * Polyfill URL\r\n *\r\n * Inspired from : https://github.com/arv/DOM-URL-Polyfill/blob/master/src/url.js\r\n */\r\n\r\n var checkIfURLIsSupported = function() {\r\n try {\r\n var u = new global.URL('b', 'http://a');\r\n u.pathname = 'c d';\r\n return (u.href === 'http://a/c%20d') && u.searchParams;\r\n } catch (e) {\r\n return false;\r\n }\r\n };\r\n\r\n\r\n var polyfillURL = function() {\r\n var _URL = global.URL;\r\n\r\n var URL = function(url, base) {\r\n if (typeof url !== 'string') url = String(url);\r\n if (base && typeof base !== 'string') base = String(base);\r\n\r\n // Only create another document if the base is different from current location.\r\n var doc = document, baseElement;\r\n if (base && (global.location === void 0 || base !== global.location.href)) {\r\n base = base.toLowerCase();\r\n doc = document.implementation.createHTMLDocument('');\r\n baseElement = doc.createElement('base');\r\n baseElement.href = base;\r\n doc.head.appendChild(baseElement);\r\n try {\r\n if (baseElement.href.indexOf(base) !== 0) throw new Error(baseElement.href);\r\n } catch (err) {\r\n throw new Error('URL unable to set base ' + base + ' due to ' + err);\r\n }\r\n }\r\n\r\n var anchorElement = doc.createElement('a');\r\n anchorElement.href = url;\r\n if (baseElement) {\r\n doc.body.appendChild(anchorElement);\r\n anchorElement.href = anchorElement.href; // force href to refresh\r\n }\r\n\r\n var inputElement = doc.createElement('input');\r\n inputElement.type = 'url';\r\n inputElement.value = url;\r\n\r\n if (anchorElement.protocol === ':' || !/:/.test(anchorElement.href) || (!inputElement.checkValidity() && !base)) {\r\n throw new TypeError('Invalid URL');\r\n }\r\n\r\n Object.defineProperty(this, '_anchorElement', {\r\n value: anchorElement\r\n });\r\n\r\n\r\n // create a linked searchParams which reflect its changes on URL\r\n var searchParams = new global.URLSearchParams(this.search);\r\n var enableSearchUpdate = true;\r\n var enableSearchParamsUpdate = true;\r\n var _this = this;\r\n ['append', 'delete', 'set'].forEach(function(methodName) {\r\n var method = searchParams[methodName];\r\n searchParams[methodName] = function() {\r\n method.apply(searchParams, arguments);\r\n if (enableSearchUpdate) {\r\n enableSearchParamsUpdate = false;\r\n _this.search = searchParams.toString();\r\n enableSearchParamsUpdate = true;\r\n }\r\n };\r\n });\r\n\r\n Object.defineProperty(this, 'searchParams', {\r\n value: searchParams,\r\n enumerable: true\r\n });\r\n\r\n var search = void 0;\r\n Object.defineProperty(this, '_updateSearchParams', {\r\n enumerable: false,\r\n configurable: false,\r\n writable: false,\r\n value: function() {\r\n if (this.search !== search) {\r\n search = this.search;\r\n if (enableSearchParamsUpdate) {\r\n enableSearchUpdate = false;\r\n this.searchParams._fromString(this.search);\r\n enableSearchUpdate = true;\r\n }\r\n }\r\n }\r\n });\r\n };\r\n\r\n var proto = URL.prototype;\r\n\r\n var linkURLWithAnchorAttribute = function(attributeName) {\r\n Object.defineProperty(proto, attributeName, {\r\n get: function() {\r\n return this._anchorElement[attributeName];\r\n },\r\n set: function(value) {\r\n this._anchorElement[attributeName] = value;\r\n },\r\n enumerable: true\r\n });\r\n };\r\n\r\n ['hash', 'host', 'hostname', 'port', 'protocol']\r\n .forEach(function(attributeName) {\r\n linkURLWithAnchorAttribute(attributeName);\r\n });\r\n\r\n Object.defineProperty(proto, 'search', {\r\n get: function() {\r\n return this._anchorElement['search'];\r\n },\r\n set: function(value) {\r\n this._anchorElement['search'] = value;\r\n this._updateSearchParams();\r\n },\r\n enumerable: true\r\n });\r\n\r\n Object.defineProperties(proto, {\r\n\r\n 'toString': {\r\n get: function() {\r\n var _this = this;\r\n return function() {\r\n return _this.href;\r\n };\r\n }\r\n },\r\n\r\n 'href': {\r\n get: function() {\r\n return this._anchorElement.href.replace(/\\?$/, '');\r\n },\r\n set: function(value) {\r\n this._anchorElement.href = value;\r\n this._updateSearchParams();\r\n },\r\n enumerable: true\r\n },\r\n\r\n 'pathname': {\r\n get: function() {\r\n return this._anchorElement.pathname.replace(/(^\\/?)/, '/');\r\n },\r\n set: function(value) {\r\n this._anchorElement.pathname = value;\r\n },\r\n enumerable: true\r\n },\r\n\r\n 'origin': {\r\n get: function() {\r\n // get expected port from protocol\r\n var expectedPort = { 'http:': 80, 'https:': 443, 'ftp:': 21 }[this._anchorElement.protocol];\r\n // add port to origin if, expected port is different than actual port\r\n // and it is not empty f.e http://foo:8080\r\n // 8080 != 80 && 8080 != ''\r\n var addPortToOrigin = this._anchorElement.port != expectedPort &&\r\n this._anchorElement.port !== '';\r\n\r\n return this._anchorElement.protocol +\r\n '//' +\r\n this._anchorElement.hostname +\r\n (addPortToOrigin ? (':' + this._anchorElement.port) : '');\r\n },\r\n enumerable: true\r\n },\r\n\r\n 'password': { // TODO\r\n get: function() {\r\n return '';\r\n },\r\n set: function(value) {\r\n },\r\n enumerable: true\r\n },\r\n\r\n 'username': { // TODO\r\n get: function() {\r\n return '';\r\n },\r\n set: function(value) {\r\n },\r\n enumerable: true\r\n },\r\n });\r\n\r\n URL.createObjectURL = function(blob) {\r\n return _URL.createObjectURL.apply(_URL, arguments);\r\n };\r\n\r\n URL.revokeObjectURL = function(url) {\r\n return _URL.revokeObjectURL.apply(_URL, arguments);\r\n };\r\n\r\n global.URL = URL;\r\n\r\n };\r\n\r\n if (!checkIfURLIsSupported()) {\r\n polyfillURL();\r\n }\r\n\r\n if ((global.location !== void 0) && !('origin' in global.location)) {\r\n var getOrigin = function() {\r\n return global.location.protocol + '//' + global.location.hostname + (global.location.port ? (':' + global.location.port) : '');\r\n };\r\n\r\n try {\r\n Object.defineProperty(global.location, 'origin', {\r\n get: getOrigin,\r\n enumerable: true\r\n });\r\n } catch (e) {\r\n setInterval(function() {\r\n global.location.origin = getOrigin();\r\n }, 100);\r\n }\r\n }\r\n\r\n})(\r\n (typeof global !== 'undefined') ? global\r\n : ((typeof window !== 'undefined') ? window\r\n : ((typeof self !== 'undefined') ? self : this))\r\n);\r\n", "/*! *****************************************************************************\r\nCopyright (c) Microsoft Corporation.\r\n\r\nPermission to use, copy, modify, and/or distribute this software for any\r\npurpose with or without fee is hereby granted.\r\n\r\nTHE SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH\r\nREGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY\r\nAND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,\r\nINDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM\r\nLOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR\r\nOTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR\r\nPERFORMANCE OF THIS SOFTWARE.\r\n***************************************************************************** */\r\n/* global global, define, System, Reflect, Promise */\r\nvar __extends;\r\nvar __assign;\r\nvar __rest;\r\nvar __decorate;\r\nvar __param;\r\nvar __metadata;\r\nvar __awaiter;\r\nvar __generator;\r\nvar __exportStar;\r\nvar __values;\r\nvar __read;\r\nvar __spread;\r\nvar __spreadArrays;\r\nvar __spreadArray;\r\nvar __await;\r\nvar __asyncGenerator;\r\nvar __asyncDelegator;\r\nvar __asyncValues;\r\nvar __makeTemplateObject;\r\nvar __importStar;\r\nvar __importDefault;\r\nvar __classPrivateFieldGet;\r\nvar __classPrivateFieldSet;\r\nvar __createBinding;\r\n(function (factory) {\r\n var root = typeof global === \"object\" ? global : typeof self === \"object\" ? self : typeof this === \"object\" ? this : {};\r\n if (typeof define === \"function\" && define.amd) {\r\n define(\"tslib\", [\"exports\"], function (exports) { factory(createExporter(root, createExporter(exports))); });\r\n }\r\n else if (typeof module === \"object\" && typeof module.exports === \"object\") {\r\n factory(createExporter(root, createExporter(module.exports)));\r\n }\r\n else {\r\n factory(createExporter(root));\r\n }\r\n function createExporter(exports, previous) {\r\n if (exports !== root) {\r\n if (typeof Object.create === \"function\") {\r\n Object.defineProperty(exports, \"__esModule\", { value: true });\r\n }\r\n else {\r\n exports.__esModule = true;\r\n }\r\n }\r\n return function (id, v) { return exports[id] = previous ? previous(id, v) : v; };\r\n }\r\n})\r\n(function (exporter) {\r\n var extendStatics = Object.setPrototypeOf ||\r\n ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||\r\n function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };\r\n\r\n __extends = function (d, b) {\r\n if (typeof b !== \"function\" && b !== null)\r\n throw new TypeError(\"Class extends value \" + String(b) + \" is not a constructor or null\");\r\n extendStatics(d, b);\r\n function __() { this.constructor = d; }\r\n d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());\r\n };\r\n\r\n __assign = Object.assign || function (t) {\r\n for (var s, i = 1, n = arguments.length; i < n; i++) {\r\n s = arguments[i];\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p];\r\n }\r\n return t;\r\n };\r\n\r\n __rest = function (s, e) {\r\n var t = {};\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)\r\n t[p] = s[p];\r\n if (s != null && typeof Object.getOwnPropertySymbols === \"function\")\r\n for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {\r\n if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))\r\n t[p[i]] = s[p[i]];\r\n }\r\n return t;\r\n };\r\n\r\n __decorate = function (decorators, target, key, desc) {\r\n var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;\r\n if (typeof Reflect === \"object\" && typeof Reflect.decorate === \"function\") r = Reflect.decorate(decorators, target, key, desc);\r\n else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;\r\n return c > 3 && r && Object.defineProperty(target, key, r), r;\r\n };\r\n\r\n __param = function (paramIndex, decorator) {\r\n return function (target, key) { decorator(target, key, paramIndex); }\r\n };\r\n\r\n __metadata = function (metadataKey, metadataValue) {\r\n if (typeof Reflect === \"object\" && typeof Reflect.metadata === \"function\") return Reflect.metadata(metadataKey, metadataValue);\r\n };\r\n\r\n __awaiter = function (thisArg, _arguments, P, generator) {\r\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\r\n return new (P || (P = Promise))(function (resolve, reject) {\r\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\r\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\r\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\r\n step((generator = generator.apply(thisArg, _arguments || [])).next());\r\n });\r\n };\r\n\r\n __generator = function (thisArg, body) {\r\n var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;\r\n return g = { next: verb(0), \"throw\": verb(1), \"return\": verb(2) }, typeof Symbol === \"function\" && (g[Symbol.iterator] = function() { return this; }), g;\r\n function verb(n) { return function (v) { return step([n, v]); }; }\r\n function step(op) {\r\n if (f) throw new TypeError(\"Generator is already executing.\");\r\n while (_) try {\r\n if (f = 1, y && (t = op[0] & 2 ? y[\"return\"] : op[0] ? y[\"throw\"] || ((t = y[\"return\"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;\r\n if (y = 0, t) op = [op[0] & 2, t.value];\r\n switch (op[0]) {\r\n case 0: case 1: t = op; break;\r\n case 4: _.label++; return { value: op[1], done: false };\r\n case 5: _.label++; y = op[1]; op = [0]; continue;\r\n case 7: op = _.ops.pop(); _.trys.pop(); continue;\r\n default:\r\n if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }\r\n if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }\r\n if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }\r\n if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }\r\n if (t[2]) _.ops.pop();\r\n _.trys.pop(); continue;\r\n }\r\n op = body.call(thisArg, _);\r\n } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }\r\n if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };\r\n }\r\n };\r\n\r\n __exportStar = function(m, o) {\r\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p);\r\n };\r\n\r\n __createBinding = Object.create ? (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\r\n }) : (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n });\r\n\r\n __values = function (o) {\r\n var s = typeof Symbol === \"function\" && Symbol.iterator, m = s && o[s], i = 0;\r\n if (m) return m.call(o);\r\n if (o && typeof o.length === \"number\") return {\r\n next: function () {\r\n if (o && i >= o.length) o = void 0;\r\n return { value: o && o[i++], done: !o };\r\n }\r\n };\r\n throw new TypeError(s ? \"Object is not iterable.\" : \"Symbol.iterator is not defined.\");\r\n };\r\n\r\n __read = function (o, n) {\r\n var m = typeof Symbol === \"function\" && o[Symbol.iterator];\r\n if (!m) return o;\r\n var i = m.call(o), r, ar = [], e;\r\n try {\r\n while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value);\r\n }\r\n catch (error) { e = { error: error }; }\r\n finally {\r\n try {\r\n if (r && !r.done && (m = i[\"return\"])) m.call(i);\r\n }\r\n finally { if (e) throw e.error; }\r\n }\r\n return ar;\r\n };\r\n\r\n /** @deprecated */\r\n __spread = function () {\r\n for (var ar = [], i = 0; i < arguments.length; i++)\r\n ar = ar.concat(__read(arguments[i]));\r\n return ar;\r\n };\r\n\r\n /** @deprecated */\r\n __spreadArrays = function () {\r\n for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length;\r\n for (var r = Array(s), k = 0, i = 0; i < il; i++)\r\n for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++)\r\n r[k] = a[j];\r\n return r;\r\n };\r\n\r\n __spreadArray = function (to, from, pack) {\r\n if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) {\r\n if (ar || !(i in from)) {\r\n if (!ar) ar = Array.prototype.slice.call(from, 0, i);\r\n ar[i] = from[i];\r\n }\r\n }\r\n return to.concat(ar || Array.prototype.slice.call(from));\r\n };\r\n\r\n __await = function (v) {\r\n return this instanceof __await ? (this.v = v, this) : new __await(v);\r\n };\r\n\r\n __asyncGenerator = function (thisArg, _arguments, generator) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var g = generator.apply(thisArg, _arguments || []), i, q = [];\r\n return i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i;\r\n function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }\r\n function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }\r\n function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }\r\n function fulfill(value) { resume(\"next\", value); }\r\n function reject(value) { resume(\"throw\", value); }\r\n function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }\r\n };\r\n\r\n __asyncDelegator = function (o) {\r\n var i, p;\r\n return i = {}, verb(\"next\"), verb(\"throw\", function (e) { throw e; }), verb(\"return\"), i[Symbol.iterator] = function () { return this; }, i;\r\n function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === \"return\" } : f ? f(v) : v; } : f; }\r\n };\r\n\r\n __asyncValues = function (o) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var m = o[Symbol.asyncIterator], i;\r\n return m ? m.call(o) : (o = typeof __values === \"function\" ? __values(o) : o[Symbol.iterator](), i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i);\r\n function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }\r\n function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }\r\n };\r\n\r\n __makeTemplateObject = function (cooked, raw) {\r\n if (Object.defineProperty) { Object.defineProperty(cooked, \"raw\", { value: raw }); } else { cooked.raw = raw; }\r\n return cooked;\r\n };\r\n\r\n var __setModuleDefault = Object.create ? (function(o, v) {\r\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\r\n }) : function(o, v) {\r\n o[\"default\"] = v;\r\n };\r\n\r\n __importStar = function (mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\r\n __setModuleDefault(result, mod);\r\n return result;\r\n };\r\n\r\n __importDefault = function (mod) {\r\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\r\n };\r\n\r\n __classPrivateFieldGet = function (receiver, state, kind, f) {\r\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\r\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\r\n return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\r\n };\r\n\r\n __classPrivateFieldSet = function (receiver, state, value, kind, f) {\r\n if (kind === \"m\") throw new TypeError(\"Private method is not writable\");\r\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a setter\");\r\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot write private member to an object whose class did not declare it\");\r\n return (kind === \"a\" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;\r\n };\r\n\r\n exporter(\"__extends\", __extends);\r\n exporter(\"__assign\", __assign);\r\n exporter(\"__rest\", __rest);\r\n exporter(\"__decorate\", __decorate);\r\n exporter(\"__param\", __param);\r\n exporter(\"__metadata\", __metadata);\r\n exporter(\"__awaiter\", __awaiter);\r\n exporter(\"__generator\", __generator);\r\n exporter(\"__exportStar\", __exportStar);\r\n exporter(\"__createBinding\", __createBinding);\r\n exporter(\"__values\", __values);\r\n exporter(\"__read\", __read);\r\n exporter(\"__spread\", __spread);\r\n exporter(\"__spreadArrays\", __spreadArrays);\r\n exporter(\"__spreadArray\", __spreadArray);\r\n exporter(\"__await\", __await);\r\n exporter(\"__asyncGenerator\", __asyncGenerator);\r\n exporter(\"__asyncDelegator\", __asyncDelegator);\r\n exporter(\"__asyncValues\", __asyncValues);\r\n exporter(\"__makeTemplateObject\", __makeTemplateObject);\r\n exporter(\"__importStar\", __importStar);\r\n exporter(\"__importDefault\", __importDefault);\r\n exporter(\"__classPrivateFieldGet\", __classPrivateFieldGet);\r\n exporter(\"__classPrivateFieldSet\", __classPrivateFieldSet);\r\n});\r\n", "/*!\n * clipboard.js v2.0.11\n * https://clipboardjs.com/\n *\n * Licensed MIT \u00A9 Zeno Rocha\n */\n(function webpackUniversalModuleDefinition(root, factory) {\n\tif(typeof exports === 'object' && typeof module === 'object')\n\t\tmodule.exports = factory();\n\telse if(typeof define === 'function' && define.amd)\n\t\tdefine([], factory);\n\telse if(typeof exports === 'object')\n\t\texports[\"ClipboardJS\"] = factory();\n\telse\n\t\troot[\"ClipboardJS\"] = factory();\n})(this, function() {\nreturn /******/ (function() { // webpackBootstrap\n/******/ \tvar __webpack_modules__ = ({\n\n/***/ 686:\n/***/ (function(__unused_webpack_module, __webpack_exports__, __webpack_require__) {\n\n\"use strict\";\n\n// EXPORTS\n__webpack_require__.d(__webpack_exports__, {\n \"default\": function() { return /* binding */ clipboard; }\n});\n\n// EXTERNAL MODULE: ./node_modules/tiny-emitter/index.js\nvar tiny_emitter = __webpack_require__(279);\nvar tiny_emitter_default = /*#__PURE__*/__webpack_require__.n(tiny_emitter);\n// EXTERNAL MODULE: ./node_modules/good-listener/src/listen.js\nvar listen = __webpack_require__(370);\nvar listen_default = /*#__PURE__*/__webpack_require__.n(listen);\n// EXTERNAL MODULE: ./node_modules/select/src/select.js\nvar src_select = __webpack_require__(817);\nvar select_default = /*#__PURE__*/__webpack_require__.n(src_select);\n;// CONCATENATED MODULE: ./src/common/command.js\n/**\n * Executes a given operation type.\n * @param {String} type\n * @return {Boolean}\n */\nfunction command(type) {\n try {\n return document.execCommand(type);\n } catch (err) {\n return false;\n }\n}\n;// CONCATENATED MODULE: ./src/actions/cut.js\n\n\n/**\n * Cut action wrapper.\n * @param {String|HTMLElement} target\n * @return {String}\n */\n\nvar ClipboardActionCut = function ClipboardActionCut(target) {\n var selectedText = select_default()(target);\n command('cut');\n return selectedText;\n};\n\n/* harmony default export */ var actions_cut = (ClipboardActionCut);\n;// CONCATENATED MODULE: ./src/common/create-fake-element.js\n/**\n * Creates a fake textarea element with a value.\n * @param {String} value\n * @return {HTMLElement}\n */\nfunction createFakeElement(value) {\n var isRTL = document.documentElement.getAttribute('dir') === 'rtl';\n var fakeElement = document.createElement('textarea'); // Prevent zooming on iOS\n\n fakeElement.style.fontSize = '12pt'; // Reset box model\n\n fakeElement.style.border = '0';\n fakeElement.style.padding = '0';\n fakeElement.style.margin = '0'; // Move element out of screen horizontally\n\n fakeElement.style.position = 'absolute';\n fakeElement.style[isRTL ? 'right' : 'left'] = '-9999px'; // Move element to the same position vertically\n\n var yPosition = window.pageYOffset || document.documentElement.scrollTop;\n fakeElement.style.top = \"\".concat(yPosition, \"px\");\n fakeElement.setAttribute('readonly', '');\n fakeElement.value = value;\n return fakeElement;\n}\n;// CONCATENATED MODULE: ./src/actions/copy.js\n\n\n\n/**\n * Create fake copy action wrapper using a fake element.\n * @param {String} target\n * @param {Object} options\n * @return {String}\n */\n\nvar fakeCopyAction = function fakeCopyAction(value, options) {\n var fakeElement = createFakeElement(value);\n options.container.appendChild(fakeElement);\n var selectedText = select_default()(fakeElement);\n command('copy');\n fakeElement.remove();\n return selectedText;\n};\n/**\n * Copy action wrapper.\n * @param {String|HTMLElement} target\n * @param {Object} options\n * @return {String}\n */\n\n\nvar ClipboardActionCopy = function ClipboardActionCopy(target) {\n var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {\n container: document.body\n };\n var selectedText = '';\n\n if (typeof target === 'string') {\n selectedText = fakeCopyAction(target, options);\n } else if (target instanceof HTMLInputElement && !['text', 'search', 'url', 'tel', 'password'].includes(target === null || target === void 0 ? void 0 : target.type)) {\n // If input type doesn't support `setSelectionRange`. Simulate it. https://developer.mozilla.org/en-US/docs/Web/API/HTMLInputElement/setSelectionRange\n selectedText = fakeCopyAction(target.value, options);\n } else {\n selectedText = select_default()(target);\n command('copy');\n }\n\n return selectedText;\n};\n\n/* harmony default export */ var actions_copy = (ClipboardActionCopy);\n;// CONCATENATED MODULE: ./src/actions/default.js\nfunction _typeof(obj) { \"@babel/helpers - typeof\"; if (typeof Symbol === \"function\" && typeof Symbol.iterator === \"symbol\") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === \"function\" && obj.constructor === Symbol && obj !== Symbol.prototype ? \"symbol\" : typeof obj; }; } return _typeof(obj); }\n\n\n\n/**\n * Inner function which performs selection from either `text` or `target`\n * properties and then executes copy or cut operations.\n * @param {Object} options\n */\n\nvar ClipboardActionDefault = function ClipboardActionDefault() {\n var options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};\n // Defines base properties passed from constructor.\n var _options$action = options.action,\n action = _options$action === void 0 ? 'copy' : _options$action,\n container = options.container,\n target = options.target,\n text = options.text; // Sets the `action` to be performed which can be either 'copy' or 'cut'.\n\n if (action !== 'copy' && action !== 'cut') {\n throw new Error('Invalid \"action\" value, use either \"copy\" or \"cut\"');\n } // Sets the `target` property using an element that will be have its content copied.\n\n\n if (target !== undefined) {\n if (target && _typeof(target) === 'object' && target.nodeType === 1) {\n if (action === 'copy' && target.hasAttribute('disabled')) {\n throw new Error('Invalid \"target\" attribute. Please use \"readonly\" instead of \"disabled\" attribute');\n }\n\n if (action === 'cut' && (target.hasAttribute('readonly') || target.hasAttribute('disabled'))) {\n throw new Error('Invalid \"target\" attribute. You can\\'t cut text from elements with \"readonly\" or \"disabled\" attributes');\n }\n } else {\n throw new Error('Invalid \"target\" value, use a valid Element');\n }\n } // Define selection strategy based on `text` property.\n\n\n if (text) {\n return actions_copy(text, {\n container: container\n });\n } // Defines which selection strategy based on `target` property.\n\n\n if (target) {\n return action === 'cut' ? actions_cut(target) : actions_copy(target, {\n container: container\n });\n }\n};\n\n/* harmony default export */ var actions_default = (ClipboardActionDefault);\n;// CONCATENATED MODULE: ./src/clipboard.js\nfunction clipboard_typeof(obj) { \"@babel/helpers - typeof\"; if (typeof Symbol === \"function\" && typeof Symbol.iterator === \"symbol\") { clipboard_typeof = function _typeof(obj) { return typeof obj; }; } else { clipboard_typeof = function _typeof(obj) { return obj && typeof Symbol === \"function\" && obj.constructor === Symbol && obj !== Symbol.prototype ? \"symbol\" : typeof obj; }; } return clipboard_typeof(obj); }\n\nfunction _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nfunction _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if (\"value\" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }\n\nfunction _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }\n\nfunction _inherits(subClass, superClass) { if (typeof superClass !== \"function\" && superClass !== null) { throw new TypeError(\"Super expression must either be null or a function\"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }\n\nfunction _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }\n\nfunction _createSuper(Derived) { var hasNativeReflectConstruct = _isNativeReflectConstruct(); return function _createSuperInternal() { var Super = _getPrototypeOf(Derived), result; if (hasNativeReflectConstruct) { var NewTarget = _getPrototypeOf(this).constructor; result = Reflect.construct(Super, arguments, NewTarget); } else { result = Super.apply(this, arguments); } return _possibleConstructorReturn(this, result); }; }\n\nfunction _possibleConstructorReturn(self, call) { if (call && (clipboard_typeof(call) === \"object\" || typeof call === \"function\")) { return call; } return _assertThisInitialized(self); }\n\nfunction _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError(\"this hasn't been initialised - super() hasn't been called\"); } return self; }\n\nfunction _isNativeReflectConstruct() { if (typeof Reflect === \"undefined\" || !Reflect.construct) return false; if (Reflect.construct.sham) return false; if (typeof Proxy === \"function\") return true; try { Date.prototype.toString.call(Reflect.construct(Date, [], function () {})); return true; } catch (e) { return false; } }\n\nfunction _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }\n\n\n\n\n\n\n/**\n * Helper function to retrieve attribute value.\n * @param {String} suffix\n * @param {Element} element\n */\n\nfunction getAttributeValue(suffix, element) {\n var attribute = \"data-clipboard-\".concat(suffix);\n\n if (!element.hasAttribute(attribute)) {\n return;\n }\n\n return element.getAttribute(attribute);\n}\n/**\n * Base class which takes one or more elements, adds event listeners to them,\n * and instantiates a new `ClipboardAction` on each click.\n */\n\n\nvar Clipboard = /*#__PURE__*/function (_Emitter) {\n _inherits(Clipboard, _Emitter);\n\n var _super = _createSuper(Clipboard);\n\n /**\n * @param {String|HTMLElement|HTMLCollection|NodeList} trigger\n * @param {Object} options\n */\n function Clipboard(trigger, options) {\n var _this;\n\n _classCallCheck(this, Clipboard);\n\n _this = _super.call(this);\n\n _this.resolveOptions(options);\n\n _this.listenClick(trigger);\n\n return _this;\n }\n /**\n * Defines if attributes would be resolved using internal setter functions\n * or custom functions that were passed in the constructor.\n * @param {Object} options\n */\n\n\n _createClass(Clipboard, [{\n key: \"resolveOptions\",\n value: function resolveOptions() {\n var options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};\n this.action = typeof options.action === 'function' ? options.action : this.defaultAction;\n this.target = typeof options.target === 'function' ? options.target : this.defaultTarget;\n this.text = typeof options.text === 'function' ? options.text : this.defaultText;\n this.container = clipboard_typeof(options.container) === 'object' ? options.container : document.body;\n }\n /**\n * Adds a click event listener to the passed trigger.\n * @param {String|HTMLElement|HTMLCollection|NodeList} trigger\n */\n\n }, {\n key: \"listenClick\",\n value: function listenClick(trigger) {\n var _this2 = this;\n\n this.listener = listen_default()(trigger, 'click', function (e) {\n return _this2.onClick(e);\n });\n }\n /**\n * Defines a new `ClipboardAction` on each click event.\n * @param {Event} e\n */\n\n }, {\n key: \"onClick\",\n value: function onClick(e) {\n var trigger = e.delegateTarget || e.currentTarget;\n var action = this.action(trigger) || 'copy';\n var text = actions_default({\n action: action,\n container: this.container,\n target: this.target(trigger),\n text: this.text(trigger)\n }); // Fires an event based on the copy operation result.\n\n this.emit(text ? 'success' : 'error', {\n action: action,\n text: text,\n trigger: trigger,\n clearSelection: function clearSelection() {\n if (trigger) {\n trigger.focus();\n }\n\n window.getSelection().removeAllRanges();\n }\n });\n }\n /**\n * Default `action` lookup function.\n * @param {Element} trigger\n */\n\n }, {\n key: \"defaultAction\",\n value: function defaultAction(trigger) {\n return getAttributeValue('action', trigger);\n }\n /**\n * Default `target` lookup function.\n * @param {Element} trigger\n */\n\n }, {\n key: \"defaultTarget\",\n value: function defaultTarget(trigger) {\n var selector = getAttributeValue('target', trigger);\n\n if (selector) {\n return document.querySelector(selector);\n }\n }\n /**\n * Allow fire programmatically a copy action\n * @param {String|HTMLElement} target\n * @param {Object} options\n * @returns Text copied.\n */\n\n }, {\n key: \"defaultText\",\n\n /**\n * Default `text` lookup function.\n * @param {Element} trigger\n */\n value: function defaultText(trigger) {\n return getAttributeValue('text', trigger);\n }\n /**\n * Destroy lifecycle.\n */\n\n }, {\n key: \"destroy\",\n value: function destroy() {\n this.listener.destroy();\n }\n }], [{\n key: \"copy\",\n value: function copy(target) {\n var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {\n container: document.body\n };\n return actions_copy(target, options);\n }\n /**\n * Allow fire programmatically a cut action\n * @param {String|HTMLElement} target\n * @returns Text cutted.\n */\n\n }, {\n key: \"cut\",\n value: function cut(target) {\n return actions_cut(target);\n }\n /**\n * Returns the support of the given action, or all actions if no action is\n * given.\n * @param {String} [action]\n */\n\n }, {\n key: \"isSupported\",\n value: function isSupported() {\n var action = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : ['copy', 'cut'];\n var actions = typeof action === 'string' ? [action] : action;\n var support = !!document.queryCommandSupported;\n actions.forEach(function (action) {\n support = support && !!document.queryCommandSupported(action);\n });\n return support;\n }\n }]);\n\n return Clipboard;\n}((tiny_emitter_default()));\n\n/* harmony default export */ var clipboard = (Clipboard);\n\n/***/ }),\n\n/***/ 828:\n/***/ (function(module) {\n\nvar DOCUMENT_NODE_TYPE = 9;\n\n/**\n * A polyfill for Element.matches()\n */\nif (typeof Element !== 'undefined' && !Element.prototype.matches) {\n var proto = Element.prototype;\n\n proto.matches = proto.matchesSelector ||\n proto.mozMatchesSelector ||\n proto.msMatchesSelector ||\n proto.oMatchesSelector ||\n proto.webkitMatchesSelector;\n}\n\n/**\n * Finds the closest parent that matches a selector.\n *\n * @param {Element} element\n * @param {String} selector\n * @return {Function}\n */\nfunction closest (element, selector) {\n while (element && element.nodeType !== DOCUMENT_NODE_TYPE) {\n if (typeof element.matches === 'function' &&\n element.matches(selector)) {\n return element;\n }\n element = element.parentNode;\n }\n}\n\nmodule.exports = closest;\n\n\n/***/ }),\n\n/***/ 438:\n/***/ (function(module, __unused_webpack_exports, __webpack_require__) {\n\nvar closest = __webpack_require__(828);\n\n/**\n * Delegates event to a selector.\n *\n * @param {Element} element\n * @param {String} selector\n * @param {String} type\n * @param {Function} callback\n * @param {Boolean} useCapture\n * @return {Object}\n */\nfunction _delegate(element, selector, type, callback, useCapture) {\n var listenerFn = listener.apply(this, arguments);\n\n element.addEventListener(type, listenerFn, useCapture);\n\n return {\n destroy: function() {\n element.removeEventListener(type, listenerFn, useCapture);\n }\n }\n}\n\n/**\n * Delegates event to a selector.\n *\n * @param {Element|String|Array} [elements]\n * @param {String} selector\n * @param {String} type\n * @param {Function} callback\n * @param {Boolean} useCapture\n * @return {Object}\n */\nfunction delegate(elements, selector, type, callback, useCapture) {\n // Handle the regular Element usage\n if (typeof elements.addEventListener === 'function') {\n return _delegate.apply(null, arguments);\n }\n\n // Handle Element-less usage, it defaults to global delegation\n if (typeof type === 'function') {\n // Use `document` as the first parameter, then apply arguments\n // This is a short way to .unshift `arguments` without running into deoptimizations\n return _delegate.bind(null, document).apply(null, arguments);\n }\n\n // Handle Selector-based usage\n if (typeof elements === 'string') {\n elements = document.querySelectorAll(elements);\n }\n\n // Handle Array-like based usage\n return Array.prototype.map.call(elements, function (element) {\n return _delegate(element, selector, type, callback, useCapture);\n });\n}\n\n/**\n * Finds closest match and invokes callback.\n *\n * @param {Element} element\n * @param {String} selector\n * @param {String} type\n * @param {Function} callback\n * @return {Function}\n */\nfunction listener(element, selector, type, callback) {\n return function(e) {\n e.delegateTarget = closest(e.target, selector);\n\n if (e.delegateTarget) {\n callback.call(element, e);\n }\n }\n}\n\nmodule.exports = delegate;\n\n\n/***/ }),\n\n/***/ 879:\n/***/ (function(__unused_webpack_module, exports) {\n\n/**\n * Check if argument is a HTML element.\n *\n * @param {Object} value\n * @return {Boolean}\n */\nexports.node = function(value) {\n return value !== undefined\n && value instanceof HTMLElement\n && value.nodeType === 1;\n};\n\n/**\n * Check if argument is a list of HTML elements.\n *\n * @param {Object} value\n * @return {Boolean}\n */\nexports.nodeList = function(value) {\n var type = Object.prototype.toString.call(value);\n\n return value !== undefined\n && (type === '[object NodeList]' || type === '[object HTMLCollection]')\n && ('length' in value)\n && (value.length === 0 || exports.node(value[0]));\n};\n\n/**\n * Check if argument is a string.\n *\n * @param {Object} value\n * @return {Boolean}\n */\nexports.string = function(value) {\n return typeof value === 'string'\n || value instanceof String;\n};\n\n/**\n * Check if argument is a function.\n *\n * @param {Object} value\n * @return {Boolean}\n */\nexports.fn = function(value) {\n var type = Object.prototype.toString.call(value);\n\n return type === '[object Function]';\n};\n\n\n/***/ }),\n\n/***/ 370:\n/***/ (function(module, __unused_webpack_exports, __webpack_require__) {\n\nvar is = __webpack_require__(879);\nvar delegate = __webpack_require__(438);\n\n/**\n * Validates all params and calls the right\n * listener function based on its target type.\n *\n * @param {String|HTMLElement|HTMLCollection|NodeList} target\n * @param {String} type\n * @param {Function} callback\n * @return {Object}\n */\nfunction listen(target, type, callback) {\n if (!target && !type && !callback) {\n throw new Error('Missing required arguments');\n }\n\n if (!is.string(type)) {\n throw new TypeError('Second argument must be a String');\n }\n\n if (!is.fn(callback)) {\n throw new TypeError('Third argument must be a Function');\n }\n\n if (is.node(target)) {\n return listenNode(target, type, callback);\n }\n else if (is.nodeList(target)) {\n return listenNodeList(target, type, callback);\n }\n else if (is.string(target)) {\n return listenSelector(target, type, callback);\n }\n else {\n throw new TypeError('First argument must be a String, HTMLElement, HTMLCollection, or NodeList');\n }\n}\n\n/**\n * Adds an event listener to a HTML element\n * and returns a remove listener function.\n *\n * @param {HTMLElement} node\n * @param {String} type\n * @param {Function} callback\n * @return {Object}\n */\nfunction listenNode(node, type, callback) {\n node.addEventListener(type, callback);\n\n return {\n destroy: function() {\n node.removeEventListener(type, callback);\n }\n }\n}\n\n/**\n * Add an event listener to a list of HTML elements\n * and returns a remove listener function.\n *\n * @param {NodeList|HTMLCollection} nodeList\n * @param {String} type\n * @param {Function} callback\n * @return {Object}\n */\nfunction listenNodeList(nodeList, type, callback) {\n Array.prototype.forEach.call(nodeList, function(node) {\n node.addEventListener(type, callback);\n });\n\n return {\n destroy: function() {\n Array.prototype.forEach.call(nodeList, function(node) {\n node.removeEventListener(type, callback);\n });\n }\n }\n}\n\n/**\n * Add an event listener to a selector\n * and returns a remove listener function.\n *\n * @param {String} selector\n * @param {String} type\n * @param {Function} callback\n * @return {Object}\n */\nfunction listenSelector(selector, type, callback) {\n return delegate(document.body, selector, type, callback);\n}\n\nmodule.exports = listen;\n\n\n/***/ }),\n\n/***/ 817:\n/***/ (function(module) {\n\nfunction select(element) {\n var selectedText;\n\n if (element.nodeName === 'SELECT') {\n element.focus();\n\n selectedText = element.value;\n }\n else if (element.nodeName === 'INPUT' || element.nodeName === 'TEXTAREA') {\n var isReadOnly = element.hasAttribute('readonly');\n\n if (!isReadOnly) {\n element.setAttribute('readonly', '');\n }\n\n element.select();\n element.setSelectionRange(0, element.value.length);\n\n if (!isReadOnly) {\n element.removeAttribute('readonly');\n }\n\n selectedText = element.value;\n }\n else {\n if (element.hasAttribute('contenteditable')) {\n element.focus();\n }\n\n var selection = window.getSelection();\n var range = document.createRange();\n\n range.selectNodeContents(element);\n selection.removeAllRanges();\n selection.addRange(range);\n\n selectedText = selection.toString();\n }\n\n return selectedText;\n}\n\nmodule.exports = select;\n\n\n/***/ }),\n\n/***/ 279:\n/***/ (function(module) {\n\nfunction E () {\n // Keep this empty so it's easier to inherit from\n // (via https://github.com/lipsmack from https://github.com/scottcorgan/tiny-emitter/issues/3)\n}\n\nE.prototype = {\n on: function (name, callback, ctx) {\n var e = this.e || (this.e = {});\n\n (e[name] || (e[name] = [])).push({\n fn: callback,\n ctx: ctx\n });\n\n return this;\n },\n\n once: function (name, callback, ctx) {\n var self = this;\n function listener () {\n self.off(name, listener);\n callback.apply(ctx, arguments);\n };\n\n listener._ = callback\n return this.on(name, listener, ctx);\n },\n\n emit: function (name) {\n var data = [].slice.call(arguments, 1);\n var evtArr = ((this.e || (this.e = {}))[name] || []).slice();\n var i = 0;\n var len = evtArr.length;\n\n for (i; i < len; i++) {\n evtArr[i].fn.apply(evtArr[i].ctx, data);\n }\n\n return this;\n },\n\n off: function (name, callback) {\n var e = this.e || (this.e = {});\n var evts = e[name];\n var liveEvents = [];\n\n if (evts && callback) {\n for (var i = 0, len = evts.length; i < len; i++) {\n if (evts[i].fn !== callback && evts[i].fn._ !== callback)\n liveEvents.push(evts[i]);\n }\n }\n\n // Remove event from queue to prevent memory leak\n // Suggested by https://github.com/lazd\n // Ref: https://github.com/scottcorgan/tiny-emitter/commit/c6ebfaa9bc973b33d110a84a307742b7cf94c953#commitcomment-5024910\n\n (liveEvents.length)\n ? e[name] = liveEvents\n : delete e[name];\n\n return this;\n }\n};\n\nmodule.exports = E;\nmodule.exports.TinyEmitter = E;\n\n\n/***/ })\n\n/******/ \t});\n/************************************************************************/\n/******/ \t// The module cache\n/******/ \tvar __webpack_module_cache__ = {};\n/******/ \t\n/******/ \t// The require function\n/******/ \tfunction __webpack_require__(moduleId) {\n/******/ \t\t// Check if module is in cache\n/******/ \t\tif(__webpack_module_cache__[moduleId]) {\n/******/ \t\t\treturn __webpack_module_cache__[moduleId].exports;\n/******/ \t\t}\n/******/ \t\t// Create a new module (and put it into the cache)\n/******/ \t\tvar module = __webpack_module_cache__[moduleId] = {\n/******/ \t\t\t// no module.id needed\n/******/ \t\t\t// no module.loaded needed\n/******/ \t\t\texports: {}\n/******/ \t\t};\n/******/ \t\n/******/ \t\t// Execute the module function\n/******/ \t\t__webpack_modules__[moduleId](module, module.exports, __webpack_require__);\n/******/ \t\n/******/ \t\t// Return the exports of the module\n/******/ \t\treturn module.exports;\n/******/ \t}\n/******/ \t\n/************************************************************************/\n/******/ \t/* webpack/runtime/compat get default export */\n/******/ \t!function() {\n/******/ \t\t// getDefaultExport function for compatibility with non-harmony modules\n/******/ \t\t__webpack_require__.n = function(module) {\n/******/ \t\t\tvar getter = module && module.__esModule ?\n/******/ \t\t\t\tfunction() { return module['default']; } :\n/******/ \t\t\t\tfunction() { return module; };\n/******/ \t\t\t__webpack_require__.d(getter, { a: getter });\n/******/ \t\t\treturn getter;\n/******/ \t\t};\n/******/ \t}();\n/******/ \t\n/******/ \t/* webpack/runtime/define property getters */\n/******/ \t!function() {\n/******/ \t\t// define getter functions for harmony exports\n/******/ \t\t__webpack_require__.d = function(exports, definition) {\n/******/ \t\t\tfor(var key in definition) {\n/******/ \t\t\t\tif(__webpack_require__.o(definition, key) && !__webpack_require__.o(exports, key)) {\n/******/ \t\t\t\t\tObject.defineProperty(exports, key, { enumerable: true, get: definition[key] });\n/******/ \t\t\t\t}\n/******/ \t\t\t}\n/******/ \t\t};\n/******/ \t}();\n/******/ \t\n/******/ \t/* webpack/runtime/hasOwnProperty shorthand */\n/******/ \t!function() {\n/******/ \t\t__webpack_require__.o = function(obj, prop) { return Object.prototype.hasOwnProperty.call(obj, prop); }\n/******/ \t}();\n/******/ \t\n/************************************************************************/\n/******/ \t// module exports must be returned from runtime so entry inlining is disabled\n/******/ \t// startup\n/******/ \t// Load entry module and return exports\n/******/ \treturn __webpack_require__(686);\n/******/ })()\n.default;\n});", "/*!\n * escape-html\n * Copyright(c) 2012-2013 TJ Holowaychuk\n * Copyright(c) 2015 Andreas Lubbe\n * Copyright(c) 2015 Tiancheng \"Timothy\" Gu\n * MIT Licensed\n */\n\n'use strict';\n\n/**\n * Module variables.\n * @private\n */\n\nvar matchHtmlRegExp = /[\"'&<>]/;\n\n/**\n * Module exports.\n * @public\n */\n\nmodule.exports = escapeHtml;\n\n/**\n * Escape special characters in the given string of html.\n *\n * @param {string} string The string to escape for inserting into HTML\n * @return {string}\n * @public\n */\n\nfunction escapeHtml(string) {\n var str = '' + string;\n var match = matchHtmlRegExp.exec(str);\n\n if (!match) {\n return str;\n }\n\n var escape;\n var html = '';\n var index = 0;\n var lastIndex = 0;\n\n for (index = match.index; index < str.length; index++) {\n switch (str.charCodeAt(index)) {\n case 34: // \"\n escape = '"';\n break;\n case 38: // &\n escape = '&';\n break;\n case 39: // '\n escape = ''';\n break;\n case 60: // <\n escape = '<';\n break;\n case 62: // >\n escape = '>';\n break;\n default:\n continue;\n }\n\n if (lastIndex !== index) {\n html += str.substring(lastIndex, index);\n }\n\n lastIndex = index + 1;\n html += escape;\n }\n\n return lastIndex !== index\n ? html + str.substring(lastIndex, index)\n : html;\n}\n", "Array.prototype.flat||Object.defineProperty(Array.prototype,\"flat\",{configurable:!0,value:function r(){var t=isNaN(arguments[0])?1:Number(arguments[0]);return t?Array.prototype.reduce.call(this,function(a,e){return Array.isArray(e)?a.push.apply(a,r.call(e,t-1)):a.push(e),a},[]):Array.prototype.slice.call(this)},writable:!0}),Array.prototype.flatMap||Object.defineProperty(Array.prototype,\"flatMap\",{configurable:!0,value:function(r){return Array.prototype.map.apply(this,arguments).flat()},writable:!0})\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport \"array-flat-polyfill\"\nimport \"focus-visible\"\nimport \"unfetch/polyfill\"\nimport \"url-polyfill\"\n\nimport {\n EMPTY,\n NEVER,\n Subject,\n defer,\n delay,\n filter,\n map,\n merge,\n mergeWith,\n shareReplay,\n switchMap\n} from \"rxjs\"\n\nimport { configuration, feature } from \"./_\"\nimport {\n at,\n getOptionalElement,\n requestJSON,\n setToggle,\n watchDocument,\n watchKeyboard,\n watchLocation,\n watchLocationTarget,\n watchMedia,\n watchPrint,\n watchViewport\n} from \"./browser\"\nimport {\n getComponentElement,\n getComponentElements,\n mountAnnounce,\n mountBackToTop,\n mountConsent,\n mountContent,\n mountDialog,\n mountHeader,\n mountHeaderTitle,\n mountPalette,\n mountSearch,\n mountSearchHiglight,\n mountSidebar,\n mountSource,\n mountTableOfContents,\n mountTabs,\n watchHeader,\n watchMain\n} from \"./components\"\nimport {\n SearchIndex,\n setupClipboardJS,\n setupInstantLoading,\n setupVersionSelector\n} from \"./integrations\"\nimport {\n patchIndeterminate,\n patchScrollfix,\n patchScrolllock\n} from \"./patches\"\nimport \"./polyfills\"\n\n/* ----------------------------------------------------------------------------\n * Application\n * ------------------------------------------------------------------------- */\n\n/* Yay, JavaScript is available */\ndocument.documentElement.classList.remove(\"no-js\")\ndocument.documentElement.classList.add(\"js\")\n\n/* Set up navigation observables and subjects */\nconst document$ = watchDocument()\nconst location$ = watchLocation()\nconst target$ = watchLocationTarget()\nconst keyboard$ = watchKeyboard()\n\n/* Set up media observables */\nconst viewport$ = watchViewport()\nconst tablet$ = watchMedia(\"(min-width: 960px)\")\nconst screen$ = watchMedia(\"(min-width: 1220px)\")\nconst print$ = watchPrint()\n\n/* Retrieve search index, if search is enabled */\nconst config = configuration()\nconst index$ = document.forms.namedItem(\"search\")\n ? __search?.index || requestJSON(\n new URL(\"search/search_index.json\", config.base)\n )\n : NEVER\n\n/* Set up Clipboard.js integration */\nconst alert$ = new Subject()\nsetupClipboardJS({ alert$ })\n\n/* Set up instant loading, if enabled */\nif (feature(\"navigation.instant\"))\n setupInstantLoading({ document$, location$, viewport$ })\n\n/* Set up version selector */\nif (config.version?.provider === \"mike\")\n setupVersionSelector({ document$ })\n\n/* Always close drawer and search on navigation */\nmerge(location$, target$)\n .pipe(\n delay(125)\n )\n .subscribe(() => {\n setToggle(\"drawer\", false)\n setToggle(\"search\", false)\n })\n\n/* Set up global keyboard handlers */\nkeyboard$\n .pipe(\n filter(({ mode }) => mode === \"global\")\n )\n .subscribe(key => {\n switch (key.type) {\n\n /* Go to previous page */\n case \"p\":\n case \",\":\n const prev = getOptionalElement(\"[href][rel=prev]\")\n if (typeof prev !== \"undefined\")\n prev.click()\n break\n\n /* Go to next page */\n case \"n\":\n case \".\":\n const next = getOptionalElement(\"[href][rel=next]\")\n if (typeof next !== \"undefined\")\n next.click()\n break\n }\n })\n\n/* Set up patches */\npatchIndeterminate({ document$, tablet$ })\npatchScrollfix({ document$ })\npatchScrolllock({ viewport$, tablet$ })\n\n/* Set up header and main area observable */\nconst header$ = watchHeader(getComponentElement(\"header\"), { viewport$ })\nconst main$ = document$\n .pipe(\n map(() => getComponentElement(\"main\")),\n switchMap(el => watchMain(el, { viewport$, header$ })),\n shareReplay(1)\n )\n\n/* Set up control component observables */\nconst control$ = merge(\n\n /* Consent */\n ...getComponentElements(\"consent\")\n .map(el => mountConsent(el, { target$ })),\n\n /* Dialog */\n ...getComponentElements(\"dialog\")\n .map(el => mountDialog(el, { alert$ })),\n\n /* Header */\n ...getComponentElements(\"header\")\n .map(el => mountHeader(el, { viewport$, header$, main$ })),\n\n /* Color palette */\n ...getComponentElements(\"palette\")\n .map(el => mountPalette(el)),\n\n /* Search */\n ...getComponentElements(\"search\")\n .map(el => mountSearch(el, { index$, keyboard$ })),\n\n /* Repository information */\n ...getComponentElements(\"source\")\n .map(el => mountSource(el))\n)\n\n/* Set up content component observables */\nconst content$ = defer(() => merge(\n\n /* Announcement bar */\n ...getComponentElements(\"announce\")\n .map(el => mountAnnounce(el)),\n\n /* Content */\n ...getComponentElements(\"content\")\n .map(el => mountContent(el, { viewport$, target$, print$ })),\n\n /* Search highlighting */\n ...getComponentElements(\"content\")\n .map(el => feature(\"search.highlight\")\n ? mountSearchHiglight(el, { index$, location$ })\n : EMPTY\n ),\n\n /* Header title */\n ...getComponentElements(\"header-title\")\n .map(el => mountHeaderTitle(el, { viewport$, header$ })),\n\n /* Sidebar */\n ...getComponentElements(\"sidebar\")\n .map(el => el.getAttribute(\"data-md-type\") === \"navigation\"\n ? at(screen$, () => mountSidebar(el, { viewport$, header$, main$ }))\n : at(tablet$, () => mountSidebar(el, { viewport$, header$, main$ }))\n ),\n\n /* Navigation tabs */\n ...getComponentElements(\"tabs\")\n .map(el => mountTabs(el, { viewport$, header$ })),\n\n /* Table of contents */\n ...getComponentElements(\"toc\")\n .map(el => mountTableOfContents(el, { viewport$, header$, target$ })),\n\n /* Back-to-top button */\n ...getComponentElements(\"top\")\n .map(el => mountBackToTop(el, { viewport$, header$, main$, target$ }))\n))\n\n/* Set up component observables */\nconst component$ = document$\n .pipe(\n switchMap(() => content$),\n mergeWith(control$),\n shareReplay(1)\n )\n\n/* Subscribe to all components */\ncomponent$.subscribe()\n\n/* ----------------------------------------------------------------------------\n * Exports\n * ------------------------------------------------------------------------- */\n\nwindow.document$ = document$ /* Document observable */\nwindow.location$ = location$ /* Location subject */\nwindow.target$ = target$ /* Location target observable */\nwindow.keyboard$ = keyboard$ /* Keyboard observable */\nwindow.viewport$ = viewport$ /* Viewport observable */\nwindow.tablet$ = tablet$ /* Media tablet observable */\nwindow.screen$ = screen$ /* Media screen observable */\nwindow.print$ = print$ /* Media print observable */\nwindow.alert$ = alert$ /* Alert subject */\nwindow.component$ = component$ /* Component observable */\n", "self.fetch||(self.fetch=function(e,n){return n=n||{},new Promise(function(t,s){var r=new XMLHttpRequest,o=[],u=[],i={},a=function(){return{ok:2==(r.status/100|0),statusText:r.statusText,status:r.status,url:r.responseURL,text:function(){return Promise.resolve(r.responseText)},json:function(){return Promise.resolve(r.responseText).then(JSON.parse)},blob:function(){return Promise.resolve(new Blob([r.response]))},clone:a,headers:{keys:function(){return o},entries:function(){return u},get:function(e){return i[e.toLowerCase()]},has:function(e){return e.toLowerCase()in i}}}};for(var c in r.open(n.method||\"get\",e,!0),r.onload=function(){r.getAllResponseHeaders().replace(/^(.*?):[^\\S\\n]*([\\s\\S]*?)$/gm,function(e,n,t){o.push(n=n.toLowerCase()),u.push([n,t]),i[n]=i[n]?i[n]+\",\"+t:t}),t(a())},r.onerror=s,r.withCredentials=\"include\"==n.credentials,n.headers)r.setRequestHeader(c,n.headers[c]);r.send(n.body||null)})});\n", "import tslib from '../tslib.js';\r\nconst {\r\n __extends,\r\n __assign,\r\n __rest,\r\n __decorate,\r\n __param,\r\n __metadata,\r\n __awaiter,\r\n __generator,\r\n __exportStar,\r\n __createBinding,\r\n __values,\r\n __read,\r\n __spread,\r\n __spreadArrays,\r\n __spreadArray,\r\n __await,\r\n __asyncGenerator,\r\n __asyncDelegator,\r\n __asyncValues,\r\n __makeTemplateObject,\r\n __importStar,\r\n __importDefault,\r\n __classPrivateFieldGet,\r\n __classPrivateFieldSet,\r\n} = tslib;\r\nexport {\r\n __extends,\r\n __assign,\r\n __rest,\r\n __decorate,\r\n __param,\r\n __metadata,\r\n __awaiter,\r\n __generator,\r\n __exportStar,\r\n __createBinding,\r\n __values,\r\n __read,\r\n __spread,\r\n __spreadArrays,\r\n __spreadArray,\r\n __await,\r\n __asyncGenerator,\r\n __asyncDelegator,\r\n __asyncValues,\r\n __makeTemplateObject,\r\n __importStar,\r\n __importDefault,\r\n __classPrivateFieldGet,\r\n __classPrivateFieldSet,\r\n};\r\n", null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n ReplaySubject,\n Subject,\n fromEvent\n} from \"rxjs\"\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch document\n *\n * Documents are implemented as subjects, so all downstream observables are\n * automatically updated when a new document is emitted.\n *\n * @returns Document subject\n */\nexport function watchDocument(): Subject {\n const document$ = new ReplaySubject(1)\n fromEvent(document, \"DOMContentLoaded\", { once: true })\n .subscribe(() => document$.next(document))\n\n /* Return document */\n return document$\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Retrieve all elements matching the query selector\n *\n * @template T - Element type\n *\n * @param selector - Query selector\n * @param node - Node of reference\n *\n * @returns Elements\n */\nexport function getElements(\n selector: T, node?: ParentNode\n): HTMLElementTagNameMap[T][]\n\nexport function getElements(\n selector: string, node?: ParentNode\n): T[]\n\nexport function getElements(\n selector: string, node: ParentNode = document\n): T[] {\n return Array.from(node.querySelectorAll(selector))\n}\n\n/**\n * Retrieve an element matching a query selector or throw a reference error\n *\n * Note that this function assumes that the element is present. If unsure if an\n * element is existent, use the `getOptionalElement` function instead.\n *\n * @template T - Element type\n *\n * @param selector - Query selector\n * @param node - Node of reference\n *\n * @returns Element\n */\nexport function getElement(\n selector: T, node?: ParentNode\n): HTMLElementTagNameMap[T]\n\nexport function getElement(\n selector: string, node?: ParentNode\n): T\n\nexport function getElement(\n selector: string, node: ParentNode = document\n): T {\n const el = getOptionalElement(selector, node)\n if (typeof el === \"undefined\")\n throw new ReferenceError(\n `Missing element: expected \"${selector}\" to be present`\n )\n\n /* Return element */\n return el\n}\n\n/* ------------------------------------------------------------------------- */\n\n/**\n * Retrieve an optional element matching the query selector\n *\n * @template T - Element type\n *\n * @param selector - Query selector\n * @param node - Node of reference\n *\n * @returns Element or nothing\n */\nexport function getOptionalElement(\n selector: T, node?: ParentNode\n): HTMLElementTagNameMap[T] | undefined\n\nexport function getOptionalElement(\n selector: string, node?: ParentNode\n): T | undefined\n\nexport function getOptionalElement(\n selector: string, node: ParentNode = document\n): T | undefined {\n return node.querySelector(selector) || undefined\n}\n\n/**\n * Retrieve the currently active element\n *\n * @returns Element or nothing\n */\nexport function getActiveElement(): HTMLElement | undefined {\n return document.activeElement instanceof HTMLElement\n ? document.activeElement || undefined\n : undefined\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n debounceTime,\n distinctUntilChanged,\n fromEvent,\n map,\n merge,\n startWith\n} from \"rxjs\"\n\nimport { getActiveElement } from \"../_\"\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch element focus\n *\n * Previously, this function used `focus` and `blur` events to determine whether\n * an element is focused, but this doesn't work if there are focusable elements\n * within the elements itself. A better solutions are `focusin` and `focusout`\n * events, which bubble up the tree and allow for more fine-grained control.\n *\n * `debounceTime` is necessary, because when a focus change happens inside an\n * element, the observable would first emit `false` and then `true` again.\n *\n * @param el - Element\n *\n * @returns Element focus observable\n */\nexport function watchElementFocus(\n el: HTMLElement\n): Observable {\n return merge(\n fromEvent(document.body, \"focusin\"),\n fromEvent(document.body, \"focusout\")\n )\n .pipe(\n debounceTime(1),\n map(() => {\n const active = getActiveElement()\n return typeof active !== \"undefined\"\n ? el.contains(active)\n : false\n }),\n startWith(el === getActiveElement()),\n distinctUntilChanged()\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n animationFrameScheduler,\n auditTime,\n fromEvent,\n map,\n merge,\n startWith\n} from \"rxjs\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Element offset\n */\nexport interface ElementOffset {\n x: number /* Horizontal offset */\n y: number /* Vertical offset */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Retrieve element offset\n *\n * @param el - Element\n *\n * @returns Element offset\n */\nexport function getElementOffset(\n el: HTMLElement\n): ElementOffset {\n return {\n x: el.offsetLeft,\n y: el.offsetTop\n }\n}\n\n/* ------------------------------------------------------------------------- */\n\n/**\n * Watch element offset\n *\n * @param el - Element\n *\n * @returns Element offset observable\n */\nexport function watchElementOffset(\n el: HTMLElement\n): Observable {\n return merge(\n fromEvent(window, \"load\"),\n fromEvent(window, \"resize\")\n )\n .pipe(\n auditTime(0, animationFrameScheduler),\n map(() => getElementOffset(el)),\n startWith(getElementOffset(el))\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n animationFrameScheduler,\n auditTime,\n fromEvent,\n map,\n merge,\n startWith\n} from \"rxjs\"\n\nimport { ElementOffset } from \"../_\"\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Retrieve element content offset (= scroll offset)\n *\n * @param el - Element\n *\n * @returns Element content offset\n */\nexport function getElementContentOffset(\n el: HTMLElement\n): ElementOffset {\n return {\n x: el.scrollLeft,\n y: el.scrollTop\n }\n}\n\n/* ------------------------------------------------------------------------- */\n\n/**\n * Watch element content offset\n *\n * @param el - Element\n *\n * @returns Element content offset observable\n */\nexport function watchElementContentOffset(\n el: HTMLElement\n): Observable {\n return merge(\n fromEvent(el, \"scroll\"),\n fromEvent(window, \"resize\")\n )\n .pipe(\n auditTime(0, animationFrameScheduler),\n map(() => getElementContentOffset(el)),\n startWith(getElementContentOffset(el))\n )\n}\n", "/**\r\n * A collection of shims that provide minimal functionality of the ES6 collections.\r\n *\r\n * These implementations are not meant to be used outside of the ResizeObserver\r\n * modules as they cover only a limited range of use cases.\r\n */\r\n/* eslint-disable require-jsdoc, valid-jsdoc */\r\nvar MapShim = (function () {\r\n if (typeof Map !== 'undefined') {\r\n return Map;\r\n }\r\n /**\r\n * Returns index in provided array that matches the specified key.\r\n *\r\n * @param {Array} arr\r\n * @param {*} key\r\n * @returns {number}\r\n */\r\n function getIndex(arr, key) {\r\n var result = -1;\r\n arr.some(function (entry, index) {\r\n if (entry[0] === key) {\r\n result = index;\r\n return true;\r\n }\r\n return false;\r\n });\r\n return result;\r\n }\r\n return /** @class */ (function () {\r\n function class_1() {\r\n this.__entries__ = [];\r\n }\r\n Object.defineProperty(class_1.prototype, \"size\", {\r\n /**\r\n * @returns {boolean}\r\n */\r\n get: function () {\r\n return this.__entries__.length;\r\n },\r\n enumerable: true,\r\n configurable: true\r\n });\r\n /**\r\n * @param {*} key\r\n * @returns {*}\r\n */\r\n class_1.prototype.get = function (key) {\r\n var index = getIndex(this.__entries__, key);\r\n var entry = this.__entries__[index];\r\n return entry && entry[1];\r\n };\r\n /**\r\n * @param {*} key\r\n * @param {*} value\r\n * @returns {void}\r\n */\r\n class_1.prototype.set = function (key, value) {\r\n var index = getIndex(this.__entries__, key);\r\n if (~index) {\r\n this.__entries__[index][1] = value;\r\n }\r\n else {\r\n this.__entries__.push([key, value]);\r\n }\r\n };\r\n /**\r\n * @param {*} key\r\n * @returns {void}\r\n */\r\n class_1.prototype.delete = function (key) {\r\n var entries = this.__entries__;\r\n var index = getIndex(entries, key);\r\n if (~index) {\r\n entries.splice(index, 1);\r\n }\r\n };\r\n /**\r\n * @param {*} key\r\n * @returns {void}\r\n */\r\n class_1.prototype.has = function (key) {\r\n return !!~getIndex(this.__entries__, key);\r\n };\r\n /**\r\n * @returns {void}\r\n */\r\n class_1.prototype.clear = function () {\r\n this.__entries__.splice(0);\r\n };\r\n /**\r\n * @param {Function} callback\r\n * @param {*} [ctx=null]\r\n * @returns {void}\r\n */\r\n class_1.prototype.forEach = function (callback, ctx) {\r\n if (ctx === void 0) { ctx = null; }\r\n for (var _i = 0, _a = this.__entries__; _i < _a.length; _i++) {\r\n var entry = _a[_i];\r\n callback.call(ctx, entry[1], entry[0]);\r\n }\r\n };\r\n return class_1;\r\n }());\r\n})();\n\n/**\r\n * Detects whether window and document objects are available in current environment.\r\n */\r\nvar isBrowser = typeof window !== 'undefined' && typeof document !== 'undefined' && window.document === document;\n\n// Returns global object of a current environment.\r\nvar global$1 = (function () {\r\n if (typeof global !== 'undefined' && global.Math === Math) {\r\n return global;\r\n }\r\n if (typeof self !== 'undefined' && self.Math === Math) {\r\n return self;\r\n }\r\n if (typeof window !== 'undefined' && window.Math === Math) {\r\n return window;\r\n }\r\n // eslint-disable-next-line no-new-func\r\n return Function('return this')();\r\n})();\n\n/**\r\n * A shim for the requestAnimationFrame which falls back to the setTimeout if\r\n * first one is not supported.\r\n *\r\n * @returns {number} Requests' identifier.\r\n */\r\nvar requestAnimationFrame$1 = (function () {\r\n if (typeof requestAnimationFrame === 'function') {\r\n // It's required to use a bounded function because IE sometimes throws\r\n // an \"Invalid calling object\" error if rAF is invoked without the global\r\n // object on the left hand side.\r\n return requestAnimationFrame.bind(global$1);\r\n }\r\n return function (callback) { return setTimeout(function () { return callback(Date.now()); }, 1000 / 60); };\r\n})();\n\n// Defines minimum timeout before adding a trailing call.\r\nvar trailingTimeout = 2;\r\n/**\r\n * Creates a wrapper function which ensures that provided callback will be\r\n * invoked only once during the specified delay period.\r\n *\r\n * @param {Function} callback - Function to be invoked after the delay period.\r\n * @param {number} delay - Delay after which to invoke callback.\r\n * @returns {Function}\r\n */\r\nfunction throttle (callback, delay) {\r\n var leadingCall = false, trailingCall = false, lastCallTime = 0;\r\n /**\r\n * Invokes the original callback function and schedules new invocation if\r\n * the \"proxy\" was called during current request.\r\n *\r\n * @returns {void}\r\n */\r\n function resolvePending() {\r\n if (leadingCall) {\r\n leadingCall = false;\r\n callback();\r\n }\r\n if (trailingCall) {\r\n proxy();\r\n }\r\n }\r\n /**\r\n * Callback invoked after the specified delay. It will further postpone\r\n * invocation of the original function delegating it to the\r\n * requestAnimationFrame.\r\n *\r\n * @returns {void}\r\n */\r\n function timeoutCallback() {\r\n requestAnimationFrame$1(resolvePending);\r\n }\r\n /**\r\n * Schedules invocation of the original function.\r\n *\r\n * @returns {void}\r\n */\r\n function proxy() {\r\n var timeStamp = Date.now();\r\n if (leadingCall) {\r\n // Reject immediately following calls.\r\n if (timeStamp - lastCallTime < trailingTimeout) {\r\n return;\r\n }\r\n // Schedule new call to be in invoked when the pending one is resolved.\r\n // This is important for \"transitions\" which never actually start\r\n // immediately so there is a chance that we might miss one if change\r\n // happens amids the pending invocation.\r\n trailingCall = true;\r\n }\r\n else {\r\n leadingCall = true;\r\n trailingCall = false;\r\n setTimeout(timeoutCallback, delay);\r\n }\r\n lastCallTime = timeStamp;\r\n }\r\n return proxy;\r\n}\n\n// Minimum delay before invoking the update of observers.\r\nvar REFRESH_DELAY = 20;\r\n// A list of substrings of CSS properties used to find transition events that\r\n// might affect dimensions of observed elements.\r\nvar transitionKeys = ['top', 'right', 'bottom', 'left', 'width', 'height', 'size', 'weight'];\r\n// Check if MutationObserver is available.\r\nvar mutationObserverSupported = typeof MutationObserver !== 'undefined';\r\n/**\r\n * Singleton controller class which handles updates of ResizeObserver instances.\r\n */\r\nvar ResizeObserverController = /** @class */ (function () {\r\n /**\r\n * Creates a new instance of ResizeObserverController.\r\n *\r\n * @private\r\n */\r\n function ResizeObserverController() {\r\n /**\r\n * Indicates whether DOM listeners have been added.\r\n *\r\n * @private {boolean}\r\n */\r\n this.connected_ = false;\r\n /**\r\n * Tells that controller has subscribed for Mutation Events.\r\n *\r\n * @private {boolean}\r\n */\r\n this.mutationEventsAdded_ = false;\r\n /**\r\n * Keeps reference to the instance of MutationObserver.\r\n *\r\n * @private {MutationObserver}\r\n */\r\n this.mutationsObserver_ = null;\r\n /**\r\n * A list of connected observers.\r\n *\r\n * @private {Array}\r\n */\r\n this.observers_ = [];\r\n this.onTransitionEnd_ = this.onTransitionEnd_.bind(this);\r\n this.refresh = throttle(this.refresh.bind(this), REFRESH_DELAY);\r\n }\r\n /**\r\n * Adds observer to observers list.\r\n *\r\n * @param {ResizeObserverSPI} observer - Observer to be added.\r\n * @returns {void}\r\n */\r\n ResizeObserverController.prototype.addObserver = function (observer) {\r\n if (!~this.observers_.indexOf(observer)) {\r\n this.observers_.push(observer);\r\n }\r\n // Add listeners if they haven't been added yet.\r\n if (!this.connected_) {\r\n this.connect_();\r\n }\r\n };\r\n /**\r\n * Removes observer from observers list.\r\n *\r\n * @param {ResizeObserverSPI} observer - Observer to be removed.\r\n * @returns {void}\r\n */\r\n ResizeObserverController.prototype.removeObserver = function (observer) {\r\n var observers = this.observers_;\r\n var index = observers.indexOf(observer);\r\n // Remove observer if it's present in registry.\r\n if (~index) {\r\n observers.splice(index, 1);\r\n }\r\n // Remove listeners if controller has no connected observers.\r\n if (!observers.length && this.connected_) {\r\n this.disconnect_();\r\n }\r\n };\r\n /**\r\n * Invokes the update of observers. It will continue running updates insofar\r\n * it detects changes.\r\n *\r\n * @returns {void}\r\n */\r\n ResizeObserverController.prototype.refresh = function () {\r\n var changesDetected = this.updateObservers_();\r\n // Continue running updates if changes have been detected as there might\r\n // be future ones caused by CSS transitions.\r\n if (changesDetected) {\r\n this.refresh();\r\n }\r\n };\r\n /**\r\n * Updates every observer from observers list and notifies them of queued\r\n * entries.\r\n *\r\n * @private\r\n * @returns {boolean} Returns \"true\" if any observer has detected changes in\r\n * dimensions of it's elements.\r\n */\r\n ResizeObserverController.prototype.updateObservers_ = function () {\r\n // Collect observers that have active observations.\r\n var activeObservers = this.observers_.filter(function (observer) {\r\n return observer.gatherActive(), observer.hasActive();\r\n });\r\n // Deliver notifications in a separate cycle in order to avoid any\r\n // collisions between observers, e.g. when multiple instances of\r\n // ResizeObserver are tracking the same element and the callback of one\r\n // of them changes content dimensions of the observed target. Sometimes\r\n // this may result in notifications being blocked for the rest of observers.\r\n activeObservers.forEach(function (observer) { return observer.broadcastActive(); });\r\n return activeObservers.length > 0;\r\n };\r\n /**\r\n * Initializes DOM listeners.\r\n *\r\n * @private\r\n * @returns {void}\r\n */\r\n ResizeObserverController.prototype.connect_ = function () {\r\n // Do nothing if running in a non-browser environment or if listeners\r\n // have been already added.\r\n if (!isBrowser || this.connected_) {\r\n return;\r\n }\r\n // Subscription to the \"Transitionend\" event is used as a workaround for\r\n // delayed transitions. This way it's possible to capture at least the\r\n // final state of an element.\r\n document.addEventListener('transitionend', this.onTransitionEnd_);\r\n window.addEventListener('resize', this.refresh);\r\n if (mutationObserverSupported) {\r\n this.mutationsObserver_ = new MutationObserver(this.refresh);\r\n this.mutationsObserver_.observe(document, {\r\n attributes: true,\r\n childList: true,\r\n characterData: true,\r\n subtree: true\r\n });\r\n }\r\n else {\r\n document.addEventListener('DOMSubtreeModified', this.refresh);\r\n this.mutationEventsAdded_ = true;\r\n }\r\n this.connected_ = true;\r\n };\r\n /**\r\n * Removes DOM listeners.\r\n *\r\n * @private\r\n * @returns {void}\r\n */\r\n ResizeObserverController.prototype.disconnect_ = function () {\r\n // Do nothing if running in a non-browser environment or if listeners\r\n // have been already removed.\r\n if (!isBrowser || !this.connected_) {\r\n return;\r\n }\r\n document.removeEventListener('transitionend', this.onTransitionEnd_);\r\n window.removeEventListener('resize', this.refresh);\r\n if (this.mutationsObserver_) {\r\n this.mutationsObserver_.disconnect();\r\n }\r\n if (this.mutationEventsAdded_) {\r\n document.removeEventListener('DOMSubtreeModified', this.refresh);\r\n }\r\n this.mutationsObserver_ = null;\r\n this.mutationEventsAdded_ = false;\r\n this.connected_ = false;\r\n };\r\n /**\r\n * \"Transitionend\" event handler.\r\n *\r\n * @private\r\n * @param {TransitionEvent} event\r\n * @returns {void}\r\n */\r\n ResizeObserverController.prototype.onTransitionEnd_ = function (_a) {\r\n var _b = _a.propertyName, propertyName = _b === void 0 ? '' : _b;\r\n // Detect whether transition may affect dimensions of an element.\r\n var isReflowProperty = transitionKeys.some(function (key) {\r\n return !!~propertyName.indexOf(key);\r\n });\r\n if (isReflowProperty) {\r\n this.refresh();\r\n }\r\n };\r\n /**\r\n * Returns instance of the ResizeObserverController.\r\n *\r\n * @returns {ResizeObserverController}\r\n */\r\n ResizeObserverController.getInstance = function () {\r\n if (!this.instance_) {\r\n this.instance_ = new ResizeObserverController();\r\n }\r\n return this.instance_;\r\n };\r\n /**\r\n * Holds reference to the controller's instance.\r\n *\r\n * @private {ResizeObserverController}\r\n */\r\n ResizeObserverController.instance_ = null;\r\n return ResizeObserverController;\r\n}());\n\n/**\r\n * Defines non-writable/enumerable properties of the provided target object.\r\n *\r\n * @param {Object} target - Object for which to define properties.\r\n * @param {Object} props - Properties to be defined.\r\n * @returns {Object} Target object.\r\n */\r\nvar defineConfigurable = (function (target, props) {\r\n for (var _i = 0, _a = Object.keys(props); _i < _a.length; _i++) {\r\n var key = _a[_i];\r\n Object.defineProperty(target, key, {\r\n value: props[key],\r\n enumerable: false,\r\n writable: false,\r\n configurable: true\r\n });\r\n }\r\n return target;\r\n});\n\n/**\r\n * Returns the global object associated with provided element.\r\n *\r\n * @param {Object} target\r\n * @returns {Object}\r\n */\r\nvar getWindowOf = (function (target) {\r\n // Assume that the element is an instance of Node, which means that it\r\n // has the \"ownerDocument\" property from which we can retrieve a\r\n // corresponding global object.\r\n var ownerGlobal = target && target.ownerDocument && target.ownerDocument.defaultView;\r\n // Return the local global object if it's not possible extract one from\r\n // provided element.\r\n return ownerGlobal || global$1;\r\n});\n\n// Placeholder of an empty content rectangle.\r\nvar emptyRect = createRectInit(0, 0, 0, 0);\r\n/**\r\n * Converts provided string to a number.\r\n *\r\n * @param {number|string} value\r\n * @returns {number}\r\n */\r\nfunction toFloat(value) {\r\n return parseFloat(value) || 0;\r\n}\r\n/**\r\n * Extracts borders size from provided styles.\r\n *\r\n * @param {CSSStyleDeclaration} styles\r\n * @param {...string} positions - Borders positions (top, right, ...)\r\n * @returns {number}\r\n */\r\nfunction getBordersSize(styles) {\r\n var positions = [];\r\n for (var _i = 1; _i < arguments.length; _i++) {\r\n positions[_i - 1] = arguments[_i];\r\n }\r\n return positions.reduce(function (size, position) {\r\n var value = styles['border-' + position + '-width'];\r\n return size + toFloat(value);\r\n }, 0);\r\n}\r\n/**\r\n * Extracts paddings sizes from provided styles.\r\n *\r\n * @param {CSSStyleDeclaration} styles\r\n * @returns {Object} Paddings box.\r\n */\r\nfunction getPaddings(styles) {\r\n var positions = ['top', 'right', 'bottom', 'left'];\r\n var paddings = {};\r\n for (var _i = 0, positions_1 = positions; _i < positions_1.length; _i++) {\r\n var position = positions_1[_i];\r\n var value = styles['padding-' + position];\r\n paddings[position] = toFloat(value);\r\n }\r\n return paddings;\r\n}\r\n/**\r\n * Calculates content rectangle of provided SVG element.\r\n *\r\n * @param {SVGGraphicsElement} target - Element content rectangle of which needs\r\n * to be calculated.\r\n * @returns {DOMRectInit}\r\n */\r\nfunction getSVGContentRect(target) {\r\n var bbox = target.getBBox();\r\n return createRectInit(0, 0, bbox.width, bbox.height);\r\n}\r\n/**\r\n * Calculates content rectangle of provided HTMLElement.\r\n *\r\n * @param {HTMLElement} target - Element for which to calculate the content rectangle.\r\n * @returns {DOMRectInit}\r\n */\r\nfunction getHTMLElementContentRect(target) {\r\n // Client width & height properties can't be\r\n // used exclusively as they provide rounded values.\r\n var clientWidth = target.clientWidth, clientHeight = target.clientHeight;\r\n // By this condition we can catch all non-replaced inline, hidden and\r\n // detached elements. Though elements with width & height properties less\r\n // than 0.5 will be discarded as well.\r\n //\r\n // Without it we would need to implement separate methods for each of\r\n // those cases and it's not possible to perform a precise and performance\r\n // effective test for hidden elements. E.g. even jQuery's ':visible' filter\r\n // gives wrong results for elements with width & height less than 0.5.\r\n if (!clientWidth && !clientHeight) {\r\n return emptyRect;\r\n }\r\n var styles = getWindowOf(target).getComputedStyle(target);\r\n var paddings = getPaddings(styles);\r\n var horizPad = paddings.left + paddings.right;\r\n var vertPad = paddings.top + paddings.bottom;\r\n // Computed styles of width & height are being used because they are the\r\n // only dimensions available to JS that contain non-rounded values. It could\r\n // be possible to utilize the getBoundingClientRect if only it's data wasn't\r\n // affected by CSS transformations let alone paddings, borders and scroll bars.\r\n var width = toFloat(styles.width), height = toFloat(styles.height);\r\n // Width & height include paddings and borders when the 'border-box' box\r\n // model is applied (except for IE).\r\n if (styles.boxSizing === 'border-box') {\r\n // Following conditions are required to handle Internet Explorer which\r\n // doesn't include paddings and borders to computed CSS dimensions.\r\n //\r\n // We can say that if CSS dimensions + paddings are equal to the \"client\"\r\n // properties then it's either IE, and thus we don't need to subtract\r\n // anything, or an element merely doesn't have paddings/borders styles.\r\n if (Math.round(width + horizPad) !== clientWidth) {\r\n width -= getBordersSize(styles, 'left', 'right') + horizPad;\r\n }\r\n if (Math.round(height + vertPad) !== clientHeight) {\r\n height -= getBordersSize(styles, 'top', 'bottom') + vertPad;\r\n }\r\n }\r\n // Following steps can't be applied to the document's root element as its\r\n // client[Width/Height] properties represent viewport area of the window.\r\n // Besides, it's as well not necessary as the itself neither has\r\n // rendered scroll bars nor it can be clipped.\r\n if (!isDocumentElement(target)) {\r\n // In some browsers (only in Firefox, actually) CSS width & height\r\n // include scroll bars size which can be removed at this step as scroll\r\n // bars are the only difference between rounded dimensions + paddings\r\n // and \"client\" properties, though that is not always true in Chrome.\r\n var vertScrollbar = Math.round(width + horizPad) - clientWidth;\r\n var horizScrollbar = Math.round(height + vertPad) - clientHeight;\r\n // Chrome has a rather weird rounding of \"client\" properties.\r\n // E.g. for an element with content width of 314.2px it sometimes gives\r\n // the client width of 315px and for the width of 314.7px it may give\r\n // 314px. And it doesn't happen all the time. So just ignore this delta\r\n // as a non-relevant.\r\n if (Math.abs(vertScrollbar) !== 1) {\r\n width -= vertScrollbar;\r\n }\r\n if (Math.abs(horizScrollbar) !== 1) {\r\n height -= horizScrollbar;\r\n }\r\n }\r\n return createRectInit(paddings.left, paddings.top, width, height);\r\n}\r\n/**\r\n * Checks whether provided element is an instance of the SVGGraphicsElement.\r\n *\r\n * @param {Element} target - Element to be checked.\r\n * @returns {boolean}\r\n */\r\nvar isSVGGraphicsElement = (function () {\r\n // Some browsers, namely IE and Edge, don't have the SVGGraphicsElement\r\n // interface.\r\n if (typeof SVGGraphicsElement !== 'undefined') {\r\n return function (target) { return target instanceof getWindowOf(target).SVGGraphicsElement; };\r\n }\r\n // If it's so, then check that element is at least an instance of the\r\n // SVGElement and that it has the \"getBBox\" method.\r\n // eslint-disable-next-line no-extra-parens\r\n return function (target) { return (target instanceof getWindowOf(target).SVGElement &&\r\n typeof target.getBBox === 'function'); };\r\n})();\r\n/**\r\n * Checks whether provided element is a document element ().\r\n *\r\n * @param {Element} target - Element to be checked.\r\n * @returns {boolean}\r\n */\r\nfunction isDocumentElement(target) {\r\n return target === getWindowOf(target).document.documentElement;\r\n}\r\n/**\r\n * Calculates an appropriate content rectangle for provided html or svg element.\r\n *\r\n * @param {Element} target - Element content rectangle of which needs to be calculated.\r\n * @returns {DOMRectInit}\r\n */\r\nfunction getContentRect(target) {\r\n if (!isBrowser) {\r\n return emptyRect;\r\n }\r\n if (isSVGGraphicsElement(target)) {\r\n return getSVGContentRect(target);\r\n }\r\n return getHTMLElementContentRect(target);\r\n}\r\n/**\r\n * Creates rectangle with an interface of the DOMRectReadOnly.\r\n * Spec: https://drafts.fxtf.org/geometry/#domrectreadonly\r\n *\r\n * @param {DOMRectInit} rectInit - Object with rectangle's x/y coordinates and dimensions.\r\n * @returns {DOMRectReadOnly}\r\n */\r\nfunction createReadOnlyRect(_a) {\r\n var x = _a.x, y = _a.y, width = _a.width, height = _a.height;\r\n // If DOMRectReadOnly is available use it as a prototype for the rectangle.\r\n var Constr = typeof DOMRectReadOnly !== 'undefined' ? DOMRectReadOnly : Object;\r\n var rect = Object.create(Constr.prototype);\r\n // Rectangle's properties are not writable and non-enumerable.\r\n defineConfigurable(rect, {\r\n x: x, y: y, width: width, height: height,\r\n top: y,\r\n right: x + width,\r\n bottom: height + y,\r\n left: x\r\n });\r\n return rect;\r\n}\r\n/**\r\n * Creates DOMRectInit object based on the provided dimensions and the x/y coordinates.\r\n * Spec: https://drafts.fxtf.org/geometry/#dictdef-domrectinit\r\n *\r\n * @param {number} x - X coordinate.\r\n * @param {number} y - Y coordinate.\r\n * @param {number} width - Rectangle's width.\r\n * @param {number} height - Rectangle's height.\r\n * @returns {DOMRectInit}\r\n */\r\nfunction createRectInit(x, y, width, height) {\r\n return { x: x, y: y, width: width, height: height };\r\n}\n\n/**\r\n * Class that is responsible for computations of the content rectangle of\r\n * provided DOM element and for keeping track of it's changes.\r\n */\r\nvar ResizeObservation = /** @class */ (function () {\r\n /**\r\n * Creates an instance of ResizeObservation.\r\n *\r\n * @param {Element} target - Element to be observed.\r\n */\r\n function ResizeObservation(target) {\r\n /**\r\n * Broadcasted width of content rectangle.\r\n *\r\n * @type {number}\r\n */\r\n this.broadcastWidth = 0;\r\n /**\r\n * Broadcasted height of content rectangle.\r\n *\r\n * @type {number}\r\n */\r\n this.broadcastHeight = 0;\r\n /**\r\n * Reference to the last observed content rectangle.\r\n *\r\n * @private {DOMRectInit}\r\n */\r\n this.contentRect_ = createRectInit(0, 0, 0, 0);\r\n this.target = target;\r\n }\r\n /**\r\n * Updates content rectangle and tells whether it's width or height properties\r\n * have changed since the last broadcast.\r\n *\r\n * @returns {boolean}\r\n */\r\n ResizeObservation.prototype.isActive = function () {\r\n var rect = getContentRect(this.target);\r\n this.contentRect_ = rect;\r\n return (rect.width !== this.broadcastWidth ||\r\n rect.height !== this.broadcastHeight);\r\n };\r\n /**\r\n * Updates 'broadcastWidth' and 'broadcastHeight' properties with a data\r\n * from the corresponding properties of the last observed content rectangle.\r\n *\r\n * @returns {DOMRectInit} Last observed content rectangle.\r\n */\r\n ResizeObservation.prototype.broadcastRect = function () {\r\n var rect = this.contentRect_;\r\n this.broadcastWidth = rect.width;\r\n this.broadcastHeight = rect.height;\r\n return rect;\r\n };\r\n return ResizeObservation;\r\n}());\n\nvar ResizeObserverEntry = /** @class */ (function () {\r\n /**\r\n * Creates an instance of ResizeObserverEntry.\r\n *\r\n * @param {Element} target - Element that is being observed.\r\n * @param {DOMRectInit} rectInit - Data of the element's content rectangle.\r\n */\r\n function ResizeObserverEntry(target, rectInit) {\r\n var contentRect = createReadOnlyRect(rectInit);\r\n // According to the specification following properties are not writable\r\n // and are also not enumerable in the native implementation.\r\n //\r\n // Property accessors are not being used as they'd require to define a\r\n // private WeakMap storage which may cause memory leaks in browsers that\r\n // don't support this type of collections.\r\n defineConfigurable(this, { target: target, contentRect: contentRect });\r\n }\r\n return ResizeObserverEntry;\r\n}());\n\nvar ResizeObserverSPI = /** @class */ (function () {\r\n /**\r\n * Creates a new instance of ResizeObserver.\r\n *\r\n * @param {ResizeObserverCallback} callback - Callback function that is invoked\r\n * when one of the observed elements changes it's content dimensions.\r\n * @param {ResizeObserverController} controller - Controller instance which\r\n * is responsible for the updates of observer.\r\n * @param {ResizeObserver} callbackCtx - Reference to the public\r\n * ResizeObserver instance which will be passed to callback function.\r\n */\r\n function ResizeObserverSPI(callback, controller, callbackCtx) {\r\n /**\r\n * Collection of resize observations that have detected changes in dimensions\r\n * of elements.\r\n *\r\n * @private {Array}\r\n */\r\n this.activeObservations_ = [];\r\n /**\r\n * Registry of the ResizeObservation instances.\r\n *\r\n * @private {Map}\r\n */\r\n this.observations_ = new MapShim();\r\n if (typeof callback !== 'function') {\r\n throw new TypeError('The callback provided as parameter 1 is not a function.');\r\n }\r\n this.callback_ = callback;\r\n this.controller_ = controller;\r\n this.callbackCtx_ = callbackCtx;\r\n }\r\n /**\r\n * Starts observing provided element.\r\n *\r\n * @param {Element} target - Element to be observed.\r\n * @returns {void}\r\n */\r\n ResizeObserverSPI.prototype.observe = function (target) {\r\n if (!arguments.length) {\r\n throw new TypeError('1 argument required, but only 0 present.');\r\n }\r\n // Do nothing if current environment doesn't have the Element interface.\r\n if (typeof Element === 'undefined' || !(Element instanceof Object)) {\r\n return;\r\n }\r\n if (!(target instanceof getWindowOf(target).Element)) {\r\n throw new TypeError('parameter 1 is not of type \"Element\".');\r\n }\r\n var observations = this.observations_;\r\n // Do nothing if element is already being observed.\r\n if (observations.has(target)) {\r\n return;\r\n }\r\n observations.set(target, new ResizeObservation(target));\r\n this.controller_.addObserver(this);\r\n // Force the update of observations.\r\n this.controller_.refresh();\r\n };\r\n /**\r\n * Stops observing provided element.\r\n *\r\n * @param {Element} target - Element to stop observing.\r\n * @returns {void}\r\n */\r\n ResizeObserverSPI.prototype.unobserve = function (target) {\r\n if (!arguments.length) {\r\n throw new TypeError('1 argument required, but only 0 present.');\r\n }\r\n // Do nothing if current environment doesn't have the Element interface.\r\n if (typeof Element === 'undefined' || !(Element instanceof Object)) {\r\n return;\r\n }\r\n if (!(target instanceof getWindowOf(target).Element)) {\r\n throw new TypeError('parameter 1 is not of type \"Element\".');\r\n }\r\n var observations = this.observations_;\r\n // Do nothing if element is not being observed.\r\n if (!observations.has(target)) {\r\n return;\r\n }\r\n observations.delete(target);\r\n if (!observations.size) {\r\n this.controller_.removeObserver(this);\r\n }\r\n };\r\n /**\r\n * Stops observing all elements.\r\n *\r\n * @returns {void}\r\n */\r\n ResizeObserverSPI.prototype.disconnect = function () {\r\n this.clearActive();\r\n this.observations_.clear();\r\n this.controller_.removeObserver(this);\r\n };\r\n /**\r\n * Collects observation instances the associated element of which has changed\r\n * it's content rectangle.\r\n *\r\n * @returns {void}\r\n */\r\n ResizeObserverSPI.prototype.gatherActive = function () {\r\n var _this = this;\r\n this.clearActive();\r\n this.observations_.forEach(function (observation) {\r\n if (observation.isActive()) {\r\n _this.activeObservations_.push(observation);\r\n }\r\n });\r\n };\r\n /**\r\n * Invokes initial callback function with a list of ResizeObserverEntry\r\n * instances collected from active resize observations.\r\n *\r\n * @returns {void}\r\n */\r\n ResizeObserverSPI.prototype.broadcastActive = function () {\r\n // Do nothing if observer doesn't have active observations.\r\n if (!this.hasActive()) {\r\n return;\r\n }\r\n var ctx = this.callbackCtx_;\r\n // Create ResizeObserverEntry instance for every active observation.\r\n var entries = this.activeObservations_.map(function (observation) {\r\n return new ResizeObserverEntry(observation.target, observation.broadcastRect());\r\n });\r\n this.callback_.call(ctx, entries, ctx);\r\n this.clearActive();\r\n };\r\n /**\r\n * Clears the collection of active observations.\r\n *\r\n * @returns {void}\r\n */\r\n ResizeObserverSPI.prototype.clearActive = function () {\r\n this.activeObservations_.splice(0);\r\n };\r\n /**\r\n * Tells whether observer has active observations.\r\n *\r\n * @returns {boolean}\r\n */\r\n ResizeObserverSPI.prototype.hasActive = function () {\r\n return this.activeObservations_.length > 0;\r\n };\r\n return ResizeObserverSPI;\r\n}());\n\n// Registry of internal observers. If WeakMap is not available use current shim\r\n// for the Map collection as it has all required methods and because WeakMap\r\n// can't be fully polyfilled anyway.\r\nvar observers = typeof WeakMap !== 'undefined' ? new WeakMap() : new MapShim();\r\n/**\r\n * ResizeObserver API. Encapsulates the ResizeObserver SPI implementation\r\n * exposing only those methods and properties that are defined in the spec.\r\n */\r\nvar ResizeObserver = /** @class */ (function () {\r\n /**\r\n * Creates a new instance of ResizeObserver.\r\n *\r\n * @param {ResizeObserverCallback} callback - Callback that is invoked when\r\n * dimensions of the observed elements change.\r\n */\r\n function ResizeObserver(callback) {\r\n if (!(this instanceof ResizeObserver)) {\r\n throw new TypeError('Cannot call a class as a function.');\r\n }\r\n if (!arguments.length) {\r\n throw new TypeError('1 argument required, but only 0 present.');\r\n }\r\n var controller = ResizeObserverController.getInstance();\r\n var observer = new ResizeObserverSPI(callback, controller, this);\r\n observers.set(this, observer);\r\n }\r\n return ResizeObserver;\r\n}());\r\n// Expose public methods of ResizeObserver.\r\n[\r\n 'observe',\r\n 'unobserve',\r\n 'disconnect'\r\n].forEach(function (method) {\r\n ResizeObserver.prototype[method] = function () {\r\n var _a;\r\n return (_a = observers.get(this))[method].apply(_a, arguments);\r\n };\r\n});\n\nvar index = (function () {\r\n // Export existing implementation if available.\r\n if (typeof global$1.ResizeObserver !== 'undefined') {\r\n return global$1.ResizeObserver;\r\n }\r\n return ResizeObserver;\r\n})();\n\nexport default index;\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport ResizeObserver from \"resize-observer-polyfill\"\nimport {\n NEVER,\n Observable,\n Subject,\n defer,\n filter,\n finalize,\n map,\n merge,\n of,\n shareReplay,\n startWith,\n switchMap,\n tap\n} from \"rxjs\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Element offset\n */\nexport interface ElementSize {\n width: number /* Element width */\n height: number /* Element height */\n}\n\n/* ----------------------------------------------------------------------------\n * Data\n * ------------------------------------------------------------------------- */\n\n/**\n * Resize observer entry subject\n */\nconst entry$ = new Subject()\n\n/**\n * Resize observer observable\n *\n * This observable will create a `ResizeObserver` on the first subscription\n * and will automatically terminate it when there are no more subscribers.\n * It's quite important to centralize observation in a single `ResizeObserver`,\n * as the performance difference can be quite dramatic, as the link shows.\n *\n * @see https://bit.ly/3iIYfEm - Google Groups on performance\n */\nconst observer$ = defer(() => of(\n new ResizeObserver(entries => {\n for (const entry of entries)\n entry$.next(entry)\n })\n))\n .pipe(\n switchMap(observer => merge(NEVER, of(observer))\n .pipe(\n finalize(() => observer.disconnect())\n )\n ),\n shareReplay(1)\n )\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Retrieve element size\n *\n * @param el - Element\n *\n * @returns Element size\n */\nexport function getElementSize(\n el: HTMLElement\n): ElementSize {\n return {\n width: el.offsetWidth,\n height: el.offsetHeight\n }\n}\n\n/* ------------------------------------------------------------------------- */\n\n/**\n * Watch element size\n *\n * This function returns an observable that subscribes to a single internal\n * instance of `ResizeObserver` upon subscription, and emit resize events until\n * termination. Note that this function should not be called with the same\n * element twice, as the first unsubscription will terminate observation.\n *\n * Sadly, we can't use the `DOMRect` objects returned by the observer, because\n * we need the emitted values to be consistent with `getElementSize`, which will\n * return the used values (rounded) and not actual values (unrounded). Thus, we\n * use the `offset*` properties. See the linked GitHub issue.\n *\n * @see https://bit.ly/3m0k3he - GitHub issue\n *\n * @param el - Element\n *\n * @returns Element size observable\n */\nexport function watchElementSize(\n el: HTMLElement\n): Observable {\n return observer$\n .pipe(\n tap(observer => observer.observe(el)),\n switchMap(observer => entry$\n .pipe(\n filter(({ target }) => target === el),\n finalize(() => observer.unobserve(el)),\n map(() => getElementSize(el))\n )\n ),\n startWith(getElementSize(el))\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport { ElementSize } from \"../_\"\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Retrieve element content size (= scroll width and height)\n *\n * @param el - Element\n *\n * @returns Element content size\n */\nexport function getElementContentSize(\n el: HTMLElement\n): ElementSize {\n return {\n width: el.scrollWidth,\n height: el.scrollHeight\n }\n}\n\n/**\n * Retrieve the overflowing container of an element, if any\n *\n * @param el - Element\n *\n * @returns Overflowing container or nothing\n */\nexport function getElementContainer(\n el: HTMLElement\n): HTMLElement | undefined {\n let parent = el.parentElement\n while (parent)\n if (\n el.scrollWidth <= parent.scrollWidth &&\n el.scrollHeight <= parent.scrollHeight\n )\n parent = (el = parent).parentElement\n else\n break\n\n /* Return overflowing container */\n return parent ? el : undefined\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n NEVER,\n Observable,\n Subject,\n defer,\n distinctUntilChanged,\n filter,\n finalize,\n map,\n merge,\n of,\n shareReplay,\n switchMap,\n tap\n} from \"rxjs\"\n\nimport {\n getElementContentSize,\n getElementSize,\n watchElementContentOffset\n} from \"~/browser\"\n\n/* ----------------------------------------------------------------------------\n * Data\n * ------------------------------------------------------------------------- */\n\n/**\n * Intersection observer entry subject\n */\nconst entry$ = new Subject()\n\n/**\n * Intersection observer observable\n *\n * This observable will create an `IntersectionObserver` on first subscription\n * and will automatically terminate it when there are no more subscribers.\n *\n * @see https://bit.ly/3iIYfEm - Google Groups on performance\n */\nconst observer$ = defer(() => of(\n new IntersectionObserver(entries => {\n for (const entry of entries)\n entry$.next(entry)\n }, {\n threshold: 0\n })\n))\n .pipe(\n switchMap(observer => merge(NEVER, of(observer))\n .pipe(\n finalize(() => observer.disconnect())\n )\n ),\n shareReplay(1)\n )\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch element visibility\n *\n * @param el - Element\n *\n * @returns Element visibility observable\n */\nexport function watchElementVisibility(\n el: HTMLElement\n): Observable {\n return observer$\n .pipe(\n tap(observer => observer.observe(el)),\n switchMap(observer => entry$\n .pipe(\n filter(({ target }) => target === el),\n finalize(() => observer.unobserve(el)),\n map(({ isIntersecting }) => isIntersecting)\n )\n )\n )\n}\n\n/**\n * Watch element boundary\n *\n * This function returns an observable which emits whether the bottom content\n * boundary (= scroll offset) of an element is within a certain threshold.\n *\n * @param el - Element\n * @param threshold - Threshold\n *\n * @returns Element boundary observable\n */\nexport function watchElementBoundary(\n el: HTMLElement, threshold = 16\n): Observable {\n return watchElementContentOffset(el)\n .pipe(\n map(({ y }) => {\n const visible = getElementSize(el)\n const content = getElementContentSize(el)\n return y >= (\n content.height - visible.height - threshold\n )\n }),\n distinctUntilChanged()\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n fromEvent,\n map,\n startWith\n} from \"rxjs\"\n\nimport { getElement } from \"../element\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Toggle\n */\nexport type Toggle =\n | \"drawer\" /* Toggle for drawer */\n | \"search\" /* Toggle for search */\n\n/* ----------------------------------------------------------------------------\n * Data\n * ------------------------------------------------------------------------- */\n\n/**\n * Toggle map\n */\nconst toggles: Record = {\n drawer: getElement(\"[data-md-toggle=drawer]\"),\n search: getElement(\"[data-md-toggle=search]\")\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Retrieve the value of a toggle\n *\n * @param name - Toggle\n *\n * @returns Toggle value\n */\nexport function getToggle(name: Toggle): boolean {\n return toggles[name].checked\n}\n\n/**\n * Set toggle\n *\n * Simulating a click event seems to be the most cross-browser compatible way\n * of changing the value while also emitting a `change` event. Before, Material\n * used `CustomEvent` to programmatically change the value of a toggle, but this\n * is a much simpler and cleaner solution which doesn't require a polyfill.\n *\n * @param name - Toggle\n * @param value - Toggle value\n */\nexport function setToggle(name: Toggle, value: boolean): void {\n if (toggles[name].checked !== value)\n toggles[name].click()\n}\n\n/* ------------------------------------------------------------------------- */\n\n/**\n * Watch toggle\n *\n * @param name - Toggle\n *\n * @returns Toggle value observable\n */\nexport function watchToggle(name: Toggle): Observable {\n const el = toggles[name]\n return fromEvent(el, \"change\")\n .pipe(\n map(() => el.checked),\n startWith(el.checked)\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n filter,\n fromEvent,\n map,\n share\n} from \"rxjs\"\n\nimport { getActiveElement } from \"../element\"\nimport { getToggle } from \"../toggle\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Keyboard mode\n */\nexport type KeyboardMode =\n | \"global\" /* Global */\n | \"search\" /* Search is open */\n\n/* ------------------------------------------------------------------------- */\n\n/**\n * Keyboard\n */\nexport interface Keyboard {\n mode: KeyboardMode /* Keyboard mode */\n type: string /* Key type */\n claim(): void /* Key claim */\n}\n\n/* ----------------------------------------------------------------------------\n * Helper functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Check whether an element may receive keyboard input\n *\n * @param el - Element\n * @param type - Key type\n *\n * @returns Test result\n */\nfunction isSusceptibleToKeyboard(\n el: HTMLElement, type: string\n): boolean {\n switch (el.constructor) {\n\n /* Input elements */\n case HTMLInputElement:\n /* @ts-expect-error - omit unnecessary type cast */\n if (el.type === \"radio\")\n return /^Arrow/.test(type)\n else\n return true\n\n /* Select element and textarea */\n case HTMLSelectElement:\n case HTMLTextAreaElement:\n return true\n\n /* Everything else */\n default:\n return el.isContentEditable\n }\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch keyboard\n *\n * @returns Keyboard observable\n */\nexport function watchKeyboard(): Observable {\n return fromEvent(window, \"keydown\")\n .pipe(\n filter(ev => !(ev.metaKey || ev.ctrlKey)),\n map(ev => ({\n mode: getToggle(\"search\") ? \"search\" : \"global\",\n type: ev.key,\n claim() {\n ev.preventDefault()\n ev.stopPropagation()\n }\n } as Keyboard)),\n filter(({ mode, type }) => {\n if (mode === \"global\") {\n const active = getActiveElement()\n if (typeof active !== \"undefined\")\n return !isSusceptibleToKeyboard(active, type)\n }\n return true\n }),\n share()\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport { Subject } from \"rxjs\"\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Retrieve location\n *\n * This function returns a `URL` object (and not `Location`) to normalize the\n * typings across the application. Furthermore, locations need to be tracked\n * without setting them and `Location` is a singleton which represents the\n * current location.\n *\n * @returns URL\n */\nexport function getLocation(): URL {\n return new URL(location.href)\n}\n\n/**\n * Set location\n *\n * @param url - URL to change to\n */\nexport function setLocation(url: URL): void {\n location.href = url.href\n}\n\n/* ------------------------------------------------------------------------- */\n\n/**\n * Watch location\n *\n * @returns Location subject\n */\nexport function watchLocation(): Subject {\n return new Subject()\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport { JSX as JSXInternal } from \"preact\"\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * HTML attributes\n */\ntype Attributes =\n & JSXInternal.HTMLAttributes\n & JSXInternal.SVGAttributes\n & Record\n\n/**\n * Child element\n */\ntype Child =\n | HTMLElement\n | Text\n | string\n | number\n\n/* ----------------------------------------------------------------------------\n * Helper functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Append a child node to an element\n *\n * @param el - Element\n * @param child - Child node(s)\n */\nfunction appendChild(el: HTMLElement, child: Child | Child[]): void {\n\n /* Handle primitive types (including raw HTML) */\n if (typeof child === \"string\" || typeof child === \"number\") {\n el.innerHTML += child.toString()\n\n /* Handle nodes */\n } else if (child instanceof Node) {\n el.appendChild(child)\n\n /* Handle nested children */\n } else if (Array.isArray(child)) {\n for (const node of child)\n appendChild(el, node)\n }\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * JSX factory\n *\n * @template T - Element type\n *\n * @param tag - HTML tag\n * @param attributes - HTML attributes\n * @param children - Child elements\n *\n * @returns Element\n */\nexport function h(\n tag: T, attributes?: Attributes | null, ...children: Child[]\n): HTMLElementTagNameMap[T]\n\nexport function h(\n tag: string, attributes?: Attributes | null, ...children: Child[]\n): T\n\nexport function h(\n tag: string, attributes?: Attributes | null, ...children: Child[]\n): T {\n const el = document.createElement(tag)\n\n /* Set attributes, if any */\n if (attributes)\n for (const attr of Object.keys(attributes)) {\n if (typeof attributes[attr] === \"undefined\")\n continue\n\n /* Set default attribute or boolean */\n if (typeof attributes[attr] !== \"boolean\")\n el.setAttribute(attr, attributes[attr])\n else\n el.setAttribute(attr, \"\")\n }\n\n /* Append child nodes */\n for (const child of children)\n appendChild(el, child)\n\n /* Return element */\n return el as T\n}\n\n/* ----------------------------------------------------------------------------\n * Namespace\n * ------------------------------------------------------------------------- */\n\nexport declare namespace h {\n namespace JSX {\n type Element = HTMLElement\n type IntrinsicElements = JSXInternal.IntrinsicElements\n }\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Truncate a string after the given number of characters\n *\n * This is not a very reasonable approach, since the summaries kind of suck.\n * It would be better to create something more intelligent, highlighting the\n * search occurrences and making a better summary out of it, but this note was\n * written three years ago, so who knows if we'll ever fix it.\n *\n * @param value - Value to be truncated\n * @param n - Number of characters\n *\n * @returns Truncated value\n */\nexport function truncate(value: string, n: number): string {\n let i = n\n if (value.length > i) {\n while (value[i] !== \" \" && --i > 0) { /* keep eating */ }\n return `${value.substring(0, i)}...`\n }\n return value\n}\n\n/**\n * Round a number for display with repository facts\n *\n * This is a reverse-engineered version of GitHub's weird rounding algorithm\n * for stars, forks and all other numbers. While all numbers below `1,000` are\n * returned as-is, bigger numbers are converted to fixed numbers:\n *\n * - `1,049` => `1k`\n * - `1,050` => `1.1k`\n * - `1,949` => `1.9k`\n * - `1,950` => `2k`\n *\n * @param value - Original value\n *\n * @returns Rounded value\n */\nexport function round(value: number): string {\n if (value > 999) {\n const digits = +((value - 950) % 1000 > 99)\n return `${((value + 0.000001) / 1000).toFixed(digits)}k`\n } else {\n return value.toString()\n }\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n filter,\n fromEvent,\n map,\n shareReplay,\n startWith\n} from \"rxjs\"\n\nimport { getOptionalElement } from \"~/browser\"\nimport { h } from \"~/utilities\"\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Retrieve location hash\n *\n * @returns Location hash\n */\nexport function getLocationHash(): string {\n return location.hash.substring(1)\n}\n\n/**\n * Set location hash\n *\n * Setting a new fragment identifier via `location.hash` will have no effect\n * if the value doesn't change. When a new fragment identifier is set, we want\n * the browser to target the respective element at all times, which is why we\n * use this dirty little trick.\n *\n * @param hash - Location hash\n */\nexport function setLocationHash(hash: string): void {\n const el = h(\"a\", { href: hash })\n el.addEventListener(\"click\", ev => ev.stopPropagation())\n el.click()\n}\n\n/* ------------------------------------------------------------------------- */\n\n/**\n * Watch location hash\n *\n * @returns Location hash observable\n */\nexport function watchLocationHash(): Observable {\n return fromEvent(window, \"hashchange\")\n .pipe(\n map(getLocationHash),\n startWith(getLocationHash()),\n filter(hash => hash.length > 0),\n shareReplay(1)\n )\n}\n\n/**\n * Watch location target\n *\n * @returns Location target observable\n */\nexport function watchLocationTarget(): Observable {\n return watchLocationHash()\n .pipe(\n map(id => getOptionalElement(`[id=\"${id}\"]`)!),\n filter(el => typeof el !== \"undefined\")\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n EMPTY,\n Observable,\n fromEvent,\n fromEventPattern,\n map,\n merge,\n startWith,\n switchMap\n} from \"rxjs\"\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch media query\n *\n * Note that although `MediaQueryList.addListener` is deprecated we have to\n * use it, because it's the only way to ensure proper downward compatibility.\n *\n * @see https://bit.ly/3dUBH2m - GitHub issue\n *\n * @param query - Media query\n *\n * @returns Media observable\n */\nexport function watchMedia(query: string): Observable {\n const media = matchMedia(query)\n return fromEventPattern(next => (\n media.addListener(() => next(media.matches))\n ))\n .pipe(\n startWith(media.matches)\n )\n}\n\n/**\n * Watch print mode\n *\n * @returns Print observable\n */\nexport function watchPrint(): Observable {\n const media = matchMedia(\"print\")\n return merge(\n fromEvent(window, \"beforeprint\").pipe(map(() => true)),\n fromEvent(window, \"afterprint\").pipe(map(() => false))\n )\n .pipe(\n startWith(media.matches)\n )\n}\n\n/* ------------------------------------------------------------------------- */\n\n/**\n * Toggle an observable with a media observable\n *\n * @template T - Data type\n *\n * @param query$ - Media observable\n * @param factory - Observable factory\n *\n * @returns Toggled observable\n */\nexport function at(\n query$: Observable, factory: () => Observable\n): Observable {\n return query$\n .pipe(\n switchMap(active => active ? factory() : EMPTY)\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n EMPTY,\n Observable,\n catchError,\n from,\n map,\n of,\n shareReplay,\n switchMap,\n throwError\n} from \"rxjs\"\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Fetch the given URL\n *\n * If the request fails (e.g. when dispatched from `file://` locations), the\n * observable will complete without emitting a value.\n *\n * @param url - Request URL\n * @param options - Options\n *\n * @returns Response observable\n */\nexport function request(\n url: URL | string, options: RequestInit = { credentials: \"same-origin\" }\n): Observable {\n return from(fetch(`${url}`, options))\n .pipe(\n catchError(() => EMPTY),\n switchMap(res => res.status !== 200\n ? throwError(() => new Error(res.statusText))\n : of(res)\n )\n )\n}\n\n/**\n * Fetch JSON from the given URL\n *\n * @template T - Data type\n *\n * @param url - Request URL\n * @param options - Options\n *\n * @returns Data observable\n */\nexport function requestJSON(\n url: URL | string, options?: RequestInit\n): Observable {\n return request(url, options)\n .pipe(\n switchMap(res => res.json()),\n shareReplay(1)\n )\n}\n\n/**\n * Fetch XML from the given URL\n *\n * @param url - Request URL\n * @param options - Options\n *\n * @returns Data observable\n */\nexport function requestXML(\n url: URL | string, options?: RequestInit\n): Observable {\n const dom = new DOMParser()\n return request(url, options)\n .pipe(\n switchMap(res => res.text()),\n map(res => dom.parseFromString(res, \"text/xml\")),\n shareReplay(1)\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n defer,\n finalize,\n fromEvent,\n map,\n merge,\n switchMap,\n take,\n throwError\n} from \"rxjs\"\n\nimport { h } from \"~/utilities\"\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Create and load a `script` element\n *\n * This function returns an observable that will emit when the script was\n * successfully loaded, or throw an error if it didn't.\n *\n * @param src - Script URL\n *\n * @returns Script observable\n */\nexport function watchScript(src: string): Observable {\n const script = h(\"script\", { src })\n return defer(() => {\n document.head.appendChild(script)\n return merge(\n fromEvent(script, \"load\"),\n fromEvent(script, \"error\")\n .pipe(\n switchMap(() => (\n throwError(() => new ReferenceError(`Invalid script: ${src}`))\n ))\n )\n )\n .pipe(\n map(() => undefined),\n finalize(() => document.head.removeChild(script)),\n take(1)\n )\n })\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n fromEvent,\n map,\n merge,\n startWith\n} from \"rxjs\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Viewport offset\n */\nexport interface ViewportOffset {\n x: number /* Horizontal offset */\n y: number /* Vertical offset */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Retrieve viewport offset\n *\n * On iOS Safari, viewport offset can be negative due to overflow scrolling.\n * As this may induce strange behaviors downstream, we'll just limit it to 0.\n *\n * @returns Viewport offset\n */\nexport function getViewportOffset(): ViewportOffset {\n return {\n x: Math.max(0, scrollX),\n y: Math.max(0, scrollY)\n }\n}\n\n/* ------------------------------------------------------------------------- */\n\n/**\n * Watch viewport offset\n *\n * @returns Viewport offset observable\n */\nexport function watchViewportOffset(): Observable {\n return merge(\n fromEvent(window, \"scroll\", { passive: true }),\n fromEvent(window, \"resize\", { passive: true })\n )\n .pipe(\n map(getViewportOffset),\n startWith(getViewportOffset())\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n fromEvent,\n map,\n startWith\n} from \"rxjs\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Viewport size\n */\nexport interface ViewportSize {\n width: number /* Viewport width */\n height: number /* Viewport height */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Retrieve viewport size\n *\n * @returns Viewport size\n */\nexport function getViewportSize(): ViewportSize {\n return {\n width: innerWidth,\n height: innerHeight\n }\n}\n\n/* ------------------------------------------------------------------------- */\n\n/**\n * Watch viewport size\n *\n * @returns Viewport size observable\n */\nexport function watchViewportSize(): Observable {\n return fromEvent(window, \"resize\", { passive: true })\n .pipe(\n map(getViewportSize),\n startWith(getViewportSize())\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n combineLatest,\n map,\n shareReplay\n} from \"rxjs\"\n\nimport {\n ViewportOffset,\n watchViewportOffset\n} from \"../offset\"\nimport {\n ViewportSize,\n watchViewportSize\n} from \"../size\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Viewport\n */\nexport interface Viewport {\n offset: ViewportOffset /* Viewport offset */\n size: ViewportSize /* Viewport size */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch viewport\n *\n * @returns Viewport observable\n */\nexport function watchViewport(): Observable {\n return combineLatest([\n watchViewportOffset(),\n watchViewportSize()\n ])\n .pipe(\n map(([offset, size]) => ({ offset, size })),\n shareReplay(1)\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n combineLatest,\n distinctUntilKeyChanged,\n map\n} from \"rxjs\"\n\nimport { Header } from \"~/components\"\n\nimport { getElementOffset } from \"../../element\"\nimport { Viewport } from \"../_\"\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch options\n */\ninterface WatchOptions {\n viewport$: Observable /* Viewport observable */\n header$: Observable
/* Header observable */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch viewport relative to element\n *\n * @param el - Element\n * @param options - Options\n *\n * @returns Viewport observable\n */\nexport function watchViewportAt(\n el: HTMLElement, { viewport$, header$ }: WatchOptions\n): Observable {\n const size$ = viewport$\n .pipe(\n distinctUntilKeyChanged(\"size\")\n )\n\n /* Compute element offset */\n const offset$ = combineLatest([size$, header$])\n .pipe(\n map(() => getElementOffset(el))\n )\n\n /* Compute relative viewport, return hot observable */\n return combineLatest([header$, viewport$, offset$])\n .pipe(\n map(([{ height }, { offset, size }, { x, y }]) => ({\n offset: {\n x: offset.x - x,\n y: offset.y - y + height\n },\n size\n }))\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n Subject,\n fromEvent,\n map,\n share,\n switchMap,\n tap,\n throttle\n} from \"rxjs\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Worker message\n */\nexport interface WorkerMessage {\n type: unknown /* Message type */\n data?: unknown /* Message data */\n}\n\n/**\n * Worker handler\n *\n * @template T - Message type\n */\nexport interface WorkerHandler<\n T extends WorkerMessage\n> {\n tx$: Subject /* Message transmission subject */\n rx$: Observable /* Message receive observable */\n}\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch options\n *\n * @template T - Worker message type\n */\ninterface WatchOptions {\n tx$: Observable /* Message transmission observable */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch a web worker\n *\n * This function returns an observable that sends all values emitted by the\n * message observable to the web worker. Web worker communication is expected\n * to be bidirectional (request-response) and synchronous. Messages that are\n * emitted during a pending request are throttled, the last one is emitted.\n *\n * @param worker - Web worker\n * @param options - Options\n *\n * @returns Worker message observable\n */\nexport function watchWorker(\n worker: Worker, { tx$ }: WatchOptions\n): Observable {\n\n /* Intercept messages from worker-like objects */\n const rx$ = fromEvent(worker, \"message\")\n .pipe(\n map(({ data }) => data as T)\n )\n\n /* Send and receive messages, return hot observable */\n return tx$\n .pipe(\n throttle(() => rx$, { leading: true, trailing: true }),\n tap(message => worker.postMessage(message)),\n switchMap(() => rx$),\n share()\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport { getElement, getLocation } from \"~/browser\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Feature flag\n */\nexport type Flag =\n | \"announce.dismiss\" /* Dismissable announcement bar */\n | \"content.code.annotate\" /* Code annotations */\n | \"content.lazy\" /* Lazy content elements */\n | \"content.tabs.link\" /* Link content tabs */\n | \"header.autohide\" /* Hide header */\n | \"navigation.expand\" /* Automatic expansion */\n | \"navigation.indexes\" /* Section pages */\n | \"navigation.instant\" /* Instant loading */\n | \"navigation.sections\" /* Section navigation */\n | \"navigation.tabs\" /* Tabs navigation */\n | \"navigation.tabs.sticky\" /* Tabs navigation (sticky) */\n | \"navigation.top\" /* Back-to-top button */\n | \"navigation.tracking\" /* Anchor tracking */\n | \"search.highlight\" /* Search highlighting */\n | \"search.share\" /* Search sharing */\n | \"search.suggest\" /* Search suggestions */\n | \"toc.follow\" /* Following table of contents */\n | \"toc.integrate\" /* Integrated table of contents */\n\n/* ------------------------------------------------------------------------- */\n\n/**\n * Translation\n */\nexport type Translation =\n | \"clipboard.copy\" /* Copy to clipboard */\n | \"clipboard.copied\" /* Copied to clipboard */\n | \"search.config.lang\" /* Search language */\n | \"search.config.pipeline\" /* Search pipeline */\n | \"search.config.separator\" /* Search separator */\n | \"search.placeholder\" /* Search */\n | \"search.result.placeholder\" /* Type to start searching */\n | \"search.result.none\" /* No matching documents */\n | \"search.result.one\" /* 1 matching document */\n | \"search.result.other\" /* # matching documents */\n | \"search.result.more.one\" /* 1 more on this page */\n | \"search.result.more.other\" /* # more on this page */\n | \"search.result.term.missing\" /* Missing */\n | \"select.version.title\" /* Version selector */\n\n/**\n * Translations\n */\nexport type Translations = Record\n\n/* ------------------------------------------------------------------------- */\n\n/**\n * Versioning\n */\nexport interface Versioning {\n provider: \"mike\" /* Version provider */\n default?: string /* Default version */\n}\n\n/**\n * Configuration\n */\nexport interface Config {\n base: string /* Base URL */\n features: Flag[] /* Feature flags */\n translations: Translations /* Translations */\n search: string /* Search worker URL */\n tags?: Record /* Tags mapping */\n version?: Versioning /* Versioning */\n}\n\n/* ----------------------------------------------------------------------------\n * Data\n * ------------------------------------------------------------------------- */\n\n/**\n * Retrieve global configuration and make base URL absolute\n */\nconst script = getElement(\"#__config\")\nconst config: Config = JSON.parse(script.textContent!)\nconfig.base = `${new URL(config.base, getLocation())}`\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Retrieve global configuration\n *\n * @returns Global configuration\n */\nexport function configuration(): Config {\n return config\n}\n\n/**\n * Check whether a feature flag is enabled\n *\n * @param flag - Feature flag\n *\n * @returns Test result\n */\nexport function feature(flag: Flag): boolean {\n return config.features.includes(flag)\n}\n\n/**\n * Retrieve the translation for the given key\n *\n * @param key - Key to be translated\n * @param value - Positional value, if any\n *\n * @returns Translation\n */\nexport function translation(\n key: Translation, value?: string | number\n): string {\n return typeof value !== \"undefined\"\n ? config.translations[key].replace(\"#\", value.toString())\n : config.translations[key]\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport { getElement, getElements } from \"~/browser\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Component type\n */\nexport type ComponentType =\n | \"announce\" /* Announcement bar */\n | \"container\" /* Container */\n | \"consent\" /* Consent */\n | \"content\" /* Content */\n | \"dialog\" /* Dialog */\n | \"header\" /* Header */\n | \"header-title\" /* Header title */\n | \"header-topic\" /* Header topic */\n | \"main\" /* Main area */\n | \"outdated\" /* Version warning */\n | \"palette\" /* Color palette */\n | \"search\" /* Search */\n | \"search-query\" /* Search input */\n | \"search-result\" /* Search results */\n | \"search-share\" /* Search sharing */\n | \"search-suggest\" /* Search suggestions */\n | \"sidebar\" /* Sidebar */\n | \"skip\" /* Skip link */\n | \"source\" /* Repository information */\n | \"tabs\" /* Navigation tabs */\n | \"toc\" /* Table of contents */\n | \"top\" /* Back-to-top button */\n\n/**\n * Component\n *\n * @template T - Component type\n * @template U - Reference type\n */\nexport type Component<\n T extends {} = {},\n U extends HTMLElement = HTMLElement\n> =\n T & {\n ref: U /* Component reference */\n }\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Component type map\n */\ninterface ComponentTypeMap {\n \"announce\": HTMLElement /* Announcement bar */\n \"container\": HTMLElement /* Container */\n \"consent\": HTMLElement /* Consent */\n \"content\": HTMLElement /* Content */\n \"dialog\": HTMLElement /* Dialog */\n \"header\": HTMLElement /* Header */\n \"header-title\": HTMLElement /* Header title */\n \"header-topic\": HTMLElement /* Header topic */\n \"main\": HTMLElement /* Main area */\n \"outdated\": HTMLElement /* Version warning */\n \"palette\": HTMLElement /* Color palette */\n \"search\": HTMLElement /* Search */\n \"search-query\": HTMLInputElement /* Search input */\n \"search-result\": HTMLElement /* Search results */\n \"search-share\": HTMLAnchorElement /* Search sharing */\n \"search-suggest\": HTMLElement /* Search suggestions */\n \"sidebar\": HTMLElement /* Sidebar */\n \"skip\": HTMLAnchorElement /* Skip link */\n \"source\": HTMLAnchorElement /* Repository information */\n \"tabs\": HTMLElement /* Navigation tabs */\n \"toc\": HTMLElement /* Table of contents */\n \"top\": HTMLAnchorElement /* Back-to-top button */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Retrieve the element for a given component or throw a reference error\n *\n * @template T - Component type\n *\n * @param type - Component type\n * @param node - Node of reference\n *\n * @returns Element\n */\nexport function getComponentElement(\n type: T, node: ParentNode = document\n): ComponentTypeMap[T] {\n return getElement(`[data-md-component=${type}]`, node)\n}\n\n/**\n * Retrieve all elements for a given component\n *\n * @template T - Component type\n *\n * @param type - Component type\n * @param node - Node of reference\n *\n * @returns Elements\n */\nexport function getComponentElements(\n type: T, node: ParentNode = document\n): ComponentTypeMap[T][] {\n return getElements(`[data-md-component=${type}]`, node)\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n EMPTY,\n Observable,\n Subject,\n defer,\n finalize,\n fromEvent,\n map,\n startWith,\n tap\n} from \"rxjs\"\n\nimport { feature } from \"~/_\"\nimport { getElement } from \"~/browser\"\n\nimport { Component } from \"../_\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Announcement bar\n */\nexport interface Announce {\n hash: number /* Content hash */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch announcement bar\n *\n * @param el - Announcement bar element\n *\n * @returns Announcement bar observable\n */\nexport function watchAnnounce(\n el: HTMLElement\n): Observable {\n const button = getElement(\".md-typeset > :first-child\", el)\n return fromEvent(button, \"click\", { once: true })\n .pipe(\n map(() => getElement(\".md-typeset\", el)),\n map(content => ({ hash: __md_hash(content.innerHTML) }))\n )\n}\n\n/**\n * Mount announcement bar\n *\n * @param el - Announcement bar element\n *\n * @returns Announcement bar component observable\n */\nexport function mountAnnounce(\n el: HTMLElement\n): Observable> {\n if (!feature(\"announce.dismiss\") || !el.childElementCount)\n return EMPTY\n\n /* Mount component on subscription */\n return defer(() => {\n const push$ = new Subject()\n push$\n .pipe(\n startWith({ hash: __md_get(\"__announce\") })\n )\n .subscribe(({ hash }) => {\n if (hash && hash === (__md_get(\"__announce\") ?? hash)) {\n el.hidden = true\n\n /* Persist preference in local storage */\n __md_set(\"__announce\", hash)\n }\n })\n\n /* Create and return component */\n return watchAnnounce(el)\n .pipe(\n tap(state => push$.next(state)),\n finalize(() => push$.complete()),\n map(state => ({ ref: el, ...state }))\n )\n })\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n Subject,\n finalize,\n map,\n tap\n} from \"rxjs\"\n\nimport { Component } from \"../_\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Consent\n */\nexport interface Consent {\n hidden: boolean /* Consent is hidden */\n}\n\n/**\n * Consent defaults\n */\nexport interface ConsentDefaults {\n analytics?: boolean /* Consent for Analytics */\n github?: boolean /* Consent for GitHub */\n}\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch options\n */\ninterface WatchOptions {\n target$: Observable /* Target observable */\n}\n\n/**\n * Mount options\n */\ninterface MountOptions {\n target$: Observable /* Target observable */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch consent\n *\n * @param el - Consent element\n * @param options - Options\n *\n * @returns Consent observable\n */\nexport function watchConsent(\n el: HTMLElement, { target$ }: WatchOptions\n): Observable {\n return target$\n .pipe(\n map(target => ({ hidden: target !== el }))\n )\n}\n\n/* ------------------------------------------------------------------------- */\n\n/**\n * Mount consent\n *\n * @param el - Consent element\n * @param options - Options\n *\n * @returns Consent component observable\n */\nexport function mountConsent(\n el: HTMLElement, options: MountOptions\n): Observable> {\n const internal$ = new Subject()\n internal$.subscribe(({ hidden }) => {\n el.hidden = hidden\n })\n\n /* Create and return component */\n return watchConsent(el, options)\n .pipe(\n tap(state => internal$.next(state)),\n finalize(() => internal$.complete()),\n map(state => ({ ref: el, ...state }))\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport ClipboardJS from \"clipboard\"\nimport {\n EMPTY,\n Observable,\n Subject,\n defer,\n distinctUntilChanged,\n distinctUntilKeyChanged,\n filter,\n finalize,\n map,\n mergeWith,\n switchMap,\n take,\n tap\n} from \"rxjs\"\n\nimport { feature } from \"~/_\"\nimport {\n getElementContentSize,\n watchElementSize,\n watchElementVisibility\n} from \"~/browser\"\nimport { renderClipboardButton } from \"~/templates\"\n\nimport { Component } from \"../../../_\"\nimport {\n Annotation,\n mountAnnotationList\n} from \"../../annotation\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Code block\n */\nexport interface CodeBlock {\n scrollable: boolean /* Code block overflows */\n}\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Mount options\n */\ninterface MountOptions {\n target$: Observable /* Location target observable */\n print$: Observable /* Media print observable */\n}\n\n/* ----------------------------------------------------------------------------\n * Data\n * ------------------------------------------------------------------------- */\n\n/**\n * Global sequence number for code blocks\n */\nlet sequence = 0\n\n/* ----------------------------------------------------------------------------\n * Helper functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Find candidate list element directly following a code block\n *\n * @param el - Code block element\n *\n * @returns List element or nothing\n */\nfunction findCandidateList(el: HTMLElement): HTMLElement | undefined {\n if (el.nextElementSibling) {\n const sibling = el.nextElementSibling as HTMLElement\n if (sibling.tagName === \"OL\")\n return sibling\n\n /* Skip empty paragraphs - see https://bit.ly/3r4ZJ2O */\n else if (sibling.tagName === \"P\" && !sibling.children.length)\n return findCandidateList(sibling)\n }\n\n /* Everything else */\n return undefined\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch code block\n *\n * This function monitors size changes of the viewport, as well as switches of\n * content tabs with embedded code blocks, as both may trigger overflow.\n *\n * @param el - Code block element\n *\n * @returns Code block observable\n */\nexport function watchCodeBlock(\n el: HTMLElement\n): Observable {\n return watchElementSize(el)\n .pipe(\n map(({ width }) => {\n const content = getElementContentSize(el)\n return {\n scrollable: content.width > width\n }\n }),\n distinctUntilKeyChanged(\"scrollable\")\n )\n}\n\n/**\n * Mount code block\n *\n * This function ensures that an overflowing code block is focusable through\n * keyboard, so it can be scrolled without a mouse to improve on accessibility.\n * Furthermore, if code annotations are enabled, they are mounted if and only\n * if the code block is currently visible, e.g., not in a hidden content tab.\n *\n * Note that code blocks may be mounted eagerly or lazily. If they're mounted\n * lazily (on first visibility), code annotation anchor links will not work,\n * as they are evaluated on initial page load, and code annotations in general\n * might feel a little bumpier.\n *\n * @param el - Code block element\n * @param options - Options\n *\n * @returns Code block and annotation component observable\n */\nexport function mountCodeBlock(\n el: HTMLElement, options: MountOptions\n): Observable> {\n const { matches: hover } = matchMedia(\"(hover)\")\n\n /* Defer mounting of code block - see https://bit.ly/3vHVoVD */\n const factory$ = defer(() => {\n const push$ = new Subject()\n push$.subscribe(({ scrollable }) => {\n if (scrollable && hover)\n el.setAttribute(\"tabindex\", \"0\")\n else\n el.removeAttribute(\"tabindex\")\n })\n\n /* Render button for Clipboard.js integration */\n if (ClipboardJS.isSupported()) {\n const parent = el.closest(\"pre\")!\n parent.id = `__code_${++sequence}`\n parent.insertBefore(\n renderClipboardButton(parent.id),\n el\n )\n }\n\n /* Handle code annotations */\n const container = el.closest(\".highlight\")\n if (container instanceof HTMLElement) {\n const list = findCandidateList(container)\n\n /* Mount code annotations, if enabled */\n if (typeof list !== \"undefined\" && (\n container.classList.contains(\"annotate\") ||\n feature(\"content.code.annotate\")\n )) {\n const annotations$ = mountAnnotationList(list, el, options)\n\n /* Create and return component */\n return watchCodeBlock(el)\n .pipe(\n tap(state => push$.next(state)),\n finalize(() => push$.complete()),\n map(state => ({ ref: el, ...state })),\n mergeWith(\n watchElementSize(container)\n .pipe(\n map(({ width, height }) => width && height),\n distinctUntilChanged(),\n switchMap(active => active ? annotations$ : EMPTY)\n )\n )\n )\n }\n }\n\n /* Create and return component */\n return watchCodeBlock(el)\n .pipe(\n tap(state => push$.next(state)),\n finalize(() => push$.complete()),\n map(state => ({ ref: el, ...state }))\n )\n })\n\n /* Mount code block lazily */\n if (feature(\"content.lazy\"))\n return watchElementVisibility(el)\n .pipe(\n filter(visible => visible),\n take(1),\n switchMap(() => factory$)\n )\n\n /* Mount code block */\n return factory$\n}\n", "/*\n * Copyright (c) 2016-2021 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport { h } from \"~/utilities\"\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Render a tooltip\n *\n * @param id - Tooltip identifier\n *\n * @returns Element\n */\nexport function renderTooltip(id?: string): HTMLElement {\n return (\n
\n
\n
\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport { h } from \"~/utilities\"\n\nimport { renderTooltip } from \"../tooltip\"\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Render an annotation\n *\n * @param id - Annotation identifier\n * @param prefix - Tooltip identifier prefix\n *\n * @returns Element\n */\nexport function renderAnnotation(\n id: string | number, prefix?: string\n): HTMLElement {\n prefix = prefix ? `${prefix}_annotation_${id}` : undefined\n\n /* Render tooltip with anchor, if given */\n if (prefix) {\n const anchor = prefix ? `#${prefix}` : undefined\n return (\n \n )\n } else {\n return (\n \n )\n }\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport { translation } from \"~/_\"\nimport { h } from \"~/utilities\"\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Render a 'copy-to-clipboard' button\n *\n * @param id - Unique identifier\n *\n * @returns Element\n */\nexport function renderClipboardButton(id: string): HTMLElement {\n return (\n code`}\n >\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport { ComponentChild } from \"preact\"\n\nimport { configuration, feature, translation } from \"~/_\"\nimport {\n SearchDocument,\n SearchMetadata,\n SearchResultItem\n} from \"~/integrations/search\"\nimport { h, truncate } from \"~/utilities\"\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Render flag\n */\nconst enum Flag {\n TEASER = 1, /* Render teaser */\n PARENT = 2 /* Render as parent */\n}\n\n/* ----------------------------------------------------------------------------\n * Helper function\n * ------------------------------------------------------------------------- */\n\n/**\n * Render a search document\n *\n * @param document - Search document\n * @param flag - Render flags\n *\n * @returns Element\n */\nfunction renderSearchDocument(\n document: SearchDocument & SearchMetadata, flag: Flag\n): HTMLElement {\n const parent = flag & Flag.PARENT\n const teaser = flag & Flag.TEASER\n\n /* Render missing query terms */\n const missing = Object.keys(document.terms)\n .filter(key => !document.terms[key])\n .reduce((list, key) => [\n ...list, {key}, \" \"\n ], [])\n .slice(0, -1)\n\n /* Assemble query string for highlighting */\n const url = new URL(document.location)\n if (feature(\"search.highlight\"))\n url.searchParams.set(\"h\", Object.entries(document.terms)\n .filter(([, match]) => match)\n .reduce((highlight, [value]) => `${highlight} ${value}`.trim(), \"\")\n )\n\n /* Render article or section, depending on flags */\n const { tags } = configuration()\n return (\n \n \n {parent > 0 &&
}\n

{document.title}

\n {teaser > 0 && document.text.length > 0 &&\n

\n {truncate(document.text, 320)}\n

\n }\n {document.tags && (\n
\n {document.tags.map(tag => {\n const id = tag.replace(/<[^>]+>/g, \"\")\n const type = tags\n ? id in tags\n ? `md-tag-icon md-tag-icon--${tags[id]}`\n : \"md-tag-icon\"\n : \"\"\n return (\n {tag}\n )\n })}\n
\n )}\n {teaser > 0 && missing.length > 0 &&\n

\n {translation(\"search.result.term.missing\")}: {...missing}\n

\n }\n \n
\n )\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Render a search result\n *\n * @param result - Search result\n *\n * @returns Element\n */\nexport function renderSearchResultItem(\n result: SearchResultItem\n): HTMLElement {\n const threshold = result[0].score\n const docs = [...result]\n\n /* Find and extract parent article */\n const parent = docs.findIndex(doc => !doc.location.includes(\"#\"))\n const [article] = docs.splice(parent, 1)\n\n /* Determine last index above threshold */\n let index = docs.findIndex(doc => doc.score < threshold)\n if (index === -1)\n index = docs.length\n\n /* Partition sections */\n const best = docs.slice(0, index)\n const more = docs.slice(index)\n\n /* Render children */\n const children = [\n renderSearchDocument(article, Flag.PARENT | +(!parent && index === 0)),\n ...best.map(section => renderSearchDocument(section, Flag.TEASER)),\n ...more.length ? [\n
\n \n {more.length > 0 && more.length === 1\n ? translation(\"search.result.more.one\")\n : translation(\"search.result.more.other\", more.length)\n }\n \n {...more.map(section => renderSearchDocument(section, Flag.TEASER))}\n
\n ] : []\n ]\n\n /* Render search result */\n return (\n
  • \n {children}\n
  • \n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport { SourceFacts } from \"~/components\"\nimport { h, round } from \"~/utilities\"\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Render repository facts\n *\n * @param facts - Repository facts\n *\n * @returns Element\n */\nexport function renderSourceFacts(facts: SourceFacts): HTMLElement {\n return (\n
      \n {Object.entries(facts).map(([key, value]) => (\n
    • \n {typeof value === \"number\" ? round(value) : value}\n
    • \n ))}\n
    \n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport { h } from \"~/utilities\"\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Tabbed control type\n */\ntype TabbedControlType =\n | \"prev\"\n | \"next\"\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Render control for content tabs\n *\n * @param type - Control type\n *\n * @returns Element\n */\nexport function renderTabbedControl(\n type: TabbedControlType\n): HTMLElement {\n const classes = `tabbed-control tabbed-control--${type}`\n return (\n \n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport { h } from \"~/utilities\"\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Render a table inside a wrapper to improve scrolling on mobile\n *\n * @param table - Table element\n *\n * @returns Element\n */\nexport function renderTable(table: HTMLElement): HTMLElement {\n return (\n
    \n
    \n {table}\n
    \n
    \n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport { configuration, translation } from \"~/_\"\nimport { h } from \"~/utilities\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Version\n */\nexport interface Version {\n version: string /* Version identifier */\n title: string /* Version title */\n aliases: string[] /* Version aliases */\n}\n\n/* ----------------------------------------------------------------------------\n * Helper functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Render a version\n *\n * @param version - Version\n *\n * @returns Element\n */\nfunction renderVersion(version: Version): HTMLElement {\n const config = configuration()\n\n /* Ensure trailing slash - see https://bit.ly/3rL5u3f */\n const url = new URL(`../${version.version}/`, config.base)\n return (\n
  • \n \n {version.title}\n \n
  • \n )\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Render a version selector\n *\n * @param versions - Versions\n * @param active - Active version\n *\n * @returns Element\n */\nexport function renderVersionSelector(\n versions: Version[], active: Version\n): HTMLElement {\n return (\n
    \n \n {active.title}\n \n
      \n {versions.map(renderVersion)}\n
    \n
    \n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n Subject,\n animationFrameScheduler,\n auditTime,\n combineLatest,\n debounceTime,\n defer,\n delay,\n filter,\n finalize,\n fromEvent,\n map,\n merge,\n switchMap,\n take,\n takeLast,\n takeUntil,\n tap,\n throttleTime,\n withLatestFrom\n} from \"rxjs\"\n\nimport {\n ElementOffset,\n getActiveElement,\n getElementSize,\n watchElementContentOffset,\n watchElementFocus,\n watchElementOffset,\n watchElementVisibility\n} from \"~/browser\"\n\nimport { Component } from \"../../../_\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Annotation\n */\nexport interface Annotation {\n active: boolean /* Annotation is active */\n offset: ElementOffset /* Annotation offset */\n}\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Mount options\n */\ninterface MountOptions {\n target$: Observable /* Location target observable */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch annotation\n *\n * @param el - Annotation element\n * @param container - Containing element\n *\n * @returns Annotation observable\n */\nexport function watchAnnotation(\n el: HTMLElement, container: HTMLElement\n): Observable {\n const offset$ = defer(() => combineLatest([\n watchElementOffset(el),\n watchElementContentOffset(container)\n ]))\n .pipe(\n map(([{ x, y }, scroll]): ElementOffset => {\n const { width, height } = getElementSize(el)\n return ({\n x: x - scroll.x + width / 2,\n y: y - scroll.y + height / 2\n })\n })\n )\n\n /* Actively watch annotation on focus */\n return watchElementFocus(el)\n .pipe(\n switchMap(active => offset$\n .pipe(\n map(offset => ({ active, offset })),\n take(+!active || Infinity)\n )\n )\n )\n}\n\n/**\n * Mount annotation\n *\n * @param el - Annotation element\n * @param container - Containing element\n * @param options - Options\n *\n * @returns Annotation component observable\n */\nexport function mountAnnotation(\n el: HTMLElement, container: HTMLElement, { target$ }: MountOptions\n): Observable> {\n const [tooltip, index] = Array.from(el.children)\n\n /* Mount component on subscription */\n return defer(() => {\n const push$ = new Subject()\n const done$ = push$.pipe(takeLast(1))\n push$.subscribe({\n\n /* Handle emission */\n next({ offset }) {\n el.style.setProperty(\"--md-tooltip-x\", `${offset.x}px`)\n el.style.setProperty(\"--md-tooltip-y\", `${offset.y}px`)\n },\n\n /* Handle complete */\n complete() {\n el.style.removeProperty(\"--md-tooltip-x\")\n el.style.removeProperty(\"--md-tooltip-y\")\n }\n })\n\n /* Start animation only when annotation is visible */\n watchElementVisibility(el)\n .pipe(\n takeUntil(done$)\n )\n .subscribe(visible => {\n el.toggleAttribute(\"data-md-visible\", visible)\n })\n\n /* Toggle tooltip presence to mitigate empty lines when copying */\n merge(\n push$.pipe(filter(({ active }) => active)),\n push$.pipe(debounceTime(250), filter(({ active }) => !active))\n )\n .subscribe({\n\n /* Handle emission */\n next({ active }) {\n if (active)\n el.prepend(tooltip)\n else\n tooltip.remove()\n },\n\n /* Handle complete */\n complete() {\n el.prepend(tooltip)\n }\n })\n\n /* Toggle tooltip visibility */\n push$\n .pipe(\n auditTime(16, animationFrameScheduler)\n )\n .subscribe(({ active }) => {\n tooltip.classList.toggle(\"md-tooltip--active\", active)\n })\n\n /* Track relative origin of tooltip */\n push$\n .pipe(\n throttleTime(125, animationFrameScheduler),\n filter(() => !!el.offsetParent),\n map(() => el.offsetParent!.getBoundingClientRect()),\n map(({ x }) => x)\n )\n .subscribe({\n\n /* Handle emission */\n next(origin) {\n if (origin)\n el.style.setProperty(\"--md-tooltip-0\", `${-origin}px`)\n else\n el.style.removeProperty(\"--md-tooltip-0\")\n },\n\n /* Handle complete */\n complete() {\n el.style.removeProperty(\"--md-tooltip-0\")\n }\n })\n\n /* Allow to copy link without scrolling to anchor */\n fromEvent(index, \"click\")\n .pipe(\n takeUntil(done$),\n filter(ev => !(ev.metaKey || ev.ctrlKey))\n )\n .subscribe(ev => ev.preventDefault())\n\n /* Allow to open link in new tab or blur on close */\n fromEvent(index, \"mousedown\")\n .pipe(\n takeUntil(done$),\n withLatestFrom(push$)\n )\n .subscribe(([ev, { active }]) => {\n\n /* Open in new tab */\n if (ev.button !== 0 || ev.metaKey || ev.ctrlKey) {\n ev.preventDefault()\n\n /* Close annotation */\n } else if (active) {\n ev.preventDefault()\n\n /* Focus parent annotation, if any */\n const parent = el.parentElement!.closest(\".md-annotation\")\n if (parent instanceof HTMLElement)\n parent.focus()\n else\n getActiveElement()?.blur()\n }\n })\n\n /* Open and focus annotation on location target */\n target$\n .pipe(\n takeUntil(done$),\n filter(target => target === tooltip),\n delay(125)\n )\n .subscribe(() => el.focus())\n\n /* Create and return component */\n return watchAnnotation(el, container)\n .pipe(\n tap(state => push$.next(state)),\n finalize(() => push$.complete()),\n map(state => ({ ref: el, ...state }))\n )\n })\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n EMPTY,\n Observable,\n Subject,\n defer,\n finalize,\n merge,\n share,\n takeLast,\n takeUntil\n} from \"rxjs\"\n\nimport {\n getElement,\n getElements,\n getOptionalElement\n} from \"~/browser\"\nimport { renderAnnotation } from \"~/templates\"\n\nimport { Component } from \"../../../_\"\nimport {\n Annotation,\n mountAnnotation\n} from \"../_\"\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Mount options\n */\ninterface MountOptions {\n target$: Observable /* Location target observable */\n print$: Observable /* Media print observable */\n}\n\n/* ----------------------------------------------------------------------------\n * Helper functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Find all annotation markers in the given code block\n *\n * @param container - Containing element\n *\n * @returns Annotation markers\n */\nfunction findAnnotationMarkers(container: HTMLElement): Text[] {\n const markers: Text[] = []\n for (const el of getElements(\".c, .c1, .cm\", container)) {\n const nodes: Text[] = []\n\n /* Find all text nodes in current element */\n const it = document.createNodeIterator(el, NodeFilter.SHOW_TEXT)\n for (let node = it.nextNode(); node; node = it.nextNode())\n nodes.push(node as Text)\n\n /* Find all markers in each text node */\n for (let text of nodes) {\n let match: RegExpExecArray | null\n\n /* Split text at marker and add to list */\n while ((match = /(\\(\\d+\\))(!)?/.exec(text.textContent!))) {\n const [, id, force] = match\n if (typeof force === \"undefined\") {\n const marker = text.splitText(match.index)\n text = marker.splitText(id.length)\n markers.push(marker)\n\n /* Replace entire text with marker */\n } else {\n text.textContent = id\n markers.push(text)\n break\n }\n }\n }\n }\n return markers\n}\n\n/**\n * Swap the child nodes of two elements\n *\n * @param source - Source element\n * @param target - Target element\n */\nfunction swap(source: HTMLElement, target: HTMLElement): void {\n target.append(...Array.from(source.childNodes))\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Mount annotation list\n *\n * This function analyzes the containing code block and checks for markers\n * referring to elements in the given annotation list. If no markers are found,\n * the list is left untouched. Otherwise, list elements are rendered as\n * annotations inside the code block.\n *\n * @param el - Annotation list element\n * @param container - Containing element\n * @param options - Options\n *\n * @returns Annotation component observable\n */\nexport function mountAnnotationList(\n el: HTMLElement, container: HTMLElement, { target$, print$ }: MountOptions\n): Observable> {\n\n /* Compute prefix for tooltip anchors */\n const parent = container.closest(\"[id]\")\n const prefix = parent?.id\n\n /* Find and replace all markers with empty annotations */\n const annotations = new Map()\n for (const marker of findAnnotationMarkers(container)) {\n const [, id] = marker.textContent!.match(/\\((\\d+)\\)/)!\n if (getOptionalElement(`li:nth-child(${id})`, el)) {\n annotations.set(id, renderAnnotation(id, prefix))\n marker.replaceWith(annotations.get(id)!)\n }\n }\n\n /* Keep list if there are no annotations to render */\n if (annotations.size === 0)\n return EMPTY\n\n /* Mount component on subscription */\n return defer(() => {\n const done$ = new Subject()\n\n /* Retrieve container pairs for swapping */\n const pairs: [HTMLElement, HTMLElement][] = []\n for (const [id, annotation] of annotations)\n pairs.push([\n getElement(\".md-typeset\", annotation),\n getElement(`li:nth-child(${id})`, el)\n ])\n\n /* Handle print mode - see https://bit.ly/3rgPdpt */\n print$\n .pipe(\n takeUntil(done$.pipe(takeLast(1)))\n )\n .subscribe(active => {\n el.hidden = !active\n\n /* Show annotations in code block or list (print) */\n for (const [inner, child] of pairs)\n if (!active)\n swap(child, inner)\n else\n swap(inner, child)\n })\n\n /* Create and return component */\n return merge(...[...annotations]\n .map(([, annotation]) => (\n mountAnnotation(annotation, container, { target$ })\n ))\n )\n .pipe(\n finalize(() => done$.complete()),\n share()\n )\n })\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n map,\n of,\n shareReplay,\n tap\n} from \"rxjs\"\n\nimport { watchScript } from \"~/browser\"\nimport { h } from \"~/utilities\"\n\nimport { Component } from \"../../../_\"\n\nimport themeCSS from \"./index.css\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Mermaid diagram\n */\nexport interface Mermaid {}\n\n/* ----------------------------------------------------------------------------\n * Data\n * ------------------------------------------------------------------------- */\n\n/**\n * Mermaid instance observable\n */\nlet mermaid$: Observable\n\n/**\n * Global sequence number for diagrams\n */\nlet sequence = 0\n\n/* ----------------------------------------------------------------------------\n * Helper functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Fetch Mermaid script\n *\n * @returns Mermaid scripts observable\n */\nfunction fetchScripts(): Observable {\n return typeof mermaid === \"undefined\" || mermaid instanceof Element\n ? watchScript(\"https://unpkg.com/mermaid@9.1.7/dist/mermaid.min.js\")\n : of(undefined)\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Mount Mermaid diagram\n *\n * @param el - Code block element\n *\n * @returns Mermaid diagram component observable\n */\nexport function mountMermaid(\n el: HTMLElement\n): Observable> {\n el.classList.remove(\"mermaid\") // Hack: mitigate https://bit.ly/3CiN6Du\n mermaid$ ||= fetchScripts()\n .pipe(\n tap(() => mermaid.initialize({\n startOnLoad: false,\n themeCSS,\n sequence: {\n actorFontSize: \"16px\", // Hack: mitigate https://bit.ly/3y0NEi3\n messageFontSize: \"16px\",\n noteFontSize: \"16px\"\n }\n })),\n map(() => undefined),\n shareReplay(1)\n )\n\n /* Render diagram */\n mermaid$.subscribe(() => {\n el.classList.add(\"mermaid\") // Hack: mitigate https://bit.ly/3CiN6Du\n const id = `__mermaid_${sequence++}`\n const host = h(\"div\", { class: \"mermaid\" })\n mermaid.mermaidAPI.render(id, el.textContent, (svg: string) => {\n\n /* Create a shadow root and inject diagram */\n const shadow = host.attachShadow({ mode: \"closed\" })\n shadow.innerHTML = svg\n\n /* Replace code block with diagram */\n el.replaceWith(host)\n })\n })\n\n /* Create and return component */\n return mermaid$\n .pipe(\n map(() => ({ ref: el }))\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n Subject,\n defer,\n filter,\n finalize,\n map,\n merge,\n tap\n} from \"rxjs\"\n\nimport { Component } from \"../../_\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Details\n */\nexport interface Details {\n action: \"open\" | \"close\" /* Details state */\n reveal?: boolean /* Details is revealed */\n}\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch options\n */\ninterface WatchOptions {\n target$: Observable /* Location target observable */\n print$: Observable /* Media print observable */\n}\n\n/**\n * Mount options\n */\ninterface MountOptions {\n target$: Observable /* Location target observable */\n print$: Observable /* Media print observable */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch details\n *\n * @param el - Details element\n * @param options - Options\n *\n * @returns Details observable\n */\nexport function watchDetails(\n el: HTMLDetailsElement, { target$, print$ }: WatchOptions\n): Observable
    {\n let open = true\n return merge(\n\n /* Open and focus details on location target */\n target$\n .pipe(\n map(target => target.closest(\"details:not([open])\")!),\n filter(details => el === details),\n map(() => ({\n action: \"open\", reveal: true\n }) as Details)\n ),\n\n /* Open details on print and close afterwards */\n print$\n .pipe(\n filter(active => active || !open),\n tap(() => open = el.open),\n map(active => ({\n action: active ? \"open\" : \"close\"\n }) as Details)\n )\n )\n}\n\n/**\n * Mount details\n *\n * This function ensures that `details` tags are opened on anchor jumps and\n * prior to printing, so the whole content of the page is visible.\n *\n * @param el - Details element\n * @param options - Options\n *\n * @returns Details component observable\n */\nexport function mountDetails(\n el: HTMLDetailsElement, options: MountOptions\n): Observable> {\n return defer(() => {\n const push$ = new Subject
    ()\n push$.subscribe(({ action, reveal }) => {\n el.toggleAttribute(\"open\", action === \"open\")\n if (reveal)\n el.scrollIntoView()\n })\n\n /* Create and return component */\n return watchDetails(el, options)\n .pipe(\n tap(state => push$.next(state)),\n finalize(() => push$.complete()),\n map(state => ({ ref: el, ...state }))\n )\n })\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport { Observable, of } from \"rxjs\"\n\nimport { renderTable } from \"~/templates\"\nimport { h } from \"~/utilities\"\n\nimport { Component } from \"../../_\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Data table\n */\nexport interface DataTable {}\n\n/* ----------------------------------------------------------------------------\n * Data\n * ------------------------------------------------------------------------- */\n\n/**\n * Sentinel for replacement\n */\nconst sentinel = h(\"table\")\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Mount data table\n *\n * This function wraps a data table in another scrollable container, so it can\n * be smoothly scrolled on smaller screen sizes and won't break the layout.\n *\n * @param el - Data table element\n *\n * @returns Data table component observable\n */\nexport function mountDataTable(\n el: HTMLElement\n): Observable> {\n el.replaceWith(sentinel)\n sentinel.replaceWith(renderTable(el))\n\n /* Create and return component */\n return of({ ref: el })\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n Subject,\n animationFrameScheduler,\n asyncScheduler,\n auditTime,\n combineLatest,\n defer,\n finalize,\n fromEvent,\n map,\n merge,\n skip,\n startWith,\n subscribeOn,\n takeLast,\n takeUntil,\n tap,\n withLatestFrom\n} from \"rxjs\"\n\nimport { feature } from \"~/_\"\nimport {\n Viewport,\n getElement,\n getElementContentOffset,\n getElementContentSize,\n getElementOffset,\n getElementSize,\n getElements,\n watchElementContentOffset,\n watchElementSize\n} from \"~/browser\"\nimport { renderTabbedControl } from \"~/templates\"\n\nimport { Component } from \"../../_\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Content tabs\n */\nexport interface ContentTabs {\n active: HTMLLabelElement /* Active tab label */\n}\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Mount options\n */\ninterface MountOptions {\n viewport$: Observable /* Viewport observable */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch content tabs\n *\n * @param el - Content tabs element\n *\n * @returns Content tabs observable\n */\nexport function watchContentTabs(\n el: HTMLElement\n): Observable {\n const inputs = getElements(\":scope > input\", el)\n const initial = inputs.find(input => input.checked) || inputs[0]\n return merge(...inputs.map(input => fromEvent(input, \"change\")\n .pipe(\n map(() => getElement(`label[for=\"${input.id}\"]`))\n )\n ))\n .pipe(\n startWith(getElement(`label[for=\"${initial.id}\"]`)),\n map(active => ({ active }))\n )\n}\n\n/**\n * Mount content tabs\n *\n * This function scrolls the active tab into view. While this functionality is\n * provided by browsers as part of `scrollInfoView`, browsers will always also\n * scroll the vertical axis, which we do not want. Thus, we decided to provide\n * this functionality ourselves.\n *\n * @param el - Content tabs element\n * @param options - Options\n *\n * @returns Content tabs component observable\n */\nexport function mountContentTabs(\n el: HTMLElement, { viewport$ }: MountOptions\n): Observable> {\n\n /* Render content tab previous button for pagination */\n const prev = renderTabbedControl(\"prev\")\n el.append(prev)\n\n /* Render content tab next button for pagination */\n const next = renderTabbedControl(\"next\")\n el.append(next)\n\n /* Mount component on subscription */\n const container = getElement(\".tabbed-labels\", el)\n return defer(() => {\n const push$ = new Subject()\n const done$ = push$.pipe(takeLast(1))\n combineLatest([push$, watchElementSize(el)])\n .pipe(\n auditTime(1, animationFrameScheduler),\n takeUntil(done$)\n )\n .subscribe({\n\n /* Handle emission */\n next([{ active }, size]) {\n const offset = getElementOffset(active)\n const { width } = getElementSize(active)\n\n /* Set tab indicator offset and width */\n el.style.setProperty(\"--md-indicator-x\", `${offset.x}px`)\n el.style.setProperty(\"--md-indicator-width\", `${width}px`)\n\n /* Scroll container to active content tab */\n const content = getElementContentOffset(container)\n if (\n offset.x < content.x ||\n offset.x + width > content.x + size.width\n )\n container.scrollTo({\n left: Math.max(0, offset.x - 16),\n behavior: \"smooth\"\n })\n },\n\n /* Handle complete */\n complete() {\n el.style.removeProperty(\"--md-indicator-x\")\n el.style.removeProperty(\"--md-indicator-width\")\n }\n })\n\n /* Hide content tab buttons on borders */\n combineLatest([\n watchElementContentOffset(container),\n watchElementSize(container)\n ])\n .pipe(\n takeUntil(done$)\n )\n .subscribe(([offset, size]) => {\n const content = getElementContentSize(container)\n prev.hidden = offset.x < 16\n next.hidden = offset.x > content.width - size.width - 16\n })\n\n /* Paginate content tab container on click */\n merge(\n fromEvent(prev, \"click\").pipe(map(() => -1)),\n fromEvent(next, \"click\").pipe(map(() => +1))\n )\n .pipe(\n takeUntil(done$)\n )\n .subscribe(direction => {\n const { width } = getElementSize(container)\n container.scrollBy({\n left: width * direction,\n behavior: \"smooth\"\n })\n })\n\n /* Set up linking of content tabs, if enabled */\n if (feature(\"content.tabs.link\"))\n push$.pipe(\n skip(1),\n withLatestFrom(viewport$)\n )\n .subscribe(([{ active }, { offset }]) => {\n const tab = active.innerText.trim()\n if (active.hasAttribute(\"data-md-switching\")) {\n active.removeAttribute(\"data-md-switching\")\n\n /* Determine viewport offset of active tab */\n } else {\n const y = el.offsetTop - offset.y\n\n /* Passively activate other tabs */\n for (const set of getElements(\"[data-tabs]\"))\n for (const input of getElements(\n \":scope > input\", set\n )) {\n const label = getElement(`label[for=\"${input.id}\"]`)\n if (\n label !== active &&\n label.innerText.trim() === tab\n ) {\n label.setAttribute(\"data-md-switching\", \"\")\n input.click()\n break\n }\n }\n\n /* Bring active tab into view */\n window.scrollTo({\n top: el.offsetTop - y\n })\n\n /* Persist active tabs in local storage */\n const tabs = __md_get(\"__tabs\") || []\n __md_set(\"__tabs\", [...new Set([tab, ...tabs])])\n }\n })\n\n /* Create and return component */\n return watchContentTabs(el)\n .pipe(\n tap(state => push$.next(state)),\n finalize(() => push$.complete()),\n map(state => ({ ref: el, ...state }))\n )\n })\n .pipe(\n subscribeOn(asyncScheduler)\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport { Observable, merge } from \"rxjs\"\n\nimport { Viewport, getElements } from \"~/browser\"\n\nimport { Component } from \"../../_\"\nimport { Annotation } from \"../annotation\"\nimport {\n CodeBlock,\n Mermaid,\n mountCodeBlock,\n mountMermaid\n} from \"../code\"\nimport {\n Details,\n mountDetails\n} from \"../details\"\nimport {\n DataTable,\n mountDataTable\n} from \"../table\"\nimport {\n ContentTabs,\n mountContentTabs\n} from \"../tabs\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Content\n */\nexport type Content =\n | Annotation\n | ContentTabs\n | CodeBlock\n | Mermaid\n | DataTable\n | Details\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Mount options\n */\ninterface MountOptions {\n viewport$: Observable /* Viewport observable */\n target$: Observable /* Location target observable */\n print$: Observable /* Media print observable */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Mount content\n *\n * This function mounts all components that are found in the content of the\n * actual article, including code blocks, data tables and details.\n *\n * @param el - Content element\n * @param options - Options\n *\n * @returns Content component observable\n */\nexport function mountContent(\n el: HTMLElement, { viewport$, target$, print$ }: MountOptions\n): Observable> {\n return merge(\n\n /* Code blocks */\n ...getElements(\"pre:not(.mermaid) > code\", el)\n .map(child => mountCodeBlock(child, { target$, print$ })),\n\n /* Mermaid diagrams */\n ...getElements(\"pre.mermaid\", el)\n .map(child => mountMermaid(child)),\n\n /* Data tables */\n ...getElements(\"table:not([class])\", el)\n .map(child => mountDataTable(child)),\n\n /* Details */\n ...getElements(\"details\", el)\n .map(child => mountDetails(child, { target$, print$ })),\n\n /* Content tabs */\n ...getElements(\"[data-tabs]\", el)\n .map(child => mountContentTabs(child, { viewport$ }))\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n Subject,\n defer,\n delay,\n finalize,\n map,\n merge,\n of,\n switchMap,\n tap\n} from \"rxjs\"\n\nimport { getElement } from \"~/browser\"\n\nimport { Component } from \"../_\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Dialog\n */\nexport interface Dialog {\n message: string /* Dialog message */\n active: boolean /* Dialog is active */\n}\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch options\n */\ninterface WatchOptions {\n alert$: Subject /* Alert subject */\n}\n\n/**\n * Mount options\n */\ninterface MountOptions {\n alert$: Subject /* Alert subject */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch dialog\n *\n * @param _el - Dialog element\n * @param options - Options\n *\n * @returns Dialog observable\n */\nexport function watchDialog(\n _el: HTMLElement, { alert$ }: WatchOptions\n): Observable {\n return alert$\n .pipe(\n switchMap(message => merge(\n of(true),\n of(false).pipe(delay(2000))\n )\n .pipe(\n map(active => ({ message, active }))\n )\n )\n )\n}\n\n/**\n * Mount dialog\n *\n * This function reveals the dialog in the right corner when a new alert is\n * emitted through the subject that is passed as part of the options.\n *\n * @param el - Dialog element\n * @param options - Options\n *\n * @returns Dialog component observable\n */\nexport function mountDialog(\n el: HTMLElement, options: MountOptions\n): Observable> {\n const inner = getElement(\".md-typeset\", el)\n return defer(() => {\n const push$ = new Subject()\n push$.subscribe(({ message, active }) => {\n el.classList.toggle(\"md-dialog--active\", active)\n inner.textContent = message\n })\n\n /* Create and return component */\n return watchDialog(el, options)\n .pipe(\n tap(state => push$.next(state)),\n finalize(() => push$.complete()),\n map(state => ({ ref: el, ...state }))\n )\n })\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n Subject,\n bufferCount,\n combineLatest,\n combineLatestWith,\n defer,\n distinctUntilChanged,\n distinctUntilKeyChanged,\n filter,\n map,\n of,\n shareReplay,\n startWith,\n switchMap,\n takeLast,\n takeUntil\n} from \"rxjs\"\n\nimport { feature } from \"~/_\"\nimport {\n Viewport,\n watchElementSize,\n watchToggle\n} from \"~/browser\"\n\nimport { Component } from \"../../_\"\nimport { Main } from \"../../main\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Header\n */\nexport interface Header {\n height: number /* Header visible height */\n hidden: boolean /* Header is hidden */\n}\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch options\n */\ninterface WatchOptions {\n viewport$: Observable /* Viewport observable */\n}\n\n/**\n * Mount options\n */\ninterface MountOptions {\n viewport$: Observable /* Viewport observable */\n header$: Observable
    /* Header observable */\n main$: Observable
    /* Main area observable */\n}\n\n/* ----------------------------------------------------------------------------\n * Helper functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Compute whether the header is hidden\n *\n * If the user scrolls past a certain threshold, the header can be hidden when\n * scrolling down, and shown when scrolling up.\n *\n * @param options - Options\n *\n * @returns Toggle observable\n */\nfunction isHidden({ viewport$ }: WatchOptions): Observable {\n if (!feature(\"header.autohide\"))\n return of(false)\n\n /* Compute direction and turning point */\n const direction$ = viewport$\n .pipe(\n map(({ offset: { y } }) => y),\n bufferCount(2, 1),\n map(([a, b]) => [a < b, b] as const),\n distinctUntilKeyChanged(0)\n )\n\n /* Compute whether header should be hidden */\n const hidden$ = combineLatest([viewport$, direction$])\n .pipe(\n filter(([{ offset }, [, y]]) => Math.abs(y - offset.y) > 100),\n map(([, [direction]]) => direction),\n distinctUntilChanged()\n )\n\n /* Compute threshold for hiding */\n const search$ = watchToggle(\"search\")\n return combineLatest([viewport$, search$])\n .pipe(\n map(([{ offset }, search]) => offset.y > 400 && !search),\n distinctUntilChanged(),\n switchMap(active => active ? hidden$ : of(false)),\n startWith(false)\n )\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch header\n *\n * @param el - Header element\n * @param options - Options\n *\n * @returns Header observable\n */\nexport function watchHeader(\n el: HTMLElement, options: WatchOptions\n): Observable
    {\n return defer(() => combineLatest([\n watchElementSize(el),\n isHidden(options)\n ]))\n .pipe(\n map(([{ height }, hidden]) => ({\n height,\n hidden\n })),\n distinctUntilChanged((a, b) => (\n a.height === b.height &&\n a.hidden === b.hidden\n )),\n shareReplay(1)\n )\n}\n\n/**\n * Mount header\n *\n * This function manages the different states of the header, i.e. whether it's\n * hidden or rendered with a shadow. This depends heavily on the main area.\n *\n * @param el - Header element\n * @param options - Options\n *\n * @returns Header component observable\n */\nexport function mountHeader(\n el: HTMLElement, { header$, main$ }: MountOptions\n): Observable> {\n return defer(() => {\n const push$ = new Subject
    ()\n const done$ = push$.pipe(takeLast(1))\n push$\n .pipe(\n distinctUntilKeyChanged(\"active\"),\n combineLatestWith(header$)\n )\n .subscribe(([{ active }, { hidden }]) => {\n el.classList.toggle(\"md-header--shadow\", active && !hidden)\n el.hidden = hidden\n })\n\n /* Link to main area */\n main$.subscribe(push$)\n\n /* Create and return component */\n return header$\n .pipe(\n takeUntil(done$),\n map(state => ({ ref: el, ...state }))\n )\n })\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n EMPTY,\n Observable,\n Subject,\n defer,\n distinctUntilKeyChanged,\n finalize,\n map,\n tap\n} from \"rxjs\"\n\nimport {\n Viewport,\n getElementSize,\n getOptionalElement,\n watchViewportAt\n} from \"~/browser\"\n\nimport { Component } from \"../../_\"\nimport { Header } from \"../_\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Header\n */\nexport interface HeaderTitle {\n active: boolean /* Header title is active */\n}\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch options\n */\ninterface WatchOptions {\n viewport$: Observable /* Viewport observable */\n header$: Observable
    /* Header observable */\n}\n\n/**\n * Mount options\n */\ninterface MountOptions {\n viewport$: Observable /* Viewport observable */\n header$: Observable
    /* Header observable */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch header title\n *\n * @param el - Heading element\n * @param options - Options\n *\n * @returns Header title observable\n */\nexport function watchHeaderTitle(\n el: HTMLElement, { viewport$, header$ }: WatchOptions\n): Observable {\n return watchViewportAt(el, { viewport$, header$ })\n .pipe(\n map(({ offset: { y } }) => {\n const { height } = getElementSize(el)\n return {\n active: y >= height\n }\n }),\n distinctUntilKeyChanged(\"active\")\n )\n}\n\n/**\n * Mount header title\n *\n * This function swaps the header title from the site title to the title of the\n * current page when the user scrolls past the first headline.\n *\n * @param el - Header title element\n * @param options - Options\n *\n * @returns Header title component observable\n */\nexport function mountHeaderTitle(\n el: HTMLElement, options: MountOptions\n): Observable> {\n return defer(() => {\n const push$ = new Subject()\n push$.subscribe(({ active }) => {\n el.classList.toggle(\"md-header__title--active\", active)\n })\n\n /* Obtain headline, if any */\n const heading = getOptionalElement(\"article h1\")\n if (typeof heading === \"undefined\")\n return EMPTY\n\n /* Create and return component */\n return watchHeaderTitle(heading, options)\n .pipe(\n tap(state => push$.next(state)),\n finalize(() => push$.complete()),\n map(state => ({ ref: el, ...state }))\n )\n })\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n combineLatest,\n distinctUntilChanged,\n distinctUntilKeyChanged,\n map,\n switchMap\n} from \"rxjs\"\n\nimport {\n Viewport,\n watchElementSize\n} from \"~/browser\"\n\nimport { Header } from \"../header\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Main area\n */\nexport interface Main {\n offset: number /* Main area top offset */\n height: number /* Main area visible height */\n active: boolean /* Main area is active */\n}\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch options\n */\ninterface WatchOptions {\n viewport$: Observable /* Viewport observable */\n header$: Observable
    /* Header observable */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch main area\n *\n * This function returns an observable that computes the visual parameters of\n * the main area which depends on the viewport vertical offset and height, as\n * well as the height of the header element, if the header is fixed.\n *\n * @param el - Main area element\n * @param options - Options\n *\n * @returns Main area observable\n */\nexport function watchMain(\n el: HTMLElement, { viewport$, header$ }: WatchOptions\n): Observable
    {\n\n /* Compute necessary adjustment for header */\n const adjust$ = header$\n .pipe(\n map(({ height }) => height),\n distinctUntilChanged()\n )\n\n /* Compute the main area's top and bottom borders */\n const border$ = adjust$\n .pipe(\n switchMap(() => watchElementSize(el)\n .pipe(\n map(({ height }) => ({\n top: el.offsetTop,\n bottom: el.offsetTop + height\n })),\n distinctUntilKeyChanged(\"bottom\")\n )\n )\n )\n\n /* Compute the main area's offset, visible height and if we scrolled past */\n return combineLatest([adjust$, border$, viewport$])\n .pipe(\n map(([header, { top, bottom }, { offset: { y }, size: { height } }]) => {\n height = Math.max(0, height\n - Math.max(0, top - y, header)\n - Math.max(0, height + y - bottom)\n )\n return {\n offset: top - header,\n height,\n active: top - header <= y\n }\n }),\n distinctUntilChanged((a, b) => (\n a.offset === b.offset &&\n a.height === b.height &&\n a.active === b.active\n ))\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n Subject,\n asyncScheduler,\n defer,\n finalize,\n fromEvent,\n map,\n mergeMap,\n observeOn,\n of,\n shareReplay,\n startWith,\n tap\n} from \"rxjs\"\n\nimport { getElements } from \"~/browser\"\n\nimport { Component } from \"../_\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Palette colors\n */\nexport interface PaletteColor {\n scheme?: string /* Color scheme */\n primary?: string /* Primary color */\n accent?: string /* Accent color */\n}\n\n/**\n * Palette\n */\nexport interface Palette {\n index: number /* Palette index */\n color: PaletteColor /* Palette colors */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch color palette\n *\n * @param inputs - Color palette element\n *\n * @returns Color palette observable\n */\nexport function watchPalette(\n inputs: HTMLInputElement[]\n): Observable {\n const current = __md_get(\"__palette\") || {\n index: inputs.findIndex(input => matchMedia(\n input.getAttribute(\"data-md-color-media\")!\n ).matches)\n }\n\n /* Emit changes in color palette */\n return of(...inputs)\n .pipe(\n mergeMap(input => fromEvent(input, \"change\")\n .pipe(\n map(() => input)\n )\n ),\n startWith(inputs[Math.max(0, current.index)]),\n map(input => ({\n index: inputs.indexOf(input),\n color: {\n scheme: input.getAttribute(\"data-md-color-scheme\"),\n primary: input.getAttribute(\"data-md-color-primary\"),\n accent: input.getAttribute(\"data-md-color-accent\")\n }\n } as Palette)),\n shareReplay(1)\n )\n}\n\n/**\n * Mount color palette\n *\n * @param el - Color palette element\n *\n * @returns Color palette component observable\n */\nexport function mountPalette(\n el: HTMLElement\n): Observable> {\n return defer(() => {\n const push$ = new Subject()\n push$.subscribe(palette => {\n document.body.setAttribute(\"data-md-color-switching\", \"\")\n\n /* Set color palette */\n for (const [key, value] of Object.entries(palette.color))\n document.body.setAttribute(`data-md-color-${key}`, value)\n\n /* Toggle visibility */\n for (let index = 0; index < inputs.length; index++) {\n const label = inputs[index].nextElementSibling\n if (label instanceof HTMLElement)\n label.hidden = palette.index !== index\n }\n\n /* Persist preference in local storage */\n __md_set(\"__palette\", palette)\n })\n\n /* Revert transition durations after color switch */\n push$.pipe(observeOn(asyncScheduler))\n .subscribe(() => {\n document.body.removeAttribute(\"data-md-color-switching\")\n })\n\n /* Create and return component */\n const inputs = getElements(\"input\", el)\n return watchPalette(inputs)\n .pipe(\n tap(state => push$.next(state)),\n finalize(() => push$.complete()),\n map(state => ({ ref: el, ...state }))\n )\n })\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport ClipboardJS from \"clipboard\"\nimport {\n Observable,\n Subject,\n map,\n tap\n} from \"rxjs\"\n\nimport { translation } from \"~/_\"\nimport { getElement } from \"~/browser\"\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Setup options\n */\ninterface SetupOptions {\n alert$: Subject /* Alert subject */\n}\n\n/* ----------------------------------------------------------------------------\n * Helper functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Extract text to copy\n *\n * @param el - HTML element\n *\n * @returns Extracted text\n */\nfunction extract(el: HTMLElement): string {\n el.setAttribute(\"data-md-copying\", \"\")\n const text = el.innerText\n el.removeAttribute(\"data-md-copying\")\n return text\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Set up Clipboard.js integration\n *\n * @param options - Options\n */\nexport function setupClipboardJS(\n { alert$ }: SetupOptions\n): void {\n if (ClipboardJS.isSupported()) {\n new Observable(subscriber => {\n new ClipboardJS(\"[data-clipboard-target], [data-clipboard-text]\", {\n text: el => (\n el.getAttribute(\"data-clipboard-text\")! ||\n extract(getElement(\n el.getAttribute(\"data-clipboard-target\")!\n ))\n )\n })\n .on(\"success\", ev => subscriber.next(ev))\n })\n .pipe(\n tap(ev => {\n const trigger = ev.trigger as HTMLElement\n trigger.focus()\n }),\n map(() => translation(\"clipboard.copied\"))\n )\n .subscribe(alert$)\n }\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n EMPTY,\n Observable,\n catchError,\n defaultIfEmpty,\n map,\n of,\n tap\n} from \"rxjs\"\n\nimport { configuration } from \"~/_\"\nimport { getElements, requestXML } from \"~/browser\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Sitemap, i.e. a list of URLs\n */\nexport type Sitemap = string[]\n\n/* ----------------------------------------------------------------------------\n * Helper functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Preprocess a list of URLs\n *\n * This function replaces the `site_url` in the sitemap with the actual base\n * URL, to allow instant loading to work in occasions like Netlify previews.\n *\n * @param urls - URLs\n *\n * @returns URL path parts\n */\nfunction preprocess(urls: Sitemap): Sitemap {\n if (urls.length < 2)\n return [\"\"]\n\n /* Take the first two URLs and remove everything after the last slash */\n const [root, next] = [...urls]\n .sort((a, b) => a.length - b.length)\n .map(url => url.replace(/[^/]+$/, \"\"))\n\n /* Compute common prefix */\n let index = 0\n if (root === next)\n index = root.length\n else\n while (root.charCodeAt(index) === next.charCodeAt(index))\n index++\n\n /* Remove common prefix and return in original order */\n return urls.map(url => url.replace(root.slice(0, index), \"\"))\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Fetch the sitemap for the given base URL\n *\n * @param base - Base URL\n *\n * @returns Sitemap observable\n */\nexport function fetchSitemap(base?: URL): Observable {\n const cached = __md_get(\"__sitemap\", sessionStorage, base)\n if (cached) {\n return of(cached)\n } else {\n const config = configuration()\n return requestXML(new URL(\"sitemap.xml\", base || config.base))\n .pipe(\n map(sitemap => preprocess(getElements(\"loc\", sitemap)\n .map(node => node.textContent!)\n )),\n catchError(() => EMPTY), // @todo refactor instant loading\n defaultIfEmpty([]),\n tap(sitemap => __md_set(\"__sitemap\", sitemap, sessionStorage, base))\n )\n }\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n EMPTY,\n NEVER,\n Observable,\n Subject,\n bufferCount,\n catchError,\n concatMap,\n debounceTime,\n distinctUntilChanged,\n distinctUntilKeyChanged,\n filter,\n fromEvent,\n map,\n merge,\n of,\n sample,\n share,\n skip,\n skipUntil,\n switchMap\n} from \"rxjs\"\n\nimport { configuration, feature } from \"~/_\"\nimport {\n Viewport,\n ViewportOffset,\n getElements,\n getOptionalElement,\n request,\n setLocation,\n setLocationHash\n} from \"~/browser\"\nimport { getComponentElement } from \"~/components\"\nimport { h } from \"~/utilities\"\n\nimport { fetchSitemap } from \"../sitemap\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * History state\n */\nexport interface HistoryState {\n url: URL /* State URL */\n offset?: ViewportOffset /* State viewport offset */\n}\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Setup options\n */\ninterface SetupOptions {\n document$: Subject /* Document subject */\n location$: Subject /* Location subject */\n viewport$: Observable /* Viewport observable */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Set up instant loading\n *\n * When fetching, theoretically, we could use `responseType: \"document\"`, but\n * since all MkDocs links are relative, we need to make sure that the current\n * location matches the document we just loaded. Otherwise any relative links\n * in the document could use the old location.\n *\n * This is the reason why we need to synchronize history events and the process\n * of fetching the document for navigation changes (except `popstate` events):\n *\n * 1. Fetch document via `XMLHTTPRequest`\n * 2. Set new location via `history.pushState`\n * 3. Parse and emit fetched document\n *\n * For `popstate` events, we must not use `history.pushState`, or the forward\n * history will be irreversibly overwritten. In case the request fails, the\n * location change is dispatched regularly.\n *\n * @param options - Options\n */\nexport function setupInstantLoading(\n { document$, location$, viewport$ }: SetupOptions\n): void {\n const config = configuration()\n if (location.protocol === \"file:\")\n return\n\n /* Disable automatic scroll restoration */\n if (\"scrollRestoration\" in history) {\n history.scrollRestoration = \"manual\"\n\n /* Hack: ensure that reloads restore viewport offset */\n fromEvent(window, \"beforeunload\")\n .subscribe(() => {\n history.scrollRestoration = \"auto\"\n })\n }\n\n /* Hack: ensure absolute favicon link to omit 404s when switching */\n const favicon = getOptionalElement(\"link[rel=icon]\")\n if (typeof favicon !== \"undefined\")\n favicon.href = favicon.href\n\n /* Intercept internal navigation */\n const push$ = fetchSitemap()\n .pipe(\n map(paths => paths.map(path => `${new URL(path, config.base)}`)),\n switchMap(urls => fromEvent(document.body, \"click\")\n .pipe(\n filter(ev => !ev.metaKey && !ev.ctrlKey),\n switchMap(ev => {\n if (ev.target instanceof Element) {\n const el = ev.target.closest(\"a\")\n if (el && !el.target) {\n const url = new URL(el.href)\n\n /* Canonicalize URL */\n url.search = \"\"\n url.hash = \"\"\n\n /* Check if URL should be intercepted */\n if (\n url.pathname !== location.pathname &&\n urls.includes(url.toString())\n ) {\n ev.preventDefault()\n return of({\n url: new URL(el.href)\n })\n }\n }\n }\n return NEVER\n })\n )\n ),\n share()\n )\n\n /* Intercept history back and forward */\n const pop$ = fromEvent(window, \"popstate\")\n .pipe(\n filter(ev => ev.state !== null),\n map(ev => ({\n url: new URL(location.href),\n offset: ev.state\n })),\n share()\n )\n\n /* Emit location change */\n merge(push$, pop$)\n .pipe(\n distinctUntilChanged((a, b) => a.url.href === b.url.href),\n map(({ url }) => url)\n )\n .subscribe(location$)\n\n /* Fetch document via `XMLHTTPRequest` */\n const response$ = location$\n .pipe(\n distinctUntilKeyChanged(\"pathname\"),\n switchMap(url => request(url.href)\n .pipe(\n catchError(() => {\n setLocation(url)\n return NEVER\n })\n )\n ),\n share()\n )\n\n /* Set new location via `history.pushState` */\n push$\n .pipe(\n sample(response$)\n )\n .subscribe(({ url }) => {\n history.pushState({}, \"\", `${url}`)\n })\n\n /* Parse and emit fetched document */\n const dom = new DOMParser()\n response$\n .pipe(\n switchMap(res => res.text()),\n map(res => dom.parseFromString(res, \"text/html\"))\n )\n .subscribe(document$)\n\n /* Replace meta tags and components */\n document$\n .pipe(\n skip(1)\n )\n .subscribe(replacement => {\n for (const selector of [\n\n /* Meta tags */\n \"title\",\n \"link[rel=canonical]\",\n \"meta[name=author]\",\n \"meta[name=description]\",\n\n /* Components */\n \"[data-md-component=announce]\",\n \"[data-md-component=container]\",\n \"[data-md-component=header-topic]\",\n \"[data-md-component=outdated]\",\n \"[data-md-component=logo]\",\n \"[data-md-component=skip]\",\n ...feature(\"navigation.tabs.sticky\")\n ? [\"[data-md-component=tabs]\"]\n : []\n ]) {\n const source = getOptionalElement(selector)\n const target = getOptionalElement(selector, replacement)\n if (\n typeof source !== \"undefined\" &&\n typeof target !== \"undefined\"\n ) {\n source.replaceWith(target)\n }\n }\n })\n\n /* Re-evaluate scripts */\n document$\n .pipe(\n skip(1),\n map(() => getComponentElement(\"container\")),\n switchMap(el => getElements(\"script\", el)),\n concatMap(el => {\n const script = h(\"script\")\n if (el.src) {\n for (const name of el.getAttributeNames())\n script.setAttribute(name, el.getAttribute(name)!)\n el.replaceWith(script)\n\n /* Complete when script is loaded */\n return new Observable(observer => {\n script.onload = () => observer.complete()\n })\n\n /* Complete immediately */\n } else {\n script.textContent = el.textContent\n el.replaceWith(script)\n return EMPTY\n }\n })\n )\n .subscribe()\n\n /* Emit history state change */\n merge(push$, pop$)\n .pipe(\n sample(document$)\n )\n .subscribe(({ url, offset }) => {\n if (url.hash && !offset) {\n setLocationHash(url.hash)\n } else {\n window.scrollTo(0, offset?.y || 0)\n }\n })\n\n /* Debounce update of viewport offset */\n viewport$\n .pipe(\n skipUntil(push$),\n debounceTime(250),\n distinctUntilKeyChanged(\"offset\")\n )\n .subscribe(({ offset }) => {\n history.replaceState(offset, \"\")\n })\n\n /* Set viewport offset from history */\n merge(push$, pop$)\n .pipe(\n bufferCount(2, 1),\n filter(([a, b]) => a.url.pathname === b.url.pathname),\n map(([, state]) => state)\n )\n .subscribe(({ offset }) => {\n window.scrollTo(0, offset?.y || 0)\n })\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport escapeHTML from \"escape-html\"\n\nimport { SearchIndexDocument } from \"../_\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Search document\n */\nexport interface SearchDocument extends SearchIndexDocument {\n parent?: SearchIndexDocument /* Parent article */\n}\n\n/* ------------------------------------------------------------------------- */\n\n/**\n * Search document mapping\n */\nexport type SearchDocumentMap = Map\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Create a search document mapping\n *\n * @param docs - Search index documents\n *\n * @returns Search document map\n */\nexport function setupSearchDocumentMap(\n docs: SearchIndexDocument[]\n): SearchDocumentMap {\n const documents = new Map()\n const parents = new Set()\n for (const doc of docs) {\n const [path, hash] = doc.location.split(\"#\")\n\n /* Extract location, title and tags */\n const location = doc.location\n const title = doc.title\n const tags = doc.tags\n\n /* Escape and cleanup text */\n const text = escapeHTML(doc.text)\n .replace(/\\s+(?=[,.:;!?])/g, \"\")\n .replace(/\\s+/g, \" \")\n\n /* Handle section */\n if (hash) {\n const parent = documents.get(path)!\n\n /* Ignore first section, override article */\n if (!parents.has(parent)) {\n parent.title = doc.title\n parent.text = text\n\n /* Remember that we processed the article */\n parents.add(parent)\n\n /* Add subsequent section */\n } else {\n documents.set(location, {\n location,\n title,\n text,\n parent\n })\n }\n\n /* Add article */\n } else {\n documents.set(location, {\n location,\n title,\n text,\n ...tags && { tags }\n })\n }\n }\n return documents\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport escapeHTML from \"escape-html\"\n\nimport { SearchIndexConfig } from \"../_\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Search highlight function\n *\n * @param value - Value\n *\n * @returns Highlighted value\n */\nexport type SearchHighlightFn = (value: string) => string\n\n/**\n * Search highlight factory function\n *\n * @param query - Query value\n *\n * @returns Search highlight function\n */\nexport type SearchHighlightFactoryFn = (query: string) => SearchHighlightFn\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Create a search highlighter\n *\n * @param config - Search index configuration\n * @param escape - Whether to escape HTML\n *\n * @returns Search highlight factory function\n */\nexport function setupSearchHighlighter(\n config: SearchIndexConfig, escape: boolean\n): SearchHighlightFactoryFn {\n const separator = new RegExp(config.separator, \"img\")\n const highlight = (_: unknown, data: string, term: string) => {\n return `${data}${term}`\n }\n\n /* Return factory function */\n return (query: string) => {\n query = query\n .replace(/[\\s*+\\-:~^]+/g, \" \")\n .trim()\n\n /* Create search term match expression */\n const match = new RegExp(`(^|${config.separator})(${\n query\n .replace(/[|\\\\{}()[\\]^$+*?.-]/g, \"\\\\$&\")\n .replace(separator, \"|\")\n })`, \"img\")\n\n /* Highlight string value */\n return value => (\n escape\n ? escapeHTML(value)\n : value\n )\n .replace(match, highlight)\n .replace(/<\\/mark>(\\s+)]*>/img, \"$1\")\n }\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Search transformation function\n *\n * @param value - Query value\n *\n * @returns Transformed query value\n */\nexport type SearchTransformFn = (value: string) => string\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Default transformation function\n *\n * 1. Search for terms in quotation marks and prepend a `+` modifier to denote\n * that the resulting document must contain all terms, converting the query\n * to an `AND` query (as opposed to the default `OR` behavior). While users\n * may expect terms enclosed in quotation marks to map to span queries, i.e.\n * for which order is important, Lunr.js doesn't support them, so the best\n * we can do is to convert the terms to an `AND` query.\n *\n * 2. Replace control characters which are not located at the beginning of the\n * query or preceded by white space, or are not followed by a non-whitespace\n * character or are at the end of the query string. Furthermore, filter\n * unmatched quotation marks.\n *\n * 3. Trim excess whitespace from left and right.\n *\n * @param query - Query value\n *\n * @returns Transformed query value\n */\nexport function defaultTransform(query: string): string {\n return query\n .split(/\"([^\"]+)\"/g) /* => 1 */\n .map((terms, index) => index & 1\n ? terms.replace(/^\\b|^(?![^\\x00-\\x7F]|$)|\\s+/g, \" +\")\n : terms\n )\n .join(\"\")\n .replace(/\"|(?:^|\\s+)[*+\\-:^~]+(?=\\s+|$)/g, \"\") /* => 2 */\n .trim() /* => 3 */\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A RTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport { SearchIndex, SearchResult } from \"../../_\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Search message type\n */\nexport const enum SearchMessageType {\n SETUP, /* Search index setup */\n READY, /* Search index ready */\n QUERY, /* Search query */\n RESULT /* Search results */\n}\n\n/* ------------------------------------------------------------------------- */\n\n/**\n * Message containing the data necessary to setup the search index\n */\nexport interface SearchSetupMessage {\n type: SearchMessageType.SETUP /* Message type */\n data: SearchIndex /* Message data */\n}\n\n/**\n * Message indicating the search index is ready\n */\nexport interface SearchReadyMessage {\n type: SearchMessageType.READY /* Message type */\n}\n\n/**\n * Message containing a search query\n */\nexport interface SearchQueryMessage {\n type: SearchMessageType.QUERY /* Message type */\n data: string /* Message data */\n}\n\n/**\n * Message containing results for a search query\n */\nexport interface SearchResultMessage {\n type: SearchMessageType.RESULT /* Message type */\n data: SearchResult /* Message data */\n}\n\n/* ------------------------------------------------------------------------- */\n\n/**\n * Message exchanged with the search worker\n */\nexport type SearchMessage =\n | SearchSetupMessage\n | SearchReadyMessage\n | SearchQueryMessage\n | SearchResultMessage\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Type guard for search setup messages\n *\n * @param message - Search worker message\n *\n * @returns Test result\n */\nexport function isSearchSetupMessage(\n message: SearchMessage\n): message is SearchSetupMessage {\n return message.type === SearchMessageType.SETUP\n}\n\n/**\n * Type guard for search ready messages\n *\n * @param message - Search worker message\n *\n * @returns Test result\n */\nexport function isSearchReadyMessage(\n message: SearchMessage\n): message is SearchReadyMessage {\n return message.type === SearchMessageType.READY\n}\n\n/**\n * Type guard for search query messages\n *\n * @param message - Search worker message\n *\n * @returns Test result\n */\nexport function isSearchQueryMessage(\n message: SearchMessage\n): message is SearchQueryMessage {\n return message.type === SearchMessageType.QUERY\n}\n\n/**\n * Type guard for search result messages\n *\n * @param message - Search worker message\n *\n * @returns Test result\n */\nexport function isSearchResultMessage(\n message: SearchMessage\n): message is SearchResultMessage {\n return message.type === SearchMessageType.RESULT\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A RTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n ObservableInput,\n Subject,\n from,\n map,\n share\n} from \"rxjs\"\n\nimport { configuration, feature, translation } from \"~/_\"\nimport { WorkerHandler, watchWorker } from \"~/browser\"\n\nimport { SearchIndex } from \"../../_\"\nimport {\n SearchOptions,\n SearchPipeline\n} from \"../../options\"\nimport {\n SearchMessage,\n SearchMessageType,\n SearchSetupMessage,\n isSearchResultMessage\n} from \"../message\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Search worker\n */\nexport type SearchWorker = WorkerHandler\n\n/* ----------------------------------------------------------------------------\n * Helper functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Set up search index\n *\n * @param data - Search index\n *\n * @returns Search index\n */\nfunction setupSearchIndex({ config, docs }: SearchIndex): SearchIndex {\n\n /* Override default language with value from translation */\n if (config.lang.length === 1 && config.lang[0] === \"en\")\n config.lang = [\n translation(\"search.config.lang\")\n ]\n\n /* Override default separator with value from translation */\n if (config.separator === \"[\\\\s\\\\-]+\")\n config.separator = translation(\"search.config.separator\")\n\n /* Set pipeline from translation */\n const pipeline = translation(\"search.config.pipeline\")\n .split(/\\s*,\\s*/)\n .filter(Boolean) as SearchPipeline\n\n /* Determine search options */\n const options: SearchOptions = {\n pipeline,\n suggestions: feature(\"search.suggest\")\n }\n\n /* Return search index after defaulting */\n return { config, docs, options }\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Set up search worker\n *\n * This function creates a web worker to set up and query the search index,\n * which is done using Lunr.js. The index must be passed as an observable to\n * enable hacks like _localsearch_ via search index embedding as JSON.\n *\n * @param url - Worker URL\n * @param index - Search index observable input\n *\n * @returns Search worker\n */\nexport function setupSearchWorker(\n url: string, index: ObservableInput\n): SearchWorker {\n const config = configuration()\n const worker = new Worker(url)\n\n /* Create communication channels and resolve relative links */\n const tx$ = new Subject()\n const rx$ = watchWorker(worker, { tx$ })\n .pipe(\n map(message => {\n if (isSearchResultMessage(message)) {\n for (const result of message.data.items)\n for (const document of result)\n document.location = `${new URL(document.location, config.base)}`\n }\n return message\n }),\n share()\n )\n\n /* Set up search index */\n from(index)\n .pipe(\n map(data => ({\n type: SearchMessageType.SETUP,\n data: setupSearchIndex(data)\n } as SearchSetupMessage))\n )\n .subscribe(tx$.next.bind(tx$))\n\n /* Return search worker */\n return { tx$, rx$ }\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n EMPTY,\n Subject,\n catchError,\n combineLatest,\n filter,\n fromEvent,\n map,\n of,\n switchMap,\n withLatestFrom\n} from \"rxjs\"\n\nimport { configuration } from \"~/_\"\nimport {\n getElement,\n getLocation,\n requestJSON,\n setLocation\n} from \"~/browser\"\nimport { getComponentElements } from \"~/components\"\nimport {\n Version,\n renderVersionSelector\n} from \"~/templates\"\n\nimport { fetchSitemap } from \"../sitemap\"\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Setup options\n */\ninterface SetupOptions {\n document$: Subject /* Document subject */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Set up version selector\n *\n * @param options - Options\n */\nexport function setupVersionSelector(\n { document$ }: SetupOptions\n): void {\n const config = configuration()\n const versions$ = requestJSON(\n new URL(\"../versions.json\", config.base)\n )\n .pipe(\n catchError(() => EMPTY) // @todo refactor instant loading\n )\n\n /* Determine current version */\n const current$ = versions$\n .pipe(\n map(versions => {\n const [, current] = config.base.match(/([^/]+)\\/?$/)!\n return versions.find(({ version, aliases }) => (\n version === current || aliases.includes(current)\n )) || versions[0]\n })\n )\n\n /* Intercept inter-version navigation */\n versions$\n .pipe(\n map(versions => new Map(versions.map(version => [\n `${new URL(`../${version.version}/`, config.base)}`,\n version\n ]))),\n switchMap(urls => fromEvent(document.body, \"click\")\n .pipe(\n filter(ev => !ev.metaKey && !ev.ctrlKey),\n withLatestFrom(current$),\n switchMap(([ev, current]) => {\n if (ev.target instanceof Element) {\n const el = ev.target.closest(\"a\")\n if (el && !el.target && urls.has(el.href)) {\n const url = el.href\n // This is a temporary hack to detect if a version inside the\n // version selector or on another part of the site was clicked.\n // If we're inside the version selector, we definitely want to\n // find the same page, as we might have different deployments\n // due to aliases. However, if we're outside the version\n // selector, we must abort here, because we might otherwise\n // interfere with instant loading. We need to refactor this\n // at some point together with instant loading.\n //\n // See https://github.com/squidfunk/mkdocs-material/issues/4012\n if (!ev.target.closest(\".md-version\")) {\n const version = urls.get(url)!\n if (version === current)\n return EMPTY\n }\n ev.preventDefault()\n return of(url)\n }\n }\n return EMPTY\n }),\n switchMap(url => {\n const { version } = urls.get(url)!\n return fetchSitemap(new URL(url))\n .pipe(\n map(sitemap => {\n const location = getLocation()\n const path = location.href.replace(config.base, \"\")\n return sitemap.includes(path.split(\"#\")[0])\n ? new URL(`../${version}/${path}`, config.base)\n : new URL(url)\n })\n )\n })\n )\n )\n )\n .subscribe(url => setLocation(url))\n\n /* Render version selector and warning */\n combineLatest([versions$, current$])\n .subscribe(([versions, current]) => {\n const topic = getElement(\".md-header__topic\")\n topic.appendChild(renderVersionSelector(versions, current))\n })\n\n /* Integrate outdated version banner with instant loading */\n document$.pipe(switchMap(() => current$))\n .subscribe(current => {\n\n /* Check if version state was already determined */\n let outdated = __md_get(\"__outdated\", sessionStorage)\n if (outdated === null) {\n const latest = config.version?.default || \"latest\"\n outdated = !current.aliases.includes(latest)\n\n /* Persist version state in session storage */\n __md_set(\"__outdated\", outdated, sessionStorage)\n }\n\n /* Unhide outdated version banner */\n if (outdated)\n for (const warning of getComponentElements(\"outdated\"))\n warning.hidden = false\n })\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n Subject,\n combineLatest,\n delay,\n distinctUntilChanged,\n distinctUntilKeyChanged,\n filter,\n finalize,\n fromEvent,\n map,\n merge,\n share,\n shareReplay,\n startWith,\n take,\n takeLast,\n takeUntil,\n tap\n} from \"rxjs\"\n\nimport { translation } from \"~/_\"\nimport {\n getLocation,\n setToggle,\n watchElementFocus,\n watchToggle\n} from \"~/browser\"\nimport {\n SearchMessageType,\n SearchQueryMessage,\n SearchWorker,\n defaultTransform,\n isSearchReadyMessage\n} from \"~/integrations\"\n\nimport { Component } from \"../../_\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Search query\n */\nexport interface SearchQuery {\n value: string /* Query value */\n focus: boolean /* Query focus */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch search query\n *\n * Note that the focus event which triggers re-reading the current query value\n * is delayed by `1ms` so the input's empty state is allowed to propagate.\n *\n * @param el - Search query element\n * @param worker - Search worker\n *\n * @returns Search query observable\n */\nexport function watchSearchQuery(\n el: HTMLInputElement, { rx$ }: SearchWorker\n): Observable {\n const fn = __search?.transform || defaultTransform\n\n /* Immediately show search dialog */\n const { searchParams } = getLocation()\n if (searchParams.has(\"q\"))\n setToggle(\"search\", true)\n\n /* Intercept query parameter (deep link) */\n const param$ = rx$\n .pipe(\n filter(isSearchReadyMessage),\n take(1),\n map(() => searchParams.get(\"q\") || \"\")\n )\n\n /* Remove query parameter when search is closed */\n watchToggle(\"search\")\n .pipe(\n filter(active => !active),\n take(1)\n )\n .subscribe(() => {\n const url = new URL(location.href)\n url.searchParams.delete(\"q\")\n history.replaceState({}, \"\", `${url}`)\n })\n\n /* Set query from parameter */\n param$.subscribe(value => { // TODO: not ideal - find a better way\n if (value) {\n el.value = value\n el.focus()\n }\n })\n\n /* Intercept focus and input events */\n const focus$ = watchElementFocus(el)\n const value$ = merge(\n fromEvent(el, \"keyup\"),\n fromEvent(el, \"focus\").pipe(delay(1)),\n param$\n )\n .pipe(\n map(() => fn(el.value)),\n startWith(\"\"),\n distinctUntilChanged(),\n )\n\n /* Combine into single observable */\n return combineLatest([value$, focus$])\n .pipe(\n map(([value, focus]) => ({ value, focus })),\n shareReplay(1)\n )\n}\n\n/**\n * Mount search query\n *\n * @param el - Search query element\n * @param worker - Search worker\n *\n * @returns Search query component observable\n */\nexport function mountSearchQuery(\n el: HTMLInputElement, { tx$, rx$ }: SearchWorker\n): Observable> {\n const push$ = new Subject()\n const done$ = push$.pipe(takeLast(1))\n\n /* Handle value changes */\n push$\n .pipe(\n distinctUntilKeyChanged(\"value\"),\n map(({ value }): SearchQueryMessage => ({\n type: SearchMessageType.QUERY,\n data: value\n }))\n )\n .subscribe(tx$.next.bind(tx$))\n\n /* Handle focus changes */\n push$\n .pipe(\n distinctUntilKeyChanged(\"focus\")\n )\n .subscribe(({ focus }) => {\n if (focus) {\n setToggle(\"search\", focus)\n el.placeholder = \"\"\n } else {\n el.placeholder = translation(\"search.placeholder\")\n }\n })\n\n /* Handle reset */\n fromEvent(el.form!, \"reset\")\n .pipe(\n takeUntil(done$)\n )\n .subscribe(() => el.focus())\n\n /* Create and return component */\n return watchSearchQuery(el, { tx$, rx$ })\n .pipe(\n tap(state => push$.next(state)),\n finalize(() => push$.complete()),\n map(state => ({ ref: el, ...state })),\n share()\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n Subject,\n bufferCount,\n filter,\n finalize,\n map,\n merge,\n of,\n skipUntil,\n switchMap,\n take,\n tap,\n withLatestFrom,\n zipWith\n} from \"rxjs\"\n\nimport { translation } from \"~/_\"\nimport {\n getElement,\n watchElementBoundary\n} from \"~/browser\"\nimport {\n SearchResult,\n SearchWorker,\n isSearchReadyMessage,\n isSearchResultMessage\n} from \"~/integrations\"\nimport { renderSearchResultItem } from \"~/templates\"\nimport { round } from \"~/utilities\"\n\nimport { Component } from \"../../_\"\nimport { SearchQuery } from \"../query\"\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Mount options\n */\ninterface MountOptions {\n query$: Observable /* Search query observable */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Mount search result list\n *\n * This function performs a lazy rendering of the search results, depending on\n * the vertical offset of the search result container.\n *\n * @param el - Search result list element\n * @param worker - Search worker\n * @param options - Options\n *\n * @returns Search result list component observable\n */\nexport function mountSearchResult(\n el: HTMLElement, { rx$ }: SearchWorker, { query$ }: MountOptions\n): Observable> {\n const push$ = new Subject()\n const boundary$ = watchElementBoundary(el.parentElement!)\n .pipe(\n filter(Boolean)\n )\n\n /* Retrieve nested components */\n const meta = getElement(\":scope > :first-child\", el)\n const list = getElement(\":scope > :last-child\", el)\n\n /* Wait until search is ready */\n const ready$ = rx$\n .pipe(\n filter(isSearchReadyMessage),\n take(1)\n )\n\n /* Update search result metadata */\n push$\n .pipe(\n withLatestFrom(query$),\n skipUntil(ready$)\n )\n .subscribe(([{ items }, { value }]) => {\n if (value) {\n switch (items.length) {\n\n /* No results */\n case 0:\n meta.textContent = translation(\"search.result.none\")\n break\n\n /* One result */\n case 1:\n meta.textContent = translation(\"search.result.one\")\n break\n\n /* Multiple result */\n default:\n meta.textContent = translation(\n \"search.result.other\",\n round(items.length)\n )\n }\n } else {\n meta.textContent = translation(\"search.result.placeholder\")\n }\n })\n\n /* Update search result list */\n push$\n .pipe(\n tap(() => list.innerHTML = \"\"),\n switchMap(({ items }) => merge(\n of(...items.slice(0, 10)),\n of(...items.slice(10))\n .pipe(\n bufferCount(4),\n zipWith(boundary$),\n switchMap(([chunk]) => chunk)\n )\n ))\n )\n .subscribe(result => list.appendChild(\n renderSearchResultItem(result)\n ))\n\n /* Filter search result message */\n const result$ = rx$\n .pipe(\n filter(isSearchResultMessage),\n map(({ data }) => data)\n )\n\n /* Create and return component */\n return result$\n .pipe(\n tap(state => push$.next(state)),\n finalize(() => push$.complete()),\n map(state => ({ ref: el, ...state }))\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n Subject,\n finalize,\n fromEvent,\n map,\n tap\n} from \"rxjs\"\n\nimport { getLocation } from \"~/browser\"\n\nimport { Component } from \"../../_\"\nimport { SearchQuery } from \"../query\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Search sharing\n */\nexport interface SearchShare {\n url: URL /* Deep link for sharing */\n}\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch options\n */\ninterface WatchOptions {\n query$: Observable /* Search query observable */\n}\n\n/**\n * Mount options\n */\ninterface MountOptions {\n query$: Observable /* Search query observable */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Mount search sharing\n *\n * @param _el - Search sharing element\n * @param options - Options\n *\n * @returns Search sharing observable\n */\nexport function watchSearchShare(\n _el: HTMLElement, { query$ }: WatchOptions\n): Observable {\n return query$\n .pipe(\n map(({ value }) => {\n const url = getLocation()\n url.hash = \"\"\n url.searchParams.delete(\"h\")\n url.searchParams.set(\"q\", value)\n return { url }\n })\n )\n}\n\n/**\n * Mount search sharing\n *\n * @param el - Search sharing element\n * @param options - Options\n *\n * @returns Search sharing component observable\n */\nexport function mountSearchShare(\n el: HTMLAnchorElement, options: MountOptions\n): Observable> {\n const push$ = new Subject()\n push$.subscribe(({ url }) => {\n el.setAttribute(\"data-clipboard-text\", el.href)\n el.href = `${url}`\n })\n\n /* Prevent following of link */\n fromEvent(el, \"click\")\n .subscribe(ev => ev.preventDefault())\n\n /* Create and return component */\n return watchSearchShare(el, options)\n .pipe(\n tap(state => push$.next(state)),\n finalize(() => push$.complete()),\n map(state => ({ ref: el, ...state }))\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n Subject,\n asyncScheduler,\n combineLatestWith,\n distinctUntilChanged,\n filter,\n finalize,\n fromEvent,\n map,\n merge,\n observeOn,\n tap\n} from \"rxjs\"\n\nimport { Keyboard } from \"~/browser\"\nimport {\n SearchResult,\n SearchWorker,\n isSearchResultMessage\n} from \"~/integrations\"\n\nimport { Component, getComponentElement } from \"../../_\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Search suggestions\n */\nexport interface SearchSuggest {}\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Mount options\n */\ninterface MountOptions {\n keyboard$: Observable /* Keyboard observable */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Mount search suggestions\n *\n * This function will perform a lazy rendering of the search results, depending\n * on the vertical offset of the search result container.\n *\n * @param el - Search result list element\n * @param worker - Search worker\n * @param options - Options\n *\n * @returns Search result list component observable\n */\nexport function mountSearchSuggest(\n el: HTMLElement, { rx$ }: SearchWorker, { keyboard$ }: MountOptions\n): Observable> {\n const push$ = new Subject()\n\n /* Retrieve query component and track all changes */\n const query = getComponentElement(\"search-query\")\n const query$ = merge(\n fromEvent(query, \"keydown\"),\n fromEvent(query, \"focus\")\n )\n .pipe(\n observeOn(asyncScheduler),\n map(() => query.value),\n distinctUntilChanged(),\n )\n\n /* Update search suggestions */\n push$\n .pipe(\n combineLatestWith(query$),\n map(([{ suggestions }, value]) => {\n const words = value.split(/([\\s-]+)/)\n if (suggestions?.length && words[words.length - 1]) {\n const last = suggestions[suggestions.length - 1]\n if (last.startsWith(words[words.length - 1]))\n words[words.length - 1] = last\n } else {\n words.length = 0\n }\n return words\n })\n )\n .subscribe(words => el.innerHTML = words\n .join(\"\")\n .replace(/\\s/g, \" \")\n )\n\n /* Set up search keyboard handlers */\n keyboard$\n .pipe(\n filter(({ mode }) => mode === \"search\")\n )\n .subscribe(key => {\n switch (key.type) {\n\n /* Right arrow: accept current suggestion */\n case \"ArrowRight\":\n if (\n el.innerText.length &&\n query.selectionStart === query.value.length\n )\n query.value = el.innerText\n break\n }\n })\n\n /* Filter search result message */\n const result$ = rx$\n .pipe(\n filter(isSearchResultMessage),\n map(({ data }) => data)\n )\n\n /* Create and return component */\n return result$\n .pipe(\n tap(state => push$.next(state)),\n finalize(() => push$.complete()),\n map(() => ({ ref: el }))\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n NEVER,\n Observable,\n ObservableInput,\n filter,\n merge,\n mergeWith,\n sample,\n take\n} from \"rxjs\"\n\nimport { configuration } from \"~/_\"\nimport {\n Keyboard,\n getActiveElement,\n getElements,\n setToggle\n} from \"~/browser\"\nimport {\n SearchIndex,\n SearchResult,\n isSearchQueryMessage,\n isSearchReadyMessage,\n setupSearchWorker\n} from \"~/integrations\"\n\nimport {\n Component,\n getComponentElement,\n getComponentElements\n} from \"../../_\"\nimport {\n SearchQuery,\n mountSearchQuery\n} from \"../query\"\nimport { mountSearchResult } from \"../result\"\nimport {\n SearchShare,\n mountSearchShare\n} from \"../share\"\nimport {\n SearchSuggest,\n mountSearchSuggest\n} from \"../suggest\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Search\n */\nexport type Search =\n | SearchQuery\n | SearchResult\n | SearchShare\n | SearchSuggest\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Mount options\n */\ninterface MountOptions {\n index$: ObservableInput /* Search index observable */\n keyboard$: Observable /* Keyboard observable */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Mount search\n *\n * This function sets up the search functionality, including the underlying\n * web worker and all keyboard bindings.\n *\n * @param el - Search element\n * @param options - Options\n *\n * @returns Search component observable\n */\nexport function mountSearch(\n el: HTMLElement, { index$, keyboard$ }: MountOptions\n): Observable> {\n const config = configuration()\n try {\n const url = __search?.worker || config.search\n const worker = setupSearchWorker(url, index$)\n\n /* Retrieve query and result components */\n const query = getComponentElement(\"search-query\", el)\n const result = getComponentElement(\"search-result\", el)\n\n /* Re-emit query when search is ready */\n const { tx$, rx$ } = worker\n tx$\n .pipe(\n filter(isSearchQueryMessage),\n sample(rx$.pipe(filter(isSearchReadyMessage))),\n take(1)\n )\n .subscribe(tx$.next.bind(tx$))\n\n /* Set up search keyboard handlers */\n keyboard$\n .pipe(\n filter(({ mode }) => mode === \"search\")\n )\n .subscribe(key => {\n const active = getActiveElement()\n switch (key.type) {\n\n /* Enter: go to first (best) result */\n case \"Enter\":\n if (active === query) {\n const anchors = new Map()\n for (const anchor of getElements(\n \":first-child [href]\", result\n )) {\n const article = anchor.firstElementChild!\n anchors.set(anchor, parseFloat(\n article.getAttribute(\"data-md-score\")!\n ))\n }\n\n /* Go to result with highest score, if any */\n if (anchors.size) {\n const [[best]] = [...anchors].sort(([, a], [, b]) => b - a)\n best.click()\n }\n\n /* Otherwise omit form submission */\n key.claim()\n }\n break\n\n /* Escape or Tab: close search */\n case \"Escape\":\n case \"Tab\":\n setToggle(\"search\", false)\n query.blur()\n break\n\n /* Vertical arrows: select previous or next search result */\n case \"ArrowUp\":\n case \"ArrowDown\":\n if (typeof active === \"undefined\") {\n query.focus()\n } else {\n const els = [query, ...getElements(\n \":not(details) > [href], summary, details[open] [href]\",\n result\n )]\n const i = Math.max(0, (\n Math.max(0, els.indexOf(active)) + els.length + (\n key.type === \"ArrowUp\" ? -1 : +1\n )\n ) % els.length)\n els[i].focus()\n }\n\n /* Prevent scrolling of page */\n key.claim()\n break\n\n /* All other keys: hand to search query */\n default:\n if (query !== getActiveElement())\n query.focus()\n }\n })\n\n /* Set up global keyboard handlers */\n keyboard$\n .pipe(\n filter(({ mode }) => mode === \"global\"),\n )\n .subscribe(key => {\n switch (key.type) {\n\n /* Open search and select query */\n case \"f\":\n case \"s\":\n case \"/\":\n query.focus()\n query.select()\n\n /* Prevent scrolling of page */\n key.claim()\n break\n }\n })\n\n /* Create and return component */\n const query$ = mountSearchQuery(query, worker)\n const result$ = mountSearchResult(result, worker, { query$ })\n return merge(query$, result$)\n .pipe(\n mergeWith(\n\n /* Search sharing */\n ...getComponentElements(\"search-share\", el)\n .map(child => mountSearchShare(child, { query$ })),\n\n /* Search suggestions */\n ...getComponentElements(\"search-suggest\", el)\n .map(child => mountSearchSuggest(child, worker, { keyboard$ }))\n )\n )\n\n /* Gracefully handle broken search */\n } catch (err) {\n el.hidden = true\n return NEVER\n }\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n ObservableInput,\n combineLatest,\n filter,\n map,\n startWith\n} from \"rxjs\"\n\nimport { getLocation } from \"~/browser\"\nimport {\n SearchIndex,\n setupSearchHighlighter\n} from \"~/integrations\"\nimport { h } from \"~/utilities\"\n\nimport { Component } from \"../../_\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Search highlighting\n */\nexport interface SearchHighlight {\n nodes: Map /* Map of replacements */\n}\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Mount options\n */\ninterface MountOptions {\n index$: ObservableInput /* Search index observable */\n location$: Observable /* Location observable */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Mount search highlighting\n *\n * @param el - Content element\n * @param options - Options\n *\n * @returns Search highlighting component observable\n */\nexport function mountSearchHiglight(\n el: HTMLElement, { index$, location$ }: MountOptions\n): Observable> {\n return combineLatest([\n index$,\n location$\n .pipe(\n startWith(getLocation()),\n filter(url => !!url.searchParams.get(\"h\"))\n )\n ])\n .pipe(\n map(([index, url]) => setupSearchHighlighter(index.config, true)(\n url.searchParams.get(\"h\")!\n )),\n map(fn => {\n const nodes = new Map()\n\n /* Traverse text nodes and collect matches */\n const it = document.createNodeIterator(el, NodeFilter.SHOW_TEXT)\n for (let node = it.nextNode(); node; node = it.nextNode()) {\n if (node.parentElement?.offsetHeight) {\n const original = node.textContent!\n const replaced = fn(original)\n if (replaced.length > original.length)\n nodes.set(node as ChildNode, replaced)\n }\n }\n\n /* Replace original nodes with matches */\n for (const [node, text] of nodes) {\n const { childNodes } = h(\"span\", null, text)\n node.replaceWith(...Array.from(childNodes))\n }\n\n /* Return component */\n return { ref: el, nodes }\n })\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n Subject,\n animationFrameScheduler,\n auditTime,\n combineLatest,\n defer,\n distinctUntilChanged,\n finalize,\n map,\n observeOn,\n take,\n tap,\n withLatestFrom\n} from \"rxjs\"\n\nimport {\n Viewport,\n getElement,\n getElementContainer,\n getElementOffset,\n getElementSize,\n getElements\n} from \"~/browser\"\n\nimport { Component } from \"../_\"\nimport { Header } from \"../header\"\nimport { Main } from \"../main\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Sidebar\n */\nexport interface Sidebar {\n height: number /* Sidebar height */\n locked: boolean /* Sidebar is locked */\n}\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch options\n */\ninterface WatchOptions {\n viewport$: Observable /* Viewport observable */\n main$: Observable
    /* Main area observable */\n}\n\n/**\n * Mount options\n */\ninterface MountOptions {\n viewport$: Observable /* Viewport observable */\n header$: Observable
    /* Header observable */\n main$: Observable
    /* Main area observable */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch sidebar\n *\n * This function returns an observable that computes the visual parameters of\n * the sidebar which depends on the vertical viewport offset, as well as the\n * height of the main area. When the page is scrolled beyond the header, the\n * sidebar is locked and fills the remaining space.\n *\n * @param el - Sidebar element\n * @param options - Options\n *\n * @returns Sidebar observable\n */\nexport function watchSidebar(\n el: HTMLElement, { viewport$, main$ }: WatchOptions\n): Observable {\n const parent = el.parentElement!\n const adjust =\n parent.offsetTop -\n parent.parentElement!.offsetTop\n\n /* Compute the sidebar's available height and if it should be locked */\n return combineLatest([main$, viewport$])\n .pipe(\n map(([{ offset, height }, { offset: { y } }]) => {\n height = height\n + Math.min(adjust, Math.max(0, y - offset))\n - adjust\n return {\n height,\n locked: y >= offset + adjust\n }\n }),\n distinctUntilChanged((a, b) => (\n a.height === b.height &&\n a.locked === b.locked\n ))\n )\n}\n\n/**\n * Mount sidebar\n *\n * This function doesn't set the height of the actual sidebar, but of its first\n * child \u2013 the `.md-sidebar__scrollwrap` element in order to mitigiate jittery\n * sidebars when the footer is scrolled into view. At some point we switched\n * from `absolute` / `fixed` positioning to `sticky` positioning, significantly\n * reducing jitter in some browsers (respectively Firefox and Safari) when\n * scrolling from the top. However, top-aligned sticky positioning means that\n * the sidebar snaps to the bottom when the end of the container is reached.\n * This is what leads to the mentioned jitter, as the sidebar's height may be\n * updated too slowly.\n *\n * This behaviour can be mitigiated by setting the height of the sidebar to `0`\n * while preserving the padding, and the height on its first element.\n *\n * @param el - Sidebar element\n * @param options - Options\n *\n * @returns Sidebar component observable\n */\nexport function mountSidebar(\n el: HTMLElement, { header$, ...options }: MountOptions\n): Observable> {\n const inner = getElement(\".md-sidebar__scrollwrap\", el)\n const { y } = getElementOffset(inner)\n return defer(() => {\n const push$ = new Subject()\n push$\n .pipe(\n auditTime(0, animationFrameScheduler),\n withLatestFrom(header$)\n )\n .subscribe({\n\n /* Handle emission */\n next([{ height }, { height: offset }]) {\n inner.style.height = `${height - 2 * y}px`\n el.style.top = `${offset}px`\n },\n\n /* Handle complete */\n complete() {\n inner.style.height = \"\"\n el.style.top = \"\"\n }\n })\n\n /* Bring active item into view on initial load */\n push$\n .pipe(\n observeOn(animationFrameScheduler),\n take(1)\n )\n .subscribe(() => {\n for (const item of getElements(\".md-nav__link--active[href]\", el)) {\n const container = getElementContainer(item)\n if (typeof container !== \"undefined\") {\n const offset = item.offsetTop - container.offsetTop\n const { height } = getElementSize(container)\n container.scrollTo({\n top: offset - height / 2\n })\n }\n }\n })\n\n /* Create and return component */\n return watchSidebar(el, options)\n .pipe(\n tap(state => push$.next(state)),\n finalize(() => push$.complete()),\n map(state => ({ ref: el, ...state }))\n )\n })\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport { Repo, User } from \"github-types\"\nimport {\n EMPTY,\n Observable,\n catchError,\n defaultIfEmpty,\n map,\n zip\n} from \"rxjs\"\n\nimport { requestJSON } from \"~/browser\"\n\nimport { SourceFacts } from \"../_\"\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * GitHub release (partial)\n */\ninterface Release {\n tag_name: string /* Tag name */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Fetch GitHub repository facts\n *\n * @param user - GitHub user or organization\n * @param repo - GitHub repository\n *\n * @returns Repository facts observable\n */\nexport function fetchSourceFactsFromGitHub(\n user: string, repo?: string\n): Observable {\n if (typeof repo !== \"undefined\") {\n const url = `https://api.github.com/repos/${user}/${repo}`\n return zip(\n\n /* Fetch version */\n requestJSON(`${url}/releases/latest`)\n .pipe(\n catchError(() => EMPTY), // @todo refactor instant loading\n map(release => ({\n version: release.tag_name\n })),\n defaultIfEmpty({})\n ),\n\n /* Fetch stars and forks */\n requestJSON(url)\n .pipe(\n catchError(() => EMPTY), // @todo refactor instant loading\n map(info => ({\n stars: info.stargazers_count,\n forks: info.forks_count\n })),\n defaultIfEmpty({})\n )\n )\n .pipe(\n map(([release, info]) => ({ ...release, ...info }))\n )\n\n /* User or organization */\n } else {\n const url = `https://api.github.com/users/${user}`\n return requestJSON(url)\n .pipe(\n map(info => ({\n repositories: info.public_repos\n })),\n defaultIfEmpty({})\n )\n }\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport { ProjectSchema } from \"gitlab\"\nimport {\n EMPTY,\n Observable,\n catchError,\n defaultIfEmpty,\n map\n} from \"rxjs\"\n\nimport { requestJSON } from \"~/browser\"\n\nimport { SourceFacts } from \"../_\"\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Fetch GitLab repository facts\n *\n * @param base - GitLab base\n * @param project - GitLab project\n *\n * @returns Repository facts observable\n */\nexport function fetchSourceFactsFromGitLab(\n base: string, project: string\n): Observable {\n const url = `https://${base}/api/v4/projects/${encodeURIComponent(project)}`\n return requestJSON(url)\n .pipe(\n catchError(() => EMPTY), // @todo refactor instant loading\n map(({ star_count, forks_count }) => ({\n stars: star_count,\n forks: forks_count\n })),\n defaultIfEmpty({})\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport { EMPTY, Observable } from \"rxjs\"\n\nimport { fetchSourceFactsFromGitHub } from \"../github\"\nimport { fetchSourceFactsFromGitLab } from \"../gitlab\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Repository facts for repositories\n */\nexport interface RepositoryFacts {\n stars?: number /* Number of stars */\n forks?: number /* Number of forks */\n version?: string /* Latest version */\n}\n\n/**\n * Repository facts for organizations\n */\nexport interface OrganizationFacts {\n repositories?: number /* Number of repositories */\n}\n\n/* ------------------------------------------------------------------------- */\n\n/**\n * Repository facts\n */\nexport type SourceFacts =\n | RepositoryFacts\n | OrganizationFacts\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Fetch repository facts\n *\n * @param url - Repository URL\n *\n * @returns Repository facts observable\n */\nexport function fetchSourceFacts(\n url: string\n): Observable {\n\n /* Try to match GitHub repository */\n let match = url.match(/^.+github\\.com\\/([^/]+)\\/?([^/]+)?/i)\n if (match) {\n const [, user, repo] = match\n return fetchSourceFactsFromGitHub(user, repo)\n }\n\n /* Try to match GitLab repository */\n match = url.match(/^.+?([^/]*gitlab[^/]+)\\/(.+?)\\/?$/i)\n if (match) {\n const [, base, slug] = match\n return fetchSourceFactsFromGitLab(base, slug)\n }\n\n /* Fallback */\n return EMPTY\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n EMPTY,\n Observable,\n Subject,\n catchError,\n defer,\n filter,\n finalize,\n map,\n of,\n shareReplay,\n tap\n} from \"rxjs\"\n\nimport { getElement } from \"~/browser\"\nimport { ConsentDefaults } from \"~/components/consent\"\nimport { renderSourceFacts } from \"~/templates\"\n\nimport {\n Component,\n getComponentElements\n} from \"../../_\"\nimport {\n SourceFacts,\n fetchSourceFacts\n} from \"../facts\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Repository information\n */\nexport interface Source {\n facts: SourceFacts /* Repository facts */\n}\n\n/* ----------------------------------------------------------------------------\n * Data\n * ------------------------------------------------------------------------- */\n\n/**\n * Repository information observable\n */\nlet fetch$: Observable\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch repository information\n *\n * This function tries to read the repository facts from session storage, and\n * if unsuccessful, fetches them from the underlying provider.\n *\n * @param el - Repository information element\n *\n * @returns Repository information observable\n */\nexport function watchSource(\n el: HTMLAnchorElement\n): Observable {\n return fetch$ ||= defer(() => {\n const cached = __md_get(\"__source\", sessionStorage)\n if (cached) {\n return of(cached)\n } else {\n\n /* Check if consent is configured and was given */\n const els = getComponentElements(\"consent\")\n if (els.length) {\n const consent = __md_get(\"__consent\")\n if (!(consent && consent.github))\n return EMPTY\n }\n\n /* Fetch repository facts */\n return fetchSourceFacts(el.href)\n .pipe(\n tap(facts => __md_set(\"__source\", facts, sessionStorage))\n )\n }\n })\n .pipe(\n catchError(() => EMPTY),\n filter(facts => Object.keys(facts).length > 0),\n map(facts => ({ facts })),\n shareReplay(1)\n )\n}\n\n/**\n * Mount repository information\n *\n * @param el - Repository information element\n *\n * @returns Repository information component observable\n */\nexport function mountSource(\n el: HTMLAnchorElement\n): Observable> {\n const inner = getElement(\":scope > :last-child\", el)\n return defer(() => {\n const push$ = new Subject()\n push$.subscribe(({ facts }) => {\n inner.appendChild(renderSourceFacts(facts))\n inner.classList.add(\"md-source__repository--active\")\n })\n\n /* Create and return component */\n return watchSource(el)\n .pipe(\n tap(state => push$.next(state)),\n finalize(() => push$.complete()),\n map(state => ({ ref: el, ...state }))\n )\n })\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n Subject,\n defer,\n distinctUntilKeyChanged,\n finalize,\n map,\n of,\n switchMap,\n tap\n} from \"rxjs\"\n\nimport { feature } from \"~/_\"\nimport {\n Viewport,\n watchElementSize,\n watchViewportAt\n} from \"~/browser\"\n\nimport { Component } from \"../_\"\nimport { Header } from \"../header\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Navigation tabs\n */\nexport interface Tabs {\n hidden: boolean /* Navigation tabs are hidden */\n}\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch options\n */\ninterface WatchOptions {\n viewport$: Observable /* Viewport observable */\n header$: Observable
    /* Header observable */\n}\n\n/**\n * Mount options\n */\ninterface MountOptions {\n viewport$: Observable /* Viewport observable */\n header$: Observable
    /* Header observable */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch navigation tabs\n *\n * @param el - Navigation tabs element\n * @param options - Options\n *\n * @returns Navigation tabs observable\n */\nexport function watchTabs(\n el: HTMLElement, { viewport$, header$ }: WatchOptions\n): Observable {\n return watchElementSize(document.body)\n .pipe(\n switchMap(() => watchViewportAt(el, { header$, viewport$ })),\n map(({ offset: { y } }) => {\n return {\n hidden: y >= 10\n }\n }),\n distinctUntilKeyChanged(\"hidden\")\n )\n}\n\n/**\n * Mount navigation tabs\n *\n * This function hides the navigation tabs when scrolling past the threshold\n * and makes them reappear in a nice CSS animation when scrolling back up.\n *\n * @param el - Navigation tabs element\n * @param options - Options\n *\n * @returns Navigation tabs component observable\n */\nexport function mountTabs(\n el: HTMLElement, options: MountOptions\n): Observable> {\n return defer(() => {\n const push$ = new Subject()\n push$.subscribe({\n\n /* Handle emission */\n next({ hidden }) {\n el.hidden = hidden\n },\n\n /* Handle complete */\n complete() {\n el.hidden = false\n }\n })\n\n /* Create and return component */\n return (\n feature(\"navigation.tabs.sticky\")\n ? of({ hidden: false })\n : watchTabs(el, options)\n )\n .pipe(\n tap(state => push$.next(state)),\n finalize(() => push$.complete()),\n map(state => ({ ref: el, ...state }))\n )\n })\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n Subject,\n bufferCount,\n combineLatestWith,\n debounceTime,\n defer,\n distinctUntilChanged,\n distinctUntilKeyChanged,\n filter,\n finalize,\n map,\n merge,\n of,\n repeat,\n scan,\n share,\n skip,\n startWith,\n switchMap,\n takeLast,\n takeUntil,\n tap,\n withLatestFrom\n} from \"rxjs\"\n\nimport { feature } from \"~/_\"\nimport {\n Viewport,\n getElement,\n getElementContainer,\n getElementSize,\n getElements,\n getLocation,\n getOptionalElement,\n watchElementSize\n} from \"~/browser\"\n\nimport {\n Component,\n getComponentElement\n} from \"../_\"\nimport { Header } from \"../header\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Table of contents\n */\nexport interface TableOfContents {\n prev: HTMLAnchorElement[][] /* Anchors (previous) */\n next: HTMLAnchorElement[][] /* Anchors (next) */\n}\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch options\n */\ninterface WatchOptions {\n viewport$: Observable /* Viewport observable */\n header$: Observable
    /* Header observable */\n}\n\n/**\n * Mount options\n */\ninterface MountOptions {\n viewport$: Observable /* Viewport observable */\n header$: Observable
    /* Header observable */\n target$: Observable /* Location target observable */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch table of contents\n *\n * This is effectively a scroll spy implementation which will account for the\n * fixed header and automatically re-calculate anchor offsets when the viewport\n * is resized. The returned observable will only emit if the table of contents\n * needs to be repainted.\n *\n * This implementation tracks an anchor element's entire path starting from its\n * level up to the top-most anchor element, e.g. `[h3, h2, h1]`. Although the\n * Material theme currently doesn't make use of this information, it enables\n * the styling of the entire hierarchy through customization.\n *\n * Note that the current anchor is the last item of the `prev` anchor list.\n *\n * @param el - Table of contents element\n * @param options - Options\n *\n * @returns Table of contents observable\n */\nexport function watchTableOfContents(\n el: HTMLElement, { viewport$, header$ }: WatchOptions\n): Observable {\n const table = new Map()\n\n /* Compute anchor-to-target mapping */\n const anchors = getElements(\"[href^=\\\\#]\", el)\n for (const anchor of anchors) {\n const id = decodeURIComponent(anchor.hash.substring(1))\n const target = getOptionalElement(`[id=\"${id}\"]`)\n if (typeof target !== \"undefined\")\n table.set(anchor, target)\n }\n\n /* Compute necessary adjustment for header */\n const adjust$ = header$\n .pipe(\n distinctUntilKeyChanged(\"height\"),\n map(({ height }) => {\n const main = getComponentElement(\"main\")\n const grid = getElement(\":scope > :first-child\", main)\n return height + 0.8 * (\n grid.offsetTop -\n main.offsetTop\n )\n }),\n share()\n )\n\n /* Compute partition of previous and next anchors */\n const partition$ = watchElementSize(document.body)\n .pipe(\n distinctUntilKeyChanged(\"height\"),\n\n /* Build index to map anchor paths to vertical offsets */\n switchMap(body => defer(() => {\n let path: HTMLAnchorElement[] = []\n return of([...table].reduce((index, [anchor, target]) => {\n while (path.length) {\n const last = table.get(path[path.length - 1])!\n if (last.tagName >= target.tagName) {\n path.pop()\n } else {\n break\n }\n }\n\n /* If the current anchor is hidden, continue with its parent */\n let offset = target.offsetTop\n while (!offset && target.parentElement) {\n target = target.parentElement\n offset = target.offsetTop\n }\n\n /* Map reversed anchor path to vertical offset */\n return index.set(\n [...path = [...path, anchor]].reverse(),\n offset\n )\n }, new Map()))\n })\n .pipe(\n\n /* Sort index by vertical offset (see https://bit.ly/30z6QSO) */\n map(index => new Map([...index].sort(([, a], [, b]) => a - b))),\n combineLatestWith(adjust$),\n\n /* Re-compute partition when viewport offset changes */\n switchMap(([index, adjust]) => viewport$\n .pipe(\n scan(([prev, next], { offset: { y }, size }) => {\n const last = y + size.height >= Math.floor(body.height)\n\n /* Look forward */\n while (next.length) {\n const [, offset] = next[0]\n if (offset - adjust < y || last) {\n prev = [...prev, next.shift()!]\n } else {\n break\n }\n }\n\n /* Look backward */\n while (prev.length) {\n const [, offset] = prev[prev.length - 1]\n if (offset - adjust >= y && !last) {\n next = [prev.pop()!, ...next]\n } else {\n break\n }\n }\n\n /* Return partition */\n return [prev, next]\n }, [[], [...index]]),\n distinctUntilChanged((a, b) => (\n a[0] === b[0] &&\n a[1] === b[1]\n ))\n )\n )\n )\n )\n )\n\n /* Compute and return anchor list migrations */\n return partition$\n .pipe(\n map(([prev, next]) => ({\n prev: prev.map(([path]) => path),\n next: next.map(([path]) => path)\n })),\n\n /* Extract anchor list migrations */\n startWith({ prev: [], next: [] }),\n bufferCount(2, 1),\n map(([a, b]) => {\n\n /* Moving down */\n if (a.prev.length < b.prev.length) {\n return {\n prev: b.prev.slice(Math.max(0, a.prev.length - 1), b.prev.length),\n next: []\n }\n\n /* Moving up */\n } else {\n return {\n prev: b.prev.slice(-1),\n next: b.next.slice(0, b.next.length - a.next.length)\n }\n }\n })\n )\n}\n\n/* ------------------------------------------------------------------------- */\n\n/**\n * Mount table of contents\n *\n * @param el - Table of contents element\n * @param options - Options\n *\n * @returns Table of contents component observable\n */\nexport function mountTableOfContents(\n el: HTMLElement, { viewport$, header$, target$ }: MountOptions\n): Observable> {\n return defer(() => {\n const push$ = new Subject()\n const done$ = push$.pipe(takeLast(1))\n push$.subscribe(({ prev, next }) => {\n\n /* Look forward */\n for (const [anchor] of next) {\n anchor.classList.remove(\"md-nav__link--passed\")\n anchor.classList.remove(\"md-nav__link--active\")\n }\n\n /* Look backward */\n for (const [index, [anchor]] of prev.entries()) {\n anchor.classList.add(\"md-nav__link--passed\")\n anchor.classList.toggle(\n \"md-nav__link--active\",\n index === prev.length - 1\n )\n }\n })\n\n /* Set up following, if enabled */\n if (feature(\"toc.follow\")) {\n\n /* Toggle smooth scrolling only for anchor clicks */\n const smooth$ = merge(\n viewport$.pipe(debounceTime(1), map(() => undefined)),\n viewport$.pipe(debounceTime(250), map(() => \"smooth\" as const))\n )\n\n /* Bring active anchor into view */\n push$\n .pipe(\n filter(({ prev }) => prev.length > 0),\n withLatestFrom(smooth$)\n )\n .subscribe(([{ prev }, behavior]) => {\n const [anchor] = prev[prev.length - 1]\n if (anchor.offsetHeight) {\n\n /* Retrieve overflowing container and scroll */\n const container = getElementContainer(anchor)\n if (typeof container !== \"undefined\") {\n const offset = anchor.offsetTop - container.offsetTop\n const { height } = getElementSize(container)\n container.scrollTo({\n top: offset - height / 2,\n behavior\n })\n }\n }\n })\n }\n\n /* Set up anchor tracking, if enabled */\n if (feature(\"navigation.tracking\"))\n viewport$\n .pipe(\n takeUntil(done$),\n distinctUntilKeyChanged(\"offset\"),\n debounceTime(250),\n skip(1),\n takeUntil(target$.pipe(skip(1))),\n repeat({ delay: 250 }),\n withLatestFrom(push$)\n )\n .subscribe(([, { prev }]) => {\n const url = getLocation()\n\n /* Set hash fragment to active anchor */\n const anchor = prev[prev.length - 1]\n if (anchor && anchor.length) {\n const [active] = anchor\n const { hash } = new URL(active.href)\n if (url.hash !== hash) {\n url.hash = hash\n history.replaceState({}, \"\", `${url}`)\n }\n\n /* Reset anchor when at the top */\n } else {\n url.hash = \"\"\n history.replaceState({}, \"\", `${url}`)\n }\n })\n\n /* Create and return component */\n return watchTableOfContents(el, { viewport$, header$ })\n .pipe(\n tap(state => push$.next(state)),\n finalize(() => push$.complete()),\n map(state => ({ ref: el, ...state }))\n )\n })\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n Subject,\n bufferCount,\n combineLatest,\n distinctUntilChanged,\n distinctUntilKeyChanged,\n endWith,\n finalize,\n map,\n repeat,\n skip,\n takeLast,\n takeUntil,\n tap\n} from \"rxjs\"\n\nimport { Viewport } from \"~/browser\"\n\nimport { Component } from \"../_\"\nimport { Header } from \"../header\"\nimport { Main } from \"../main\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Back-to-top button\n */\nexport interface BackToTop {\n hidden: boolean /* Back-to-top button is hidden */\n}\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch options\n */\ninterface WatchOptions {\n viewport$: Observable /* Viewport observable */\n main$: Observable
    /* Main area observable */\n target$: Observable /* Location target observable */\n}\n\n/**\n * Mount options\n */\ninterface MountOptions {\n viewport$: Observable /* Viewport observable */\n header$: Observable
    /* Header observable */\n main$: Observable
    /* Main area observable */\n target$: Observable /* Location target observable */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch back-to-top\n *\n * @param _el - Back-to-top element\n * @param options - Options\n *\n * @returns Back-to-top observable\n */\nexport function watchBackToTop(\n _el: HTMLElement, { viewport$, main$, target$ }: WatchOptions\n): Observable {\n\n /* Compute direction */\n const direction$ = viewport$\n .pipe(\n map(({ offset: { y } }) => y),\n bufferCount(2, 1),\n map(([a, b]) => a > b && b > 0),\n distinctUntilChanged()\n )\n\n /* Compute whether main area is active */\n const active$ = main$\n .pipe(\n map(({ active }) => active)\n )\n\n /* Compute threshold for hiding */\n return combineLatest([active$, direction$])\n .pipe(\n map(([active, direction]) => !(active && direction)),\n distinctUntilChanged(),\n takeUntil(target$.pipe(skip(1))),\n endWith(true),\n repeat({ delay: 250 }),\n map(hidden => ({ hidden }))\n )\n}\n\n/* ------------------------------------------------------------------------- */\n\n/**\n * Mount back-to-top\n *\n * @param el - Back-to-top element\n * @param options - Options\n *\n * @returns Back-to-top component observable\n */\nexport function mountBackToTop(\n el: HTMLElement, { viewport$, header$, main$, target$ }: MountOptions\n): Observable> {\n const push$ = new Subject()\n const done$ = push$.pipe(takeLast(1))\n push$.subscribe({\n\n /* Handle emission */\n next({ hidden }) {\n el.hidden = hidden\n if (hidden) {\n el.setAttribute(\"tabindex\", \"-1\")\n el.blur()\n } else {\n el.removeAttribute(\"tabindex\")\n }\n },\n\n /* Handle complete */\n complete() {\n el.style.top = \"\"\n el.hidden = true\n el.removeAttribute(\"tabindex\")\n }\n })\n\n /* Watch header height */\n header$\n .pipe(\n takeUntil(done$),\n distinctUntilKeyChanged(\"height\")\n )\n .subscribe(({ height }) => {\n el.style.top = `${height + 16}px`\n })\n\n /* Create and return component */\n return watchBackToTop(el, { viewport$, main$, target$ })\n .pipe(\n tap(state => push$.next(state)),\n finalize(() => push$.complete()),\n map(state => ({ ref: el, ...state }))\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n fromEvent,\n map,\n mergeMap,\n switchMap,\n takeWhile,\n tap,\n withLatestFrom\n} from \"rxjs\"\n\nimport { getElements } from \"~/browser\"\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Patch options\n */\ninterface PatchOptions {\n document$: Observable /* Document observable */\n tablet$: Observable /* Media tablet observable */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Patch indeterminate checkboxes\n *\n * This function replaces the indeterminate \"pseudo state\" with the actual\n * indeterminate state, which is used to keep navigation always expanded.\n *\n * @param options - Options\n */\nexport function patchIndeterminate(\n { document$, tablet$ }: PatchOptions\n): void {\n document$\n .pipe(\n switchMap(() => getElements(\n // @todo `data-md-state` is deprecated and removed in v9\n \".md-toggle--indeterminate, [data-md-state=indeterminate]\"\n )),\n tap(el => {\n el.indeterminate = true\n el.checked = false\n }),\n mergeMap(el => fromEvent(el, \"change\")\n .pipe(\n takeWhile(() => el.classList.contains(\"md-toggle--indeterminate\")),\n map(() => el)\n )\n ),\n withLatestFrom(tablet$)\n )\n .subscribe(([el, tablet]) => {\n el.classList.remove(\"md-toggle--indeterminate\")\n if (tablet)\n el.checked = false\n })\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n filter,\n fromEvent,\n map,\n mergeMap,\n switchMap,\n tap\n} from \"rxjs\"\n\nimport { getElements } from \"~/browser\"\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Patch options\n */\ninterface PatchOptions {\n document$: Observable /* Document observable */\n}\n\n/* ----------------------------------------------------------------------------\n * Helper functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Check whether the given device is an Apple device\n *\n * @returns Test result\n */\nfunction isAppleDevice(): boolean {\n return /(iPad|iPhone|iPod)/.test(navigator.userAgent)\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Patch all elements with `data-md-scrollfix` attributes\n *\n * This is a year-old patch which ensures that overflow scrolling works at the\n * top and bottom of containers on iOS by ensuring a `1px` scroll offset upon\n * the start of a touch event.\n *\n * @see https://bit.ly/2SCtAOO - Original source\n *\n * @param options - Options\n */\nexport function patchScrollfix(\n { document$ }: PatchOptions\n): void {\n document$\n .pipe(\n switchMap(() => getElements(\"[data-md-scrollfix]\")),\n tap(el => el.removeAttribute(\"data-md-scrollfix\")),\n filter(isAppleDevice),\n mergeMap(el => fromEvent(el, \"touchstart\")\n .pipe(\n map(() => el)\n )\n )\n )\n .subscribe(el => {\n const top = el.scrollTop\n\n /* We're at the top of the container */\n if (top === 0) {\n el.scrollTop = 1\n\n /* We're at the bottom of the container */\n } else if (top + el.offsetHeight === el.scrollHeight) {\n el.scrollTop = top - 1\n }\n })\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n combineLatest,\n delay,\n map,\n of,\n switchMap,\n withLatestFrom\n} from \"rxjs\"\n\nimport {\n Viewport,\n watchToggle\n} from \"~/browser\"\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Patch options\n */\ninterface PatchOptions {\n viewport$: Observable /* Viewport observable */\n tablet$: Observable /* Media tablet observable */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Patch the document body to lock when search is open\n *\n * For mobile and tablet viewports, the search is rendered full screen, which\n * leads to scroll leaking when at the top or bottom of the search result. This\n * function locks the body when the search is in full screen mode, and restores\n * the scroll position when leaving.\n *\n * @param options - Options\n */\nexport function patchScrolllock(\n { viewport$, tablet$ }: PatchOptions\n): void {\n combineLatest([watchToggle(\"search\"), tablet$])\n .pipe(\n map(([active, tablet]) => active && !tablet),\n switchMap(active => of(active)\n .pipe(\n delay(active ? 400 : 100)\n )\n ),\n withLatestFrom(viewport$)\n )\n .subscribe(([active, { offset: { y }}]) => {\n if (active) {\n document.body.setAttribute(\"data-md-scrolllock\", \"\")\n document.body.style.top = `-${y}px`\n } else {\n const value = -1 * parseInt(document.body.style.top, 10)\n document.body.removeAttribute(\"data-md-scrolllock\")\n document.body.style.top = \"\"\n if (value)\n window.scrollTo(0, value)\n }\n })\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\n/* ----------------------------------------------------------------------------\n * Polyfills\n * ------------------------------------------------------------------------- */\n\n/* Polyfill `Object.entries` */\nif (!Object.entries)\n Object.entries = function (obj: object) {\n const data: [string, string][] = []\n for (const key of Object.keys(obj))\n // @ts-expect-error - ignore property access warning\n data.push([key, obj[key]])\n\n /* Return entries */\n return data\n }\n\n/* Polyfill `Object.values` */\nif (!Object.values)\n Object.values = function (obj: object) {\n const data: string[] = []\n for (const key of Object.keys(obj))\n // @ts-expect-error - ignore property access warning\n data.push(obj[key])\n\n /* Return values */\n return data\n }\n\n/* ------------------------------------------------------------------------- */\n\n/* Polyfills for `Element` */\nif (typeof Element !== \"undefined\") {\n\n /* Polyfill `Element.scrollTo` */\n if (!Element.prototype.scrollTo)\n Element.prototype.scrollTo = function (\n x?: ScrollToOptions | number, y?: number\n ): void {\n if (typeof x === \"object\") {\n this.scrollLeft = x.left!\n this.scrollTop = x.top!\n } else {\n this.scrollLeft = x!\n this.scrollTop = y!\n }\n }\n\n /* Polyfill `Element.replaceWith` */\n if (!Element.prototype.replaceWith)\n Element.prototype.replaceWith = function (\n ...nodes: Array\n ): void {\n const parent = this.parentNode\n if (parent) {\n if (nodes.length === 0)\n parent.removeChild(this)\n\n /* Replace children and create text nodes */\n for (let i = nodes.length - 1; i >= 0; i--) {\n let node = nodes[i]\n if (typeof node === \"string\")\n node = document.createTextNode(node)\n else if (node.parentNode)\n node.parentNode.removeChild(node)\n\n /* Replace child or insert before previous sibling */\n if (!i)\n parent.replaceChild(node, this)\n else\n parent.insertBefore(this.previousSibling!, node)\n }\n }\n }\n}\n"], + "mappings": "6+BAAA,IAAAA,GAAAC,GAAA,CAAAC,GAAAC,KAAA,EAAC,SAAUC,EAAQC,EAAS,CAC1B,OAAOH,IAAY,UAAY,OAAOC,IAAW,YAAcE,EAAQ,EACvE,OAAO,QAAW,YAAc,OAAO,IAAM,OAAOA,CAAO,EAC1DA,EAAQ,CACX,GAAEH,GAAO,UAAY,CAAE,aASrB,SAASI,EAA0BC,EAAO,CACxC,IAAIC,EAAmB,GACnBC,EAA0B,GAC1BC,EAAiC,KAEjCC,EAAsB,CACxB,KAAM,GACN,OAAQ,GACR,IAAK,GACL,IAAK,GACL,MAAO,GACP,SAAU,GACV,OAAQ,GACR,KAAM,GACN,MAAO,GACP,KAAM,GACN,KAAM,GACN,SAAU,GACV,iBAAkB,EACpB,EAOA,SAASC,EAAmBC,EAAI,CAC9B,MACE,GAAAA,GACAA,IAAO,UACPA,EAAG,WAAa,QAChBA,EAAG,WAAa,QAChB,cAAeA,GACf,aAAcA,EAAG,UAKrB,CASA,SAASC,EAA8BD,EAAI,CACzC,IAAIE,GAAOF,EAAG,KACVG,GAAUH,EAAG,QAUjB,MARI,GAAAG,KAAY,SAAWL,EAAoBI,KAAS,CAACF,EAAG,UAIxDG,KAAY,YAAc,CAACH,EAAG,UAI9BA,EAAG,kBAKT,CAOA,SAASI,EAAqBJ,EAAI,CAC5BA,EAAG,UAAU,SAAS,eAAe,IAGzCA,EAAG,UAAU,IAAI,eAAe,EAChCA,EAAG,aAAa,2BAA4B,EAAE,EAChD,CAOA,SAASK,EAAwBL,EAAI,CAC/B,CAACA,EAAG,aAAa,0BAA0B,IAG/CA,EAAG,UAAU,OAAO,eAAe,EACnCA,EAAG,gBAAgB,0BAA0B,EAC/C,CAUA,SAASM,EAAUC,EAAG,CAChBA,EAAE,SAAWA,EAAE,QAAUA,EAAE,UAI3BR,EAAmBL,EAAM,aAAa,GACxCU,EAAqBV,EAAM,aAAa,EAG1CC,EAAmB,GACrB,CAUA,SAASa,EAAcD,EAAG,CACxBZ,EAAmB,EACrB,CASA,SAASc,EAAQF,EAAG,CAEd,CAACR,EAAmBQ,EAAE,MAAM,IAI5BZ,GAAoBM,EAA8BM,EAAE,MAAM,IAC5DH,EAAqBG,EAAE,MAAM,CAEjC,CAMA,SAASG,EAAOH,EAAG,CACb,CAACR,EAAmBQ,EAAE,MAAM,IAK9BA,EAAE,OAAO,UAAU,SAAS,eAAe,GAC3CA,EAAE,OAAO,aAAa,0BAA0B,KAMhDX,EAA0B,GAC1B,OAAO,aAAaC,CAA8B,EAClDA,EAAiC,OAAO,WAAW,UAAW,CAC5DD,EAA0B,EAC5B,EAAG,GAAG,EACNS,EAAwBE,EAAE,MAAM,EAEpC,CAOA,SAASI,EAAmBJ,EAAG,CACzB,SAAS,kBAAoB,WAK3BX,IACFD,EAAmB,IAErBiB,EAA+B,EAEnC,CAQA,SAASA,GAAiC,CACxC,SAAS,iBAAiB,YAAaC,CAAoB,EAC3D,SAAS,iBAAiB,YAAaA,CAAoB,EAC3D,SAAS,iBAAiB,UAAWA,CAAoB,EACzD,SAAS,iBAAiB,cAAeA,CAAoB,EAC7D,SAAS,iBAAiB,cAAeA,CAAoB,EAC7D,SAAS,iBAAiB,YAAaA,CAAoB,EAC3D,SAAS,iBAAiB,YAAaA,CAAoB,EAC3D,SAAS,iBAAiB,aAAcA,CAAoB,EAC5D,SAAS,iBAAiB,WAAYA,CAAoB,CAC5D,CAEA,SAASC,GAAoC,CAC3C,SAAS,oBAAoB,YAAaD,CAAoB,EAC9D,SAAS,oBAAoB,YAAaA,CAAoB,EAC9D,SAAS,oBAAoB,UAAWA,CAAoB,EAC5D,SAAS,oBAAoB,cAAeA,CAAoB,EAChE,SAAS,oBAAoB,cAAeA,CAAoB,EAChE,SAAS,oBAAoB,YAAaA,CAAoB,EAC9D,SAAS,oBAAoB,YAAaA,CAAoB,EAC9D,SAAS,oBAAoB,aAAcA,CAAoB,EAC/D,SAAS,oBAAoB,WAAYA,CAAoB,CAC/D,CASA,SAASA,EAAqBN,EAAG,CAG3BA,EAAE,OAAO,UAAYA,EAAE,OAAO,SAAS,YAAY,IAAM,SAI7DZ,EAAmB,GACnBmB,EAAkC,EACpC,CAKA,SAAS,iBAAiB,UAAWR,EAAW,EAAI,EACpD,SAAS,iBAAiB,YAAaE,EAAe,EAAI,EAC1D,SAAS,iBAAiB,cAAeA,EAAe,EAAI,EAC5D,SAAS,iBAAiB,aAAcA,EAAe,EAAI,EAC3D,SAAS,iBAAiB,mBAAoBG,EAAoB,EAAI,EAEtEC,EAA+B,EAM/BlB,EAAM,iBAAiB,QAASe,EAAS,EAAI,EAC7Cf,EAAM,iBAAiB,OAAQgB,EAAQ,EAAI,EAOvChB,EAAM,WAAa,KAAK,wBAA0BA,EAAM,KAI1DA,EAAM,KAAK,aAAa,wBAAyB,EAAE,EAC1CA,EAAM,WAAa,KAAK,gBACjC,SAAS,gBAAgB,UAAU,IAAI,kBAAkB,EACzD,SAAS,gBAAgB,aAAa,wBAAyB,EAAE,EAErE,CAKA,GAAI,OAAO,QAAW,aAAe,OAAO,UAAa,YAAa,CAIpE,OAAO,0BAA4BD,EAInC,IAAIsB,EAEJ,GAAI,CACFA,EAAQ,IAAI,YAAY,8BAA8B,CACxD,OAASC,EAAP,CAEAD,EAAQ,SAAS,YAAY,aAAa,EAC1CA,EAAM,gBAAgB,+BAAgC,GAAO,GAAO,CAAC,CAAC,CACxE,CAEA,OAAO,cAAcA,CAAK,CAC5B,CAEI,OAAO,UAAa,aAGtBtB,EAA0B,QAAQ,CAGtC,CAAE,ICvTF,IAAAwB,GAAAC,GAAAC,IAAA,EAAC,SAASC,EAAQ,CAOhB,IAAIC,EAA6B,UAAW,CAC1C,GAAI,CACF,MAAO,CAAC,CAAC,OAAO,QAClB,OAASC,EAAP,CACA,MAAO,EACT,CACF,EAGIC,EAAoBF,EAA2B,EAE/CG,EAAiB,SAASC,EAAO,CACnC,IAAIC,EAAW,CACb,KAAM,UAAW,CACf,IAAIC,EAAQF,EAAM,MAAM,EACxB,MAAO,CAAE,KAAME,IAAU,OAAQ,MAAOA,CAAM,CAChD,CACF,EAEA,OAAIJ,IACFG,EAAS,OAAO,UAAY,UAAW,CACrC,OAAOA,CACT,GAGKA,CACT,EAMIE,EAAiB,SAASD,EAAO,CACnC,OAAO,mBAAmBA,CAAK,EAAE,QAAQ,OAAQ,GAAG,CACtD,EAEIE,EAAmB,SAASF,EAAO,CACrC,OAAO,mBAAmB,OAAOA,CAAK,EAAE,QAAQ,MAAO,GAAG,CAAC,CAC7D,EAEIG,EAA0B,UAAW,CAEvC,IAAIC,EAAkB,SAASC,EAAc,CAC3C,OAAO,eAAe,KAAM,WAAY,CAAE,SAAU,GAAM,MAAO,CAAC,CAAE,CAAC,EACrE,IAAIC,EAAqB,OAAOD,EAEhC,GAAIC,IAAuB,YAEpB,GAAIA,IAAuB,SAC5BD,IAAiB,IACnB,KAAK,YAAYA,CAAY,UAEtBA,aAAwBD,EAAiB,CAClD,IAAIG,EAAQ,KACZF,EAAa,QAAQ,SAASL,EAAOQ,EAAM,CACzCD,EAAM,OAAOC,EAAMR,CAAK,CAC1B,CAAC,CACH,SAAYK,IAAiB,MAAUC,IAAuB,SAC5D,GAAI,OAAO,UAAU,SAAS,KAAKD,CAAY,IAAM,iBACnD,QAASI,EAAI,EAAGA,EAAIJ,EAAa,OAAQI,IAAK,CAC5C,IAAIC,EAAQL,EAAaI,GACzB,GAAK,OAAO,UAAU,SAAS,KAAKC,CAAK,IAAM,kBAAsBA,EAAM,SAAW,EACpF,KAAK,OAAOA,EAAM,GAAIA,EAAM,EAAE,MAE9B,OAAM,IAAI,UAAU,4CAA8CD,EAAI,6BAA8B,CAExG,KAEA,SAASE,KAAON,EACVA,EAAa,eAAeM,CAAG,GACjC,KAAK,OAAOA,EAAKN,EAAaM,EAAI,MAKxC,OAAM,IAAI,UAAU,8CAA+C,CAEvE,EAEIC,EAAQR,EAAgB,UAE5BQ,EAAM,OAAS,SAASJ,EAAMR,EAAO,CAC/BQ,KAAQ,KAAK,SACf,KAAK,SAASA,GAAM,KAAK,OAAOR,CAAK,CAAC,EAEtC,KAAK,SAASQ,GAAQ,CAAC,OAAOR,CAAK,CAAC,CAExC,EAEAY,EAAM,OAAS,SAASJ,EAAM,CAC5B,OAAO,KAAK,SAASA,EACvB,EAEAI,EAAM,IAAM,SAASJ,EAAM,CACzB,OAAQA,KAAQ,KAAK,SAAY,KAAK,SAASA,GAAM,GAAK,IAC5D,EAEAI,EAAM,OAAS,SAASJ,EAAM,CAC5B,OAAQA,KAAQ,KAAK,SAAY,KAAK,SAASA,GAAM,MAAM,CAAC,EAAI,CAAC,CACnE,EAEAI,EAAM,IAAM,SAASJ,EAAM,CACzB,OAAQA,KAAQ,KAAK,QACvB,EAEAI,EAAM,IAAM,SAASJ,EAAMR,EAAO,CAChC,KAAK,SAASQ,GAAQ,CAAC,OAAOR,CAAK,CAAC,CACtC,EAEAY,EAAM,QAAU,SAASC,EAAUC,EAAS,CAC1C,IAAIC,EACJ,QAASP,KAAQ,KAAK,SACpB,GAAI,KAAK,SAAS,eAAeA,CAAI,EAAG,CACtCO,EAAU,KAAK,SAASP,GACxB,QAASC,EAAI,EAAGA,EAAIM,EAAQ,OAAQN,IAClCI,EAAS,KAAKC,EAASC,EAAQN,GAAID,EAAM,IAAI,CAEjD,CAEJ,EAEAI,EAAM,KAAO,UAAW,CACtB,IAAId,EAAQ,CAAC,EACb,YAAK,QAAQ,SAASE,EAAOQ,EAAM,CACjCV,EAAM,KAAKU,CAAI,CACjB,CAAC,EACMX,EAAeC,CAAK,CAC7B,EAEAc,EAAM,OAAS,UAAW,CACxB,IAAId,EAAQ,CAAC,EACb,YAAK,QAAQ,SAASE,EAAO,CAC3BF,EAAM,KAAKE,CAAK,CAClB,CAAC,EACMH,EAAeC,CAAK,CAC7B,EAEAc,EAAM,QAAU,UAAW,CACzB,IAAId,EAAQ,CAAC,EACb,YAAK,QAAQ,SAASE,EAAOQ,EAAM,CACjCV,EAAM,KAAK,CAACU,EAAMR,CAAK,CAAC,CAC1B,CAAC,EACMH,EAAeC,CAAK,CAC7B,EAEIF,IACFgB,EAAM,OAAO,UAAYA,EAAM,SAGjCA,EAAM,SAAW,UAAW,CAC1B,IAAII,EAAc,CAAC,EACnB,YAAK,QAAQ,SAAShB,EAAOQ,EAAM,CACjCQ,EAAY,KAAKf,EAAeO,CAAI,EAAI,IAAMP,EAAeD,CAAK,CAAC,CACrE,CAAC,EACMgB,EAAY,KAAK,GAAG,CAC7B,EAGAvB,EAAO,gBAAkBW,CAC3B,EAEIa,EAAkC,UAAW,CAC/C,GAAI,CACF,IAAIb,EAAkBX,EAAO,gBAE7B,OACG,IAAIW,EAAgB,MAAM,EAAE,SAAS,IAAM,OAC3C,OAAOA,EAAgB,UAAU,KAAQ,YACzC,OAAOA,EAAgB,UAAU,SAAY,UAElD,OAASc,EAAP,CACA,MAAO,EACT,CACF,EAEKD,EAAgC,GACnCd,EAAwB,EAG1B,IAAIS,EAAQnB,EAAO,gBAAgB,UAE/B,OAAOmB,EAAM,MAAS,aACxBA,EAAM,KAAO,UAAW,CACtB,IAAIL,EAAQ,KACRT,EAAQ,CAAC,EACb,KAAK,QAAQ,SAASE,EAAOQ,EAAM,CACjCV,EAAM,KAAK,CAACU,EAAMR,CAAK,CAAC,EACnBO,EAAM,UACTA,EAAM,OAAOC,CAAI,CAErB,CAAC,EACDV,EAAM,KAAK,SAASqB,EAAGC,EAAG,CACxB,OAAID,EAAE,GAAKC,EAAE,GACJ,GACED,EAAE,GAAKC,EAAE,GACX,EAEA,CAEX,CAAC,EACGb,EAAM,WACRA,EAAM,SAAW,CAAC,GAEpB,QAASE,EAAI,EAAGA,EAAIX,EAAM,OAAQW,IAChC,KAAK,OAAOX,EAAMW,GAAG,GAAIX,EAAMW,GAAG,EAAE,CAExC,GAGE,OAAOG,EAAM,aAAgB,YAC/B,OAAO,eAAeA,EAAO,cAAe,CAC1C,WAAY,GACZ,aAAc,GACd,SAAU,GACV,MAAO,SAASP,EAAc,CAC5B,GAAI,KAAK,SACP,KAAK,SAAW,CAAC,MACZ,CACL,IAAIgB,EAAO,CAAC,EACZ,KAAK,QAAQ,SAASrB,EAAOQ,EAAM,CACjCa,EAAK,KAAKb,CAAI,CAChB,CAAC,EACD,QAASC,EAAI,EAAGA,EAAIY,EAAK,OAAQZ,IAC/B,KAAK,OAAOY,EAAKZ,EAAE,CAEvB,CAEAJ,EAAeA,EAAa,QAAQ,MAAO,EAAE,EAG7C,QAFIiB,EAAajB,EAAa,MAAM,GAAG,EACnCkB,EACKd,EAAI,EAAGA,EAAIa,EAAW,OAAQb,IACrCc,EAAYD,EAAWb,GAAG,MAAM,GAAG,EACnC,KAAK,OACHP,EAAiBqB,EAAU,EAAE,EAC5BA,EAAU,OAAS,EAAKrB,EAAiBqB,EAAU,EAAE,EAAI,EAC5D,CAEJ,CACF,CAAC,CAKL,GACG,OAAO,QAAW,YAAe,OAC5B,OAAO,QAAW,YAAe,OACjC,OAAO,MAAS,YAAe,KAAO/B,EAC9C,GAEC,SAASC,EAAQ,CAOhB,IAAI+B,EAAwB,UAAW,CACrC,GAAI,CACF,IAAIC,EAAI,IAAIhC,EAAO,IAAI,IAAK,UAAU,EACtC,OAAAgC,EAAE,SAAW,MACLA,EAAE,OAAS,kBAAqBA,EAAE,YAC5C,OAASP,EAAP,CACA,MAAO,EACT,CACF,EAGIQ,EAAc,UAAW,CAC3B,IAAIC,EAAOlC,EAAO,IAEdmC,EAAM,SAASC,EAAKC,EAAM,CACxB,OAAOD,GAAQ,WAAUA,EAAM,OAAOA,CAAG,GACzCC,GAAQ,OAAOA,GAAS,WAAUA,EAAO,OAAOA,CAAI,GAGxD,IAAIC,EAAM,SAAUC,EACpB,GAAIF,IAASrC,EAAO,WAAa,QAAUqC,IAASrC,EAAO,SAAS,MAAO,CACzEqC,EAAOA,EAAK,YAAY,EACxBC,EAAM,SAAS,eAAe,mBAAmB,EAAE,EACnDC,EAAcD,EAAI,cAAc,MAAM,EACtCC,EAAY,KAAOF,EACnBC,EAAI,KAAK,YAAYC,CAAW,EAChC,GAAI,CACF,GAAIA,EAAY,KAAK,QAAQF,CAAI,IAAM,EAAG,MAAM,IAAI,MAAME,EAAY,IAAI,CAC5E,OAASC,EAAP,CACA,MAAM,IAAI,MAAM,0BAA4BH,EAAO,WAAaG,CAAG,CACrE,CACF,CAEA,IAAIC,EAAgBH,EAAI,cAAc,GAAG,EACzCG,EAAc,KAAOL,EACjBG,IACFD,EAAI,KAAK,YAAYG,CAAa,EAClCA,EAAc,KAAOA,EAAc,MAGrC,IAAIC,EAAeJ,EAAI,cAAc,OAAO,EAI5C,GAHAI,EAAa,KAAO,MACpBA,EAAa,MAAQN,EAEjBK,EAAc,WAAa,KAAO,CAAC,IAAI,KAAKA,EAAc,IAAI,GAAM,CAACC,EAAa,cAAc,GAAK,CAACL,EACxG,MAAM,IAAI,UAAU,aAAa,EAGnC,OAAO,eAAe,KAAM,iBAAkB,CAC5C,MAAOI,CACT,CAAC,EAID,IAAIE,EAAe,IAAI3C,EAAO,gBAAgB,KAAK,MAAM,EACrD4C,EAAqB,GACrBC,EAA2B,GAC3B/B,EAAQ,KACZ,CAAC,SAAU,SAAU,KAAK,EAAE,QAAQ,SAASgC,EAAY,CACvD,IAAIC,GAASJ,EAAaG,GAC1BH,EAAaG,GAAc,UAAW,CACpCC,GAAO,MAAMJ,EAAc,SAAS,EAChCC,IACFC,EAA2B,GAC3B/B,EAAM,OAAS6B,EAAa,SAAS,EACrCE,EAA2B,GAE/B,CACF,CAAC,EAED,OAAO,eAAe,KAAM,eAAgB,CAC1C,MAAOF,EACP,WAAY,EACd,CAAC,EAED,IAAIK,EAAS,OACb,OAAO,eAAe,KAAM,sBAAuB,CACjD,WAAY,GACZ,aAAc,GACd,SAAU,GACV,MAAO,UAAW,CACZ,KAAK,SAAWA,IAClBA,EAAS,KAAK,OACVH,IACFD,EAAqB,GACrB,KAAK,aAAa,YAAY,KAAK,MAAM,EACzCA,EAAqB,IAG3B,CACF,CAAC,CACH,EAEIzB,EAAQgB,EAAI,UAEZc,EAA6B,SAASC,EAAe,CACvD,OAAO,eAAe/B,EAAO+B,EAAe,CAC1C,IAAK,UAAW,CACd,OAAO,KAAK,eAAeA,EAC7B,EACA,IAAK,SAAS3C,EAAO,CACnB,KAAK,eAAe2C,GAAiB3C,CACvC,EACA,WAAY,EACd,CAAC,CACH,EAEA,CAAC,OAAQ,OAAQ,WAAY,OAAQ,UAAU,EAC5C,QAAQ,SAAS2C,EAAe,CAC/BD,EAA2BC,CAAa,CAC1C,CAAC,EAEH,OAAO,eAAe/B,EAAO,SAAU,CACrC,IAAK,UAAW,CACd,OAAO,KAAK,eAAe,MAC7B,EACA,IAAK,SAASZ,EAAO,CACnB,KAAK,eAAe,OAAYA,EAChC,KAAK,oBAAoB,CAC3B,EACA,WAAY,EACd,CAAC,EAED,OAAO,iBAAiBY,EAAO,CAE7B,SAAY,CACV,IAAK,UAAW,CACd,IAAIL,EAAQ,KACZ,OAAO,UAAW,CAChB,OAAOA,EAAM,IACf,CACF,CACF,EAEA,KAAQ,CACN,IAAK,UAAW,CACd,OAAO,KAAK,eAAe,KAAK,QAAQ,MAAO,EAAE,CACnD,EACA,IAAK,SAASP,EAAO,CACnB,KAAK,eAAe,KAAOA,EAC3B,KAAK,oBAAoB,CAC3B,EACA,WAAY,EACd,EAEA,SAAY,CACV,IAAK,UAAW,CACd,OAAO,KAAK,eAAe,SAAS,QAAQ,SAAU,GAAG,CAC3D,EACA,IAAK,SAASA,EAAO,CACnB,KAAK,eAAe,SAAWA,CACjC,EACA,WAAY,EACd,EAEA,OAAU,CACR,IAAK,UAAW,CAEd,IAAI4C,EAAe,CAAE,QAAS,GAAI,SAAU,IAAK,OAAQ,EAAG,EAAE,KAAK,eAAe,UAI9EC,EAAkB,KAAK,eAAe,MAAQD,GAChD,KAAK,eAAe,OAAS,GAE/B,OAAO,KAAK,eAAe,SACzB,KACA,KAAK,eAAe,UACnBC,EAAmB,IAAM,KAAK,eAAe,KAAQ,GAC1D,EACA,WAAY,EACd,EAEA,SAAY,CACV,IAAK,UAAW,CACd,MAAO,EACT,EACA,IAAK,SAAS7C,EAAO,CACrB,EACA,WAAY,EACd,EAEA,SAAY,CACV,IAAK,UAAW,CACd,MAAO,EACT,EACA,IAAK,SAASA,EAAO,CACrB,EACA,WAAY,EACd,CACF,CAAC,EAED4B,EAAI,gBAAkB,SAASkB,EAAM,CACnC,OAAOnB,EAAK,gBAAgB,MAAMA,EAAM,SAAS,CACnD,EAEAC,EAAI,gBAAkB,SAASC,EAAK,CAClC,OAAOF,EAAK,gBAAgB,MAAMA,EAAM,SAAS,CACnD,EAEAlC,EAAO,IAAMmC,CAEf,EAMA,GAJKJ,EAAsB,GACzBE,EAAY,EAGTjC,EAAO,WAAa,QAAW,EAAE,WAAYA,EAAO,UAAW,CAClE,IAAIsD,EAAY,UAAW,CACzB,OAAOtD,EAAO,SAAS,SAAW,KAAOA,EAAO,SAAS,UAAYA,EAAO,SAAS,KAAQ,IAAMA,EAAO,SAAS,KAAQ,GAC7H,EAEA,GAAI,CACF,OAAO,eAAeA,EAAO,SAAU,SAAU,CAC/C,IAAKsD,EACL,WAAY,EACd,CAAC,CACH,OAAS7B,EAAP,CACA,YAAY,UAAW,CACrBzB,EAAO,SAAS,OAASsD,EAAU,CACrC,EAAG,GAAG,CACR,CACF,CAEF,GACG,OAAO,QAAW,YAAe,OAC5B,OAAO,QAAW,YAAe,OACjC,OAAO,MAAS,YAAe,KAAOvD,EAC9C,IC5eA,IAAAwD,GAAAC,GAAA,CAAAC,GAAAC,KAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gFAeA,IAAIC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,IACH,SAAUC,EAAS,CAChB,IAAIC,EAAO,OAAO,QAAW,SAAW,OAAS,OAAO,MAAS,SAAW,KAAO,OAAO,MAAS,SAAW,KAAO,CAAC,EAClH,OAAO,QAAW,YAAc,OAAO,IACvC,OAAO,QAAS,CAAC,SAAS,EAAG,SAAU3B,EAAS,CAAE0B,EAAQE,EAAeD,EAAMC,EAAe5B,CAAO,CAAC,CAAC,CAAG,CAAC,EAEtG,OAAOC,IAAW,UAAY,OAAOA,GAAO,SAAY,SAC7DyB,EAAQE,EAAeD,EAAMC,EAAe3B,GAAO,OAAO,CAAC,CAAC,EAG5DyB,EAAQE,EAAeD,CAAI,CAAC,EAEhC,SAASC,EAAe5B,EAAS6B,EAAU,CACvC,OAAI7B,IAAY2B,IACR,OAAO,OAAO,QAAW,WACzB,OAAO,eAAe3B,EAAS,aAAc,CAAE,MAAO,EAAK,CAAC,EAG5DA,EAAQ,WAAa,IAGtB,SAAU8B,EAAIC,EAAG,CAAE,OAAO/B,EAAQ8B,GAAMD,EAAWA,EAASC,EAAIC,CAAC,EAAIA,CAAG,CACnF,CACJ,GACC,SAAUC,EAAU,CACjB,IAAIC,EAAgB,OAAO,gBACtB,CAAE,UAAW,CAAC,CAAE,YAAa,OAAS,SAAUC,EAAGC,EAAG,CAAED,EAAE,UAAYC,CAAG,GAC1E,SAAUD,EAAGC,EAAG,CAAE,QAASC,KAAKD,EAAO,OAAO,UAAU,eAAe,KAAKA,EAAGC,CAAC,IAAGF,EAAEE,GAAKD,EAAEC,GAAI,EAEpGlC,GAAY,SAAUgC,EAAGC,EAAG,CACxB,GAAI,OAAOA,GAAM,YAAcA,IAAM,KACjC,MAAM,IAAI,UAAU,uBAAyB,OAAOA,CAAC,EAAI,+BAA+B,EAC5FF,EAAcC,EAAGC,CAAC,EAClB,SAASE,GAAK,CAAE,KAAK,YAAcH,CAAG,CACtCA,EAAE,UAAYC,IAAM,KAAO,OAAO,OAAOA,CAAC,GAAKE,EAAG,UAAYF,EAAE,UAAW,IAAIE,EACnF,EAEAlC,GAAW,OAAO,QAAU,SAAUmC,EAAG,CACrC,QAASC,EAAG,EAAI,EAAGC,EAAI,UAAU,OAAQ,EAAIA,EAAG,IAAK,CACjDD,EAAI,UAAU,GACd,QAASH,KAAKG,EAAO,OAAO,UAAU,eAAe,KAAKA,EAAGH,CAAC,IAAGE,EAAEF,GAAKG,EAAEH,GAC9E,CACA,OAAOE,CACX,EAEAlC,GAAS,SAAUmC,EAAGE,EAAG,CACrB,IAAIH,EAAI,CAAC,EACT,QAASF,KAAKG,EAAO,OAAO,UAAU,eAAe,KAAKA,EAAGH,CAAC,GAAKK,EAAE,QAAQL,CAAC,EAAI,IAC9EE,EAAEF,GAAKG,EAAEH,IACb,GAAIG,GAAK,MAAQ,OAAO,OAAO,uBAA0B,WACrD,QAASG,EAAI,EAAGN,EAAI,OAAO,sBAAsBG,CAAC,EAAGG,EAAIN,EAAE,OAAQM,IAC3DD,EAAE,QAAQL,EAAEM,EAAE,EAAI,GAAK,OAAO,UAAU,qBAAqB,KAAKH,EAAGH,EAAEM,EAAE,IACzEJ,EAAEF,EAAEM,IAAMH,EAAEH,EAAEM,KAE1B,OAAOJ,CACX,EAEAjC,GAAa,SAAUsC,EAAYC,EAAQC,EAAKC,EAAM,CAClD,IAAIC,EAAI,UAAU,OAAQC,EAAID,EAAI,EAAIH,EAASE,IAAS,KAAOA,EAAO,OAAO,yBAAyBF,EAAQC,CAAG,EAAIC,EAAMZ,EAC3H,GAAI,OAAO,SAAY,UAAY,OAAO,QAAQ,UAAa,WAAYc,EAAI,QAAQ,SAASL,EAAYC,EAAQC,EAAKC,CAAI,MACxH,SAASJ,EAAIC,EAAW,OAAS,EAAGD,GAAK,EAAGA,KAASR,EAAIS,EAAWD,MAAIM,GAAKD,EAAI,EAAIb,EAAEc,CAAC,EAAID,EAAI,EAAIb,EAAEU,EAAQC,EAAKG,CAAC,EAAId,EAAEU,EAAQC,CAAG,IAAMG,GAChJ,OAAOD,EAAI,GAAKC,GAAK,OAAO,eAAeJ,EAAQC,EAAKG,CAAC,EAAGA,CAChE,EAEA1C,GAAU,SAAU2C,EAAYC,EAAW,CACvC,OAAO,SAAUN,EAAQC,EAAK,CAAEK,EAAUN,EAAQC,EAAKI,CAAU,CAAG,CACxE,EAEA1C,GAAa,SAAU4C,EAAaC,EAAe,CAC/C,GAAI,OAAO,SAAY,UAAY,OAAO,QAAQ,UAAa,WAAY,OAAO,QAAQ,SAASD,EAAaC,CAAa,CACjI,EAEA5C,GAAY,SAAU6C,EAASC,EAAYC,EAAGC,EAAW,CACrD,SAASC,EAAMC,EAAO,CAAE,OAAOA,aAAiBH,EAAIG,EAAQ,IAAIH,EAAE,SAAUI,EAAS,CAAEA,EAAQD,CAAK,CAAG,CAAC,CAAG,CAC3G,OAAO,IAAKH,IAAMA,EAAI,UAAU,SAAUI,EAASC,EAAQ,CACvD,SAASC,EAAUH,EAAO,CAAE,GAAI,CAAEI,EAAKN,EAAU,KAAKE,CAAK,CAAC,CAAG,OAASjB,EAAP,CAAYmB,EAAOnB,CAAC,CAAG,CAAE,CAC1F,SAASsB,EAASL,EAAO,CAAE,GAAI,CAAEI,EAAKN,EAAU,MAASE,CAAK,CAAC,CAAG,OAASjB,EAAP,CAAYmB,EAAOnB,CAAC,CAAG,CAAE,CAC7F,SAASqB,EAAKE,EAAQ,CAAEA,EAAO,KAAOL,EAAQK,EAAO,KAAK,EAAIP,EAAMO,EAAO,KAAK,EAAE,KAAKH,EAAWE,CAAQ,CAAG,CAC7GD,GAAMN,EAAYA,EAAU,MAAMH,EAASC,GAAc,CAAC,CAAC,GAAG,KAAK,CAAC,CACxE,CAAC,CACL,EAEA7C,GAAc,SAAU4C,EAASY,EAAM,CACnC,IAAIC,EAAI,CAAE,MAAO,EAAG,KAAM,UAAW,CAAE,GAAI5B,EAAE,GAAK,EAAG,MAAMA,EAAE,GAAI,OAAOA,EAAE,EAAI,EAAG,KAAM,CAAC,EAAG,IAAK,CAAC,CAAE,EAAG6B,EAAGC,EAAG9B,EAAG+B,EAC/G,OAAOA,EAAI,CAAE,KAAMC,EAAK,CAAC,EAAG,MAASA,EAAK,CAAC,EAAG,OAAUA,EAAK,CAAC,CAAE,EAAG,OAAO,QAAW,aAAeD,EAAE,OAAO,UAAY,UAAW,CAAE,OAAO,IAAM,GAAIA,EACvJ,SAASC,EAAK9B,EAAG,CAAE,OAAO,SAAUT,EAAG,CAAE,OAAO+B,EAAK,CAACtB,EAAGT,CAAC,CAAC,CAAG,CAAG,CACjE,SAAS+B,EAAKS,EAAI,CACd,GAAIJ,EAAG,MAAM,IAAI,UAAU,iCAAiC,EAC5D,KAAOD,GAAG,GAAI,CACV,GAAIC,EAAI,EAAGC,IAAM9B,EAAIiC,EAAG,GAAK,EAAIH,EAAE,OAAYG,EAAG,GAAKH,EAAE,SAAc9B,EAAI8B,EAAE,SAAc9B,EAAE,KAAK8B,CAAC,EAAG,GAAKA,EAAE,OAAS,EAAE9B,EAAIA,EAAE,KAAK8B,EAAGG,EAAG,EAAE,GAAG,KAAM,OAAOjC,EAE3J,OADI8B,EAAI,EAAG9B,IAAGiC,EAAK,CAACA,EAAG,GAAK,EAAGjC,EAAE,KAAK,GAC9BiC,EAAG,GAAI,CACX,IAAK,GAAG,IAAK,GAAGjC,EAAIiC,EAAI,MACxB,IAAK,GAAG,OAAAL,EAAE,QAAgB,CAAE,MAAOK,EAAG,GAAI,KAAM,EAAM,EACtD,IAAK,GAAGL,EAAE,QAASE,EAAIG,EAAG,GAAIA,EAAK,CAAC,CAAC,EAAG,SACxC,IAAK,GAAGA,EAAKL,EAAE,IAAI,IAAI,EAAGA,EAAE,KAAK,IAAI,EAAG,SACxC,QACI,GAAM5B,EAAI4B,EAAE,KAAM,EAAA5B,EAAIA,EAAE,OAAS,GAAKA,EAAEA,EAAE,OAAS,MAAQiC,EAAG,KAAO,GAAKA,EAAG,KAAO,GAAI,CAAEL,EAAI,EAAG,QAAU,CAC3G,GAAIK,EAAG,KAAO,IAAM,CAACjC,GAAMiC,EAAG,GAAKjC,EAAE,IAAMiC,EAAG,GAAKjC,EAAE,IAAM,CAAE4B,EAAE,MAAQK,EAAG,GAAI,KAAO,CACrF,GAAIA,EAAG,KAAO,GAAKL,EAAE,MAAQ5B,EAAE,GAAI,CAAE4B,EAAE,MAAQ5B,EAAE,GAAIA,EAAIiC,EAAI,KAAO,CACpE,GAAIjC,GAAK4B,EAAE,MAAQ5B,EAAE,GAAI,CAAE4B,EAAE,MAAQ5B,EAAE,GAAI4B,EAAE,IAAI,KAAKK,CAAE,EAAG,KAAO,CAC9DjC,EAAE,IAAI4B,EAAE,IAAI,IAAI,EACpBA,EAAE,KAAK,IAAI,EAAG,QACtB,CACAK,EAAKN,EAAK,KAAKZ,EAASa,CAAC,CAC7B,OAASzB,EAAP,CAAY8B,EAAK,CAAC,EAAG9B,CAAC,EAAG2B,EAAI,CAAG,QAAE,CAAUD,EAAI7B,EAAI,CAAG,CACzD,GAAIiC,EAAG,GAAK,EAAG,MAAMA,EAAG,GAAI,MAAO,CAAE,MAAOA,EAAG,GAAKA,EAAG,GAAK,OAAQ,KAAM,EAAK,CACnF,CACJ,EAEA7D,GAAe,SAAS8D,EAAG,EAAG,CAC1B,QAASpC,KAAKoC,EAAOpC,IAAM,WAAa,CAAC,OAAO,UAAU,eAAe,KAAK,EAAGA,CAAC,GAAGX,GAAgB,EAAG+C,EAAGpC,CAAC,CAChH,EAEAX,GAAkB,OAAO,OAAU,SAASgD,EAAGD,EAAGE,EAAGC,EAAI,CACjDA,IAAO,SAAWA,EAAKD,GAC3B,OAAO,eAAeD,EAAGE,EAAI,CAAE,WAAY,GAAM,IAAK,UAAW,CAAE,OAAOH,EAAEE,EAAI,CAAE,CAAC,CACvF,EAAM,SAASD,EAAGD,EAAGE,EAAGC,EAAI,CACpBA,IAAO,SAAWA,EAAKD,GAC3BD,EAAEE,GAAMH,EAAEE,EACd,EAEA/D,GAAW,SAAU8D,EAAG,CACpB,IAAIlC,EAAI,OAAO,QAAW,YAAc,OAAO,SAAUiC,EAAIjC,GAAKkC,EAAElC,GAAIG,EAAI,EAC5E,GAAI8B,EAAG,OAAOA,EAAE,KAAKC,CAAC,EACtB,GAAIA,GAAK,OAAOA,EAAE,QAAW,SAAU,MAAO,CAC1C,KAAM,UAAY,CACd,OAAIA,GAAK/B,GAAK+B,EAAE,SAAQA,EAAI,QACrB,CAAE,MAAOA,GAAKA,EAAE/B,KAAM,KAAM,CAAC+B,CAAE,CAC1C,CACJ,EACA,MAAM,IAAI,UAAUlC,EAAI,0BAA4B,iCAAiC,CACzF,EAEA3B,GAAS,SAAU6D,EAAGjC,EAAG,CACrB,IAAIgC,EAAI,OAAO,QAAW,YAAcC,EAAE,OAAO,UACjD,GAAI,CAACD,EAAG,OAAOC,EACf,IAAI/B,EAAI8B,EAAE,KAAKC,CAAC,EAAGzB,EAAG4B,EAAK,CAAC,EAAGnC,EAC/B,GAAI,CACA,MAAQD,IAAM,QAAUA,KAAM,IAAM,EAAEQ,EAAIN,EAAE,KAAK,GAAG,MAAMkC,EAAG,KAAK5B,EAAE,KAAK,CAC7E,OACO6B,EAAP,CAAgBpC,EAAI,CAAE,MAAOoC,CAAM,CAAG,QACtC,CACI,GAAI,CACI7B,GAAK,CAACA,EAAE,OAASwB,EAAI9B,EAAE,SAAY8B,EAAE,KAAK9B,CAAC,CACnD,QACA,CAAU,GAAID,EAAG,MAAMA,EAAE,KAAO,CACpC,CACA,OAAOmC,CACX,EAGA/D,GAAW,UAAY,CACnB,QAAS+D,EAAK,CAAC,EAAGlC,EAAI,EAAGA,EAAI,UAAU,OAAQA,IAC3CkC,EAAKA,EAAG,OAAOhE,GAAO,UAAU8B,EAAE,CAAC,EACvC,OAAOkC,CACX,EAGA9D,GAAiB,UAAY,CACzB,QAASyB,EAAI,EAAGG,EAAI,EAAGoC,EAAK,UAAU,OAAQpC,EAAIoC,EAAIpC,IAAKH,GAAK,UAAUG,GAAG,OAC7E,QAASM,EAAI,MAAMT,CAAC,EAAGmC,EAAI,EAAGhC,EAAI,EAAGA,EAAIoC,EAAIpC,IACzC,QAASqC,EAAI,UAAUrC,GAAIsC,EAAI,EAAGC,EAAKF,EAAE,OAAQC,EAAIC,EAAID,IAAKN,IAC1D1B,EAAE0B,GAAKK,EAAEC,GACjB,OAAOhC,CACX,EAEAjC,GAAgB,SAAUmE,EAAIC,EAAMC,EAAM,CACtC,GAAIA,GAAQ,UAAU,SAAW,EAAG,QAAS1C,EAAI,EAAG2C,EAAIF,EAAK,OAAQP,EAAIlC,EAAI2C,EAAG3C,KACxEkC,GAAM,EAAElC,KAAKyC,MACRP,IAAIA,EAAK,MAAM,UAAU,MAAM,KAAKO,EAAM,EAAGzC,CAAC,GACnDkC,EAAGlC,GAAKyC,EAAKzC,IAGrB,OAAOwC,EAAG,OAAON,GAAM,MAAM,UAAU,MAAM,KAAKO,CAAI,CAAC,CAC3D,EAEAnE,GAAU,SAAUe,EAAG,CACnB,OAAO,gBAAgBf,IAAW,KAAK,EAAIe,EAAG,MAAQ,IAAIf,GAAQe,CAAC,CACvE,EAEAd,GAAmB,SAAUoC,EAASC,EAAYE,EAAW,CACzD,GAAI,CAAC,OAAO,cAAe,MAAM,IAAI,UAAU,sCAAsC,EACrF,IAAIa,EAAIb,EAAU,MAAMH,EAASC,GAAc,CAAC,CAAC,EAAGZ,EAAG4C,EAAI,CAAC,EAC5D,OAAO5C,EAAI,CAAC,EAAG4B,EAAK,MAAM,EAAGA,EAAK,OAAO,EAAGA,EAAK,QAAQ,EAAG5B,EAAE,OAAO,eAAiB,UAAY,CAAE,OAAO,IAAM,EAAGA,EACpH,SAAS4B,EAAK9B,EAAG,CAAM6B,EAAE7B,KAAIE,EAAEF,GAAK,SAAUT,EAAG,CAAE,OAAO,IAAI,QAAQ,SAAUgD,EAAG5C,EAAG,CAAEmD,EAAE,KAAK,CAAC9C,EAAGT,EAAGgD,EAAG5C,CAAC,CAAC,EAAI,GAAKoD,EAAO/C,EAAGT,CAAC,CAAG,CAAC,CAAG,EAAG,CACzI,SAASwD,EAAO/C,EAAGT,EAAG,CAAE,GAAI,CAAE+B,EAAKO,EAAE7B,GAAGT,CAAC,CAAC,CAAG,OAASU,EAAP,CAAY+C,EAAOF,EAAE,GAAG,GAAI7C,CAAC,CAAG,CAAE,CACjF,SAASqB,EAAKd,EAAG,CAAEA,EAAE,iBAAiBhC,GAAU,QAAQ,QAAQgC,EAAE,MAAM,CAAC,EAAE,KAAKyC,EAAS7B,CAAM,EAAI4B,EAAOF,EAAE,GAAG,GAAItC,CAAC,CAAI,CACxH,SAASyC,EAAQ/B,EAAO,CAAE6B,EAAO,OAAQ7B,CAAK,CAAG,CACjD,SAASE,EAAOF,EAAO,CAAE6B,EAAO,QAAS7B,CAAK,CAAG,CACjD,SAAS8B,EAAOrB,EAAGpC,EAAG,CAAMoC,EAAEpC,CAAC,EAAGuD,EAAE,MAAM,EAAGA,EAAE,QAAQC,EAAOD,EAAE,GAAG,GAAIA,EAAE,GAAG,EAAE,CAAG,CACrF,EAEApE,GAAmB,SAAUuD,EAAG,CAC5B,IAAI/B,EAAGN,EACP,OAAOM,EAAI,CAAC,EAAG4B,EAAK,MAAM,EAAGA,EAAK,QAAS,SAAU7B,EAAG,CAAE,MAAMA,CAAG,CAAC,EAAG6B,EAAK,QAAQ,EAAG5B,EAAE,OAAO,UAAY,UAAY,CAAE,OAAO,IAAM,EAAGA,EAC1I,SAAS4B,EAAK9B,EAAG2B,EAAG,CAAEzB,EAAEF,GAAKiC,EAAEjC,GAAK,SAAUT,EAAG,CAAE,OAAQK,EAAI,CAACA,GAAK,CAAE,MAAOpB,GAAQyD,EAAEjC,GAAGT,CAAC,CAAC,EAAG,KAAMS,IAAM,QAAS,EAAI2B,EAAIA,EAAEpC,CAAC,EAAIA,CAAG,EAAIoC,CAAG,CAClJ,EAEAhD,GAAgB,SAAUsD,EAAG,CACzB,GAAI,CAAC,OAAO,cAAe,MAAM,IAAI,UAAU,sCAAsC,EACrF,IAAID,EAAIC,EAAE,OAAO,eAAgB,EACjC,OAAOD,EAAIA,EAAE,KAAKC,CAAC,GAAKA,EAAI,OAAO9D,IAAa,WAAaA,GAAS8D,CAAC,EAAIA,EAAE,OAAO,UAAU,EAAG,EAAI,CAAC,EAAGH,EAAK,MAAM,EAAGA,EAAK,OAAO,EAAGA,EAAK,QAAQ,EAAG,EAAE,OAAO,eAAiB,UAAY,CAAE,OAAO,IAAM,EAAG,GAC9M,SAASA,EAAK9B,EAAG,CAAE,EAAEA,GAAKiC,EAAEjC,IAAM,SAAUT,EAAG,CAAE,OAAO,IAAI,QAAQ,SAAU4B,EAASC,EAAQ,CAAE7B,EAAI0C,EAAEjC,GAAGT,CAAC,EAAGyD,EAAO7B,EAASC,EAAQ7B,EAAE,KAAMA,EAAE,KAAK,CAAG,CAAC,CAAG,CAAG,CAC/J,SAASyD,EAAO7B,EAASC,EAAQ1B,EAAGH,EAAG,CAAE,QAAQ,QAAQA,CAAC,EAAE,KAAK,SAASA,EAAG,CAAE4B,EAAQ,CAAE,MAAO5B,EAAG,KAAMG,CAAE,CAAC,CAAG,EAAG0B,CAAM,CAAG,CAC/H,EAEAxC,GAAuB,SAAUsE,EAAQC,EAAK,CAC1C,OAAI,OAAO,eAAkB,OAAO,eAAeD,EAAQ,MAAO,CAAE,MAAOC,CAAI,CAAC,EAAYD,EAAO,IAAMC,EAClGD,CACX,EAEA,IAAIE,EAAqB,OAAO,OAAU,SAASnB,EAAG1C,EAAG,CACrD,OAAO,eAAe0C,EAAG,UAAW,CAAE,WAAY,GAAM,MAAO1C,CAAE,CAAC,CACtE,EAAK,SAAS0C,EAAG1C,EAAG,CAChB0C,EAAE,QAAa1C,CACnB,EAEAV,GAAe,SAAUwE,EAAK,CAC1B,GAAIA,GAAOA,EAAI,WAAY,OAAOA,EAClC,IAAI7B,EAAS,CAAC,EACd,GAAI6B,GAAO,KAAM,QAASnB,KAAKmB,EAASnB,IAAM,WAAa,OAAO,UAAU,eAAe,KAAKmB,EAAKnB,CAAC,GAAGjD,GAAgBuC,EAAQ6B,EAAKnB,CAAC,EACvI,OAAAkB,EAAmB5B,EAAQ6B,CAAG,EACvB7B,CACX,EAEA1C,GAAkB,SAAUuE,EAAK,CAC7B,OAAQA,GAAOA,EAAI,WAAcA,EAAM,CAAE,QAAWA,CAAI,CAC5D,EAEAtE,GAAyB,SAAUuE,EAAUC,EAAOC,EAAM7B,EAAG,CACzD,GAAI6B,IAAS,KAAO,CAAC7B,EAAG,MAAM,IAAI,UAAU,+CAA+C,EAC3F,GAAI,OAAO4B,GAAU,WAAaD,IAAaC,GAAS,CAAC5B,EAAI,CAAC4B,EAAM,IAAID,CAAQ,EAAG,MAAM,IAAI,UAAU,0EAA0E,EACjL,OAAOE,IAAS,IAAM7B,EAAI6B,IAAS,IAAM7B,EAAE,KAAK2B,CAAQ,EAAI3B,EAAIA,EAAE,MAAQ4B,EAAM,IAAID,CAAQ,CAChG,EAEAtE,GAAyB,SAAUsE,EAAUC,EAAOrC,EAAOsC,EAAM7B,EAAG,CAChE,GAAI6B,IAAS,IAAK,MAAM,IAAI,UAAU,gCAAgC,EACtE,GAAIA,IAAS,KAAO,CAAC7B,EAAG,MAAM,IAAI,UAAU,+CAA+C,EAC3F,GAAI,OAAO4B,GAAU,WAAaD,IAAaC,GAAS,CAAC5B,EAAI,CAAC4B,EAAM,IAAID,CAAQ,EAAG,MAAM,IAAI,UAAU,yEAAyE,EAChL,OAAQE,IAAS,IAAM7B,EAAE,KAAK2B,EAAUpC,CAAK,EAAIS,EAAIA,EAAE,MAAQT,EAAQqC,EAAM,IAAID,EAAUpC,CAAK,EAAIA,CACxG,EAEA1B,EAAS,YAAa9B,EAAS,EAC/B8B,EAAS,WAAY7B,EAAQ,EAC7B6B,EAAS,SAAU5B,EAAM,EACzB4B,EAAS,aAAc3B,EAAU,EACjC2B,EAAS,UAAW1B,EAAO,EAC3B0B,EAAS,aAAczB,EAAU,EACjCyB,EAAS,YAAaxB,EAAS,EAC/BwB,EAAS,cAAevB,EAAW,EACnCuB,EAAS,eAAgBtB,EAAY,EACrCsB,EAAS,kBAAmBP,EAAe,EAC3CO,EAAS,WAAYrB,EAAQ,EAC7BqB,EAAS,SAAUpB,EAAM,EACzBoB,EAAS,WAAYnB,EAAQ,EAC7BmB,EAAS,iBAAkBlB,EAAc,EACzCkB,EAAS,gBAAiBjB,EAAa,EACvCiB,EAAS,UAAWhB,EAAO,EAC3BgB,EAAS,mBAAoBf,EAAgB,EAC7Ce,EAAS,mBAAoBd,EAAgB,EAC7Cc,EAAS,gBAAiBb,EAAa,EACvCa,EAAS,uBAAwBZ,EAAoB,EACrDY,EAAS,eAAgBX,EAAY,EACrCW,EAAS,kBAAmBV,EAAe,EAC3CU,EAAS,yBAA0BT,EAAsB,EACzDS,EAAS,yBAA0BR,EAAsB,CAC7D,CAAC,ICjTD,IAAAyE,GAAAC,GAAA,CAAAC,GAAAC,KAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAMC,SAA0CC,EAAMC,EAAS,CACtD,OAAOH,IAAY,UAAY,OAAOC,IAAW,SACnDA,GAAO,QAAUE,EAAQ,EAClB,OAAO,QAAW,YAAc,OAAO,IAC9C,OAAO,CAAC,EAAGA,CAAO,EACX,OAAOH,IAAY,SAC1BA,GAAQ,YAAiBG,EAAQ,EAEjCD,EAAK,YAAiBC,EAAQ,CAChC,GAAGH,GAAM,UAAW,CACpB,OAAiB,UAAW,CAClB,IAAII,EAAuB,CAE/B,IACC,SAASC,EAAyBC,EAAqBC,EAAqB,CAEnF,aAGAA,EAAoB,EAAED,EAAqB,CACzC,QAAW,UAAW,CAAE,OAAqBE,EAAW,CAC1D,CAAC,EAGD,IAAIC,EAAeF,EAAoB,GAAG,EACtCG,EAAoCH,EAAoB,EAAEE,CAAY,EAEtEE,EAASJ,EAAoB,GAAG,EAChCK,EAA8BL,EAAoB,EAAEI,CAAM,EAE1DE,EAAaN,EAAoB,GAAG,EACpCO,EAA8BP,EAAoB,EAAEM,CAAU,EAOlE,SAASE,EAAQC,EAAM,CACrB,GAAI,CACF,OAAO,SAAS,YAAYA,CAAI,CAClC,OAASC,EAAP,CACA,MAAO,EACT,CACF,CAUA,IAAIC,EAAqB,SAA4BC,EAAQ,CAC3D,IAAIC,EAAeN,EAAe,EAAEK,CAAM,EAC1C,OAAAJ,EAAQ,KAAK,EACNK,CACT,EAEiCC,EAAeH,EAOhD,SAASI,EAAkBC,EAAO,CAChC,IAAIC,EAAQ,SAAS,gBAAgB,aAAa,KAAK,IAAM,MACzDC,EAAc,SAAS,cAAc,UAAU,EAEnDA,EAAY,MAAM,SAAW,OAE7BA,EAAY,MAAM,OAAS,IAC3BA,EAAY,MAAM,QAAU,IAC5BA,EAAY,MAAM,OAAS,IAE3BA,EAAY,MAAM,SAAW,WAC7BA,EAAY,MAAMD,EAAQ,QAAU,QAAU,UAE9C,IAAIE,EAAY,OAAO,aAAe,SAAS,gBAAgB,UAC/D,OAAAD,EAAY,MAAM,IAAM,GAAG,OAAOC,EAAW,IAAI,EACjDD,EAAY,aAAa,WAAY,EAAE,EACvCA,EAAY,MAAQF,EACbE,CACT,CAYA,IAAIE,EAAiB,SAAwBJ,EAAOK,EAAS,CAC3D,IAAIH,EAAcH,EAAkBC,CAAK,EACzCK,EAAQ,UAAU,YAAYH,CAAW,EACzC,IAAIL,EAAeN,EAAe,EAAEW,CAAW,EAC/C,OAAAV,EAAQ,MAAM,EACdU,EAAY,OAAO,EACZL,CACT,EASIS,EAAsB,SAA6BV,EAAQ,CAC7D,IAAIS,EAAU,UAAU,OAAS,GAAK,UAAU,KAAO,OAAY,UAAU,GAAK,CAChF,UAAW,SAAS,IACtB,EACIR,EAAe,GAEnB,OAAI,OAAOD,GAAW,SACpBC,EAAeO,EAAeR,EAAQS,CAAO,EACpCT,aAAkB,kBAAoB,CAAC,CAAC,OAAQ,SAAU,MAAO,MAAO,UAAU,EAAE,SAASA,GAAW,KAA4B,OAASA,EAAO,IAAI,EAEjKC,EAAeO,EAAeR,EAAO,MAAOS,CAAO,GAEnDR,EAAeN,EAAe,EAAEK,CAAM,EACtCJ,EAAQ,MAAM,GAGTK,CACT,EAEiCU,EAAgBD,EAEjD,SAASE,EAAQC,EAAK,CAA6B,OAAI,OAAO,QAAW,YAAc,OAAO,OAAO,UAAa,SAAYD,EAAU,SAAiBC,EAAK,CAAE,OAAO,OAAOA,CAAK,EAAYD,EAAU,SAAiBC,EAAK,CAAE,OAAOA,GAAO,OAAO,QAAW,YAAcA,EAAI,cAAgB,QAAUA,IAAQ,OAAO,UAAY,SAAW,OAAOA,CAAK,EAAYD,EAAQC,CAAG,CAAG,CAUzX,IAAIC,GAAyB,UAAkC,CAC7D,IAAIL,EAAU,UAAU,OAAS,GAAK,UAAU,KAAO,OAAY,UAAU,GAAK,CAAC,EAE/EM,EAAkBN,EAAQ,OAC1BO,EAASD,IAAoB,OAAS,OAASA,EAC/CE,EAAYR,EAAQ,UACpBT,EAASS,EAAQ,OACjBS,GAAOT,EAAQ,KAEnB,GAAIO,IAAW,QAAUA,IAAW,MAClC,MAAM,IAAI,MAAM,oDAAoD,EAItE,GAAIhB,IAAW,OACb,GAAIA,GAAUY,EAAQZ,CAAM,IAAM,UAAYA,EAAO,WAAa,EAAG,CACnE,GAAIgB,IAAW,QAAUhB,EAAO,aAAa,UAAU,EACrD,MAAM,IAAI,MAAM,mFAAmF,EAGrG,GAAIgB,IAAW,QAAUhB,EAAO,aAAa,UAAU,GAAKA,EAAO,aAAa,UAAU,GACxF,MAAM,IAAI,MAAM,uGAAwG,CAE5H,KACE,OAAM,IAAI,MAAM,6CAA6C,EAKjE,GAAIkB,GACF,OAAOP,EAAaO,GAAM,CACxB,UAAWD,CACb,CAAC,EAIH,GAAIjB,EACF,OAAOgB,IAAW,MAAQd,EAAYF,CAAM,EAAIW,EAAaX,EAAQ,CACnE,UAAWiB,CACb,CAAC,CAEL,EAEiCE,GAAmBL,GAEpD,SAASM,GAAiBP,EAAK,CAA6B,OAAI,OAAO,QAAW,YAAc,OAAO,OAAO,UAAa,SAAYO,GAAmB,SAAiBP,EAAK,CAAE,OAAO,OAAOA,CAAK,EAAYO,GAAmB,SAAiBP,EAAK,CAAE,OAAOA,GAAO,OAAO,QAAW,YAAcA,EAAI,cAAgB,QAAUA,IAAQ,OAAO,UAAY,SAAW,OAAOA,CAAK,EAAYO,GAAiBP,CAAG,CAAG,CAE7Z,SAASQ,GAAgBC,EAAUC,EAAa,CAAE,GAAI,EAAED,aAAoBC,GAAgB,MAAM,IAAI,UAAU,mCAAmC,CAAK,CAExJ,SAASC,GAAkBxB,EAAQyB,EAAO,CAAE,QAASC,EAAI,EAAGA,EAAID,EAAM,OAAQC,IAAK,CAAE,IAAIC,EAAaF,EAAMC,GAAIC,EAAW,WAAaA,EAAW,YAAc,GAAOA,EAAW,aAAe,GAAU,UAAWA,IAAYA,EAAW,SAAW,IAAM,OAAO,eAAe3B,EAAQ2B,EAAW,IAAKA,CAAU,CAAG,CAAE,CAE5T,SAASC,GAAaL,EAAaM,EAAYC,EAAa,CAAE,OAAID,GAAYL,GAAkBD,EAAY,UAAWM,CAAU,EAAOC,GAAaN,GAAkBD,EAAaO,CAAW,EAAUP,CAAa,CAEtN,SAASQ,GAAUC,EAAUC,EAAY,CAAE,GAAI,OAAOA,GAAe,YAAcA,IAAe,KAAQ,MAAM,IAAI,UAAU,oDAAoD,EAAKD,EAAS,UAAY,OAAO,OAAOC,GAAcA,EAAW,UAAW,CAAE,YAAa,CAAE,MAAOD,EAAU,SAAU,GAAM,aAAc,EAAK,CAAE,CAAC,EAAOC,GAAYC,GAAgBF,EAAUC,CAAU,CAAG,CAEhY,SAASC,GAAgBC,EAAGC,EAAG,CAAE,OAAAF,GAAkB,OAAO,gBAAkB,SAAyBC,EAAGC,EAAG,CAAE,OAAAD,EAAE,UAAYC,EAAUD,CAAG,EAAUD,GAAgBC,EAAGC,CAAC,CAAG,CAEzK,SAASC,GAAaC,EAAS,CAAE,IAAIC,EAA4BC,GAA0B,EAAG,OAAO,UAAgC,CAAE,IAAIC,EAAQC,GAAgBJ,CAAO,EAAGK,EAAQ,GAAIJ,EAA2B,CAAE,IAAIK,EAAYF,GAAgB,IAAI,EAAE,YAAaC,EAAS,QAAQ,UAAUF,EAAO,UAAWG,CAAS,CAAG,MAASD,EAASF,EAAM,MAAM,KAAM,SAAS,EAAK,OAAOI,GAA2B,KAAMF,CAAM,CAAG,CAAG,CAExa,SAASE,GAA2BC,EAAMC,EAAM,CAAE,OAAIA,IAAS3B,GAAiB2B,CAAI,IAAM,UAAY,OAAOA,GAAS,YAAsBA,EAAeC,GAAuBF,CAAI,CAAG,CAEzL,SAASE,GAAuBF,EAAM,CAAE,GAAIA,IAAS,OAAU,MAAM,IAAI,eAAe,2DAA2D,EAAK,OAAOA,CAAM,CAErK,SAASN,IAA4B,CAA0E,GAApE,OAAO,SAAY,aAAe,CAAC,QAAQ,WAA6B,QAAQ,UAAU,KAAM,MAAO,GAAO,GAAI,OAAO,OAAU,WAAY,MAAO,GAAM,GAAI,CAAE,YAAK,UAAU,SAAS,KAAK,QAAQ,UAAU,KAAM,CAAC,EAAG,UAAY,CAAC,CAAC,CAAC,EAAU,EAAM,OAASS,EAAP,CAAY,MAAO,EAAO,CAAE,CAEnU,SAASP,GAAgBP,EAAG,CAAE,OAAAO,GAAkB,OAAO,eAAiB,OAAO,eAAiB,SAAyBP,EAAG,CAAE,OAAOA,EAAE,WAAa,OAAO,eAAeA,CAAC,CAAG,EAAUO,GAAgBP,CAAC,CAAG,CAa5M,SAASe,GAAkBC,EAAQC,EAAS,CAC1C,IAAIC,EAAY,kBAAkB,OAAOF,CAAM,EAE/C,GAAI,EAACC,EAAQ,aAAaC,CAAS,EAInC,OAAOD,EAAQ,aAAaC,CAAS,CACvC,CAOA,IAAIC,GAAyB,SAAUC,EAAU,CAC/CxB,GAAUuB,EAAWC,CAAQ,EAE7B,IAAIC,EAASnB,GAAaiB,CAAS,EAMnC,SAASA,EAAUG,EAAShD,EAAS,CACnC,IAAIiD,EAEJ,OAAArC,GAAgB,KAAMiC,CAAS,EAE/BI,EAAQF,EAAO,KAAK,IAAI,EAExBE,EAAM,eAAejD,CAAO,EAE5BiD,EAAM,YAAYD,CAAO,EAElBC,CACT,CAQA,OAAA9B,GAAa0B,EAAW,CAAC,CACvB,IAAK,iBACL,MAAO,UAA0B,CAC/B,IAAI7C,EAAU,UAAU,OAAS,GAAK,UAAU,KAAO,OAAY,UAAU,GAAK,CAAC,EACnF,KAAK,OAAS,OAAOA,EAAQ,QAAW,WAAaA,EAAQ,OAAS,KAAK,cAC3E,KAAK,OAAS,OAAOA,EAAQ,QAAW,WAAaA,EAAQ,OAAS,KAAK,cAC3E,KAAK,KAAO,OAAOA,EAAQ,MAAS,WAAaA,EAAQ,KAAO,KAAK,YACrE,KAAK,UAAYW,GAAiBX,EAAQ,SAAS,IAAM,SAAWA,EAAQ,UAAY,SAAS,IACnG,CAMF,EAAG,CACD,IAAK,cACL,MAAO,SAAqBgD,EAAS,CACnC,IAAIE,EAAS,KAEb,KAAK,SAAWlE,EAAe,EAAEgE,EAAS,QAAS,SAAUR,GAAG,CAC9D,OAAOU,EAAO,QAAQV,EAAC,CACzB,CAAC,CACH,CAMF,EAAG,CACD,IAAK,UACL,MAAO,SAAiBA,EAAG,CACzB,IAAIQ,EAAUR,EAAE,gBAAkBA,EAAE,cAChCjC,GAAS,KAAK,OAAOyC,CAAO,GAAK,OACjCvC,GAAOC,GAAgB,CACzB,OAAQH,GACR,UAAW,KAAK,UAChB,OAAQ,KAAK,OAAOyC,CAAO,EAC3B,KAAM,KAAK,KAAKA,CAAO,CACzB,CAAC,EAED,KAAK,KAAKvC,GAAO,UAAY,QAAS,CACpC,OAAQF,GACR,KAAME,GACN,QAASuC,EACT,eAAgB,UAA0B,CACpCA,GACFA,EAAQ,MAAM,EAGhB,OAAO,aAAa,EAAE,gBAAgB,CACxC,CACF,CAAC,CACH,CAMF,EAAG,CACD,IAAK,gBACL,MAAO,SAAuBA,EAAS,CACrC,OAAOP,GAAkB,SAAUO,CAAO,CAC5C,CAMF,EAAG,CACD,IAAK,gBACL,MAAO,SAAuBA,EAAS,CACrC,IAAIG,EAAWV,GAAkB,SAAUO,CAAO,EAElD,GAAIG,EACF,OAAO,SAAS,cAAcA,CAAQ,CAE1C,CAQF,EAAG,CACD,IAAK,cAML,MAAO,SAAqBH,EAAS,CACnC,OAAOP,GAAkB,OAAQO,CAAO,CAC1C,CAKF,EAAG,CACD,IAAK,UACL,MAAO,UAAmB,CACxB,KAAK,SAAS,QAAQ,CACxB,CACF,CAAC,EAAG,CAAC,CACH,IAAK,OACL,MAAO,SAAczD,EAAQ,CAC3B,IAAIS,EAAU,UAAU,OAAS,GAAK,UAAU,KAAO,OAAY,UAAU,GAAK,CAChF,UAAW,SAAS,IACtB,EACA,OAAOE,EAAaX,EAAQS,CAAO,CACrC,CAOF,EAAG,CACD,IAAK,MACL,MAAO,SAAaT,EAAQ,CAC1B,OAAOE,EAAYF,CAAM,CAC3B,CAOF,EAAG,CACD,IAAK,cACL,MAAO,UAAuB,CAC5B,IAAIgB,EAAS,UAAU,OAAS,GAAK,UAAU,KAAO,OAAY,UAAU,GAAK,CAAC,OAAQ,KAAK,EAC3F6C,EAAU,OAAO7C,GAAW,SAAW,CAACA,CAAM,EAAIA,EAClD8C,GAAU,CAAC,CAAC,SAAS,sBACzB,OAAAD,EAAQ,QAAQ,SAAU7C,GAAQ,CAChC8C,GAAUA,IAAW,CAAC,CAAC,SAAS,sBAAsB9C,EAAM,CAC9D,CAAC,EACM8C,EACT,CACF,CAAC,CAAC,EAEKR,CACT,EAAG/D,EAAqB,CAAE,EAEOF,GAAaiE,EAExC,EAEA,IACC,SAASxE,EAAQ,CAExB,IAAIiF,EAAqB,EAKzB,GAAI,OAAO,SAAY,aAAe,CAAC,QAAQ,UAAU,QAAS,CAC9D,IAAIC,EAAQ,QAAQ,UAEpBA,EAAM,QAAUA,EAAM,iBACNA,EAAM,oBACNA,EAAM,mBACNA,EAAM,kBACNA,EAAM,qBAC1B,CASA,SAASC,EAASb,EAASQ,EAAU,CACjC,KAAOR,GAAWA,EAAQ,WAAaW,GAAoB,CACvD,GAAI,OAAOX,EAAQ,SAAY,YAC3BA,EAAQ,QAAQQ,CAAQ,EAC1B,OAAOR,EAETA,EAAUA,EAAQ,UACtB,CACJ,CAEAtE,EAAO,QAAUmF,CAGX,EAEA,IACC,SAASnF,EAAQoF,EAA0B9E,EAAqB,CAEvE,IAAI6E,EAAU7E,EAAoB,GAAG,EAYrC,SAAS+E,EAAUf,EAASQ,EAAU/D,EAAMuE,EAAUC,EAAY,CAC9D,IAAIC,EAAaC,EAAS,MAAM,KAAM,SAAS,EAE/C,OAAAnB,EAAQ,iBAAiBvD,EAAMyE,EAAYD,CAAU,EAE9C,CACH,QAAS,UAAW,CAChBjB,EAAQ,oBAAoBvD,EAAMyE,EAAYD,CAAU,CAC5D,CACJ,CACJ,CAYA,SAASG,EAASC,EAAUb,EAAU/D,EAAMuE,EAAUC,EAAY,CAE9D,OAAI,OAAOI,EAAS,kBAAqB,WAC9BN,EAAU,MAAM,KAAM,SAAS,EAItC,OAAOtE,GAAS,WAGTsE,EAAU,KAAK,KAAM,QAAQ,EAAE,MAAM,KAAM,SAAS,GAI3D,OAAOM,GAAa,WACpBA,EAAW,SAAS,iBAAiBA,CAAQ,GAI1C,MAAM,UAAU,IAAI,KAAKA,EAAU,SAAUrB,EAAS,CACzD,OAAOe,EAAUf,EAASQ,EAAU/D,EAAMuE,EAAUC,CAAU,CAClE,CAAC,EACL,CAWA,SAASE,EAASnB,EAASQ,EAAU/D,EAAMuE,EAAU,CACjD,OAAO,SAASnB,EAAG,CACfA,EAAE,eAAiBgB,EAAQhB,EAAE,OAAQW,CAAQ,EAEzCX,EAAE,gBACFmB,EAAS,KAAKhB,EAASH,CAAC,CAEhC,CACJ,CAEAnE,EAAO,QAAU0F,CAGX,EAEA,IACC,SAAStF,EAAyBL,EAAS,CAQlDA,EAAQ,KAAO,SAASuB,EAAO,CAC3B,OAAOA,IAAU,QACVA,aAAiB,aACjBA,EAAM,WAAa,CAC9B,EAQAvB,EAAQ,SAAW,SAASuB,EAAO,CAC/B,IAAIP,EAAO,OAAO,UAAU,SAAS,KAAKO,CAAK,EAE/C,OAAOA,IAAU,SACTP,IAAS,qBAAuBA,IAAS,4BACzC,WAAYO,IACZA,EAAM,SAAW,GAAKvB,EAAQ,KAAKuB,EAAM,EAAE,EACvD,EAQAvB,EAAQ,OAAS,SAASuB,EAAO,CAC7B,OAAO,OAAOA,GAAU,UACjBA,aAAiB,MAC5B,EAQAvB,EAAQ,GAAK,SAASuB,EAAO,CACzB,IAAIP,EAAO,OAAO,UAAU,SAAS,KAAKO,CAAK,EAE/C,OAAOP,IAAS,mBACpB,CAGM,EAEA,IACC,SAASf,EAAQoF,EAA0B9E,EAAqB,CAEvE,IAAIsF,EAAKtF,EAAoB,GAAG,EAC5BoF,EAAWpF,EAAoB,GAAG,EAWtC,SAASI,EAAOQ,EAAQH,EAAMuE,EAAU,CACpC,GAAI,CAACpE,GAAU,CAACH,GAAQ,CAACuE,EACrB,MAAM,IAAI,MAAM,4BAA4B,EAGhD,GAAI,CAACM,EAAG,OAAO7E,CAAI,EACf,MAAM,IAAI,UAAU,kCAAkC,EAG1D,GAAI,CAAC6E,EAAG,GAAGN,CAAQ,EACf,MAAM,IAAI,UAAU,mCAAmC,EAG3D,GAAIM,EAAG,KAAK1E,CAAM,EACd,OAAO2E,EAAW3E,EAAQH,EAAMuE,CAAQ,EAEvC,GAAIM,EAAG,SAAS1E,CAAM,EACvB,OAAO4E,EAAe5E,EAAQH,EAAMuE,CAAQ,EAE3C,GAAIM,EAAG,OAAO1E,CAAM,EACrB,OAAO6E,EAAe7E,EAAQH,EAAMuE,CAAQ,EAG5C,MAAM,IAAI,UAAU,2EAA2E,CAEvG,CAWA,SAASO,EAAWG,EAAMjF,EAAMuE,EAAU,CACtC,OAAAU,EAAK,iBAAiBjF,EAAMuE,CAAQ,EAE7B,CACH,QAAS,UAAW,CAChBU,EAAK,oBAAoBjF,EAAMuE,CAAQ,CAC3C,CACJ,CACJ,CAWA,SAASQ,EAAeG,EAAUlF,EAAMuE,EAAU,CAC9C,aAAM,UAAU,QAAQ,KAAKW,EAAU,SAASD,EAAM,CAClDA,EAAK,iBAAiBjF,EAAMuE,CAAQ,CACxC,CAAC,EAEM,CACH,QAAS,UAAW,CAChB,MAAM,UAAU,QAAQ,KAAKW,EAAU,SAASD,EAAM,CAClDA,EAAK,oBAAoBjF,EAAMuE,CAAQ,CAC3C,CAAC,CACL,CACJ,CACJ,CAWA,SAASS,EAAejB,EAAU/D,EAAMuE,EAAU,CAC9C,OAAOI,EAAS,SAAS,KAAMZ,EAAU/D,EAAMuE,CAAQ,CAC3D,CAEAtF,EAAO,QAAUU,CAGX,EAEA,IACC,SAASV,EAAQ,CAExB,SAASkG,EAAO5B,EAAS,CACrB,IAAInD,EAEJ,GAAImD,EAAQ,WAAa,SACrBA,EAAQ,MAAM,EAEdnD,EAAemD,EAAQ,cAElBA,EAAQ,WAAa,SAAWA,EAAQ,WAAa,WAAY,CACtE,IAAI6B,EAAa7B,EAAQ,aAAa,UAAU,EAE3C6B,GACD7B,EAAQ,aAAa,WAAY,EAAE,EAGvCA,EAAQ,OAAO,EACfA,EAAQ,kBAAkB,EAAGA,EAAQ,MAAM,MAAM,EAE5C6B,GACD7B,EAAQ,gBAAgB,UAAU,EAGtCnD,EAAemD,EAAQ,KAC3B,KACK,CACGA,EAAQ,aAAa,iBAAiB,GACtCA,EAAQ,MAAM,EAGlB,IAAI8B,EAAY,OAAO,aAAa,EAChCC,EAAQ,SAAS,YAAY,EAEjCA,EAAM,mBAAmB/B,CAAO,EAChC8B,EAAU,gBAAgB,EAC1BA,EAAU,SAASC,CAAK,EAExBlF,EAAeiF,EAAU,SAAS,CACtC,CAEA,OAAOjF,CACX,CAEAnB,EAAO,QAAUkG,CAGX,EAEA,IACC,SAASlG,EAAQ,CAExB,SAASsG,GAAK,CAGd,CAEAA,EAAE,UAAY,CACZ,GAAI,SAAUC,EAAMjB,EAAUkB,EAAK,CACjC,IAAIrC,EAAI,KAAK,IAAM,KAAK,EAAI,CAAC,GAE7B,OAACA,EAAEoC,KAAUpC,EAAEoC,GAAQ,CAAC,IAAI,KAAK,CAC/B,GAAIjB,EACJ,IAAKkB,CACP,CAAC,EAEM,IACT,EAEA,KAAM,SAAUD,EAAMjB,EAAUkB,EAAK,CACnC,IAAIxC,EAAO,KACX,SAASyB,GAAY,CACnBzB,EAAK,IAAIuC,EAAMd,CAAQ,EACvBH,EAAS,MAAMkB,EAAK,SAAS,CAC/B,CAEA,OAAAf,EAAS,EAAIH,EACN,KAAK,GAAGiB,EAAMd,EAAUe,CAAG,CACpC,EAEA,KAAM,SAAUD,EAAM,CACpB,IAAIE,EAAO,CAAC,EAAE,MAAM,KAAK,UAAW,CAAC,EACjCC,IAAW,KAAK,IAAM,KAAK,EAAI,CAAC,IAAIH,IAAS,CAAC,GAAG,MAAM,EACvD3D,EAAI,EACJ+D,EAAMD,EAAO,OAEjB,IAAK9D,EAAGA,EAAI+D,EAAK/D,IACf8D,EAAO9D,GAAG,GAAG,MAAM8D,EAAO9D,GAAG,IAAK6D,CAAI,EAGxC,OAAO,IACT,EAEA,IAAK,SAAUF,EAAMjB,EAAU,CAC7B,IAAInB,EAAI,KAAK,IAAM,KAAK,EAAI,CAAC,GACzByC,EAAOzC,EAAEoC,GACTM,EAAa,CAAC,EAElB,GAAID,GAAQtB,EACV,QAAS1C,EAAI,EAAG+D,EAAMC,EAAK,OAAQhE,EAAI+D,EAAK/D,IACtCgE,EAAKhE,GAAG,KAAO0C,GAAYsB,EAAKhE,GAAG,GAAG,IAAM0C,GAC9CuB,EAAW,KAAKD,EAAKhE,EAAE,EAQ7B,OAACiE,EAAW,OACR1C,EAAEoC,GAAQM,EACV,OAAO1C,EAAEoC,GAEN,IACT,CACF,EAEAvG,EAAO,QAAUsG,EACjBtG,EAAO,QAAQ,YAAcsG,CAGvB,CAEI,EAGIQ,EAA2B,CAAC,EAGhC,SAASxG,EAAoByG,EAAU,CAEtC,GAAGD,EAAyBC,GAC3B,OAAOD,EAAyBC,GAAU,QAG3C,IAAI/G,EAAS8G,EAAyBC,GAAY,CAGjD,QAAS,CAAC,CACX,EAGA,OAAA5G,EAAoB4G,GAAU/G,EAAQA,EAAO,QAASM,CAAmB,EAGlEN,EAAO,OACf,CAIA,OAAC,UAAW,CAEXM,EAAoB,EAAI,SAASN,EAAQ,CACxC,IAAIgH,EAAShH,GAAUA,EAAO,WAC7B,UAAW,CAAE,OAAOA,EAAO,OAAY,EACvC,UAAW,CAAE,OAAOA,CAAQ,EAC7B,OAAAM,EAAoB,EAAE0G,EAAQ,CAAE,EAAGA,CAAO,CAAC,EACpCA,CACR,CACD,EAAE,EAGD,UAAW,CAEX1G,EAAoB,EAAI,SAASP,EAASkH,EAAY,CACrD,QAAQC,KAAOD,EACX3G,EAAoB,EAAE2G,EAAYC,CAAG,GAAK,CAAC5G,EAAoB,EAAEP,EAASmH,CAAG,GAC/E,OAAO,eAAenH,EAASmH,EAAK,CAAE,WAAY,GAAM,IAAKD,EAAWC,EAAK,CAAC,CAGjF,CACD,EAAE,EAGD,UAAW,CACX5G,EAAoB,EAAI,SAASyB,EAAKoF,EAAM,CAAE,OAAO,OAAO,UAAU,eAAe,KAAKpF,EAAKoF,CAAI,CAAG,CACvG,EAAE,EAMK7G,EAAoB,GAAG,CAC/B,EAAG,EACX,OACD,CAAC,ICz3BD,IAAA8G,GAAAC,GAAA,CAAAC,GAAAC,KAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAeA,IAAIC,GAAkB,UAOtBD,GAAO,QAAUE,GAUjB,SAASA,GAAWC,EAAQ,CAC1B,IAAIC,EAAM,GAAKD,EACXE,EAAQJ,GAAgB,KAAKG,CAAG,EAEpC,GAAI,CAACC,EACH,OAAOD,EAGT,IAAIE,EACAC,EAAO,GACPC,EAAQ,EACRC,EAAY,EAEhB,IAAKD,EAAQH,EAAM,MAAOG,EAAQJ,EAAI,OAAQI,IAAS,CACrD,OAAQJ,EAAI,WAAWI,CAAK,EAAG,CAC7B,IAAK,IACHF,EAAS,SACT,MACF,IAAK,IACHA,EAAS,QACT,MACF,IAAK,IACHA,EAAS,QACT,MACF,IAAK,IACHA,EAAS,OACT,MACF,IAAK,IACHA,EAAS,OACT,MACF,QACE,QACJ,CAEIG,IAAcD,IAChBD,GAAQH,EAAI,UAAUK,EAAWD,CAAK,GAGxCC,EAAYD,EAAQ,EACpBD,GAAQD,CACV,CAEA,OAAOG,IAAcD,EACjBD,EAAOH,EAAI,UAAUK,EAAWD,CAAK,EACrCD,CACN,IC7EA,MAAM,UAAU,MAAM,OAAO,eAAe,MAAM,UAAU,OAAO,CAAC,aAAa,GAAG,MAAM,SAASG,GAAG,CAAC,IAAI,EAAE,MAAM,UAAU,EAAE,EAAE,EAAE,OAAO,UAAU,EAAE,EAAE,OAAO,EAAE,MAAM,UAAU,OAAO,KAAK,KAAK,SAASC,EAAEC,EAAE,CAAC,OAAO,MAAM,QAAQA,CAAC,EAAED,EAAE,KAAK,MAAMA,EAAED,EAAE,KAAKE,EAAE,EAAE,CAAC,CAAC,EAAED,EAAE,KAAKC,CAAC,EAAED,CAAC,EAAE,CAAC,CAAC,EAAE,MAAM,UAAU,MAAM,KAAK,IAAI,CAAC,EAAE,SAAS,EAAE,CAAC,EAAE,MAAM,UAAU,SAAS,OAAO,eAAe,MAAM,UAAU,UAAU,CAAC,aAAa,GAAG,MAAM,SAASD,EAAE,CAAC,OAAO,MAAM,UAAU,IAAI,MAAM,KAAK,SAAS,EAAE,KAAK,CAAC,EAAE,SAAS,EAAE,CAAC,ECuBxf,IAAAG,GAAO,SCvBP,KAAK,QAAQ,KAAK,MAAM,SAAS,EAAEC,EAAE,CAAC,OAAOA,EAAEA,GAAG,CAAC,EAAE,IAAI,QAAQ,SAASC,EAAEC,EAAE,CAAC,IAAIC,EAAE,IAAI,eAAeC,EAAE,CAAC,EAAEC,EAAE,CAAC,EAAEC,EAAE,CAAC,EAAEC,EAAE,UAAU,CAAC,MAAM,CAAC,IAAOJ,EAAE,OAAO,IAAI,IAAjB,EAAoB,WAAWA,EAAE,WAAW,OAAOA,EAAE,OAAO,IAAIA,EAAE,YAAY,KAAK,UAAU,CAAC,OAAO,QAAQ,QAAQA,EAAE,YAAY,CAAC,EAAE,KAAK,UAAU,CAAC,OAAO,QAAQ,QAAQA,EAAE,YAAY,EAAE,KAAK,KAAK,KAAK,CAAC,EAAE,KAAK,UAAU,CAAC,OAAO,QAAQ,QAAQ,IAAI,KAAK,CAACA,EAAE,QAAQ,CAAC,CAAC,CAAC,EAAE,MAAMI,EAAE,QAAQ,CAAC,KAAK,UAAU,CAAC,OAAOH,CAAC,EAAE,QAAQ,UAAU,CAAC,OAAOC,CAAC,EAAE,IAAI,SAASG,EAAE,CAAC,OAAOF,EAAEE,EAAE,YAAY,EAAE,EAAE,IAAI,SAASA,EAAE,CAAC,OAAOA,EAAE,YAAY,IAAIF,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQG,KAAKN,EAAE,KAAKH,EAAE,QAAQ,MAAM,EAAE,EAAE,EAAEG,EAAE,OAAO,UAAU,CAACA,EAAE,sBAAsB,EAAE,QAAQ,+BAA+B,SAASK,EAAER,EAAEC,EAAE,CAACG,EAAE,KAAKJ,EAAEA,EAAE,YAAY,CAAC,EAAEK,EAAE,KAAK,CAACL,EAAEC,CAAC,CAAC,EAAEK,EAAEN,GAAGM,EAAEN,GAAGM,EAAEN,GAAG,IAAIC,EAAEA,CAAC,CAAC,EAAEA,EAAEM,EAAE,CAAC,CAAC,EAAEJ,EAAE,QAAQD,EAAEC,EAAE,gBAA2BH,EAAE,aAAb,UAAyBA,EAAE,QAAQG,EAAE,iBAAiBM,EAAET,EAAE,QAAQS,EAAE,EAAEN,EAAE,KAAKH,EAAE,MAAM,IAAI,CAAC,CAAC,CAAC,GDyBj5B,IAAAU,GAAO,SEzBP,IAAAC,GAAkB,WACZ,CACF,UAAAC,GACA,SAAAC,GACA,OAAAC,GACA,WAAAC,GACA,QAAAC,GACA,WAAAC,GACA,UAAAC,GACA,YAAAC,GACA,aAAAC,GACA,gBAAAC,GACA,SAAAC,GACA,OAAAC,EACA,SAAAC,GACA,eAAAC,GACA,cAAAC,EACA,QAAAC,GACA,iBAAAC,GACA,iBAAAC,GACA,cAAAC,GACA,qBAAAC,GACA,aAAAC,GACA,gBAAAC,GACA,uBAAAC,GACA,uBAAAC,EACJ,EAAI,GAAAC,QCtBE,SAAUC,EAAWC,EAAU,CACnC,OAAO,OAAOA,GAAU,UAC1B,CCGM,SAAUC,GAAoBC,EAAgC,CAClE,IAAMC,EAAS,SAACC,EAAa,CAC3B,MAAM,KAAKA,CAAQ,EACnBA,EAAS,MAAQ,IAAI,MAAK,EAAG,KAC/B,EAEMC,EAAWH,EAAWC,CAAM,EAClC,OAAAE,EAAS,UAAY,OAAO,OAAO,MAAM,SAAS,EAClDA,EAAS,UAAU,YAAcA,EAC1BA,CACT,CCDO,IAAMC,GAA+CC,GAC1D,SAACC,EAAM,CACL,OAAA,SAA4CC,EAA0B,CACpED,EAAO,IAAI,EACX,KAAK,QAAUC,EACRA,EAAO,OAAM;EACxBA,EAAO,IAAI,SAACC,EAAKC,EAAC,CAAK,OAAGA,EAAI,EAAC,KAAKD,EAAI,SAAQ,CAAzB,CAA6B,EAAE,KAAK;GAAM,EACzD,GACJ,KAAK,KAAO,sBACZ,KAAK,OAASD,CAChB,CARA,CAQC,ECvBC,SAAUG,GAAaC,EAA6BC,EAAO,CAC/D,GAAID,EAAK,CACP,IAAME,EAAQF,EAAI,QAAQC,CAAI,EAC9B,GAAKC,GAASF,EAAI,OAAOE,EAAO,CAAC,EAErC,CCOA,IAAAC,GAAA,UAAA,CAyBE,SAAAA,EAAoBC,EAA4B,CAA5B,KAAA,gBAAAA,EAdb,KAAA,OAAS,GAER,KAAA,WAAmD,KAMnD,KAAA,YAAqD,IAMV,CAQnD,OAAAD,EAAA,UAAA,YAAA,UAAA,aACME,EAEJ,GAAI,CAAC,KAAK,OAAQ,CAChB,KAAK,OAAS,GAGN,IAAAC,EAAe,KAAI,WAC3B,GAAIA,EAEF,GADA,KAAK,WAAa,KACd,MAAM,QAAQA,CAAU,MAC1B,QAAqBC,EAAAC,GAAAF,CAAU,EAAAG,EAAAF,EAAA,KAAA,EAAA,CAAAE,EAAA,KAAAA,EAAAF,EAAA,KAAA,EAAE,CAA5B,IAAMG,EAAMD,EAAA,MACfC,EAAO,OAAO,IAAI,yGAGpBJ,EAAW,OAAO,IAAI,EAIlB,IAAiBK,EAAqB,KAAI,gBAClD,GAAIC,EAAWD,CAAgB,EAC7B,GAAI,CACFA,EAAgB,QACTE,EAAP,CACAR,EAASQ,aAAaC,GAAsBD,EAAE,OAAS,CAACA,CAAC,EAIrD,IAAAE,EAAgB,KAAI,YAC5B,GAAIA,EAAa,CACf,KAAK,YAAc,SACnB,QAAwBC,EAAAR,GAAAO,CAAW,EAAAE,EAAAD,EAAA,KAAA,EAAA,CAAAC,EAAA,KAAAA,EAAAD,EAAA,KAAA,EAAE,CAAhC,IAAME,EAASD,EAAA,MAClB,GAAI,CACFE,GAAcD,CAAS,QAChBE,EAAP,CACAf,EAASA,GAAM,KAANA,EAAU,CAAA,EACfe,aAAeN,GACjBT,EAAMgB,EAAAA,EAAA,CAAA,EAAAC,EAAOjB,CAAM,CAAA,EAAAiB,EAAKF,EAAI,MAAM,CAAA,EAElCf,EAAO,KAAKe,CAAG,sGAMvB,GAAIf,EACF,MAAM,IAAIS,GAAoBT,CAAM,EAG1C,EAoBAF,EAAA,UAAA,IAAA,SAAIoB,EAAuB,OAGzB,GAAIA,GAAYA,IAAa,KAC3B,GAAI,KAAK,OAGPJ,GAAcI,CAAQ,MACjB,CACL,GAAIA,aAAoBpB,EAAc,CAGpC,GAAIoB,EAAS,QAAUA,EAAS,WAAW,IAAI,EAC7C,OAEFA,EAAS,WAAW,IAAI,GAEzB,KAAK,aAAcC,EAAA,KAAK,eAAW,MAAAA,IAAA,OAAAA,EAAI,CAAA,GAAI,KAAKD,CAAQ,EAG/D,EAOQpB,EAAA,UAAA,WAAR,SAAmBsB,EAAoB,CAC7B,IAAAnB,EAAe,KAAI,WAC3B,OAAOA,IAAemB,GAAW,MAAM,QAAQnB,CAAU,GAAKA,EAAW,SAASmB,CAAM,CAC1F,EASQtB,EAAA,UAAA,WAAR,SAAmBsB,EAAoB,CAC7B,IAAAnB,EAAe,KAAI,WAC3B,KAAK,WAAa,MAAM,QAAQA,CAAU,GAAKA,EAAW,KAAKmB,CAAM,EAAGnB,GAAcA,EAAa,CAACA,EAAYmB,CAAM,EAAIA,CAC5H,EAMQtB,EAAA,UAAA,cAAR,SAAsBsB,EAAoB,CAChC,IAAAnB,EAAe,KAAI,WACvBA,IAAemB,EACjB,KAAK,WAAa,KACT,MAAM,QAAQnB,CAAU,GACjCoB,GAAUpB,EAAYmB,CAAM,CAEhC,EAgBAtB,EAAA,UAAA,OAAA,SAAOoB,EAAsC,CACnC,IAAAR,EAAgB,KAAI,YAC5BA,GAAeW,GAAUX,EAAaQ,CAAQ,EAE1CA,aAAoBpB,GACtBoB,EAAS,cAAc,IAAI,CAE/B,EAlLcpB,EAAA,MAAS,UAAA,CACrB,IAAMwB,EAAQ,IAAIxB,EAClB,OAAAwB,EAAM,OAAS,GACRA,CACT,EAAE,EA+KJxB,GArLA,EAuLO,IAAMyB,GAAqBC,GAAa,MAEzC,SAAUC,GAAeC,EAAU,CACvC,OACEA,aAAiBF,IAChBE,GAAS,WAAYA,GAASC,EAAWD,EAAM,MAAM,GAAKC,EAAWD,EAAM,GAAG,GAAKC,EAAWD,EAAM,WAAW,CAEpH,CAEA,SAASE,GAAcC,EAAwC,CACzDF,EAAWE,CAAS,EACtBA,EAAS,EAETA,EAAU,YAAW,CAEzB,CChNO,IAAMC,GAAuB,CAClC,iBAAkB,KAClB,sBAAuB,KACvB,QAAS,OACT,sCAAuC,GACvC,yBAA0B,ICGrB,IAAMC,GAAmC,CAG9C,WAAA,SAAWC,EAAqBC,EAAgB,SAAEC,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,EAAA,GAAA,UAAAA,GACxC,IAAAC,EAAaL,GAAe,SACpC,OAAIK,GAAQ,MAARA,EAAU,WACLA,EAAS,WAAU,MAAnBA,EAAQC,EAAA,CAAYL,EAASC,CAAO,EAAAK,EAAKJ,CAAI,CAAA,CAAA,EAE/C,WAAU,MAAA,OAAAG,EAAA,CAACL,EAASC,CAAO,EAAAK,EAAKJ,CAAI,CAAA,CAAA,CAC7C,EACA,aAAA,SAAaK,EAAM,CACT,IAAAH,EAAaL,GAAe,SACpC,QAAQK,GAAQ,KAAA,OAARA,EAAU,eAAgB,cAAcG,CAAa,CAC/D,EACA,SAAU,QCjBN,SAAUC,GAAqBC,EAAQ,CAC3CC,GAAgB,WAAW,UAAA,CACjB,IAAAC,EAAqBC,GAAM,iBACnC,GAAID,EAEFA,EAAiBF,CAAG,MAGpB,OAAMA,CAEV,CAAC,CACH,CCtBM,SAAUI,IAAI,CAAK,CCMlB,IAAMC,GAAyB,UAAA,CAAM,OAAAC,GAAmB,IAAK,OAAW,MAAS,CAA5C,EAAsE,EAO5G,SAAUC,GAAkBC,EAAU,CAC1C,OAAOF,GAAmB,IAAK,OAAWE,CAAK,CACjD,CAOM,SAAUC,GAAoBC,EAAQ,CAC1C,OAAOJ,GAAmB,IAAKI,EAAO,MAAS,CACjD,CAQM,SAAUJ,GAAmBK,EAAuBD,EAAYF,EAAU,CAC9E,MAAO,CACL,KAAIG,EACJ,MAAKD,EACL,MAAKF,EAET,CCrCA,IAAII,GAAuD,KASrD,SAAUC,GAAaC,EAAc,CACzC,GAAIC,GAAO,sCAAuC,CAChD,IAAMC,EAAS,CAACJ,GAKhB,GAJII,IACFJ,GAAU,CAAE,YAAa,GAAO,MAAO,IAAI,GAE7CE,EAAE,EACEE,EAAQ,CACJ,IAAAC,EAAyBL,GAAvBM,EAAWD,EAAA,YAAEE,EAAKF,EAAA,MAE1B,GADAL,GAAU,KACNM,EACF,MAAMC,QAMVL,EAAE,CAEN,CAMM,SAAUM,GAAaC,EAAQ,CAC/BN,GAAO,uCAAyCH,KAClDA,GAAQ,YAAc,GACtBA,GAAQ,MAAQS,EAEpB,CCrBA,IAAAC,GAAA,SAAAC,EAAA,CAAmCC,GAAAF,EAAAC,CAAA,EA6BjC,SAAAD,EAAYG,EAA6C,CAAzD,IAAAC,EACEH,EAAA,KAAA,IAAA,GAAO,KATC,OAAAG,EAAA,UAAqB,GAUzBD,GACFC,EAAK,YAAcD,EAGfE,GAAeF,CAAW,GAC5BA,EAAY,IAAIC,CAAI,GAGtBA,EAAK,YAAcE,IAEvB,CAzBO,OAAAN,EAAA,OAAP,SAAiBO,EAAwBC,EAA2BC,EAAqB,CACvF,OAAO,IAAIC,GAAeH,EAAMC,EAAOC,CAAQ,CACjD,EAgCAT,EAAA,UAAA,KAAA,SAAKW,EAAS,CACR,KAAK,UACPC,GAA0BC,GAAiBF,CAAK,EAAG,IAAI,EAEvD,KAAK,MAAMA,CAAM,CAErB,EASAX,EAAA,UAAA,MAAA,SAAMc,EAAS,CACT,KAAK,UACPF,GAA0BG,GAAkBD,CAAG,EAAG,IAAI,GAEtD,KAAK,UAAY,GACjB,KAAK,OAAOA,CAAG,EAEnB,EAQAd,EAAA,UAAA,SAAA,UAAA,CACM,KAAK,UACPY,GAA0BI,GAAuB,IAAI,GAErD,KAAK,UAAY,GACjB,KAAK,UAAS,EAElB,EAEAhB,EAAA,UAAA,YAAA,UAAA,CACO,KAAK,SACR,KAAK,UAAY,GACjBC,EAAA,UAAM,YAAW,KAAA,IAAA,EACjB,KAAK,YAAc,KAEvB,EAEUD,EAAA,UAAA,MAAV,SAAgBW,EAAQ,CACtB,KAAK,YAAY,KAAKA,CAAK,CAC7B,EAEUX,EAAA,UAAA,OAAV,SAAiBc,EAAQ,CACvB,GAAI,CACF,KAAK,YAAY,MAAMA,CAAG,UAE1B,KAAK,YAAW,EAEpB,EAEUd,EAAA,UAAA,UAAV,UAAA,CACE,GAAI,CACF,KAAK,YAAY,SAAQ,UAEzB,KAAK,YAAW,EAEpB,EACFA,CAAA,EApHmCiB,EAAY,EA2H/C,IAAMC,GAAQ,SAAS,UAAU,KAEjC,SAASC,GAAyCC,EAAQC,EAAY,CACpE,OAAOH,GAAM,KAAKE,EAAIC,CAAO,CAC/B,CAMA,IAAAC,GAAA,UAAA,CACE,SAAAA,EAAoBC,EAAqC,CAArC,KAAA,gBAAAA,CAAwC,CAE5D,OAAAD,EAAA,UAAA,KAAA,SAAKE,EAAQ,CACH,IAAAD,EAAoB,KAAI,gBAChC,GAAIA,EAAgB,KAClB,GAAI,CACFA,EAAgB,KAAKC,CAAK,QACnBC,EAAP,CACAC,GAAqBD,CAAK,EAGhC,EAEAH,EAAA,UAAA,MAAA,SAAMK,EAAQ,CACJ,IAAAJ,EAAoB,KAAI,gBAChC,GAAIA,EAAgB,MAClB,GAAI,CACFA,EAAgB,MAAMI,CAAG,QAClBF,EAAP,CACAC,GAAqBD,CAAK,OAG5BC,GAAqBC,CAAG,CAE5B,EAEAL,EAAA,UAAA,SAAA,UAAA,CACU,IAAAC,EAAoB,KAAI,gBAChC,GAAIA,EAAgB,SAClB,GAAI,CACFA,EAAgB,SAAQ,QACjBE,EAAP,CACAC,GAAqBD,CAAK,EAGhC,EACFH,CAAA,EArCA,EAuCAM,GAAA,SAAAC,EAAA,CAAuCC,GAAAF,EAAAC,CAAA,EACrC,SAAAD,EACEG,EACAN,EACAO,EAA8B,CAHhC,IAAAC,EAKEJ,EAAA,KAAA,IAAA,GAAO,KAEHN,EACJ,GAAIW,EAAWH,CAAc,GAAK,CAACA,EAGjCR,EAAkB,CAChB,KAAOQ,GAAc,KAAdA,EAAkB,OACzB,MAAON,GAAK,KAALA,EAAS,OAChB,SAAUO,GAAQ,KAARA,EAAY,YAEnB,CAEL,IAAIG,EACAF,GAAQG,GAAO,0BAIjBD,EAAU,OAAO,OAAOJ,CAAc,EACtCI,EAAQ,YAAc,UAAA,CAAM,OAAAF,EAAK,YAAW,CAAhB,EAC5BV,EAAkB,CAChB,KAAMQ,EAAe,MAAQZ,GAAKY,EAAe,KAAMI,CAAO,EAC9D,MAAOJ,EAAe,OAASZ,GAAKY,EAAe,MAAOI,CAAO,EACjE,SAAUJ,EAAe,UAAYZ,GAAKY,EAAe,SAAUI,CAAO,IAI5EZ,EAAkBQ,EAMtB,OAAAE,EAAK,YAAc,IAAIX,GAAiBC,CAAe,GACzD,CACF,OAAAK,CAAA,EAzCuCS,EAAU,EA2CjD,SAASC,GAAqBC,EAAU,CAClCC,GAAO,sCACTC,GAAaF,CAAK,EAIlBG,GAAqBH,CAAK,CAE9B,CAQA,SAASI,GAAoBC,EAAQ,CACnC,MAAMA,CACR,CAOA,SAASC,GAA0BC,EAA2CC,EAA2B,CAC/F,IAAAC,EAA0BR,GAAM,sBACxCQ,GAAyBC,GAAgB,WAAW,UAAA,CAAM,OAAAD,EAAsBF,EAAcC,CAAU,CAA9C,CAA+C,CAC3G,CAOO,IAAMG,GAA6D,CACxE,OAAQ,GACR,KAAMC,GACN,MAAOR,GACP,SAAUQ,ICjRL,IAAMC,GAA+B,UAAA,CAAM,OAAC,OAAO,QAAW,YAAc,OAAO,YAAe,cAAvD,EAAsE,ECyClH,SAAUC,GAAYC,EAAI,CAC9B,OAAOA,CACT,CCiCM,SAAUC,IAAI,SAACC,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,GAAA,UAAAA,GACnB,OAAOC,GAAcF,CAAG,CAC1B,CAGM,SAAUE,GAAoBF,EAA+B,CACjE,OAAIA,EAAI,SAAW,EACVG,GAGLH,EAAI,SAAW,EACVA,EAAI,GAGN,SAAeI,EAAQ,CAC5B,OAAOJ,EAAI,OAAO,SAACK,EAAWC,EAAuB,CAAK,OAAAA,EAAGD,CAAI,CAAP,EAAUD,CAAY,CAClF,CACF,CC9EA,IAAAG,EAAA,UAAA,CAkBE,SAAAA,EAAYC,EAA6E,CACnFA,IACF,KAAK,WAAaA,EAEtB,CA4BA,OAAAD,EAAA,UAAA,KAAA,SAAQE,EAAyB,CAC/B,IAAMC,EAAa,IAAIH,EACvB,OAAAG,EAAW,OAAS,KACpBA,EAAW,SAAWD,EACfC,CACT,EA8IAH,EAAA,UAAA,UAAA,SACEI,EACAC,EACAC,EAA8B,CAHhC,IAAAC,EAAA,KAKQC,EAAaC,GAAaL,CAAc,EAAIA,EAAiB,IAAIM,GAAeN,EAAgBC,EAAOC,CAAQ,EAErH,OAAAK,GAAa,UAAA,CACL,IAAAC,EAAuBL,EAArBL,EAAQU,EAAA,SAAEC,EAAMD,EAAA,OACxBJ,EAAW,IACTN,EAGIA,EAAS,KAAKM,EAAYK,CAAM,EAChCA,EAIAN,EAAK,WAAWC,CAAU,EAG1BD,EAAK,cAAcC,CAAU,CAAC,CAEtC,CAAC,EAEMA,CACT,EAGUR,EAAA,UAAA,cAAV,SAAwBc,EAAmB,CACzC,GAAI,CACF,OAAO,KAAK,WAAWA,CAAI,QACpBC,EAAP,CAIAD,EAAK,MAAMC,CAAG,EAElB,EA6DAf,EAAA,UAAA,QAAA,SAAQgB,EAA0BC,EAAoC,CAAtE,IAAAV,EAAA,KACE,OAAAU,EAAcC,GAAeD,CAAW,EAEjC,IAAIA,EAAkB,SAACE,EAASC,EAAM,CAC3C,IAAMZ,EAAa,IAAIE,GAAkB,CACvC,KAAM,SAACW,EAAK,CACV,GAAI,CACFL,EAAKK,CAAK,QACHN,EAAP,CACAK,EAAOL,CAAG,EACVP,EAAW,YAAW,EAE1B,EACA,MAAOY,EACP,SAAUD,EACX,EACDZ,EAAK,UAAUC,CAAU,CAC3B,CAAC,CACH,EAGUR,EAAA,UAAA,WAAV,SAAqBQ,EAA2B,OAC9C,OAAOI,EAAA,KAAK,UAAM,MAAAA,IAAA,OAAA,OAAAA,EAAE,UAAUJ,CAAU,CAC1C,EAOAR,EAAA,UAACG,IAAD,UAAA,CACE,OAAO,IACT,EA4FAH,EAAA,UAAA,KAAA,UAAA,SAAKsB,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,GAAA,UAAAA,GACH,OAAOC,GAAcF,CAAU,EAAE,IAAI,CACvC,EA6BAtB,EAAA,UAAA,UAAA,SAAUiB,EAAoC,CAA9C,IAAAV,EAAA,KACE,OAAAU,EAAcC,GAAeD,CAAW,EAEjC,IAAIA,EAAY,SAACE,EAASC,EAAM,CACrC,IAAIC,EACJd,EAAK,UACH,SAACkB,EAAI,CAAK,OAACJ,EAAQI,CAAT,EACV,SAACV,EAAQ,CAAK,OAAAK,EAAOL,CAAG,CAAV,EACd,UAAA,CAAM,OAAAI,EAAQE,CAAK,CAAb,CAAc,CAExB,CAAC,CACH,EA3aOrB,EAAA,OAAkC,SAAIC,EAAwD,CACnG,OAAO,IAAID,EAAcC,CAAS,CACpC,EA0aFD,GA/cA,EAwdA,SAAS0B,GAAeC,EAA+C,OACrE,OAAOC,EAAAD,GAAW,KAAXA,EAAeE,GAAO,WAAO,MAAAD,IAAA,OAAAA,EAAI,OAC1C,CAEA,SAASE,GAAcC,EAAU,CAC/B,OAAOA,GAASC,EAAWD,EAAM,IAAI,GAAKC,EAAWD,EAAM,KAAK,GAAKC,EAAWD,EAAM,QAAQ,CAChG,CAEA,SAASE,GAAgBF,EAAU,CACjC,OAAQA,GAASA,aAAiBG,IAAgBJ,GAAWC,CAAK,GAAKI,GAAeJ,CAAK,CAC7F,CC1eM,SAAUK,GAAQC,EAAW,CACjC,OAAOC,EAAWD,GAAM,KAAA,OAANA,EAAQ,IAAI,CAChC,CAMM,SAAUE,EACdC,EAAqF,CAErF,OAAO,SAACH,EAAqB,CAC3B,GAAID,GAAQC,CAAM,EAChB,OAAOA,EAAO,KAAK,SAA+BI,EAA2B,CAC3E,GAAI,CACF,OAAOD,EAAKC,EAAc,IAAI,QACvBC,EAAP,CACA,KAAK,MAAMA,CAAG,EAElB,CAAC,EAEH,MAAM,IAAI,UAAU,wCAAwC,CAC9D,CACF,CCjBM,SAAUC,EACdC,EACAC,EACAC,EACAC,EACAC,EAAuB,CAEvB,OAAO,IAAIC,GAAmBL,EAAaC,EAAQC,EAAYC,EAASC,CAAU,CACpF,CAMA,IAAAC,GAAA,SAAAC,EAAA,CAA2CC,GAAAF,EAAAC,CAAA,EAiBzC,SAAAD,EACEL,EACAC,EACAC,EACAC,EACQC,EACAI,EAAiC,CAN3C,IAAAC,EAoBEH,EAAA,KAAA,KAAMN,CAAW,GAAC,KAfV,OAAAS,EAAA,WAAAL,EACAK,EAAA,kBAAAD,EAeRC,EAAK,MAAQR,EACT,SAAuCS,EAAQ,CAC7C,GAAI,CACFT,EAAOS,CAAK,QACLC,EAAP,CACAX,EAAY,MAAMW,CAAG,EAEzB,EACAL,EAAA,UAAM,MACVG,EAAK,OAASN,EACV,SAAuCQ,EAAQ,CAC7C,GAAI,CACFR,EAAQQ,CAAG,QACJA,EAAP,CAEAX,EAAY,MAAMW,CAAG,UAGrB,KAAK,YAAW,EAEpB,EACAL,EAAA,UAAM,OACVG,EAAK,UAAYP,EACb,UAAA,CACE,GAAI,CACFA,EAAU,QACHS,EAAP,CAEAX,EAAY,MAAMW,CAAG,UAGrB,KAAK,YAAW,EAEpB,EACAL,EAAA,UAAM,WACZ,CAEA,OAAAD,EAAA,UAAA,YAAA,UAAA,OACE,GAAI,CAAC,KAAK,mBAAqB,KAAK,kBAAiB,EAAI,CAC/C,IAAAO,EAAW,KAAI,OACvBN,EAAA,UAAM,YAAW,KAAA,IAAA,EAEjB,CAACM,KAAUC,EAAA,KAAK,cAAU,MAAAA,IAAA,QAAAA,EAAA,KAAf,IAAI,GAEnB,EACFR,CAAA,EAnF2CS,EAAU,ECd9C,IAAMC,GAAiD,CAG5D,SAAA,SAASC,EAAQ,CACf,IAAIC,EAAU,sBACVC,EAAkD,qBAC9CC,EAAaJ,GAAsB,SACvCI,IACFF,EAAUE,EAAS,sBACnBD,EAASC,EAAS,sBAEpB,IAAMC,EAASH,EAAQ,SAACI,EAAS,CAI/BH,EAAS,OACTF,EAASK,CAAS,CACpB,CAAC,EACD,OAAO,IAAIC,GAAa,UAAA,CAAM,OAAAJ,GAAM,KAAA,OAANA,EAASE,CAAM,CAAf,CAAgB,CAChD,EACA,sBAAqB,UAAA,SAACG,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,GAAA,UAAAA,GACZ,IAAAL,EAAaJ,GAAsB,SAC3C,QAAQI,GAAQ,KAAA,OAARA,EAAU,wBAAyB,uBAAsB,MAAA,OAAAM,EAAA,CAAA,EAAAC,EAAIH,CAAI,CAAA,CAAA,CAC3E,EACA,qBAAoB,UAAA,SAACA,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,GAAA,UAAAA,GACX,IAAAL,EAAaJ,GAAsB,SAC3C,QAAQI,GAAQ,KAAA,OAARA,EAAU,uBAAwB,sBAAqB,MAAA,OAAAM,EAAA,CAAA,EAAAC,EAAIH,CAAI,CAAA,CAAA,CACzE,EACA,SAAU,QCrBL,IAAMI,GAAuDC,GAClE,SAACC,EAAM,CACL,OAAA,UAAoC,CAClCA,EAAO,IAAI,EACX,KAAK,KAAO,0BACZ,KAAK,QAAU,qBACjB,CAJA,CAIC,ECXL,IAAAC,EAAA,SAAAC,EAAA,CAAgCC,GAAAF,EAAAC,CAAA,EAwB9B,SAAAD,GAAA,CAAA,IAAAG,EAEEF,EAAA,KAAA,IAAA,GAAO,KAzBT,OAAAE,EAAA,OAAS,GAEDA,EAAA,iBAAyC,KAGjDA,EAAA,UAA2B,CAAA,EAE3BA,EAAA,UAAY,GAEZA,EAAA,SAAW,GAEXA,EAAA,YAAmB,MAenB,CAGA,OAAAH,EAAA,UAAA,KAAA,SAAQI,EAAwB,CAC9B,IAAMC,EAAU,IAAIC,GAAiB,KAAM,IAAI,EAC/C,OAAAD,EAAQ,SAAWD,EACZC,CACT,EAGUL,EAAA,UAAA,eAAV,UAAA,CACE,GAAI,KAAK,OACP,MAAM,IAAIO,EAEd,EAEAP,EAAA,UAAA,KAAA,SAAKQ,EAAQ,CAAb,IAAAL,EAAA,KACEM,GAAa,UAAA,SAEX,GADAN,EAAK,eAAc,EACf,CAACA,EAAK,UAAW,CACdA,EAAK,mBACRA,EAAK,iBAAmB,MAAM,KAAKA,EAAK,SAAS,OAEnD,QAAuBO,EAAAC,GAAAR,EAAK,gBAAgB,EAAAS,EAAAF,EAAA,KAAA,EAAA,CAAAE,EAAA,KAAAA,EAAAF,EAAA,KAAA,EAAE,CAAzC,IAAMG,EAAQD,EAAA,MACjBC,EAAS,KAAKL,CAAK,qGAGzB,CAAC,CACH,EAEAR,EAAA,UAAA,MAAA,SAAMc,EAAQ,CAAd,IAAAX,EAAA,KACEM,GAAa,UAAA,CAEX,GADAN,EAAK,eAAc,EACf,CAACA,EAAK,UAAW,CACnBA,EAAK,SAAWA,EAAK,UAAY,GACjCA,EAAK,YAAcW,EAEnB,QADQC,EAAcZ,EAAI,UACnBY,EAAU,QACfA,EAAU,MAAK,EAAI,MAAMD,CAAG,EAGlC,CAAC,CACH,EAEAd,EAAA,UAAA,SAAA,UAAA,CAAA,IAAAG,EAAA,KACEM,GAAa,UAAA,CAEX,GADAN,EAAK,eAAc,EACf,CAACA,EAAK,UAAW,CACnBA,EAAK,UAAY,GAEjB,QADQY,EAAcZ,EAAI,UACnBY,EAAU,QACfA,EAAU,MAAK,EAAI,SAAQ,EAGjC,CAAC,CACH,EAEAf,EAAA,UAAA,YAAA,UAAA,CACE,KAAK,UAAY,KAAK,OAAS,GAC/B,KAAK,UAAY,KAAK,iBAAmB,IAC3C,EAEA,OAAA,eAAIA,EAAA,UAAA,WAAQ,KAAZ,UAAA,OACE,QAAOgB,EAAA,KAAK,aAAS,MAAAA,IAAA,OAAA,OAAAA,EAAE,QAAS,CAClC,kCAGUhB,EAAA,UAAA,cAAV,SAAwBiB,EAAyB,CAC/C,YAAK,eAAc,EACZhB,EAAA,UAAM,cAAa,KAAA,KAACgB,CAAU,CACvC,EAGUjB,EAAA,UAAA,WAAV,SAAqBiB,EAAyB,CAC5C,YAAK,eAAc,EACnB,KAAK,wBAAwBA,CAAU,EAChC,KAAK,gBAAgBA,CAAU,CACxC,EAGUjB,EAAA,UAAA,gBAAV,SAA0BiB,EAA2B,CAArD,IAAAd,EAAA,KACQa,EAAqC,KAAnCE,EAAQF,EAAA,SAAEG,EAASH,EAAA,UAAED,EAASC,EAAA,UACtC,OAAIE,GAAYC,EACPC,IAET,KAAK,iBAAmB,KACxBL,EAAU,KAAKE,CAAU,EAClB,IAAII,GAAa,UAAA,CACtBlB,EAAK,iBAAmB,KACxBmB,GAAUP,EAAWE,CAAU,CACjC,CAAC,EACH,EAGUjB,EAAA,UAAA,wBAAV,SAAkCiB,EAA2B,CACrD,IAAAD,EAAuC,KAArCE,EAAQF,EAAA,SAAEO,EAAWP,EAAA,YAAEG,EAASH,EAAA,UACpCE,EACFD,EAAW,MAAMM,CAAW,EACnBJ,GACTF,EAAW,SAAQ,CAEvB,EAQAjB,EAAA,UAAA,aAAA,UAAA,CACE,IAAMwB,EAAkB,IAAIC,EAC5B,OAAAD,EAAW,OAAS,KACbA,CACT,EAxHOxB,EAAA,OAAkC,SAAI0B,EAA0BC,EAAqB,CAC1F,OAAO,IAAIrB,GAAoBoB,EAAaC,CAAM,CACpD,EAuHF3B,GA7IgCyB,CAAU,EAkJ1C,IAAAG,GAAA,SAAAC,EAAA,CAAyCC,GAAAF,EAAAC,CAAA,EACvC,SAAAD,EAESG,EACPC,EAAsB,CAHxB,IAAAC,EAKEJ,EAAA,KAAA,IAAA,GAAO,KAHA,OAAAI,EAAA,YAAAF,EAIPE,EAAK,OAASD,GAChB,CAEA,OAAAJ,EAAA,UAAA,KAAA,SAAKM,EAAQ,UACXC,GAAAC,EAAA,KAAK,eAAW,MAAAA,IAAA,OAAA,OAAAA,EAAE,QAAI,MAAAD,IAAA,QAAAA,EAAA,KAAAC,EAAGF,CAAK,CAChC,EAEAN,EAAA,UAAA,MAAA,SAAMS,EAAQ,UACZF,GAAAC,EAAA,KAAK,eAAW,MAAAA,IAAA,OAAA,OAAAA,EAAE,SAAK,MAAAD,IAAA,QAAAA,EAAA,KAAAC,EAAGC,CAAG,CAC/B,EAEAT,EAAA,UAAA,SAAA,UAAA,UACEO,GAAAC,EAAA,KAAK,eAAW,MAAAA,IAAA,OAAA,OAAAA,EAAE,YAAQ,MAAAD,IAAA,QAAAA,EAAA,KAAAC,CAAA,CAC5B,EAGUR,EAAA,UAAA,WAAV,SAAqBU,EAAyB,SAC5C,OAAOH,GAAAC,EAAA,KAAK,UAAM,MAAAA,IAAA,OAAA,OAAAA,EAAE,UAAUE,CAAU,KAAC,MAAAH,IAAA,OAAAA,EAAII,EAC/C,EACFX,CAAA,EA1ByCY,CAAO,EC5JzC,IAAMC,GAA+C,CAC1D,IAAG,UAAA,CAGD,OAAQA,GAAsB,UAAY,MAAM,IAAG,CACrD,EACA,SAAU,QCwBZ,IAAAC,GAAA,SAAAC,EAAA,CAAsCC,GAAAF,EAAAC,CAAA,EAUpC,SAAAD,EACUG,EACAC,EACAC,EAA6D,CAF7DF,IAAA,SAAAA,EAAA,KACAC,IAAA,SAAAA,EAAA,KACAC,IAAA,SAAAA,EAAAC,IAHV,IAAAC,EAKEN,EAAA,KAAA,IAAA,GAAO,KAJC,OAAAM,EAAA,YAAAJ,EACAI,EAAA,YAAAH,EACAG,EAAA,mBAAAF,EAZFE,EAAA,QAA0B,CAAA,EAC1BA,EAAA,oBAAsB,GAc5BA,EAAK,oBAAsBH,IAAgB,IAC3CG,EAAK,YAAc,KAAK,IAAI,EAAGJ,CAAW,EAC1CI,EAAK,YAAc,KAAK,IAAI,EAAGH,CAAW,GAC5C,CAEA,OAAAJ,EAAA,UAAA,KAAA,SAAKQ,EAAQ,CACL,IAAAC,EAA+E,KAA7EC,EAASD,EAAA,UAAEE,EAAOF,EAAA,QAAEG,EAAmBH,EAAA,oBAAEJ,EAAkBI,EAAA,mBAAEL,EAAWK,EAAA,YAC3EC,IACHC,EAAQ,KAAKH,CAAK,EAClB,CAACI,GAAuBD,EAAQ,KAAKN,EAAmB,IAAG,EAAKD,CAAW,GAE7E,KAAK,YAAW,EAChBH,EAAA,UAAM,KAAI,KAAA,KAACO,CAAK,CAClB,EAGUR,EAAA,UAAA,WAAV,SAAqBa,EAAyB,CAC5C,KAAK,eAAc,EACnB,KAAK,YAAW,EAQhB,QANMC,EAAe,KAAK,gBAAgBD,CAAU,EAE9CJ,EAAmC,KAAjCG,EAAmBH,EAAA,oBAAEE,EAAOF,EAAA,QAG9BM,EAAOJ,EAAQ,MAAK,EACjBK,EAAI,EAAGA,EAAID,EAAK,QAAU,CAACF,EAAW,OAAQG,GAAKJ,EAAsB,EAAI,EACpFC,EAAW,KAAKE,EAAKC,EAAO,EAG9B,YAAK,wBAAwBH,CAAU,EAEhCC,CACT,EAEQd,EAAA,UAAA,YAAR,UAAA,CACQ,IAAAS,EAAoE,KAAlEN,EAAWM,EAAA,YAAEJ,EAAkBI,EAAA,mBAAEE,EAAOF,EAAA,QAAEG,EAAmBH,EAAA,oBAK/DQ,GAAsBL,EAAsB,EAAI,GAAKT,EAK3D,GAJAA,EAAc,KAAYc,EAAqBN,EAAQ,QAAUA,EAAQ,OAAO,EAAGA,EAAQ,OAASM,CAAkB,EAIlH,CAACL,EAAqB,CAKxB,QAJMM,EAAMb,EAAmB,IAAG,EAC9Bc,EAAO,EAGFH,EAAI,EAAGA,EAAIL,EAAQ,QAAWA,EAAQK,IAAiBE,EAAKF,GAAK,EACxEG,EAAOH,EAETG,GAAQR,EAAQ,OAAO,EAAGQ,EAAO,CAAC,EAEtC,EACFnB,CAAA,EAzEsCoB,CAAO,EClB7C,IAAAC,GAAA,SAAAC,EAAA,CAA+BC,GAAAF,EAAAC,CAAA,EAC7B,SAAAD,EAAYG,EAAsBC,EAAmD,QACnFH,EAAA,KAAA,IAAA,GAAO,IACT,CAWO,OAAAD,EAAA,UAAA,SAAP,SAAgBK,EAAWC,EAAiB,CAAjB,OAAAA,IAAA,SAAAA,EAAA,GAClB,IACT,EACFN,CAAA,EAjB+BO,EAAY,ECHpC,IAAMC,GAAqC,CAGhD,YAAA,SAAYC,EAAqBC,EAAgB,SAAEC,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,EAAA,GAAA,UAAAA,GACzC,IAAAC,EAAaL,GAAgB,SACrC,OAAIK,GAAQ,MAARA,EAAU,YACLA,EAAS,YAAW,MAApBA,EAAQC,EAAA,CAAaL,EAASC,CAAO,EAAAK,EAAKJ,CAAI,CAAA,CAAA,EAEhD,YAAW,MAAA,OAAAG,EAAA,CAACL,EAASC,CAAO,EAAAK,EAAKJ,CAAI,CAAA,CAAA,CAC9C,EACA,cAAA,SAAcK,EAAM,CACV,IAAAH,EAAaL,GAAgB,SACrC,QAAQK,GAAQ,KAAA,OAARA,EAAU,gBAAiB,eAAeG,CAAa,CACjE,EACA,SAAU,QCrBZ,IAAAC,GAAA,SAAAC,EAAA,CAAoCC,GAAAF,EAAAC,CAAA,EAOlC,SAAAD,EAAsBG,EAAqCC,EAAmD,CAA9G,IAAAC,EACEJ,EAAA,KAAA,KAAME,EAAWC,CAAI,GAAC,KADF,OAAAC,EAAA,UAAAF,EAAqCE,EAAA,KAAAD,EAFjDC,EAAA,QAAmB,IAI7B,CAEO,OAAAL,EAAA,UAAA,SAAP,SAAgBM,EAAWC,EAAiB,OAC1C,GADyBA,IAAA,SAAAA,EAAA,GACrB,KAAK,OACP,OAAO,KAIT,KAAK,MAAQD,EAEb,IAAME,EAAK,KAAK,GACVL,EAAY,KAAK,UAuBvB,OAAIK,GAAM,OACR,KAAK,GAAK,KAAK,eAAeL,EAAWK,EAAID,CAAK,GAKpD,KAAK,QAAU,GAEf,KAAK,MAAQA,EAEb,KAAK,IAAKE,EAAA,KAAK,MAAE,MAAAA,IAAA,OAAAA,EAAI,KAAK,eAAeN,EAAW,KAAK,GAAII,CAAK,EAE3D,IACT,EAEUP,EAAA,UAAA,eAAV,SAAyBG,EAA2BO,EAAmBH,EAAiB,CAAjB,OAAAA,IAAA,SAAAA,EAAA,GAC9DI,GAAiB,YAAYR,EAAU,MAAM,KAAKA,EAAW,IAAI,EAAGI,CAAK,CAClF,EAEUP,EAAA,UAAA,eAAV,SAAyBY,EAA4BJ,EAAkBD,EAAwB,CAE7F,GAFqEA,IAAA,SAAAA,EAAA,GAEjEA,GAAS,MAAQ,KAAK,QAAUA,GAAS,KAAK,UAAY,GAC5D,OAAOC,EAILA,GAAM,MACRG,GAAiB,cAAcH,CAAE,CAIrC,EAMOR,EAAA,UAAA,QAAP,SAAeM,EAAUC,EAAa,CACpC,GAAI,KAAK,OACP,OAAO,IAAI,MAAM,8BAA8B,EAGjD,KAAK,QAAU,GACf,IAAMM,EAAQ,KAAK,SAASP,EAAOC,CAAK,EACxC,GAAIM,EACF,OAAOA,EACE,KAAK,UAAY,IAAS,KAAK,IAAM,OAc9C,KAAK,GAAK,KAAK,eAAe,KAAK,UAAW,KAAK,GAAI,IAAI,EAE/D,EAEUb,EAAA,UAAA,SAAV,SAAmBM,EAAUQ,EAAc,CACzC,IAAIC,EAAmB,GACnBC,EACJ,GAAI,CACF,KAAK,KAAKV,CAAK,QACRW,EAAP,CACAF,EAAU,GAIVC,EAAaC,GAAQ,IAAI,MAAM,oCAAoC,EAErE,GAAIF,EACF,YAAK,YAAW,EACTC,CAEX,EAEAhB,EAAA,UAAA,YAAA,UAAA,CACE,GAAI,CAAC,KAAK,OAAQ,CACV,IAAAS,EAAoB,KAAlBD,EAAEC,EAAA,GAAEN,EAASM,EAAA,UACbS,EAAYf,EAAS,QAE7B,KAAK,KAAO,KAAK,MAAQ,KAAK,UAAY,KAC1C,KAAK,QAAU,GAEfgB,GAAUD,EAAS,IAAI,EACnBV,GAAM,OACR,KAAK,GAAK,KAAK,eAAeL,EAAWK,EAAI,IAAI,GAGnD,KAAK,MAAQ,KACbP,EAAA,UAAM,YAAW,KAAA,IAAA,EAErB,EACFD,CAAA,EA9IoCoB,EAAM,ECgB1C,IAAAC,GAAA,UAAA,CAGE,SAAAA,EAAoBC,EAAoCC,EAAiC,CAAjCA,IAAA,SAAAA,EAAoBF,EAAU,KAAlE,KAAA,oBAAAC,EAClB,KAAK,IAAMC,CACb,CA6BO,OAAAF,EAAA,UAAA,SAAP,SAAmBG,EAAqDC,EAAmBC,EAAS,CAA5B,OAAAD,IAAA,SAAAA,EAAA,GAC/D,IAAI,KAAK,oBAAuB,KAAMD,CAAI,EAAE,SAASE,EAAOD,CAAK,CAC1E,EAnCcJ,EAAA,IAAoBM,GAAsB,IAoC1DN,GArCA,ECnBA,IAAAO,GAAA,SAAAC,EAAA,CAAoCC,GAAAF,EAAAC,CAAA,EAkBlC,SAAAD,EAAYG,EAAgCC,EAAiC,CAAjCA,IAAA,SAAAA,EAAoBC,GAAU,KAA1E,IAAAC,EACEL,EAAA,KAAA,KAAME,EAAiBC,CAAG,GAAC,KAlBtB,OAAAE,EAAA,QAAmC,CAAA,EAOnCA,EAAA,QAAmB,IAY1B,CAEO,OAAAN,EAAA,UAAA,MAAP,SAAaO,EAAwB,CAC3B,IAAAC,EAAY,KAAI,QAExB,GAAI,KAAK,QAAS,CAChBA,EAAQ,KAAKD,CAAM,EACnB,OAGF,IAAIE,EACJ,KAAK,QAAU,GAEf,EACE,IAAKA,EAAQF,EAAO,QAAQA,EAAO,MAAOA,EAAO,KAAK,EACpD,YAEMA,EAASC,EAAQ,MAAK,GAIhC,GAFA,KAAK,QAAU,GAEXC,EAAO,CACT,KAAQF,EAASC,EAAQ,MAAK,GAC5BD,EAAO,YAAW,EAEpB,MAAME,EAEV,EACFT,CAAA,EAhDoCK,EAAS,EC6CtC,IAAMK,GAAiB,IAAIC,GAAeC,EAAW,EAK/CC,GAAQH,GCjDrB,IAAAI,GAAA,SAAAC,EAAA,CAA6CC,GAAAF,EAAAC,CAAA,EAC3C,SAAAD,EAAsBG,EAA8CC,EAAmD,CAAvH,IAAAC,EACEJ,EAAA,KAAA,KAAME,EAAWC,CAAI,GAAC,KADF,OAAAC,EAAA,UAAAF,EAA8CE,EAAA,KAAAD,GAEpE,CAEU,OAAAJ,EAAA,UAAA,eAAV,SAAyBG,EAAoCG,EAAkBC,EAAiB,CAE9F,OAF6EA,IAAA,SAAAA,EAAA,GAEzEA,IAAU,MAAQA,EAAQ,EACrBN,EAAA,UAAM,eAAc,KAAA,KAACE,EAAWG,EAAIC,CAAK,GAGlDJ,EAAU,QAAQ,KAAK,IAAI,EAIpBA,EAAU,aAAeA,EAAU,WAAaK,GAAuB,sBAAsB,UAAA,CAAM,OAAAL,EAAU,MAAM,MAAS,CAAzB,CAA0B,GACtI,EAEUH,EAAA,UAAA,eAAV,SAAyBG,EAAoCG,EAAkBC,EAAiB,OAI9F,GAJ6EA,IAAA,SAAAA,EAAA,GAIzEA,GAAS,KAAOA,EAAQ,EAAI,KAAK,MAAQ,EAC3C,OAAON,EAAA,UAAM,eAAc,KAAA,KAACE,EAAWG,EAAIC,CAAK,EAK1C,IAAAE,EAAYN,EAAS,QACzBG,GAAM,QAAQI,EAAAD,EAAQA,EAAQ,OAAS,MAAE,MAAAC,IAAA,OAAA,OAAAA,EAAE,MAAOJ,IACpDE,GAAuB,qBAAqBF,CAAY,EACxDH,EAAU,WAAa,OAI3B,EACFH,CAAA,EApC6CW,EAAW,ECHxD,IAAAC,GAAA,SAAAC,EAAA,CAA6CC,GAAAF,EAAAC,CAAA,EAA7C,SAAAD,GAAA,+CAkCA,CAjCS,OAAAA,EAAA,UAAA,MAAP,SAAaG,EAAyB,CACpC,KAAK,QAAU,GAUf,IAAMC,EAAU,KAAK,WACrB,KAAK,WAAa,OAEV,IAAAC,EAAY,KAAI,QACpBC,EACJH,EAASA,GAAUE,EAAQ,MAAK,EAEhC,EACE,IAAKC,EAAQH,EAAO,QAAQA,EAAO,MAAOA,EAAO,KAAK,EACpD,aAEMA,EAASE,EAAQ,KAAOF,EAAO,KAAOC,GAAWC,EAAQ,MAAK,GAIxE,GAFA,KAAK,QAAU,GAEXC,EAAO,CACT,MAAQH,EAASE,EAAQ,KAAOF,EAAO,KAAOC,GAAWC,EAAQ,MAAK,GACpEF,EAAO,YAAW,EAEpB,MAAMG,EAEV,EACFN,CAAA,EAlC6CO,EAAc,ECgCpD,IAAMC,GAA0B,IAAIC,GAAwBC,EAAoB,EC8BhF,IAAMC,EAAQ,IAAIC,EAAkB,SAACC,EAAU,CAAK,OAAAA,EAAW,SAAQ,CAAnB,CAAqB,EC9D1E,SAAUC,GAAYC,EAAU,CACpC,OAAOA,GAASC,EAAWD,EAAM,QAAQ,CAC3C,CCDA,SAASE,GAAQC,EAAQ,CACvB,OAAOA,EAAIA,EAAI,OAAS,EAC1B,CAEM,SAAUC,GAAkBC,EAAW,CAC3C,OAAOC,EAAWJ,GAAKG,CAAI,CAAC,EAAIA,EAAK,IAAG,EAAK,MAC/C,CAEM,SAAUE,GAAaF,EAAW,CACtC,OAAOG,GAAYN,GAAKG,CAAI,CAAC,EAAIA,EAAK,IAAG,EAAK,MAChD,CAEM,SAAUI,GAAUJ,EAAaK,EAAoB,CACzD,OAAO,OAAOR,GAAKG,CAAI,GAAM,SAAWA,EAAK,IAAG,EAAMK,CACxD,CClBO,IAAMC,GAAe,SAAIC,EAAM,CAAwB,OAAAA,GAAK,OAAOA,EAAE,QAAW,UAAY,OAAOA,GAAM,UAAlD,ECMxD,SAAUC,GAAUC,EAAU,CAClC,OAAOC,EAAWD,GAAK,KAAA,OAALA,EAAO,IAAI,CAC/B,CCHM,SAAUE,GAAoBC,EAAU,CAC5C,OAAOC,EAAWD,EAAME,GAAkB,CAC5C,CCLM,SAAUC,GAAmBC,EAAQ,CACzC,OAAO,OAAO,eAAiBC,EAAWD,GAAG,KAAA,OAAHA,EAAM,OAAO,cAAc,CACvE,CCAM,SAAUE,GAAiCC,EAAU,CAEzD,OAAO,IAAI,UACT,iBACEA,IAAU,MAAQ,OAAOA,GAAU,SAAW,oBAAsB,IAAIA,EAAK,KAAG,0HACwC,CAE9H,CCXM,SAAUC,IAAiB,CAC/B,OAAI,OAAO,QAAW,YAAc,CAAC,OAAO,SACnC,aAGF,OAAO,QAChB,CAEO,IAAMC,GAAWD,GAAiB,ECJnC,SAAUE,GAAWC,EAAU,CACnC,OAAOC,EAAWD,GAAK,KAAA,OAALA,EAAQE,GAAgB,CAC5C,CCHM,SAAiBC,GAAsCC,EAAqC,mGAC1FC,EAASD,EAAe,UAAS,2DAGX,MAAA,CAAA,EAAAE,GAAMD,EAAO,KAAI,CAAE,CAAA,gBAArCE,EAAkBC,EAAA,KAAA,EAAhBC,EAAKF,EAAA,MAAEG,EAAIH,EAAA,KACfG,iBAAA,CAAA,EAAA,CAAA,SACF,MAAA,CAAA,EAAAF,EAAA,KAAA,CAAA,qBAEIC,CAAM,CAAA,SAAZ,MAAA,CAAA,EAAAD,EAAA,KAAA,CAAA,SAAA,OAAAA,EAAA,KAAA,mCAGF,OAAAH,EAAO,YAAW,6BAIhB,SAAUM,GAAwBC,EAAQ,CAG9C,OAAOC,EAAWD,GAAG,KAAA,OAAHA,EAAK,SAAS,CAClC,CCPM,SAAUE,EAAaC,EAAyB,CACpD,GAAIA,aAAiBC,EACnB,OAAOD,EAET,GAAIA,GAAS,KAAM,CACjB,GAAIE,GAAoBF,CAAK,EAC3B,OAAOG,GAAsBH,CAAK,EAEpC,GAAII,GAAYJ,CAAK,EACnB,OAAOK,GAAcL,CAAK,EAE5B,GAAIM,GAAUN,CAAK,EACjB,OAAOO,GAAYP,CAAK,EAE1B,GAAIQ,GAAgBR,CAAK,EACvB,OAAOS,GAAkBT,CAAK,EAEhC,GAAIU,GAAWV,CAAK,EAClB,OAAOW,GAAaX,CAAK,EAE3B,GAAIY,GAAqBZ,CAAK,EAC5B,OAAOa,GAAuBb,CAAK,EAIvC,MAAMc,GAAiCd,CAAK,CAC9C,CAMM,SAAUG,GAAyBY,EAAQ,CAC/C,OAAO,IAAId,EAAW,SAACe,EAAyB,CAC9C,IAAMC,EAAMF,EAAIG,IAAkB,EAClC,GAAIC,EAAWF,EAAI,SAAS,EAC1B,OAAOA,EAAI,UAAUD,CAAU,EAGjC,MAAM,IAAI,UAAU,gEAAgE,CACtF,CAAC,CACH,CASM,SAAUX,GAAiBe,EAAmB,CAClD,OAAO,IAAInB,EAAW,SAACe,EAAyB,CAU9C,QAASK,EAAI,EAAGA,EAAID,EAAM,QAAU,CAACJ,EAAW,OAAQK,IACtDL,EAAW,KAAKI,EAAMC,EAAE,EAE1BL,EAAW,SAAQ,CACrB,CAAC,CACH,CAEM,SAAUT,GAAee,EAAuB,CACpD,OAAO,IAAIrB,EAAW,SAACe,EAAyB,CAC9CM,EACG,KACC,SAACC,EAAK,CACCP,EAAW,SACdA,EAAW,KAAKO,CAAK,EACrBP,EAAW,SAAQ,EAEvB,EACA,SAACQ,EAAQ,CAAK,OAAAR,EAAW,MAAMQ,CAAG,CAApB,CAAqB,EAEpC,KAAK,KAAMC,EAAoB,CACpC,CAAC,CACH,CAEM,SAAUd,GAAgBe,EAAqB,CACnD,OAAO,IAAIzB,EAAW,SAACe,EAAyB,aAC9C,QAAoBW,EAAAC,GAAAF,CAAQ,EAAAG,EAAAF,EAAA,KAAA,EAAA,CAAAE,EAAA,KAAAA,EAAAF,EAAA,KAAA,EAAE,CAAzB,IAAMJ,EAAKM,EAAA,MAEd,GADAb,EAAW,KAAKO,CAAK,EACjBP,EAAW,OACb,yGAGJA,EAAW,SAAQ,CACrB,CAAC,CACH,CAEM,SAAUP,GAAqBqB,EAA+B,CAClE,OAAO,IAAI7B,EAAW,SAACe,EAAyB,CAC9Ce,GAAQD,EAAed,CAAU,EAAE,MAAM,SAACQ,EAAG,CAAK,OAAAR,EAAW,MAAMQ,CAAG,CAApB,CAAqB,CACzE,CAAC,CACH,CAEM,SAAUX,GAA0BmB,EAAqC,CAC7E,OAAOvB,GAAkBwB,GAAmCD,CAAc,CAAC,CAC7E,CAEA,SAAeD,GAAWD,EAAiCd,EAAyB,uIACxDkB,EAAAC,GAAAL,CAAa,gFAIrC,GAJeP,EAAKa,EAAA,MACpBpB,EAAW,KAAKO,CAAK,EAGjBP,EAAW,OACb,MAAA,CAAA,CAAA,6RAGJ,OAAAA,EAAW,SAAQ,WChHf,SAAUqB,GACdC,EACAC,EACAC,EACAC,EACAC,EAAc,CADdD,IAAA,SAAAA,EAAA,GACAC,IAAA,SAAAA,EAAA,IAEA,IAAMC,EAAuBJ,EAAU,SAAS,UAAA,CAC9CC,EAAI,EACAE,EACFJ,EAAmB,IAAI,KAAK,SAAS,KAAMG,CAAK,CAAC,EAEjD,KAAK,YAAW,CAEpB,EAAGA,CAAK,EAIR,GAFAH,EAAmB,IAAIK,CAAoB,EAEvC,CAACD,EAKH,OAAOC,CAEX,CCeM,SAAUC,GAAaC,EAA0BC,EAAS,CAAT,OAAAA,IAAA,SAAAA,EAAA,GAC9CC,EAAQ,SAACC,EAAQC,EAAU,CAChCD,EAAO,UACLE,EACED,EACA,SAACE,EAAK,CAAK,OAAAC,GAAgBH,EAAYJ,EAAW,UAAA,CAAM,OAAAI,EAAW,KAAKE,CAAK,CAArB,EAAwBL,CAAK,CAA1E,EACX,UAAA,CAAM,OAAAM,GAAgBH,EAAYJ,EAAW,UAAA,CAAM,OAAAI,EAAW,SAAQ,CAAnB,EAAuBH,CAAK,CAAzE,EACN,SAACO,EAAG,CAAK,OAAAD,GAAgBH,EAAYJ,EAAW,UAAA,CAAM,OAAAI,EAAW,MAAMI,CAAG,CAApB,EAAuBP,CAAK,CAAzE,CAA0E,CACpF,CAEL,CAAC,CACH,CCPM,SAAUQ,GAAeC,EAA0BC,EAAiB,CAAjB,OAAAA,IAAA,SAAAA,EAAA,GAChDC,EAAQ,SAACC,EAAQC,EAAU,CAChCA,EAAW,IAAIJ,EAAU,SAAS,UAAA,CAAM,OAAAG,EAAO,UAAUC,CAAU,CAA3B,EAA8BH,CAAK,CAAC,CAC9E,CAAC,CACH,CC7DM,SAAUI,GAAsBC,EAA6BC,EAAwB,CACzF,OAAOC,EAAUF,CAAK,EAAE,KAAKG,GAAYF,CAAS,EAAGG,GAAUH,CAAS,CAAC,CAC3E,CCFM,SAAUI,GAAmBC,EAAuBC,EAAwB,CAChF,OAAOC,EAAUF,CAAK,EAAE,KAAKG,GAAYF,CAAS,EAAGG,GAAUH,CAAS,CAAC,CAC3E,CCJM,SAAUI,GAAiBC,EAAqBC,EAAwB,CAC5E,OAAO,IAAIC,EAAc,SAACC,EAAU,CAElC,IAAIC,EAAI,EAER,OAAOH,EAAU,SAAS,UAAA,CACpBG,IAAMJ,EAAM,OAGdG,EAAW,SAAQ,GAInBA,EAAW,KAAKH,EAAMI,IAAI,EAIrBD,EAAW,QACd,KAAK,SAAQ,EAGnB,CAAC,CACH,CAAC,CACH,CCfM,SAAUE,GAAoBC,EAAoBC,EAAwB,CAC9E,OAAO,IAAIC,EAAc,SAACC,EAAU,CAClC,IAAIC,EAKJ,OAAAC,GAAgBF,EAAYF,EAAW,UAAA,CAErCG,EAAYJ,EAAcI,IAAgB,EAE1CC,GACEF,EACAF,EACA,UAAA,OACMK,EACAC,EACJ,GAAI,CAEDC,EAAkBJ,EAAS,KAAI,EAA7BE,EAAKE,EAAA,MAAED,EAAIC,EAAA,WACPC,EAAP,CAEAN,EAAW,MAAMM,CAAG,EACpB,OAGEF,EAKFJ,EAAW,SAAQ,EAGnBA,EAAW,KAAKG,CAAK,CAEzB,EACA,EACA,EAAI,CAER,CAAC,EAMM,UAAA,CAAM,OAAAI,EAAWN,GAAQ,KAAA,OAARA,EAAU,MAAM,GAAKA,EAAS,OAAM,CAA/C,CACf,CAAC,CACH,CCvDM,SAAUO,GAAyBC,EAAyBC,EAAwB,CACxF,GAAI,CAACD,EACH,MAAM,IAAI,MAAM,yBAAyB,EAE3C,OAAO,IAAIE,EAAc,SAACC,EAAU,CAClCC,GAAgBD,EAAYF,EAAW,UAAA,CACrC,IAAMI,EAAWL,EAAM,OAAO,eAAc,EAC5CI,GACED,EACAF,EACA,UAAA,CACEI,EAAS,KAAI,EAAG,KAAK,SAACC,EAAM,CACtBA,EAAO,KAGTH,EAAW,SAAQ,EAEnBA,EAAW,KAAKG,EAAO,KAAK,CAEhC,CAAC,CACH,EACA,EACA,EAAI,CAER,CAAC,CACH,CAAC,CACH,CCzBM,SAAUC,GAA8BC,EAA8BC,EAAwB,CAClG,OAAOC,GAAsBC,GAAmCH,CAAK,EAAGC,CAAS,CACnF,CCoBM,SAAUG,GAAaC,EAA2BC,EAAwB,CAC9E,GAAID,GAAS,KAAM,CACjB,GAAIE,GAAoBF,CAAK,EAC3B,OAAOG,GAAmBH,EAAOC,CAAS,EAE5C,GAAIG,GAAYJ,CAAK,EACnB,OAAOK,GAAcL,EAAOC,CAAS,EAEvC,GAAIK,GAAUN,CAAK,EACjB,OAAOO,GAAgBP,EAAOC,CAAS,EAEzC,GAAIO,GAAgBR,CAAK,EACvB,OAAOS,GAAsBT,EAAOC,CAAS,EAE/C,GAAIS,GAAWV,CAAK,EAClB,OAAOW,GAAiBX,EAAOC,CAAS,EAE1C,GAAIW,GAAqBZ,CAAK,EAC5B,OAAOa,GAA2Bb,EAAOC,CAAS,EAGtD,MAAMa,GAAiCd,CAAK,CAC9C,CCoDM,SAAUe,GAAQC,EAA2BC,EAAyB,CAC1E,OAAOA,EAAYC,GAAUF,EAAOC,CAAS,EAAIE,EAAUH,CAAK,CAClE,CCxBM,SAAUI,GAAE,SAAIC,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,GAAA,UAAAA,GACpB,IAAMC,EAAYC,GAAaH,CAAI,EACnC,OAAOI,GAAKJ,EAAaE,CAAS,CACpC,CCsCM,SAAUG,GAAWC,EAA0BC,EAAyB,CAC5E,IAAMC,EAAeC,EAAWH,CAAmB,EAAIA,EAAsB,UAAA,CAAM,OAAAA,CAAA,EAC7EI,EAAO,SAACC,EAA6B,CAAK,OAAAA,EAAW,MAAMH,EAAY,CAAE,CAA/B,EAChD,OAAO,IAAII,EAAWL,EAAY,SAACI,EAAU,CAAK,OAAAJ,EAAU,SAASG,EAAa,EAAGC,CAAU,CAA7C,EAAiDD,CAAI,CACzG,CCrHM,SAAUG,GAAYC,EAAU,CACpC,OAAOA,aAAiB,MAAQ,CAAC,MAAMA,CAAY,CACrD,CCsCM,SAAUC,EAAUC,EAAyCC,EAAa,CAC9E,OAAOC,EAAQ,SAACC,EAAQC,EAAU,CAEhC,IAAIC,EAAQ,EAGZF,EAAO,UACLG,EAAyBF,EAAY,SAACG,EAAQ,CAG5CH,EAAW,KAAKJ,EAAQ,KAAKC,EAASM,EAAOF,GAAO,CAAC,CACvD,CAAC,CAAC,CAEN,CAAC,CACH,CC1DQ,IAAAG,GAAY,MAAK,QAEzB,SAASC,GAAkBC,EAA6BC,EAAW,CAC/D,OAAOH,GAAQG,CAAI,EAAID,EAAE,MAAA,OAAAE,EAAA,CAAA,EAAAC,EAAIF,CAAI,CAAA,CAAA,EAAID,EAAGC,CAAI,CAChD,CAMM,SAAUG,GAAuBJ,EAA2B,CAC9D,OAAOK,EAAI,SAAAJ,EAAI,CAAI,OAAAF,GAAYC,EAAIC,CAAI,CAApB,CAAqB,CAC5C,CCfQ,IAAAK,GAAY,MAAK,QACjBC,GAA0D,OAAM,eAArCC,GAA+B,OAAM,UAAlBC,GAAY,OAAM,KAQlE,SAAUC,GAAqDC,EAAuB,CAC1F,GAAIA,EAAK,SAAW,EAAG,CACrB,IAAMC,EAAQD,EAAK,GACnB,GAAIL,GAAQM,CAAK,EACf,MAAO,CAAE,KAAMA,EAAO,KAAM,IAAI,EAElC,GAAIC,GAAOD,CAAK,EAAG,CACjB,IAAME,EAAOL,GAAQG,CAAK,EAC1B,MAAO,CACL,KAAME,EAAK,IAAI,SAACC,EAAG,CAAK,OAAAH,EAAMG,EAAN,CAAU,EAClC,KAAID,IAKV,MAAO,CAAE,KAAMH,EAAa,KAAM,IAAI,CACxC,CAEA,SAASE,GAAOG,EAAQ,CACtB,OAAOA,GAAO,OAAOA,GAAQ,UAAYT,GAAeS,CAAG,IAAMR,EACnE,CC7BM,SAAUS,GAAaC,EAAgBC,EAAa,CACxD,OAAOD,EAAK,OAAO,SAACE,EAAQC,EAAKC,EAAC,CAAK,OAAEF,EAAOC,GAAOF,EAAOG,GAAKF,CAA5B,EAAqC,CAAA,CAAS,CACvF,CCsMM,SAAUG,GAAa,SAAoCC,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,GAAA,UAAAA,GAC/D,IAAMC,EAAYC,GAAaH,CAAI,EAC7BI,EAAiBC,GAAkBL,CAAI,EAEvCM,EAA8BC,GAAqBP,CAAI,EAA/CQ,EAAWF,EAAA,KAAEG,EAAIH,EAAA,KAE/B,GAAIE,EAAY,SAAW,EAIzB,OAAOE,GAAK,CAAA,EAAIR,CAAgB,EAGlC,IAAMS,EAAS,IAAIC,EACjBC,GACEL,EACAN,EACAO,EAEI,SAACK,EAAM,CAAK,OAAAC,GAAaN,EAAMK,CAAM,CAAzB,EAEZE,EAAQ,CACb,EAGH,OAAOZ,EAAkBO,EAAO,KAAKM,GAAiBb,CAAc,CAAC,EAAsBO,CAC7F,CAEM,SAAUE,GACdL,EACAN,EACAgB,EAAiD,CAAjD,OAAAA,IAAA,SAAAA,EAAAF,IAEO,SAACG,EAA2B,CAGjCC,GACElB,EACA,UAAA,CAaE,QAZQmB,EAAWb,EAAW,OAExBM,EAAS,IAAI,MAAMO,CAAM,EAG3BC,EAASD,EAITE,EAAuBF,aAGlBG,EAAC,CACRJ,GACElB,EACA,UAAA,CACE,IAAMuB,EAASf,GAAKF,EAAYgB,GAAItB,CAAgB,EAChDwB,EAAgB,GACpBD,EAAO,UACLE,EACER,EACA,SAACS,EAAK,CAEJd,EAAOU,GAAKI,EACPF,IAEHA,EAAgB,GAChBH,KAEGA,GAGHJ,EAAW,KAAKD,EAAeJ,EAAO,MAAK,CAAE,CAAC,CAElD,EACA,UAAA,CACO,EAAEQ,GAGLH,EAAW,SAAQ,CAEvB,CAAC,CACF,CAEL,EACAA,CAAU,GAjCLK,EAAI,EAAGA,EAAIH,EAAQG,MAAnBA,CAAC,CAoCZ,EACAL,CAAU,CAEd,CACF,CAMA,SAASC,GAAclB,EAAsC2B,EAAqBC,EAA0B,CACtG5B,EACF6B,GAAgBD,EAAc5B,EAAW2B,CAAO,EAEhDA,EAAO,CAEX,CC3RM,SAAUG,GACdC,EACAC,EACAC,EACAC,EACAC,EACAC,EACAC,EACAC,EAAgC,CAGhC,IAAMC,EAAc,CAAA,EAEhBC,EAAS,EAETC,EAAQ,EAERC,EAAa,GAKXC,EAAgB,UAAA,CAIhBD,GAAc,CAACH,EAAO,QAAU,CAACC,GACnCR,EAAW,SAAQ,CAEvB,EAGMY,EAAY,SAACC,EAAQ,CAAK,OAACL,EAASN,EAAaY,EAAWD,CAAK,EAAIN,EAAO,KAAKM,CAAK,CAA5D,EAE1BC,EAAa,SAACD,EAAQ,CAI1BT,GAAUJ,EAAW,KAAKa,CAAY,EAItCL,IAKA,IAAIO,EAAgB,GAGpBC,EAAUf,EAAQY,EAAOJ,GAAO,CAAC,EAAE,UACjCQ,EACEjB,EACA,SAACkB,EAAU,CAGTf,GAAY,MAAZA,EAAee,CAAU,EAErBd,EAGFQ,EAAUM,CAAiB,EAG3BlB,EAAW,KAAKkB,CAAU,CAE9B,EACA,UAAA,CAGEH,EAAgB,EAClB,EAEA,OACA,UAAA,CAIE,GAAIA,EAKF,GAAI,CAIFP,IAKA,qBACE,IAAMW,EAAgBZ,EAAO,MAAK,EAI9BF,EACFe,GAAgBpB,EAAYK,EAAmB,UAAA,CAAM,OAAAS,EAAWK,CAAa,CAAxB,CAAyB,EAE9EL,EAAWK,CAAa,GARrBZ,EAAO,QAAUC,EAASN,OAYjCS,EAAa,QACNU,EAAP,CACArB,EAAW,MAAMqB,CAAG,EAG1B,CAAC,CACF,CAEL,EAGA,OAAAtB,EAAO,UACLkB,EAAyBjB,EAAYY,EAAW,UAAA,CAE9CF,EAAa,GACbC,EAAa,CACf,CAAC,CAAC,EAKG,UAAA,CACLL,GAAmB,MAAnBA,EAAmB,CACrB,CACF,CClEM,SAAUgB,GACdC,EACAC,EACAC,EAA6B,CAE7B,OAFAA,IAAA,SAAAA,EAAA,KAEIC,EAAWF,CAAc,EAEpBF,GAAS,SAACK,EAAGC,EAAC,CAAK,OAAAC,EAAI,SAACC,EAAQC,EAAU,CAAK,OAAAP,EAAeG,EAAGG,EAAGF,EAAGG,CAAE,CAA1B,CAA2B,EAAEC,EAAUT,EAAQI,EAAGC,CAAC,CAAC,CAAC,CAAjF,EAAoFH,CAAU,GAC/G,OAAOD,GAAmB,WACnCC,EAAaD,GAGRS,EAAQ,SAACC,EAAQC,EAAU,CAAK,OAAAC,GAAeF,EAAQC,EAAYZ,EAASE,CAAU,CAAtD,CAAuD,EAChG,CChCM,SAAUY,GAAyCC,EAA6B,CAA7B,OAAAA,IAAA,SAAAA,EAAA,KAChDC,GAASC,GAAUF,CAAU,CACtC,CCNM,SAAUG,IAAS,CACvB,OAAOC,GAAS,CAAC,CACnB,CCmDM,SAAUC,IAAM,SAACC,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,GAAA,UAAAA,GACrB,OAAOC,GAAS,EAAGC,GAAKH,EAAMI,GAAaJ,CAAI,CAAC,CAAC,CACnD,CC9DM,SAAUK,EAAsCC,EAA0B,CAC9E,OAAO,IAAIC,EAA+B,SAACC,EAAU,CACnDC,EAAUH,EAAiB,CAAE,EAAE,UAAUE,CAAU,CACrD,CAAC,CACH,CChDA,IAAME,GAA0B,CAAC,cAAe,gBAAgB,EAC1DC,GAAqB,CAAC,mBAAoB,qBAAqB,EAC/DC,GAAgB,CAAC,KAAM,KAAK,EA8N5B,SAAUC,EACdC,EACAC,EACAC,EACAC,EAAsC,CAMtC,GAJIC,EAAWF,CAAO,IACpBC,EAAiBD,EACjBA,EAAU,QAERC,EACF,OAAOJ,EAAaC,EAAQC,EAAWC,CAA+B,EAAE,KAAKG,GAAiBF,CAAc,CAAC,EAUzG,IAAAG,EAAAC,EAEJC,GAAcR,CAAM,EAChBH,GAAmB,IAAI,SAACY,EAAU,CAAK,OAAA,SAACC,EAAY,CAAK,OAAAV,EAAOS,GAAYR,EAAWS,EAASR,CAA+B,CAAtE,CAAlB,CAAyF,EAElIS,GAAwBX,CAAM,EAC5BJ,GAAwB,IAAIgB,GAAwBZ,EAAQC,CAAS,CAAC,EACtEY,GAA0Bb,CAAM,EAChCF,GAAc,IAAIc,GAAwBZ,EAAQC,CAAS,CAAC,EAC5D,CAAA,EAAE,CAAA,EATDa,EAAGR,EAAA,GAAES,EAAMT,EAAA,GAgBlB,GAAI,CAACQ,GACCE,GAAYhB,CAAM,EACpB,OAAOiB,GAAS,SAACC,EAAc,CAAK,OAAAnB,EAAUmB,EAAWjB,EAAWC,CAA+B,CAA/D,CAAgE,EAClGiB,EAAUnB,CAAM,CAAC,EAOvB,GAAI,CAACc,EACH,MAAM,IAAI,UAAU,sBAAsB,EAG5C,OAAO,IAAIM,EAAc,SAACC,EAAU,CAIlC,IAAMX,EAAU,UAAA,SAACY,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,GAAA,UAAAA,GAAmB,OAAAF,EAAW,KAAK,EAAIC,EAAK,OAASA,EAAOA,EAAK,EAAE,CAAhD,EAEpC,OAAAR,EAAIJ,CAAO,EAEJ,UAAA,CAAM,OAAAK,EAAQL,CAAO,CAAf,CACf,CAAC,CACH,CASA,SAASE,GAAwBZ,EAAaC,EAAiB,CAC7D,OAAO,SAACQ,EAAkB,CAAK,OAAA,SAACC,EAAY,CAAK,OAAAV,EAAOS,GAAYR,EAAWS,CAAO,CAArC,CAAlB,CACjC,CAOA,SAASC,GAAwBX,EAAW,CAC1C,OAAOI,EAAWJ,EAAO,WAAW,GAAKI,EAAWJ,EAAO,cAAc,CAC3E,CAOA,SAASa,GAA0Bb,EAAW,CAC5C,OAAOI,EAAWJ,EAAO,EAAE,GAAKI,EAAWJ,EAAO,GAAG,CACvD,CAOA,SAASQ,GAAcR,EAAW,CAChC,OAAOI,EAAWJ,EAAO,gBAAgB,GAAKI,EAAWJ,EAAO,mBAAmB,CACrF,CC/LM,SAAUwB,GACdC,EACAC,EACAC,EAAsC,CAEtC,OAAIA,EACKH,GAAoBC,EAAYC,CAAa,EAAE,KAAKE,GAAiBD,CAAc,CAAC,EAGtF,IAAIE,EAAoB,SAACC,EAAU,CACxC,IAAMC,EAAU,UAAA,SAACC,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,GAAA,UAAAA,GAAc,OAAAH,EAAW,KAAKE,EAAE,SAAW,EAAIA,EAAE,GAAKA,CAAC,CAAzC,EACzBE,EAAWT,EAAWM,CAAO,EACnC,OAAOI,EAAWT,CAAa,EAAI,UAAA,CAAM,OAAAA,EAAcK,EAASG,CAAQ,CAA/B,EAAmC,MAC9E,CAAC,CACH,CCtBM,SAAUE,GACdC,EACAC,EACAC,EAAyC,CAFzCF,IAAA,SAAAA,EAAA,GAEAE,IAAA,SAAAA,EAAAC,IAIA,IAAIC,EAAmB,GAEvB,OAAIH,GAAuB,OAIrBI,GAAYJ,CAAmB,EACjCC,EAAYD,EAIZG,EAAmBH,GAIhB,IAAIK,EAAW,SAACC,EAAU,CAI/B,IAAIC,EAAMC,GAAYT,CAAO,EAAI,CAACA,EAAUE,EAAW,IAAG,EAAKF,EAE3DQ,EAAM,IAERA,EAAM,GAIR,IAAIE,EAAI,EAGR,OAAOR,EAAU,SAAS,UAAA,CACnBK,EAAW,SAEdA,EAAW,KAAKG,GAAG,EAEf,GAAKN,EAGP,KAAK,SAAS,OAAWA,CAAgB,EAGzCG,EAAW,SAAQ,EAGzB,EAAGC,CAAG,CACR,CAAC,CACH,CChGM,SAAUG,GAAK,SAACC,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,GAAA,UAAAA,GACpB,IAAMC,EAAYC,GAAaH,CAAI,EAC7BI,EAAaC,GAAUL,EAAM,GAAQ,EACrCM,EAAUN,EAChB,OAAQM,EAAQ,OAGZA,EAAQ,SAAW,EAEnBC,EAAUD,EAAQ,EAAE,EAEpBE,GAASJ,CAAU,EAAEK,GAAKH,EAASJ,CAAS,CAAC,EAL7CQ,CAMN,CCjEO,IAAMC,GAAQ,IAAIC,EAAkBC,EAAI,ECpCvC,IAAAC,GAAY,MAAK,QAMnB,SAAUC,GAAkBC,EAAiB,CACjD,OAAOA,EAAK,SAAW,GAAKF,GAAQE,EAAK,EAAE,EAAIA,EAAK,GAAMA,CAC5D,CCoDM,SAAUC,EAAUC,EAAiDC,EAAa,CACtF,OAAOC,EAAQ,SAACC,EAAQC,EAAU,CAEhC,IAAIC,EAAQ,EAIZF,EAAO,UAILG,EAAyBF,EAAY,SAACG,EAAK,CAAK,OAAAP,EAAU,KAAKC,EAASM,EAAOF,GAAO,GAAKD,EAAW,KAAKG,CAAK,CAAhE,CAAiE,CAAC,CAEtH,CAAC,CACH,CCxBM,SAAUC,IAAG,SAACC,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,GAAA,UAAAA,GAClB,IAAMC,EAAiBC,GAAkBH,CAAI,EAEvCI,EAAUC,GAAeL,CAAI,EAEnC,OAAOI,EAAQ,OACX,IAAIE,EAAsB,SAACC,EAAU,CAGnC,IAAIC,EAAuBJ,EAAQ,IAAI,UAAA,CAAM,MAAA,CAAA,CAAA,CAAE,EAK3CK,EAAYL,EAAQ,IAAI,UAAA,CAAM,MAAA,EAAA,CAAK,EAGvCG,EAAW,IAAI,UAAA,CACbC,EAAUC,EAAY,IACxB,CAAC,EAKD,mBAASC,EAAW,CAClBC,EAAUP,EAAQM,EAAY,EAAE,UAC9BE,EACEL,EACA,SAACM,EAAK,CAKJ,GAJAL,EAAQE,GAAa,KAAKG,CAAK,EAI3BL,EAAQ,MAAM,SAACM,EAAM,CAAK,OAAAA,EAAO,MAAP,CAAa,EAAG,CAC5C,IAAMC,EAAcP,EAAQ,IAAI,SAACM,EAAM,CAAK,OAAAA,EAAO,MAAK,CAAZ,CAAe,EAE3DP,EAAW,KAAKL,EAAiBA,EAAc,MAAA,OAAAc,EAAA,CAAA,EAAAC,EAAIF,CAAM,CAAA,CAAA,EAAIA,CAAM,EAI/DP,EAAQ,KAAK,SAACM,EAAQI,EAAC,CAAK,MAAA,CAACJ,EAAO,QAAUL,EAAUS,EAA5B,CAA8B,GAC5DX,EAAW,SAAQ,EAGzB,EACA,UAAA,CAGEE,EAAUC,GAAe,GAIzB,CAACF,EAAQE,GAAa,QAAUH,EAAW,SAAQ,CACrD,CAAC,CACF,GA9BIG,EAAc,EAAG,CAACH,EAAW,QAAUG,EAAcN,EAAQ,OAAQM,MAArEA,CAAW,EAmCpB,OAAO,UAAA,CACLF,EAAUC,EAAY,IACxB,CACF,CAAC,EACDU,CACN,CC9DM,SAAUC,GAASC,EAAoD,CAC3E,OAAOC,EAAQ,SAACC,EAAQC,EAAU,CAChC,IAAIC,EAAW,GACXC,EAAsB,KACtBC,EAA6C,KAC7CC,EAAa,GAEXC,EAAc,UAAA,CAGlB,GAFAF,GAAkB,MAAlBA,EAAoB,YAAW,EAC/BA,EAAqB,KACjBF,EAAU,CACZA,EAAW,GACX,IAAMK,EAAQJ,EACdA,EAAY,KACZF,EAAW,KAAKM,CAAK,EAEvBF,GAAcJ,EAAW,SAAQ,CACnC,EAEMO,EAAkB,UAAA,CACtBJ,EAAqB,KACrBC,GAAcJ,EAAW,SAAQ,CACnC,EAEAD,EAAO,UACLS,EACER,EACA,SAACM,EAAK,CACJL,EAAW,GACXC,EAAYI,EACPH,GACHM,EAAUZ,EAAiBS,CAAK,CAAC,EAAE,UAChCH,EAAqBK,EAAyBR,EAAYK,EAAaE,CAAe,CAAE,CAG/F,EACA,UAAA,CACEH,EAAa,IACZ,CAACH,GAAY,CAACE,GAAsBA,EAAmB,SAAWH,EAAW,SAAQ,CACxF,CAAC,CACF,CAEL,CAAC,CACH,CC3CM,SAAUU,GAAaC,EAAkBC,EAAyC,CAAzC,OAAAA,IAAA,SAAAA,EAAAC,IACtCC,GAAM,UAAA,CAAM,OAAAC,GAAMJ,EAAUC,CAAS,CAAzB,CAA0B,CAC/C,CCEM,SAAUI,GAAeC,EAAoBC,EAAsC,CAAtC,OAAAA,IAAA,SAAAA,EAAA,MAGjDA,EAAmBA,GAAgB,KAAhBA,EAAoBD,EAEhCE,EAAQ,SAACC,EAAQC,EAAU,CAChC,IAAIC,EAAiB,CAAA,EACjBC,EAAQ,EAEZH,EAAO,UACLI,EACEH,EACA,SAACI,EAAK,aACAC,EAAuB,KAKvBH,IAAUL,IAAsB,GAClCI,EAAQ,KAAK,CAAA,CAAE,MAIjB,QAAqBK,EAAAC,GAAAN,CAAO,EAAAO,EAAAF,EAAA,KAAA,EAAA,CAAAE,EAAA,KAAAA,EAAAF,EAAA,KAAA,EAAE,CAAzB,IAAMG,EAAMD,EAAA,MACfC,EAAO,KAAKL,CAAK,EAMbR,GAAca,EAAO,SACvBJ,EAASA,GAAM,KAANA,EAAU,CAAA,EACnBA,EAAO,KAAKI,CAAM,qGAItB,GAAIJ,MAIF,QAAqBK,EAAAH,GAAAF,CAAM,EAAAM,EAAAD,EAAA,KAAA,EAAA,CAAAC,EAAA,KAAAA,EAAAD,EAAA,KAAA,EAAE,CAAxB,IAAMD,EAAME,EAAA,MACfC,GAAUX,EAASQ,CAAM,EACzBT,EAAW,KAAKS,CAAM,oGAG5B,EACA,UAAA,aAGE,QAAqBI,EAAAN,GAAAN,CAAO,EAAAa,EAAAD,EAAA,KAAA,EAAA,CAAAC,EAAA,KAAAA,EAAAD,EAAA,KAAA,EAAE,CAAzB,IAAMJ,EAAMK,EAAA,MACfd,EAAW,KAAKS,CAAM,oGAExBT,EAAW,SAAQ,CACrB,EAEA,OACA,UAAA,CAEEC,EAAU,IACZ,CAAC,CACF,CAEL,CAAC,CACH,CCbM,SAAUc,GACdC,EAAgD,CAEhD,OAAOC,EAAQ,SAACC,EAAQC,EAAU,CAChC,IAAIC,EAAgC,KAChCC,EAAY,GACZC,EAEJF,EAAWF,EAAO,UAChBK,EAAyBJ,EAAY,OAAW,OAAW,SAACK,EAAG,CAC7DF,EAAgBG,EAAUT,EAASQ,EAAKT,GAAWC,CAAQ,EAAEE,CAAM,CAAC,CAAC,EACjEE,GACFA,EAAS,YAAW,EACpBA,EAAW,KACXE,EAAc,UAAUH,CAAU,GAIlCE,EAAY,EAEhB,CAAC,CAAC,EAGAA,IAMFD,EAAS,YAAW,EACpBA,EAAW,KACXE,EAAe,UAAUH,CAAU,EAEvC,CAAC,CACH,CC/HM,SAAUO,GACdC,EACAC,EACAC,EACAC,EACAC,EAAqC,CAErC,OAAO,SAACC,EAAuBC,EAA2B,CAIxD,IAAIC,EAAWL,EAIXM,EAAaP,EAEbQ,EAAQ,EAGZJ,EAAO,UACLK,EACEJ,EACA,SAACK,EAAK,CAEJ,IAAMC,EAAIH,IAEVD,EAAQD,EAEJP,EAAYQ,EAAOG,EAAOC,CAAC,GAIzBL,EAAW,GAAOI,GAGxBR,GAAcG,EAAW,KAAKE,CAAK,CACrC,EAGAJ,GACG,UAAA,CACCG,GAAYD,EAAW,KAAKE,CAAK,EACjCF,EAAW,SAAQ,CACrB,CAAE,CACL,CAEL,CACF,CCnCM,SAAUO,IAAa,SAAOC,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,GAAA,UAAAA,GAClC,IAAMC,EAAiBC,GAAkBH,CAAI,EAC7C,OAAOE,EACHE,GAAKL,GAAa,MAAA,OAAAM,EAAA,CAAA,EAAAC,EAAKN,CAAoC,CAAA,CAAA,EAAGO,GAAiBL,CAAc,CAAC,EAC9FM,EAAQ,SAACC,EAAQC,EAAU,CACzBC,GAAiBN,EAAA,CAAEI,CAAM,EAAAH,EAAKM,GAAeZ,CAAI,CAAC,CAAA,CAAA,EAAGU,CAAU,CACjE,CAAC,CACP,CCUM,SAAUG,IAAiB,SAC/BC,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,GAAA,UAAAA,GAEA,OAAOC,GAAa,MAAA,OAAAC,EAAA,CAAA,EAAAC,EAAIJ,CAAY,CAAA,CAAA,CACtC,CC+BM,SAAUK,GACdC,EACAC,EAA6G,CAE7G,OAAOC,EAAWD,CAAc,EAAIE,GAASH,EAASC,EAAgB,CAAC,EAAIE,GAASH,EAAS,CAAC,CAChG,CCpBM,SAAUI,GAAgBC,EAAiBC,EAAyC,CAAzC,OAAAA,IAAA,SAAAA,EAAAC,IACxCC,EAAQ,SAACC,EAAQC,EAAU,CAChC,IAAIC,EAAkC,KAClCC,EAAsB,KACtBC,EAA0B,KAExBC,EAAO,UAAA,CACX,GAAIH,EAAY,CAEdA,EAAW,YAAW,EACtBA,EAAa,KACb,IAAMI,EAAQH,EACdA,EAAY,KACZF,EAAW,KAAKK,CAAK,EAEzB,EACA,SAASC,GAAY,CAInB,IAAMC,EAAaJ,EAAYR,EACzBa,EAAMZ,EAAU,IAAG,EACzB,GAAIY,EAAMD,EAAY,CAEpBN,EAAa,KAAK,SAAS,OAAWM,EAAaC,CAAG,EACtDR,EAAW,IAAIC,CAAU,EACzB,OAGFG,EAAI,CACN,CAEAL,EAAO,UACLU,EACET,EACA,SAACK,EAAQ,CACPH,EAAYG,EACZF,EAAWP,EAAU,IAAG,EAGnBK,IACHA,EAAaL,EAAU,SAASU,EAAcX,CAAO,EACrDK,EAAW,IAAIC,CAAU,EAE7B,EACA,UAAA,CAGEG,EAAI,EACJJ,EAAW,SAAQ,CACrB,EAEA,OACA,UAAA,CAEEE,EAAYD,EAAa,IAC3B,CAAC,CACF,CAEL,CAAC,CACH,CCpFM,SAAUS,GAAqBC,EAAe,CAClD,OAAOC,EAAQ,SAACC,EAAQC,EAAU,CAChC,IAAIC,EAAW,GACfF,EAAO,UACLG,EACEF,EACA,SAACG,EAAK,CACJF,EAAW,GACXD,EAAW,KAAKG,CAAK,CACvB,EACA,UAAA,CACOF,GACHD,EAAW,KAAKH,CAAa,EAE/BG,EAAW,SAAQ,CACrB,CAAC,CACF,CAEL,CAAC,CACH,CCXM,SAAUI,GAAQC,EAAa,CACnC,OAAOA,GAAS,EAEZ,UAAA,CAAM,OAAAC,CAAA,EACNC,EAAQ,SAACC,EAAQC,EAAU,CACzB,IAAIC,EAAO,EACXF,EAAO,UACLG,EAAyBF,EAAY,SAACG,EAAK,CAIrC,EAAEF,GAAQL,IACZI,EAAW,KAAKG,CAAK,EAIjBP,GAASK,GACXD,EAAW,SAAQ,EAGzB,CAAC,CAAC,CAEN,CAAC,CACP,CC9BM,SAAUI,IAAc,CAC5B,OAAOC,EAAQ,SAACC,EAAQC,EAAU,CAChCD,EAAO,UAAUE,EAAyBD,EAAYE,EAAI,CAAC,CAC7D,CAAC,CACH,CCCM,SAAUC,GAASC,EAAQ,CAC/B,OAAOC,EAAI,UAAA,CAAM,OAAAD,CAAA,CAAK,CACxB,CCyCM,SAAUE,GACdC,EACAC,EAAmC,CAEnC,OAAIA,EAEK,SAACC,EAAqB,CAC3B,OAAAC,GAAOF,EAAkB,KAAKG,GAAK,CAAC,EAAGC,GAAc,CAAE,EAAGH,EAAO,KAAKH,GAAUC,CAAqB,CAAC,CAAC,CAAvG,EAGGM,GAAS,SAACC,EAAOC,EAAK,CAAK,OAAAR,EAAsBO,EAAOC,CAAK,EAAE,KAAKJ,GAAK,CAAC,EAAGK,GAAMF,CAAK,CAAC,CAA9D,CAA+D,CACnG,CCtCM,SAAUG,GAASC,EAAoBC,EAAyC,CAAzCA,IAAA,SAAAA,EAAAC,IAC3C,IAAMC,EAAWC,GAAMJ,EAAKC,CAAS,EACrC,OAAOI,GAAU,UAAA,CAAM,OAAAF,CAAA,CAAQ,CACjC,CC0EM,SAAUG,EACdC,EACAC,EAA0D,CAA1D,OAAAA,IAAA,SAAAA,EAA+BC,IAK/BF,EAAaA,GAAU,KAAVA,EAAcG,GAEpBC,EAAQ,SAACC,EAAQC,EAAU,CAGhC,IAAIC,EAEAC,EAAQ,GAEZH,EAAO,UACLI,EAAyBH,EAAY,SAACI,EAAK,CAEzC,IAAMC,EAAaV,EAAYS,CAAK,GAKhCF,GAAS,CAACR,EAAYO,EAAaI,CAAU,KAM/CH,EAAQ,GACRD,EAAcI,EAGdL,EAAW,KAAKI,CAAK,EAEzB,CAAC,CAAC,CAEN,CAAC,CACH,CAEA,SAASP,GAAeS,EAAQC,EAAM,CACpC,OAAOD,IAAMC,CACf,CCjHM,SAAUC,EAA8CC,EAAQC,EAAuC,CAC3G,OAAOC,EAAqB,SAACC,EAAMC,EAAI,CAAK,OAAAH,EAAUA,EAAQE,EAAEH,GAAMI,EAAEJ,EAAI,EAAIG,EAAEH,KAASI,EAAEJ,EAAjD,CAAqD,CACnG,CCLM,SAAUK,IAAO,SAAIC,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,GAAA,UAAAA,GACzB,OAAO,SAACC,EAAqB,CAAK,OAAAC,GAAOD,EAAQE,EAAE,MAAA,OAAAC,EAAA,CAAA,EAAAC,EAAIN,CAAM,CAAA,CAAA,CAAA,CAA3B,CACpC,CCHM,SAAUO,EAAYC,EAAoB,CAC9C,OAAOC,EAAQ,SAACC,EAAQC,EAAU,CAGhC,GAAI,CACFD,EAAO,UAAUC,CAAU,UAE3BA,EAAW,IAAIH,CAAQ,EAE3B,CAAC,CACH,CC9BM,SAAUI,GAAYC,EAAa,CACvC,OAAOA,GAAS,EACZ,UAAA,CAAM,OAAAC,CAAA,EACNC,EAAQ,SAACC,EAAQC,EAAU,CAKzB,IAAIC,EAAc,CAAA,EAClBF,EAAO,UACLG,EACEF,EACA,SAACG,EAAK,CAEJF,EAAO,KAAKE,CAAK,EAGjBP,EAAQK,EAAO,QAAUA,EAAO,MAAK,CACvC,EACA,UAAA,aAGE,QAAoBG,EAAAC,GAAAJ,CAAM,EAAAK,EAAAF,EAAA,KAAA,EAAA,CAAAE,EAAA,KAAAA,EAAAF,EAAA,KAAA,EAAE,CAAvB,IAAMD,EAAKG,EAAA,MACdN,EAAW,KAAKG,CAAK,oGAEvBH,EAAW,SAAQ,CACrB,EAEA,OACA,UAAA,CAEEC,EAAS,IACX,CAAC,CACF,CAEL,CAAC,CACP,CC1DM,SAAUM,IAAK,SAAIC,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,GAAA,UAAAA,GACvB,IAAMC,EAAYC,GAAaH,CAAI,EAC7BI,EAAaC,GAAUL,EAAM,GAAQ,EAC3C,OAAAA,EAAOM,GAAeN,CAAI,EAEnBO,EAAQ,SAACC,EAAQC,EAAU,CAChCC,GAASN,CAAU,EAAEO,GAAIC,EAAA,CAAEJ,CAAM,EAAAK,EAAMb,CAA6B,CAAA,EAAGE,CAAS,CAAC,EAAE,UAAUO,CAAU,CACzG,CAAC,CACH,CCcM,SAAUK,IAAS,SACvBC,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,GAAA,UAAAA,GAEA,OAAOC,GAAK,MAAA,OAAAC,EAAA,CAAA,EAAAC,EAAIJ,CAAY,CAAA,CAAA,CAC9B,CCmEM,SAAUK,GAAUC,EAAqC,OACzDC,EAAQ,IACRC,EAEJ,OAAIF,GAAiB,OACf,OAAOA,GAAkB,UACxBG,EAA4BH,EAAa,MAAzCC,EAAKE,IAAA,OAAG,IAAQA,EAAED,EAAUF,EAAa,OAE5CC,EAAQD,GAILC,GAAS,EACZ,UAAA,CAAM,OAAAG,CAAA,EACNC,EAAQ,SAACC,EAAQC,EAAU,CACzB,IAAIC,EAAQ,EACRC,EAEEC,EAAc,UAAA,CAGlB,GAFAD,GAAS,MAATA,EAAW,YAAW,EACtBA,EAAY,KACRP,GAAS,KAAM,CACjB,IAAMS,EAAW,OAAOT,GAAU,SAAWU,GAAMV,CAAK,EAAIW,EAAUX,EAAMM,CAAK,CAAC,EAC5EM,EAAqBC,EAAyBR,EAAY,UAAA,CAC9DO,EAAmB,YAAW,EAC9BE,EAAiB,CACnB,CAAC,EACDL,EAAS,UAAUG,CAAkB,OAErCE,EAAiB,CAErB,EAEMA,EAAoB,UAAA,CACxB,IAAIC,EAAY,GAChBR,EAAYH,EAAO,UACjBS,EAAyBR,EAAY,OAAW,UAAA,CAC1C,EAAEC,EAAQP,EACRQ,EACFC,EAAW,EAEXO,EAAY,GAGdV,EAAW,SAAQ,CAEvB,CAAC,CAAC,EAGAU,GACFP,EAAW,CAEf,EAEAM,EAAiB,CACnB,CAAC,CACP,CC7HM,SAAUE,GAAUC,EAAyB,CACjD,OAAOC,EAAQ,SAACC,EAAQC,EAAU,CAChC,IAAIC,EAAW,GACXC,EAAsB,KAC1BH,EAAO,UACLI,EAAyBH,EAAY,SAACI,EAAK,CACzCH,EAAW,GACXC,EAAYE,CACd,CAAC,CAAC,EAEJP,EAAS,UACPM,EACEH,EACA,UAAA,CACE,GAAIC,EAAU,CACZA,EAAW,GACX,IAAMG,EAAQF,EACdA,EAAY,KACZF,EAAW,KAAKI,CAAK,EAEzB,EACAC,EAAI,CACL,CAEL,CAAC,CACH,CCgBM,SAAUC,GAAcC,EAA6DC,EAAQ,CAMjG,OAAOC,EAAQC,GAAcH,EAAaC,EAAW,UAAU,QAAU,EAAG,EAAI,CAAC,CACnF,CCgDM,SAAUG,GAASC,EAA4B,CAA5BA,IAAA,SAAAA,EAAA,CAAA,GACf,IAAAC,EAAgHD,EAAO,UAAvHE,EAASD,IAAA,OAAG,UAAA,CAAM,OAAA,IAAIE,CAAJ,EAAgBF,EAAEG,EAA4EJ,EAAO,aAAnFK,EAAYD,IAAA,OAAG,GAAIA,EAAEE,EAAuDN,EAAO,gBAA9DO,EAAeD,IAAA,OAAG,GAAIA,EAAEE,EAA+BR,EAAO,oBAAtCS,EAAmBD,IAAA,OAAG,GAAIA,EAUnH,OAAO,SAACE,EAAa,CACnB,IAAIC,EACAC,EACAC,EACAC,EAAW,EACXC,EAAe,GACfC,EAAa,GAEXC,EAAc,UAAA,CAClBL,GAAe,MAAfA,EAAiB,YAAW,EAC5BA,EAAkB,MACpB,EAGMM,EAAQ,UAAA,CACZD,EAAW,EACXN,EAAaE,EAAU,OACvBE,EAAeC,EAAa,EAC9B,EACMG,EAAsB,UAAA,CAG1B,IAAMC,EAAOT,EACbO,EAAK,EACLE,GAAI,MAAJA,EAAM,YAAW,CACnB,EAEA,OAAOC,EAAc,SAACC,EAAQC,GAAU,CACtCT,IACI,CAACE,GAAc,CAACD,GAClBE,EAAW,EAOb,IAAMO,GAAQX,EAAUA,GAAO,KAAPA,EAAWX,EAAS,EAO5CqB,GAAW,IAAI,UAAA,CACbT,IAKIA,IAAa,GAAK,CAACE,GAAc,CAACD,IACpCH,EAAkBa,GAAYN,EAAqBV,CAAmB,EAE1E,CAAC,EAIDe,GAAK,UAAUD,EAAU,EAGvB,CAACZ,GAIDG,EAAW,IAOXH,EAAa,IAAIe,GAAe,CAC9B,KAAM,SAACC,GAAK,CAAK,OAAAH,GAAK,KAAKG,EAAK,CAAf,EACjB,MAAO,SAACC,GAAG,CACTZ,EAAa,GACbC,EAAW,EACXL,EAAkBa,GAAYP,EAAOb,EAAcuB,EAAG,EACtDJ,GAAK,MAAMI,EAAG,CAChB,EACA,SAAU,UAAA,CACRb,EAAe,GACfE,EAAW,EACXL,EAAkBa,GAAYP,EAAOX,CAAe,EACpDiB,GAAK,SAAQ,CACf,EACD,EACDK,EAAUP,CAAM,EAAE,UAAUX,CAAU,EAE1C,CAAC,EAAED,CAAa,CAClB,CACF,CAEA,SAASe,GACPP,EACAY,EAA+C,SAC/CC,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,EAAA,GAAA,UAAAA,GAEA,GAAIF,IAAO,GAAM,CACfZ,EAAK,EACL,OAGF,GAAIY,IAAO,GAIX,KAAMG,EAAe,IAAIP,GAAe,CACtC,KAAM,UAAA,CACJO,EAAa,YAAW,EACxBf,EAAK,CACP,EACD,EAED,OAAOY,EAAE,MAAA,OAAAI,EAAA,CAAA,EAAAC,EAAIJ,CAAI,CAAA,CAAA,EAAE,UAAUE,CAAY,EAC3C,CCjHM,SAAUG,EACdC,EACAC,EACAC,EAAyB,WAErBC,EACAC,EAAW,GACf,OAAIJ,GAAsB,OAAOA,GAAuB,UACnDK,EAA8EL,EAAkB,WAAhGG,EAAUE,IAAA,OAAG,IAAQA,EAAEC,EAAuDN,EAAkB,WAAzEC,EAAUK,IAAA,OAAG,IAAQA,EAAEC,EAAgCP,EAAkB,SAAlDI,EAAQG,IAAA,OAAG,GAAKA,EAAEL,EAAcF,EAAkB,WAEnGG,EAAcH,GAAkB,KAAlBA,EAAsB,IAE/BQ,GAAS,CACd,UAAW,UAAA,CAAM,OAAA,IAAIC,GAAcN,EAAYF,EAAYC,CAAS,CAAnD,EACjB,aAAc,GACd,gBAAiB,GACjB,oBAAqBE,EACtB,CACH,CCxIM,SAAUM,GAAQC,EAAa,CACnC,OAAOC,EAAO,SAACC,EAAGC,EAAK,CAAK,OAAAH,GAASG,CAAT,CAAc,CAC5C,CCWM,SAAUC,GAAaC,EAAyB,CACpD,OAAOC,EAAQ,SAACC,EAAQC,EAAU,CAChC,IAAIC,EAAS,GAEPC,EAAiBC,EACrBH,EACA,UAAA,CACEE,GAAc,MAAdA,EAAgB,YAAW,EAC3BD,EAAS,EACX,EACAG,EAAI,EAGNC,EAAUR,CAAQ,EAAE,UAAUK,CAAc,EAE5CH,EAAO,UAAUI,EAAyBH,EAAY,SAACM,EAAK,CAAK,OAAAL,GAAUD,EAAW,KAAKM,CAAK,CAA/B,CAAgC,CAAC,CACpG,CAAC,CACH,CCRM,SAAUC,GAAS,SAAOC,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,GAAA,UAAAA,GAC9B,IAAMC,EAAYC,GAAaH,CAAM,EACrC,OAAOI,EAAQ,SAACC,EAAQC,EAAU,EAI/BJ,EAAYK,GAAOP,EAAQK,EAAQH,CAAS,EAAIK,GAAOP,EAAQK,CAAM,GAAG,UAAUC,CAAU,CAC/F,CAAC,CACH,CCmBM,SAAUE,EACdC,EACAC,EAA6G,CAE7G,OAAOC,EAAQ,SAACC,EAAQC,EAAU,CAChC,IAAIC,EAAyD,KACzDC,EAAQ,EAERC,EAAa,GAIXC,EAAgB,UAAA,CAAM,OAAAD,GAAc,CAACF,GAAmBD,EAAW,SAAQ,CAArD,EAE5BD,EAAO,UACLM,EACEL,EACA,SAACM,EAAK,CAEJL,GAAe,MAAfA,EAAiB,YAAW,EAC5B,IAAIM,EAAa,EACXC,EAAaN,IAEnBO,EAAUb,EAAQU,EAAOE,CAAU,CAAC,EAAE,UACnCP,EAAkBI,EACjBL,EAIA,SAACU,EAAU,CAAK,OAAAV,EAAW,KAAKH,EAAiBA,EAAeS,EAAOI,EAAYF,EAAYD,GAAY,EAAIG,CAAU,CAAzG,EAChB,UAAA,CAIET,EAAkB,KAClBG,EAAa,CACf,CAAC,CACD,CAEN,EACA,UAAA,CACED,EAAa,GACbC,EAAa,CACf,CAAC,CACF,CAEL,CAAC,CACH,CCvFM,SAAUO,GAAaC,EAA8B,CACzD,OAAOC,EAAQ,SAACC,EAAQC,EAAU,CAChCC,EAAUJ,CAAQ,EAAE,UAAUK,EAAyBF,EAAY,UAAA,CAAM,OAAAA,EAAW,SAAQ,CAAnB,EAAuBG,EAAI,CAAC,EACrG,CAACH,EAAW,QAAUD,EAAO,UAAUC,CAAU,CACnD,CAAC,CACH,CCIM,SAAUI,GAAaC,EAAiDC,EAAiB,CAAjB,OAAAA,IAAA,SAAAA,EAAA,IACrEC,EAAQ,SAACC,EAAQC,EAAU,CAChC,IAAIC,EAAQ,EACZF,EAAO,UACLG,EAAyBF,EAAY,SAACG,EAAK,CACzC,IAAMC,EAASR,EAAUO,EAAOF,GAAO,GACtCG,GAAUP,IAAcG,EAAW,KAAKG,CAAK,EAC9C,CAACC,GAAUJ,EAAW,SAAQ,CAChC,CAAC,CAAC,CAEN,CAAC,CACH,CCyCM,SAAUK,EACdC,EACAC,EACAC,EAA8B,CAK9B,IAAMC,EACJC,EAAWJ,CAAc,GAAKC,GAASC,EAElC,CAAE,KAAMF,EAA2E,MAAKC,EAAE,SAAQC,CAAA,EACnGF,EAEN,OAAOG,EACHE,EAAQ,SAACC,EAAQC,EAAU,QACzBC,EAAAL,EAAY,aAAS,MAAAK,IAAA,QAAAA,EAAA,KAArBL,CAAW,EACX,IAAIM,EAAU,GACdH,EAAO,UACLI,EACEH,EACA,SAACI,EAAK,QACJH,EAAAL,EAAY,QAAI,MAAAK,IAAA,QAAAA,EAAA,KAAhBL,EAAmBQ,CAAK,EACxBJ,EAAW,KAAKI,CAAK,CACvB,EACA,UAAA,OACEF,EAAU,IACVD,EAAAL,EAAY,YAAQ,MAAAK,IAAA,QAAAA,EAAA,KAApBL,CAAW,EACXI,EAAW,SAAQ,CACrB,EACA,SAACK,EAAG,OACFH,EAAU,IACVD,EAAAL,EAAY,SAAK,MAAAK,IAAA,QAAAA,EAAA,KAAjBL,EAAoBS,CAAG,EACvBL,EAAW,MAAMK,CAAG,CACtB,EACA,UAAA,SACMH,KACFD,EAAAL,EAAY,eAAW,MAAAK,IAAA,QAAAA,EAAA,KAAvBL,CAAW,IAEbU,EAAAV,EAAY,YAAQ,MAAAU,IAAA,QAAAA,EAAA,KAApBV,CAAW,CACb,CAAC,CACF,CAEL,CAAC,EAIDW,EACN,CC9IO,IAAMC,GAAwC,CACnD,QAAS,GACT,SAAU,IAiDN,SAAUC,GACdC,EACAC,EAA8C,CAA9C,OAAAA,IAAA,SAAAA,EAAAH,IAEOI,EAAQ,SAACC,EAAQC,EAAU,CACxB,IAAAC,EAAsBJ,EAAM,QAAnBK,EAAaL,EAAM,SAChCM,EAAW,GACXC,EAAsB,KACtBC,EAAiC,KACjCC,EAAa,GAEXC,EAAgB,UAAA,CACpBF,GAAS,MAATA,EAAW,YAAW,EACtBA,EAAY,KACRH,IACFM,EAAI,EACJF,GAAcN,EAAW,SAAQ,EAErC,EAEMS,EAAoB,UAAA,CACxBJ,EAAY,KACZC,GAAcN,EAAW,SAAQ,CACnC,EAEMU,EAAgB,SAACC,EAAQ,CAC7B,OAACN,EAAYO,EAAUhB,EAAiBe,CAAK,CAAC,EAAE,UAAUE,EAAyBb,EAAYO,EAAeE,CAAiB,CAAC,CAAhI,EAEID,EAAO,UAAA,CACX,GAAIL,EAAU,CAIZA,EAAW,GACX,IAAMQ,EAAQP,EACdA,EAAY,KAEZJ,EAAW,KAAKW,CAAK,EACrB,CAACL,GAAcI,EAAcC,CAAK,EAEtC,EAEAZ,EAAO,UACLc,EACEb,EAMA,SAACW,EAAK,CACJR,EAAW,GACXC,EAAYO,EACZ,EAAEN,GAAa,CAACA,EAAU,UAAYJ,EAAUO,EAAI,EAAKE,EAAcC,CAAK,EAC9E,EACA,UAAA,CACEL,EAAa,GACb,EAAEJ,GAAYC,GAAYE,GAAa,CAACA,EAAU,SAAWL,EAAW,SAAQ,CAClF,CAAC,CACF,CAEL,CAAC,CACH,CCvEM,SAAUc,GACdC,EACAC,EACAC,EAA8B,CAD9BD,IAAA,SAAAA,EAAAE,IACAD,IAAA,SAAAA,EAAAE,IAEA,IAAMC,EAAYC,GAAMN,EAAUC,CAAS,EAC3C,OAAOM,GAAS,UAAA,CAAM,OAAAF,CAAA,EAAWH,CAAM,CACzC,CCJM,SAAUM,IAAc,SAAOC,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,GAAA,UAAAA,GACnC,IAAMC,EAAUC,GAAkBH,CAAM,EAExC,OAAOI,EAAQ,SAACC,EAAQC,EAAU,CAehC,QAdMC,EAAMP,EAAO,OACbQ,EAAc,IAAI,MAAMD,CAAG,EAI7BE,EAAWT,EAAO,IAAI,UAAA,CAAM,MAAA,EAAA,CAAK,EAGjCU,EAAQ,cAMHC,EAAC,CACRC,EAAUZ,EAAOW,EAAE,EAAE,UACnBE,EACEP,EACA,SAACQ,EAAK,CACJN,EAAYG,GAAKG,EACb,CAACJ,GAAS,CAACD,EAASE,KAEtBF,EAASE,GAAK,IAKbD,EAAQD,EAAS,MAAMM,EAAQ,KAAON,EAAW,MAEtD,EAGAO,EAAI,CACL,GAnBIL,EAAI,EAAGA,EAAIJ,EAAKI,MAAhBA,CAAC,EAwBVN,EAAO,UACLQ,EAAyBP,EAAY,SAACQ,EAAK,CACzC,GAAIJ,EAAO,CAET,IAAMO,EAAMC,EAAA,CAAIJ,CAAK,EAAAK,EAAKX,CAAW,CAAA,EACrCF,EAAW,KAAKJ,EAAUA,EAAO,MAAA,OAAAgB,EAAA,CAAA,EAAAC,EAAIF,CAAM,CAAA,CAAA,EAAIA,CAAM,EAEzD,CAAC,CAAC,CAEN,CAAC,CACH,CCxFM,SAAUG,IAAG,SAAOC,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,GAAA,UAAAA,GACxB,OAAOC,EAAQ,SAACC,EAAQC,EAAU,CAChCL,GAAS,MAAA,OAAAM,EAAA,CAACF,CAA8B,EAAAG,EAAMN,CAAuC,CAAA,CAAA,EAAE,UAAUI,CAAU,CAC7G,CAAC,CACH,CCCM,SAAUG,IAAO,SAAkCC,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,GAAA,UAAAA,GACvD,OAAOC,GAAG,MAAA,OAAAC,EAAA,CAAA,EAAAC,EAAIJ,CAAW,CAAA,CAAA,CAC3B,CCYO,SAASK,IAAmC,CACjD,IAAMC,EAAY,IAAIC,GAAwB,CAAC,EAC/C,OAAAC,EAAU,SAAU,mBAAoB,CAAE,KAAM,EAAK,CAAC,EACnD,UAAU,IAAMF,EAAU,KAAK,QAAQ,CAAC,EAGpCA,CACT,CCHO,SAASG,EACdC,EAAkBC,EAAmB,SAChC,CACL,OAAO,MAAM,KAAKA,EAAK,iBAAoBD,CAAQ,CAAC,CACtD,CAuBO,SAASE,EACdF,EAAkBC,EAAmB,SAClC,CACH,IAAME,EAAKC,GAAsBJ,EAAUC,CAAI,EAC/C,GAAI,OAAOE,GAAO,YAChB,MAAM,IAAI,eACR,8BAA8BH,kBAChC,EAGF,OAAOG,CACT,CAsBO,SAASC,GACdJ,EAAkBC,EAAmB,SACtB,CACf,OAAOA,EAAK,cAAiBD,CAAQ,GAAK,MAC5C,CAOO,SAASK,IAA4C,CAC1D,OAAO,SAAS,yBAAyB,aACrC,SAAS,eAAiB,MAEhC,CClEO,SAASC,GACdC,EACqB,CACrB,OAAOC,EACLC,EAAU,SAAS,KAAM,SAAS,EAClCA,EAAU,SAAS,KAAM,UAAU,CACrC,EACG,KACCC,GAAa,CAAC,EACdC,EAAI,IAAM,CACR,IAAMC,EAASC,GAAiB,EAChC,OAAO,OAAOD,GAAW,YACrBL,EAAG,SAASK,CAAM,EAClB,EACN,CAAC,EACDE,EAAUP,IAAOM,GAAiB,CAAC,EACnCE,EAAqB,CACvB,CACJ,CChBO,SAASC,GACdC,EACe,CACf,MAAO,CACL,EAAGA,EAAG,WACN,EAAGA,EAAG,SACR,CACF,CAWO,SAASC,GACdD,EAC2B,CAC3B,OAAOE,EACLC,EAAU,OAAQ,MAAM,EACxBA,EAAU,OAAQ,QAAQ,CAC5B,EACG,KACCC,GAAU,EAAGC,EAAuB,EACpCC,EAAI,IAAMP,GAAiBC,CAAE,CAAC,EAC9BO,EAAUR,GAAiBC,CAAE,CAAC,CAChC,CACJ,CCxCO,SAASQ,GACdC,EACe,CACf,MAAO,CACL,EAAGA,EAAG,WACN,EAAGA,EAAG,SACR,CACF,CAWO,SAASC,GACdD,EAC2B,CAC3B,OAAOE,EACLC,EAAUH,EAAI,QAAQ,EACtBG,EAAU,OAAQ,QAAQ,CAC5B,EACG,KACCC,GAAU,EAAGC,EAAuB,EACpCC,EAAI,IAAMP,GAAwBC,CAAE,CAAC,EACrCO,EAAUR,GAAwBC,CAAE,CAAC,CACvC,CACJ,CCpEA,IAAIQ,GAAW,UAAY,CACvB,GAAI,OAAO,KAAQ,YACf,OAAO,IASX,SAASC,EAASC,EAAKC,EAAK,CACxB,IAAIC,EAAS,GACb,OAAAF,EAAI,KAAK,SAAUG,EAAOC,EAAO,CAC7B,OAAID,EAAM,KAAOF,GACbC,EAASE,EACF,IAEJ,EACX,CAAC,EACMF,CACX,CACA,OAAsB,UAAY,CAC9B,SAASG,GAAU,CACf,KAAK,YAAc,CAAC,CACxB,CACA,cAAO,eAAeA,EAAQ,UAAW,OAAQ,CAI7C,IAAK,UAAY,CACb,OAAO,KAAK,YAAY,MAC5B,EACA,WAAY,GACZ,aAAc,EAClB,CAAC,EAKDA,EAAQ,UAAU,IAAM,SAAUJ,EAAK,CACnC,IAAIG,EAAQL,EAAS,KAAK,YAAaE,CAAG,EACtCE,EAAQ,KAAK,YAAYC,GAC7B,OAAOD,GAASA,EAAM,EAC1B,EAMAE,EAAQ,UAAU,IAAM,SAAUJ,EAAKK,EAAO,CAC1C,IAAIF,EAAQL,EAAS,KAAK,YAAaE,CAAG,EACtC,CAACG,EACD,KAAK,YAAYA,GAAO,GAAKE,EAG7B,KAAK,YAAY,KAAK,CAACL,EAAKK,CAAK,CAAC,CAE1C,EAKAD,EAAQ,UAAU,OAAS,SAAUJ,EAAK,CACtC,IAAIM,EAAU,KAAK,YACfH,EAAQL,EAASQ,EAASN,CAAG,EAC7B,CAACG,GACDG,EAAQ,OAAOH,EAAO,CAAC,CAE/B,EAKAC,EAAQ,UAAU,IAAM,SAAUJ,EAAK,CACnC,MAAO,CAAC,CAAC,CAACF,EAAS,KAAK,YAAaE,CAAG,CAC5C,EAIAI,EAAQ,UAAU,MAAQ,UAAY,CAClC,KAAK,YAAY,OAAO,CAAC,CAC7B,EAMAA,EAAQ,UAAU,QAAU,SAAUG,EAAUC,EAAK,CAC7CA,IAAQ,SAAUA,EAAM,MAC5B,QAASC,EAAK,EAAGC,EAAK,KAAK,YAAaD,EAAKC,EAAG,OAAQD,IAAM,CAC1D,IAAIP,EAAQQ,EAAGD,GACfF,EAAS,KAAKC,EAAKN,EAAM,GAAIA,EAAM,EAAE,CACzC,CACJ,EACOE,CACX,EAAE,CACN,EAAG,EAKCO,GAAY,OAAO,QAAW,aAAe,OAAO,UAAa,aAAe,OAAO,WAAa,SAGpGC,GAAY,UAAY,CACxB,OAAI,OAAO,QAAW,aAAe,OAAO,OAAS,KAC1C,OAEP,OAAO,MAAS,aAAe,KAAK,OAAS,KACtC,KAEP,OAAO,QAAW,aAAe,OAAO,OAAS,KAC1C,OAGJ,SAAS,aAAa,EAAE,CACnC,EAAG,EAQCC,GAA2B,UAAY,CACvC,OAAI,OAAO,uBAA0B,WAI1B,sBAAsB,KAAKD,EAAQ,EAEvC,SAAUL,EAAU,CAAE,OAAO,WAAW,UAAY,CAAE,OAAOA,EAAS,KAAK,IAAI,CAAC,CAAG,EAAG,IAAO,EAAE,CAAG,CAC7G,EAAG,EAGCO,GAAkB,EAStB,SAASC,GAAUR,EAAUS,EAAO,CAChC,IAAIC,EAAc,GAAOC,EAAe,GAAOC,EAAe,EAO9D,SAASC,GAAiB,CAClBH,IACAA,EAAc,GACdV,EAAS,GAETW,GACAG,EAAM,CAEd,CAQA,SAASC,GAAkB,CACvBT,GAAwBO,CAAc,CAC1C,CAMA,SAASC,GAAQ,CACb,IAAIE,EAAY,KAAK,IAAI,EACzB,GAAIN,EAAa,CAEb,GAAIM,EAAYJ,EAAeL,GAC3B,OAMJI,EAAe,EACnB,MAEID,EAAc,GACdC,EAAe,GACf,WAAWI,EAAiBN,CAAK,EAErCG,EAAeI,CACnB,CACA,OAAOF,CACX,CAGA,IAAIG,GAAgB,GAGhBC,GAAiB,CAAC,MAAO,QAAS,SAAU,OAAQ,QAAS,SAAU,OAAQ,QAAQ,EAEvFC,GAA4B,OAAO,kBAAqB,YAIxDC,GAA0C,UAAY,CAMtD,SAASA,GAA2B,CAMhC,KAAK,WAAa,GAMlB,KAAK,qBAAuB,GAM5B,KAAK,mBAAqB,KAM1B,KAAK,WAAa,CAAC,EACnB,KAAK,iBAAmB,KAAK,iBAAiB,KAAK,IAAI,EACvD,KAAK,QAAUZ,GAAS,KAAK,QAAQ,KAAK,IAAI,EAAGS,EAAa,CAClE,CAOA,OAAAG,EAAyB,UAAU,YAAc,SAAUC,EAAU,CAC5D,CAAC,KAAK,WAAW,QAAQA,CAAQ,GAClC,KAAK,WAAW,KAAKA,CAAQ,EAG5B,KAAK,YACN,KAAK,SAAS,CAEtB,EAOAD,EAAyB,UAAU,eAAiB,SAAUC,EAAU,CACpE,IAAIC,EAAY,KAAK,WACjB1B,EAAQ0B,EAAU,QAAQD,CAAQ,EAElC,CAACzB,GACD0B,EAAU,OAAO1B,EAAO,CAAC,EAGzB,CAAC0B,EAAU,QAAU,KAAK,YAC1B,KAAK,YAAY,CAEzB,EAOAF,EAAyB,UAAU,QAAU,UAAY,CACrD,IAAIG,EAAkB,KAAK,iBAAiB,EAGxCA,GACA,KAAK,QAAQ,CAErB,EASAH,EAAyB,UAAU,iBAAmB,UAAY,CAE9D,IAAII,EAAkB,KAAK,WAAW,OAAO,SAAUH,EAAU,CAC7D,OAAOA,EAAS,aAAa,EAAGA,EAAS,UAAU,CACvD,CAAC,EAMD,OAAAG,EAAgB,QAAQ,SAAUH,EAAU,CAAE,OAAOA,EAAS,gBAAgB,CAAG,CAAC,EAC3EG,EAAgB,OAAS,CACpC,EAOAJ,EAAyB,UAAU,SAAW,UAAY,CAGlD,CAAChB,IAAa,KAAK,aAMvB,SAAS,iBAAiB,gBAAiB,KAAK,gBAAgB,EAChE,OAAO,iBAAiB,SAAU,KAAK,OAAO,EAC1Ce,IACA,KAAK,mBAAqB,IAAI,iBAAiB,KAAK,OAAO,EAC3D,KAAK,mBAAmB,QAAQ,SAAU,CACtC,WAAY,GACZ,UAAW,GACX,cAAe,GACf,QAAS,EACb,CAAC,IAGD,SAAS,iBAAiB,qBAAsB,KAAK,OAAO,EAC5D,KAAK,qBAAuB,IAEhC,KAAK,WAAa,GACtB,EAOAC,EAAyB,UAAU,YAAc,UAAY,CAGrD,CAAChB,IAAa,CAAC,KAAK,aAGxB,SAAS,oBAAoB,gBAAiB,KAAK,gBAAgB,EACnE,OAAO,oBAAoB,SAAU,KAAK,OAAO,EAC7C,KAAK,oBACL,KAAK,mBAAmB,WAAW,EAEnC,KAAK,sBACL,SAAS,oBAAoB,qBAAsB,KAAK,OAAO,EAEnE,KAAK,mBAAqB,KAC1B,KAAK,qBAAuB,GAC5B,KAAK,WAAa,GACtB,EAQAgB,EAAyB,UAAU,iBAAmB,SAAUjB,EAAI,CAChE,IAAIsB,EAAKtB,EAAG,aAAcuB,EAAeD,IAAO,OAAS,GAAKA,EAE1DE,EAAmBT,GAAe,KAAK,SAAUzB,EAAK,CACtD,MAAO,CAAC,CAAC,CAACiC,EAAa,QAAQjC,CAAG,CACtC,CAAC,EACGkC,GACA,KAAK,QAAQ,CAErB,EAMAP,EAAyB,YAAc,UAAY,CAC/C,OAAK,KAAK,YACN,KAAK,UAAY,IAAIA,GAElB,KAAK,SAChB,EAMAA,EAAyB,UAAY,KAC9BA,CACX,EAAE,EASEQ,GAAsB,SAAUC,EAAQC,EAAO,CAC/C,QAAS5B,EAAK,EAAGC,EAAK,OAAO,KAAK2B,CAAK,EAAG5B,EAAKC,EAAG,OAAQD,IAAM,CAC5D,IAAIT,EAAMU,EAAGD,GACb,OAAO,eAAe2B,EAAQpC,EAAK,CAC/B,MAAOqC,EAAMrC,GACb,WAAY,GACZ,SAAU,GACV,aAAc,EAClB,CAAC,CACL,CACA,OAAOoC,CACX,EAQIE,GAAe,SAAUF,EAAQ,CAIjC,IAAIG,EAAcH,GAAUA,EAAO,eAAiBA,EAAO,cAAc,YAGzE,OAAOG,GAAe3B,EAC1B,EAGI4B,GAAYC,GAAe,EAAG,EAAG,EAAG,CAAC,EAOzC,SAASC,GAAQrC,EAAO,CACpB,OAAO,WAAWA,CAAK,GAAK,CAChC,CAQA,SAASsC,GAAeC,EAAQ,CAE5B,QADIC,EAAY,CAAC,EACRpC,EAAK,EAAGA,EAAK,UAAU,OAAQA,IACpCoC,EAAUpC,EAAK,GAAK,UAAUA,GAElC,OAAOoC,EAAU,OAAO,SAAUC,EAAMC,EAAU,CAC9C,IAAI1C,EAAQuC,EAAO,UAAYG,EAAW,UAC1C,OAAOD,EAAOJ,GAAQrC,CAAK,CAC/B,EAAG,CAAC,CACR,CAOA,SAAS2C,GAAYJ,EAAQ,CAGzB,QAFIC,EAAY,CAAC,MAAO,QAAS,SAAU,MAAM,EAC7CI,EAAW,CAAC,EACPxC,EAAK,EAAGyC,EAAcL,EAAWpC,EAAKyC,EAAY,OAAQzC,IAAM,CACrE,IAAIsC,EAAWG,EAAYzC,GACvBJ,EAAQuC,EAAO,WAAaG,GAChCE,EAASF,GAAYL,GAAQrC,CAAK,CACtC,CACA,OAAO4C,CACX,CAQA,SAASE,GAAkBf,EAAQ,CAC/B,IAAIgB,EAAOhB,EAAO,QAAQ,EAC1B,OAAOK,GAAe,EAAG,EAAGW,EAAK,MAAOA,EAAK,MAAM,CACvD,CAOA,SAASC,GAA0BjB,EAAQ,CAGvC,IAAIkB,EAAclB,EAAO,YAAamB,EAAenB,EAAO,aAS5D,GAAI,CAACkB,GAAe,CAACC,EACjB,OAAOf,GAEX,IAAII,EAASN,GAAYF,CAAM,EAAE,iBAAiBA,CAAM,EACpDa,EAAWD,GAAYJ,CAAM,EAC7BY,EAAWP,EAAS,KAAOA,EAAS,MACpCQ,EAAUR,EAAS,IAAMA,EAAS,OAKlCS,EAAQhB,GAAQE,EAAO,KAAK,EAAGe,EAASjB,GAAQE,EAAO,MAAM,EAqBjE,GAlBIA,EAAO,YAAc,eAOjB,KAAK,MAAMc,EAAQF,CAAQ,IAAMF,IACjCI,GAASf,GAAeC,EAAQ,OAAQ,OAAO,EAAIY,GAEnD,KAAK,MAAMG,EAASF,CAAO,IAAMF,IACjCI,GAAUhB,GAAeC,EAAQ,MAAO,QAAQ,EAAIa,IAOxD,CAACG,GAAkBxB,CAAM,EAAG,CAK5B,IAAIyB,EAAgB,KAAK,MAAMH,EAAQF,CAAQ,EAAIF,EAC/CQ,EAAiB,KAAK,MAAMH,EAASF,CAAO,EAAIF,EAMhD,KAAK,IAAIM,CAAa,IAAM,IAC5BH,GAASG,GAET,KAAK,IAAIC,CAAc,IAAM,IAC7BH,GAAUG,EAElB,CACA,OAAOrB,GAAeQ,EAAS,KAAMA,EAAS,IAAKS,EAAOC,CAAM,CACpE,CAOA,IAAII,GAAwB,UAAY,CAGpC,OAAI,OAAO,oBAAuB,YACvB,SAAU3B,EAAQ,CAAE,OAAOA,aAAkBE,GAAYF,CAAM,EAAE,kBAAoB,EAKzF,SAAUA,EAAQ,CAAE,OAAQA,aAAkBE,GAAYF,CAAM,EAAE,YACrE,OAAOA,EAAO,SAAY,UAAa,CAC/C,EAAG,EAOH,SAASwB,GAAkBxB,EAAQ,CAC/B,OAAOA,IAAWE,GAAYF,CAAM,EAAE,SAAS,eACnD,CAOA,SAAS4B,GAAe5B,EAAQ,CAC5B,OAAKzB,GAGDoD,GAAqB3B,CAAM,EACpBe,GAAkBf,CAAM,EAE5BiB,GAA0BjB,CAAM,EAL5BI,EAMf,CAQA,SAASyB,GAAmBvD,EAAI,CAC5B,IAAIwD,EAAIxD,EAAG,EAAGyD,EAAIzD,EAAG,EAAGgD,EAAQhD,EAAG,MAAOiD,EAASjD,EAAG,OAElD0D,EAAS,OAAO,iBAAoB,YAAc,gBAAkB,OACpEC,EAAO,OAAO,OAAOD,EAAO,SAAS,EAEzC,OAAAjC,GAAmBkC,EAAM,CACrB,EAAGH,EAAG,EAAGC,EAAG,MAAOT,EAAO,OAAQC,EAClC,IAAKQ,EACL,MAAOD,EAAIR,EACX,OAAQC,EAASQ,EACjB,KAAMD,CACV,CAAC,EACMG,CACX,CAWA,SAAS5B,GAAeyB,EAAGC,EAAGT,EAAOC,EAAQ,CACzC,MAAO,CAAE,EAAGO,EAAG,EAAGC,EAAG,MAAOT,EAAO,OAAQC,CAAO,CACtD,CAMA,IAAIW,GAAmC,UAAY,CAM/C,SAASA,EAAkBlC,EAAQ,CAM/B,KAAK,eAAiB,EAMtB,KAAK,gBAAkB,EAMvB,KAAK,aAAeK,GAAe,EAAG,EAAG,EAAG,CAAC,EAC7C,KAAK,OAASL,CAClB,CAOA,OAAAkC,EAAkB,UAAU,SAAW,UAAY,CAC/C,IAAID,EAAOL,GAAe,KAAK,MAAM,EACrC,YAAK,aAAeK,EACZA,EAAK,QAAU,KAAK,gBACxBA,EAAK,SAAW,KAAK,eAC7B,EAOAC,EAAkB,UAAU,cAAgB,UAAY,CACpD,IAAID,EAAO,KAAK,aAChB,YAAK,eAAiBA,EAAK,MAC3B,KAAK,gBAAkBA,EAAK,OACrBA,CACX,EACOC,CACX,EAAE,EAEEC,GAAqC,UAAY,CAOjD,SAASA,EAAoBnC,EAAQoC,EAAU,CAC3C,IAAIC,EAAcR,GAAmBO,CAAQ,EAO7CrC,GAAmB,KAAM,CAAE,OAAQC,EAAQ,YAAaqC,CAAY,CAAC,CACzE,CACA,OAAOF,CACX,EAAE,EAEEG,GAAmC,UAAY,CAW/C,SAASA,EAAkBnE,EAAUoE,EAAYC,EAAa,CAc1D,GAPA,KAAK,oBAAsB,CAAC,EAM5B,KAAK,cAAgB,IAAI/E,GACrB,OAAOU,GAAa,WACpB,MAAM,IAAI,UAAU,yDAAyD,EAEjF,KAAK,UAAYA,EACjB,KAAK,YAAcoE,EACnB,KAAK,aAAeC,CACxB,CAOA,OAAAF,EAAkB,UAAU,QAAU,SAAUtC,EAAQ,CACpD,GAAI,CAAC,UAAU,OACX,MAAM,IAAI,UAAU,0CAA0C,EAGlE,GAAI,SAAO,SAAY,aAAe,EAAE,mBAAmB,SAG3D,IAAI,EAAEA,aAAkBE,GAAYF,CAAM,EAAE,SACxC,MAAM,IAAI,UAAU,uCAAuC,EAE/D,IAAIyC,EAAe,KAAK,cAEpBA,EAAa,IAAIzC,CAAM,IAG3ByC,EAAa,IAAIzC,EAAQ,IAAIkC,GAAkBlC,CAAM,CAAC,EACtD,KAAK,YAAY,YAAY,IAAI,EAEjC,KAAK,YAAY,QAAQ,GAC7B,EAOAsC,EAAkB,UAAU,UAAY,SAAUtC,EAAQ,CACtD,GAAI,CAAC,UAAU,OACX,MAAM,IAAI,UAAU,0CAA0C,EAGlE,GAAI,SAAO,SAAY,aAAe,EAAE,mBAAmB,SAG3D,IAAI,EAAEA,aAAkBE,GAAYF,CAAM,EAAE,SACxC,MAAM,IAAI,UAAU,uCAAuC,EAE/D,IAAIyC,EAAe,KAAK,cAEpB,CAACA,EAAa,IAAIzC,CAAM,IAG5ByC,EAAa,OAAOzC,CAAM,EACrByC,EAAa,MACd,KAAK,YAAY,eAAe,IAAI,GAE5C,EAMAH,EAAkB,UAAU,WAAa,UAAY,CACjD,KAAK,YAAY,EACjB,KAAK,cAAc,MAAM,EACzB,KAAK,YAAY,eAAe,IAAI,CACxC,EAOAA,EAAkB,UAAU,aAAe,UAAY,CACnD,IAAII,EAAQ,KACZ,KAAK,YAAY,EACjB,KAAK,cAAc,QAAQ,SAAUC,EAAa,CAC1CA,EAAY,SAAS,GACrBD,EAAM,oBAAoB,KAAKC,CAAW,CAElD,CAAC,CACL,EAOAL,EAAkB,UAAU,gBAAkB,UAAY,CAEtD,GAAI,EAAC,KAAK,UAAU,EAGpB,KAAIlE,EAAM,KAAK,aAEXF,EAAU,KAAK,oBAAoB,IAAI,SAAUyE,EAAa,CAC9D,OAAO,IAAIR,GAAoBQ,EAAY,OAAQA,EAAY,cAAc,CAAC,CAClF,CAAC,EACD,KAAK,UAAU,KAAKvE,EAAKF,EAASE,CAAG,EACrC,KAAK,YAAY,EACrB,EAMAkE,EAAkB,UAAU,YAAc,UAAY,CAClD,KAAK,oBAAoB,OAAO,CAAC,CACrC,EAMAA,EAAkB,UAAU,UAAY,UAAY,CAChD,OAAO,KAAK,oBAAoB,OAAS,CAC7C,EACOA,CACX,EAAE,EAKE7C,GAAY,OAAO,SAAY,YAAc,IAAI,QAAY,IAAIhC,GAKjEmF,GAAgC,UAAY,CAO5C,SAASA,EAAezE,EAAU,CAC9B,GAAI,EAAE,gBAAgByE,GAClB,MAAM,IAAI,UAAU,oCAAoC,EAE5D,GAAI,CAAC,UAAU,OACX,MAAM,IAAI,UAAU,0CAA0C,EAElE,IAAIL,EAAahD,GAAyB,YAAY,EAClDC,EAAW,IAAI8C,GAAkBnE,EAAUoE,EAAY,IAAI,EAC/D9C,GAAU,IAAI,KAAMD,CAAQ,CAChC,CACA,OAAOoD,CACX,EAAE,EAEF,CACI,UACA,YACA,YACJ,EAAE,QAAQ,SAAUC,EAAQ,CACxBD,GAAe,UAAUC,GAAU,UAAY,CAC3C,IAAIvE,EACJ,OAAQA,EAAKmB,GAAU,IAAI,IAAI,GAAGoD,GAAQ,MAAMvE,EAAI,SAAS,CACjE,CACJ,CAAC,EAED,IAAIP,GAAS,UAAY,CAErB,OAAI,OAAOS,GAAS,gBAAmB,YAC5BA,GAAS,eAEboE,EACX,EAAG,EAEIE,GAAQ/E,GCr2Bf,IAAMgF,GAAS,IAAIC,EAYbC,GAAYC,EAAM,IAAMC,EAC5B,IAAIC,GAAeC,GAAW,CAC5B,QAAWC,KAASD,EAClBN,GAAO,KAAKO,CAAK,CACrB,CAAC,CACH,CAAC,EACE,KACCC,EAAUC,GAAYC,EAAMC,GAAOP,EAAGK,CAAQ,CAAC,EAC5C,KACCG,EAAS,IAAMH,EAAS,WAAW,CAAC,CACtC,CACF,EACAI,EAAY,CAAC,CACf,EAaK,SAASC,GACdC,EACa,CACb,MAAO,CACL,MAAQA,EAAG,YACX,OAAQA,EAAG,YACb,CACF,CAuBO,SAASC,GACdD,EACyB,CACzB,OAAOb,GACJ,KACCe,EAAIR,GAAYA,EAAS,QAAQM,CAAE,CAAC,EACpCP,EAAUC,GAAYT,GACnB,KACCkB,EAAO,CAAC,CAAE,OAAAC,CAAO,IAAMA,IAAWJ,CAAE,EACpCH,EAAS,IAAMH,EAAS,UAAUM,CAAE,CAAC,EACrCK,EAAI,IAAMN,GAAeC,CAAE,CAAC,CAC9B,CACF,EACAM,EAAUP,GAAeC,CAAE,CAAC,CAC9B,CACJ,CC1GO,SAASO,GACdC,EACa,CACb,MAAO,CACL,MAAQA,EAAG,YACX,OAAQA,EAAG,YACb,CACF,CASO,SAASC,GACdD,EACyB,CACzB,IAAIE,EAASF,EAAG,cAChB,KAAOE,IAEHF,EAAG,aAAeE,EAAO,aACzBF,EAAG,cAAgBE,EAAO,eAE1BA,GAAUF,EAAKE,GAAQ,cAK3B,OAAOA,EAASF,EAAK,MACvB,CCfA,IAAMG,GAAS,IAAIC,EAUbC,GAAYC,EAAM,IAAMC,EAC5B,IAAI,qBAAqBC,GAAW,CAClC,QAAWC,KAASD,EAClBL,GAAO,KAAKM,CAAK,CACrB,EAAG,CACD,UAAW,CACb,CAAC,CACH,CAAC,EACE,KACCC,EAAUC,GAAYC,EAAMC,GAAON,EAAGI,CAAQ,CAAC,EAC5C,KACCG,EAAS,IAAMH,EAAS,WAAW,CAAC,CACtC,CACF,EACAI,EAAY,CAAC,CACf,EAaK,SAASC,GACdC,EACqB,CACrB,OAAOZ,GACJ,KACCa,EAAIP,GAAYA,EAAS,QAAQM,CAAE,CAAC,EACpCP,EAAUC,GAAYR,GACnB,KACCgB,EAAO,CAAC,CAAE,OAAAC,CAAO,IAAMA,IAAWH,CAAE,EACpCH,EAAS,IAAMH,EAAS,UAAUM,CAAE,CAAC,EACrCI,EAAI,CAAC,CAAE,eAAAC,CAAe,IAAMA,CAAc,CAC5C,CACF,CACF,CACJ,CAaO,SAASC,GACdN,EAAiBO,EAAY,GACR,CACrB,OAAOC,GAA0BR,CAAE,EAChC,KACCI,EAAI,CAAC,CAAE,EAAAK,CAAE,IAAM,CACb,IAAMC,EAAUC,GAAeX,CAAE,EAC3BY,EAAUC,GAAsBb,CAAE,EACxC,OAAOS,GACLG,EAAQ,OAASF,EAAQ,OAASH,CAEtC,CAAC,EACDO,EAAqB,CACvB,CACJ,CCjFA,IAAMC,GAA4C,CAChD,OAAQC,EAAW,yBAAyB,EAC5C,OAAQA,EAAW,yBAAyB,CAC9C,EAaO,SAASC,GAAUC,EAAuB,CAC/C,OAAOH,GAAQG,GAAM,OACvB,CAaO,SAASC,GAAUD,EAAcE,EAAsB,CACxDL,GAAQG,GAAM,UAAYE,GAC5BL,GAAQG,GAAM,MAAM,CACxB,CAWO,SAASG,GAAYH,EAAmC,CAC7D,IAAMI,EAAKP,GAAQG,GACnB,OAAOK,EAAUD,EAAI,QAAQ,EAC1B,KACCE,EAAI,IAAMF,EAAG,OAAO,EACpBG,EAAUH,EAAG,OAAO,CACtB,CACJ,CClCA,SAASI,GACPC,EAAiBC,EACR,CACT,OAAQD,EAAG,YAAa,CAGtB,KAAK,iBAEH,OAAIA,EAAG,OAAS,QACP,SAAS,KAAKC,CAAI,EAElB,GAGX,KAAK,kBACL,KAAK,oBACH,MAAO,GAGT,QACE,OAAOD,EAAG,iBACd,CACF,CAWO,SAASE,IAAsC,CACpD,OAAOC,EAAyB,OAAQ,SAAS,EAC9C,KACCC,EAAOC,GAAM,EAAEA,EAAG,SAAWA,EAAG,QAAQ,EACxCC,EAAID,IAAO,CACT,KAAME,GAAU,QAAQ,EAAI,SAAW,SACvC,KAAMF,EAAG,IACT,OAAQ,CACNA,EAAG,eAAe,EAClBA,EAAG,gBAAgB,CACrB,CACF,EAAc,EACdD,EAAO,CAAC,CAAE,KAAAI,EAAM,KAAAP,CAAK,IAAM,CACzB,GAAIO,IAAS,SAAU,CACrB,IAAMC,EAASC,GAAiB,EAChC,GAAI,OAAOD,GAAW,YACpB,MAAO,CAACV,GAAwBU,EAAQR,CAAI,CAChD,CACA,MAAO,EACT,CAAC,EACDU,GAAM,CACR,CACJ,CCpFO,SAASC,IAAmB,CACjC,OAAO,IAAI,IAAI,SAAS,IAAI,CAC9B,CAOO,SAASC,GAAYC,EAAgB,CAC1C,SAAS,KAAOA,EAAI,IACtB,CASO,SAASC,IAA8B,CAC5C,OAAO,IAAIC,CACb,CCLA,SAASC,GAAYC,EAAiBC,EAA8B,CAGlE,GAAI,OAAOA,GAAU,UAAY,OAAOA,GAAU,SAChDD,EAAG,WAAaC,EAAM,SAAS,UAGtBA,aAAiB,KAC1BD,EAAG,YAAYC,CAAK,UAGX,MAAM,QAAQA,CAAK,EAC5B,QAAWC,KAAQD,EACjBF,GAAYC,EAAIE,CAAI,CAE1B,CAyBO,SAASC,EACdC,EAAaC,KAAmCC,EAC7C,CACH,IAAMN,EAAK,SAAS,cAAcI,CAAG,EAGrC,GAAIC,EACF,QAAWE,KAAQ,OAAO,KAAKF,CAAU,EACnC,OAAOA,EAAWE,IAAU,cAI5B,OAAOF,EAAWE,IAAU,UAC9BP,EAAG,aAAaO,EAAMF,EAAWE,EAAK,EAEtCP,EAAG,aAAaO,EAAM,EAAE,GAI9B,QAAWN,KAASK,EAClBP,GAAYC,EAAIC,CAAK,EAGvB,OAAOD,CACT,CChFO,SAASQ,GAASC,EAAeC,EAAmB,CACzD,IAAIC,EAAID,EACR,GAAID,EAAM,OAASE,EAAG,CACpB,KAAOF,EAAME,KAAO,KAAO,EAAEA,EAAI,GAAG,CACpC,MAAO,GAAGF,EAAM,UAAU,EAAGE,CAAC,MAChC,CACA,OAAOF,CACT,CAkBO,SAASG,GAAMH,EAAuB,CAC3C,GAAIA,EAAQ,IAAK,CACf,IAAMI,EAAS,GAAGJ,EAAQ,KAAO,IAAO,IACxC,MAAO,KAAKA,EAAQ,MAAY,KAAM,QAAQI,CAAM,IACtD,KACE,QAAOJ,EAAM,SAAS,CAE1B,CC5BO,SAASK,IAA0B,CACxC,OAAO,SAAS,KAAK,UAAU,CAAC,CAClC,CAYO,SAASC,GAAgBC,EAAoB,CAClD,IAAMC,EAAKC,EAAE,IAAK,CAAE,KAAMF,CAAK,CAAC,EAChCC,EAAG,iBAAiB,QAASE,GAAMA,EAAG,gBAAgB,CAAC,EACvDF,EAAG,MAAM,CACX,CASO,SAASG,IAAwC,CACtD,OAAOC,EAA2B,OAAQ,YAAY,EACnD,KACCC,EAAIR,EAAe,EACnBS,EAAUT,GAAgB,CAAC,EAC3BU,EAAOR,GAAQA,EAAK,OAAS,CAAC,EAC9BS,EAAY,CAAC,CACf,CACJ,CAOO,SAASC,IAA+C,CAC7D,OAAON,GAAkB,EACtB,KACCE,EAAIK,GAAMC,GAAmB,QAAQD,KAAM,CAAE,EAC7CH,EAAOP,GAAM,OAAOA,GAAO,WAAW,CACxC,CACJ,CC1CO,SAASY,GAAWC,EAAoC,CAC7D,IAAMC,EAAQ,WAAWD,CAAK,EAC9B,OAAOE,GAA0BC,GAC/BF,EAAM,YAAY,IAAME,EAAKF,EAAM,OAAO,CAAC,CAC5C,EACE,KACCG,EAAUH,EAAM,OAAO,CACzB,CACJ,CAOO,SAASI,IAAkC,CAChD,IAAMJ,EAAQ,WAAW,OAAO,EAChC,OAAOK,EACLC,EAAU,OAAQ,aAAa,EAAE,KAAKC,EAAI,IAAM,EAAI,CAAC,EACrDD,EAAU,OAAQ,YAAY,EAAE,KAAKC,EAAI,IAAM,EAAK,CAAC,CACvD,EACG,KACCJ,EAAUH,EAAM,OAAO,CACzB,CACJ,CAcO,SAASQ,GACdC,EAA6BC,EACd,CACf,OAAOD,EACJ,KACCE,EAAUC,GAAUA,EAASF,EAAQ,EAAIG,CAAK,CAChD,CACJ,CC7CO,SAASC,GACdC,EAAmBC,EAAuB,CAAE,YAAa,aAAc,EACjD,CACtB,OAAOC,GAAK,MAAM,GAAGF,IAAOC,CAAO,CAAC,EACjC,KACCE,GAAW,IAAMC,CAAK,EACtBC,EAAUC,GAAOA,EAAI,SAAW,IAC5BC,GAAW,IAAM,IAAI,MAAMD,EAAI,UAAU,CAAC,EAC1CE,EAAGF,CAAG,CACV,CACF,CACJ,CAYO,SAASG,GACdT,EAAmBC,EACJ,CACf,OAAOF,GAAQC,EAAKC,CAAO,EACxB,KACCI,EAAUC,GAAOA,EAAI,KAAK,CAAC,EAC3BI,EAAY,CAAC,CACf,CACJ,CAUO,SAASC,GACdX,EAAmBC,EACG,CACtB,IAAMW,EAAM,IAAI,UAChB,OAAOb,GAAQC,EAAKC,CAAO,EACxB,KACCI,EAAUC,GAAOA,EAAI,KAAK,CAAC,EAC3BO,EAAIP,GAAOM,EAAI,gBAAgBN,EAAK,UAAU,CAAC,EAC/CI,EAAY,CAAC,CACf,CACJ,CClDO,SAASI,GAAYC,EAA+B,CACzD,IAAMC,EAASC,EAAE,SAAU,CAAE,IAAAF,CAAI,CAAC,EAClC,OAAOG,EAAM,KACX,SAAS,KAAK,YAAYF,CAAM,EACzBG,EACLC,EAAUJ,EAAQ,MAAM,EACxBI,EAAUJ,EAAQ,OAAO,EACtB,KACCK,EAAU,IACRC,GAAW,IAAM,IAAI,eAAe,mBAAmBP,GAAK,CAAC,CAC9D,CACH,CACJ,EACG,KACCQ,EAAI,IAAG,EAAY,EACnBC,EAAS,IAAM,SAAS,KAAK,YAAYR,CAAM,CAAC,EAChDS,GAAK,CAAC,CACR,EACH,CACH,CCfO,SAASC,IAAoC,CAClD,MAAO,CACL,EAAG,KAAK,IAAI,EAAG,OAAO,EACtB,EAAG,KAAK,IAAI,EAAG,OAAO,CACxB,CACF,CASO,SAASC,IAAkD,CAChE,OAAOC,EACLC,EAAU,OAAQ,SAAU,CAAE,QAAS,EAAK,CAAC,EAC7CA,EAAU,OAAQ,SAAU,CAAE,QAAS,EAAK,CAAC,CAC/C,EACG,KACCC,EAAIJ,EAAiB,EACrBK,EAAUL,GAAkB,CAAC,CAC/B,CACJ,CC3BO,SAASM,IAAgC,CAC9C,MAAO,CACL,MAAQ,WACR,OAAQ,WACV,CACF,CASO,SAASC,IAA8C,CAC5D,OAAOC,EAAU,OAAQ,SAAU,CAAE,QAAS,EAAK,CAAC,EACjD,KACCC,EAAIH,EAAe,EACnBI,EAAUJ,GAAgB,CAAC,CAC7B,CACJ,CCXO,SAASK,IAAsC,CACpD,OAAOC,EAAc,CACnBC,GAAoB,EACpBC,GAAkB,CACpB,CAAC,EACE,KACCC,EAAI,CAAC,CAACC,EAAQC,CAAI,KAAO,CAAE,OAAAD,EAAQ,KAAAC,CAAK,EAAE,EAC1CC,EAAY,CAAC,CACf,CACJ,CCVO,SAASC,GACdC,EAAiB,CAAE,UAAAC,EAAW,QAAAC,CAAQ,EAChB,CACtB,IAAMC,EAAQF,EACX,KACCG,EAAwB,MAAM,CAChC,EAGIC,EAAUC,EAAc,CAACH,EAAOD,CAAO,CAAC,EAC3C,KACCK,EAAI,IAAMC,GAAiBR,CAAE,CAAC,CAChC,EAGF,OAAOM,EAAc,CAACJ,EAASD,EAAWI,CAAO,CAAC,EAC/C,KACCE,EAAI,CAAC,CAAC,CAAE,OAAAE,CAAO,EAAG,CAAE,OAAAC,EAAQ,KAAAC,CAAK,EAAG,CAAE,EAAAC,EAAG,EAAAC,CAAE,CAAC,KAAO,CACjD,OAAQ,CACN,EAAGH,EAAO,EAAIE,EACd,EAAGF,EAAO,EAAIG,EAAIJ,CACpB,EACA,KAAAE,CACF,EAAE,CACJ,CACJ,CCIO,SAASG,GACdC,EAAgB,CAAE,IAAAC,CAAI,EACP,CAGf,IAAMC,EAAMC,EAAwBH,EAAQ,SAAS,EAClD,KACCI,EAAI,CAAC,CAAE,KAAAC,CAAK,IAAMA,CAAS,CAC7B,EAGF,OAAOJ,EACJ,KACCK,GAAS,IAAMJ,EAAK,CAAE,QAAS,GAAM,SAAU,EAAK,CAAC,EACrDK,EAAIC,GAAWR,EAAO,YAAYQ,CAAO,CAAC,EAC1CC,EAAU,IAAMP,CAAG,EACnBQ,GAAM,CACR,CACJ,CCCA,IAAMC,GAASC,EAAW,WAAW,EAC/BC,GAAiB,KAAK,MAAMF,GAAO,WAAY,EACrDE,GAAO,KAAO,GAAG,IAAI,IAAIA,GAAO,KAAMC,GAAY,CAAC,IAW5C,SAASC,IAAwB,CACtC,OAAOF,EACT,CASO,SAASG,EAAQC,EAAqB,CAC3C,OAAOJ,GAAO,SAAS,SAASI,CAAI,CACtC,CAUO,SAASC,GACdC,EAAkBC,EACV,CACR,OAAO,OAAOA,GAAU,YACpBP,GAAO,aAAaM,GAAK,QAAQ,IAAKC,EAAM,SAAS,CAAC,EACtDP,GAAO,aAAaM,EAC1B,CCjCO,SAASE,GACdC,EAASC,EAAmB,SACP,CACrB,OAAOC,EAAW,sBAAsBF,KAASC,CAAI,CACvD,CAYO,SAASE,GACdH,EAASC,EAAmB,SACL,CACvB,OAAOG,EAAY,sBAAsBJ,KAASC,CAAI,CACxD,CC1EO,SAASI,GACdC,EACsB,CACtB,IAAMC,EAASC,EAAW,6BAA8BF,CAAE,EAC1D,OAAOG,EAAUF,EAAQ,QAAS,CAAE,KAAM,EAAK,CAAC,EAC7C,KACCG,EAAI,IAAMF,EAAW,cAAeF,CAAE,CAAC,EACvCI,EAAIC,IAAY,CAAE,KAAM,UAAUA,EAAQ,SAAS,CAAE,EAAE,CACzD,CACJ,CASO,SAASC,GACdN,EACiC,CACjC,MAAI,CAACO,EAAQ,kBAAkB,GAAK,CAACP,EAAG,kBAC/BQ,EAGFC,EAAM,IAAM,CACjB,IAAMC,EAAQ,IAAIC,EAClB,OAAAD,EACG,KACCE,EAAU,CAAE,KAAM,SAAiB,YAAY,CAAE,CAAC,CACpD,EACG,UAAU,CAAC,CAAE,KAAAC,CAAK,IAAM,CA5FjC,IAAAC,EA6FcD,GAAQA,MAAUC,EAAA,SAAiB,YAAY,IAA7B,KAAAA,EAAkCD,KACtDb,EAAG,OAAS,GAGZ,SAAiB,aAAca,CAAI,EAEvC,CAAC,EAGEd,GAAcC,CAAE,EACpB,KACCe,EAAIC,GAASN,EAAM,KAAKM,CAAK,CAAC,EAC9BC,EAAS,IAAMP,EAAM,SAAS,CAAC,EAC/BN,EAAIY,GAAUE,EAAA,CAAE,IAAKlB,GAAOgB,EAAQ,CACtC,CACJ,CAAC,CACH,CC5BO,SAASG,GACdC,EAAiB,CAAE,QAAAC,CAAQ,EACN,CACrB,OAAOA,EACJ,KACCC,EAAIC,IAAW,CAAE,OAAQA,IAAWH,CAAG,EAAE,CAC3C,CACJ,CAYO,SAASI,GACdJ,EAAiBK,EACe,CAChC,IAAMC,EAAY,IAAIC,EACtB,OAAAD,EAAU,UAAU,CAAC,CAAE,OAAAE,CAAO,IAAM,CAClCR,EAAG,OAASQ,CACd,CAAC,EAGMT,GAAaC,EAAIK,CAAO,EAC5B,KACCI,EAAIC,GAASJ,EAAU,KAAKI,CAAK,CAAC,EAClCC,EAAS,IAAML,EAAU,SAAS,CAAC,EACnCJ,EAAIQ,GAAUE,EAAA,CAAE,IAAKZ,GAAOU,EAAQ,CACtC,CACJ,CC7FA,IAAAG,GAAwB,SCajB,SAASC,GAAcC,EAA0B,CACtD,OACEC,EAAC,OAAI,MAAM,aAAa,GAAID,GAC1BC,EAAC,OAAI,MAAM,+BAA+B,CAC5C,CAEJ,CCHO,SAASC,GACdC,EAAqBC,EACR,CAIb,GAHAA,EAASA,EAAS,GAAGA,gBAAqBD,IAAO,OAG7CC,EAAQ,CACV,IAAMC,EAASD,EAAS,IAAIA,IAAW,OACvC,OACEE,EAAC,SAAM,MAAM,gBAAgB,SAAU,GACpCC,GAAcH,CAAM,EACrBE,EAAC,KAAE,KAAMD,EAAQ,MAAM,uBAAuB,SAAU,IACtDC,EAAC,QAAK,wBAAuBH,EAAI,CACnC,CACF,CAEJ,KACE,QACEG,EAAC,SAAM,MAAM,gBAAgB,SAAU,GACpCC,GAAcH,CAAM,EACrBE,EAAC,QAAK,MAAM,uBAAuB,SAAU,IAC3CA,EAAC,QAAK,wBAAuBH,EAAI,CACnC,CACF,CAGN,CC5BO,SAASK,GAAsBC,EAAyB,CAC7D,OACEC,EAAC,UACC,MAAM,uBACN,MAAOC,GAAY,gBAAgB,EACnC,wBAAuB,IAAIF,WAC5B,CAEL,CCYA,SAASG,GACPC,EAA2CC,EAC9B,CACb,IAAMC,EAASD,EAAO,EAChBE,EAASF,EAAO,EAGhBG,EAAU,OAAO,KAAKJ,EAAS,KAAK,EACvC,OAAOK,GAAO,CAACL,EAAS,MAAMK,EAAI,EAClC,OAAyB,CAACC,EAAMD,IAAQ,CACvC,GAAGC,EAAMC,EAAC,WAAKF,CAAI,EAAQ,GAC7B,EAAG,CAAC,CAAC,EACJ,MAAM,EAAG,EAAE,EAGRG,EAAM,IAAI,IAAIR,EAAS,QAAQ,EACjCS,EAAQ,kBAAkB,GAC5BD,EAAI,aAAa,IAAI,IAAK,OAAO,QAAQR,EAAS,KAAK,EACpD,OAAO,CAAC,CAAC,CAAEU,CAAK,IAAMA,CAAK,EAC3B,OAAO,CAACC,EAAW,CAACC,CAAK,IAAM,GAAGD,KAAaC,IAAQ,KAAK,EAAG,EAAE,CACpE,EAGF,GAAM,CAAE,KAAAC,CAAK,EAAIC,GAAc,EAC/B,OACEP,EAAC,KAAE,KAAM,GAAGC,IAAO,MAAM,yBAAyB,SAAU,IAC1DD,EAAC,WACC,MAAO,CAAC,4BAA6B,GAAGL,EACpC,CAAC,qCAAqC,EACtC,CAAC,CACL,EAAE,KAAK,GAAG,EACV,gBAAeF,EAAS,MAAM,QAAQ,CAAC,GAEtCE,EAAS,GAAKK,EAAC,OAAI,MAAM,iCAAiC,EAC3DA,EAAC,MAAG,MAAM,2BAA2BP,EAAS,KAAM,EACnDG,EAAS,GAAKH,EAAS,KAAK,OAAS,GACpCO,EAAC,KAAE,MAAM,4BACNQ,GAASf,EAAS,KAAM,GAAG,CAC9B,EAEDA,EAAS,MACRO,EAAC,OAAI,MAAM,cACRP,EAAS,KAAK,IAAIgB,GAAO,CACxB,IAAMC,EAAKD,EAAI,QAAQ,WAAY,EAAE,EAC/BE,EAAOL,EACTI,KAAMJ,EACJ,4BAA4BA,EAAKI,KACjC,cACF,GACJ,OACEV,EAAC,QAAK,MAAO,UAAUW,KAASF,CAAI,CAExC,CAAC,CACH,EAEDb,EAAS,GAAKC,EAAQ,OAAS,GAC9BG,EAAC,KAAE,MAAM,2BACNY,GAAY,4BAA4B,EAAE,KAAG,GAAGf,CACnD,CAEJ,CACF,CAEJ,CAaO,SAASgB,GACdC,EACa,CACb,IAAMC,EAAYD,EAAO,GAAG,MACtBE,EAAO,CAAC,GAAGF,CAAM,EAGjBnB,EAASqB,EAAK,UAAUC,GAAO,CAACA,EAAI,SAAS,SAAS,GAAG,CAAC,EAC1D,CAACC,CAAO,EAAIF,EAAK,OAAOrB,EAAQ,CAAC,EAGnCwB,EAAQH,EAAK,UAAUC,GAAOA,EAAI,MAAQF,CAAS,EACnDI,IAAU,KACZA,EAAQH,EAAK,QAGf,IAAMI,EAAOJ,EAAK,MAAM,EAAGG,CAAK,EAC1BE,EAAOL,EAAK,MAAMG,CAAK,EAGvBG,EAAW,CACf9B,GAAqB0B,EAAS,EAAc,EAAE,CAACvB,GAAUwB,IAAU,EAAE,EACrE,GAAGC,EAAK,IAAIG,GAAW/B,GAAqB+B,EAAS,CAAW,CAAC,EACjE,GAAGF,EAAK,OAAS,CACfrB,EAAC,WAAQ,MAAM,0BACbA,EAAC,WAAQ,SAAU,IAChBqB,EAAK,OAAS,GAAKA,EAAK,SAAW,EAChCT,GAAY,wBAAwB,EACpCA,GAAY,2BAA4BS,EAAK,MAAM,CAEzD,EACC,GAAGA,EAAK,IAAIE,GAAW/B,GAAqB+B,EAAS,CAAW,CAAC,CACpE,CACF,EAAI,CAAC,CACP,EAGA,OACEvB,EAAC,MAAG,MAAM,0BACPsB,CACH,CAEJ,CC1IO,SAASE,GAAkBC,EAAiC,CACjE,OACEC,EAAC,MAAG,MAAM,oBACP,OAAO,QAAQD,CAAK,EAAE,IAAI,CAAC,CAACE,EAAKC,CAAK,IACrCF,EAAC,MAAG,MAAO,oCAAoCC,KAC5C,OAAOC,GAAU,SAAWC,GAAMD,CAAK,EAAIA,CAC9C,CACD,CACH,CAEJ,CCAO,SAASE,GACdC,EACa,CACb,IAAMC,EAAU,kCAAkCD,IAClD,OACEE,EAAC,OAAI,MAAOD,EAAS,OAAM,IACzBC,EAAC,UAAO,MAAM,gBAAgB,SAAU,GAAI,CAC9C,CAEJ,CCpBO,SAASC,GAAYC,EAAiC,CAC3D,OACEC,EAAC,OAAI,MAAM,0BACTA,EAAC,OAAI,MAAM,qBACRD,CACH,CACF,CAEJ,CCMA,SAASE,GAAcC,EAA+B,CACpD,IAAMC,EAASC,GAAc,EAGvBC,EAAM,IAAI,IAAI,MAAMH,EAAQ,WAAYC,EAAO,IAAI,EACzD,OACEG,EAAC,MAAG,MAAM,oBACRA,EAAC,KAAE,KAAM,GAAGD,IAAO,MAAM,oBACtBH,EAAQ,KACX,CACF,CAEJ,CAcO,SAASK,GACdC,EAAqBC,EACR,CACb,OACEH,EAAC,OAAI,MAAM,cACTA,EAAC,UACC,MAAM,sBACN,aAAYI,GAAY,sBAAsB,GAE7CD,EAAO,KACV,EACAH,EAAC,MAAG,MAAM,oBACPE,EAAS,IAAIP,EAAa,CAC7B,CACF,CAEJ,CCCO,SAASU,GACdC,EAAiBC,EACO,CACxB,IAAMC,EAAUC,EAAM,IAAMC,EAAc,CACxCC,GAAmBL,CAAE,EACrBM,GAA0BL,CAAS,CACrC,CAAC,CAAC,EACC,KACCM,EAAI,CAAC,CAAC,CAAE,EAAAC,EAAG,EAAAC,CAAE,EAAGC,CAAM,IAAqB,CACzC,GAAM,CAAE,MAAAC,EAAO,OAAAC,CAAO,EAAIC,GAAeb,CAAE,EAC3C,MAAQ,CACN,EAAGQ,EAAIE,EAAO,EAAIC,EAAQ,EAC1B,EAAGF,EAAIC,EAAO,EAAIE,EAAS,CAC7B,CACF,CAAC,CACH,EAGF,OAAOE,GAAkBd,CAAE,EACxB,KACCe,EAAUC,GAAUd,EACjB,KACCK,EAAIU,IAAW,CAAE,OAAAD,EAAQ,OAAAC,CAAO,EAAE,EAClCC,GAAK,CAAC,CAACF,GAAU,GAAQ,CAC3B,CACF,CACF,CACJ,CAWO,SAASG,GACdnB,EAAiBC,EAAwB,CAAE,QAAAmB,CAAQ,EAChB,CACnC,GAAM,CAACC,EAASC,CAAK,EAAI,MAAM,KAAKtB,EAAG,QAAQ,EAG/C,OAAOG,EAAM,IAAM,CACjB,IAAMoB,EAAQ,IAAIC,EACZC,EAAQF,EAAM,KAAKG,GAAS,CAAC,CAAC,EACpC,OAAAH,EAAM,UAAU,CAGd,KAAK,CAAE,OAAAN,CAAO,EAAG,CACfjB,EAAG,MAAM,YAAY,iBAAkB,GAAGiB,EAAO,KAAK,EACtDjB,EAAG,MAAM,YAAY,iBAAkB,GAAGiB,EAAO,KAAK,CACxD,EAGA,UAAW,CACTjB,EAAG,MAAM,eAAe,gBAAgB,EACxCA,EAAG,MAAM,eAAe,gBAAgB,CAC1C,CACF,CAAC,EAGD2B,GAAuB3B,CAAE,EACtB,KACC4B,GAAUH,CAAK,CACjB,EACG,UAAUI,GAAW,CACpB7B,EAAG,gBAAgB,kBAAmB6B,CAAO,CAC/C,CAAC,EAGLC,EACEP,EAAM,KAAKQ,EAAO,CAAC,CAAE,OAAAf,CAAO,IAAMA,CAAM,CAAC,EACzCO,EAAM,KAAKS,GAAa,GAAG,EAAGD,EAAO,CAAC,CAAE,OAAAf,CAAO,IAAM,CAACA,CAAM,CAAC,CAC/D,EACG,UAAU,CAGT,KAAK,CAAE,OAAAA,CAAO,EAAG,CACXA,EACFhB,EAAG,QAAQqB,CAAO,EAElBA,EAAQ,OAAO,CACnB,EAGA,UAAW,CACTrB,EAAG,QAAQqB,CAAO,CACpB,CACF,CAAC,EAGHE,EACG,KACCU,GAAU,GAAIC,EAAuB,CACvC,EACG,UAAU,CAAC,CAAE,OAAAlB,CAAO,IAAM,CACzBK,EAAQ,UAAU,OAAO,qBAAsBL,CAAM,CACvD,CAAC,EAGLO,EACG,KACCY,GAAa,IAAKD,EAAuB,EACzCH,EAAO,IAAM,CAAC,CAAC/B,EAAG,YAAY,EAC9BO,EAAI,IAAMP,EAAG,aAAc,sBAAsB,CAAC,EAClDO,EAAI,CAAC,CAAE,EAAAC,CAAE,IAAMA,CAAC,CAClB,EACG,UAAU,CAGT,KAAK4B,EAAQ,CACPA,EACFpC,EAAG,MAAM,YAAY,iBAAkB,GAAG,CAACoC,KAAU,EAErDpC,EAAG,MAAM,eAAe,gBAAgB,CAC5C,EAGA,UAAW,CACTA,EAAG,MAAM,eAAe,gBAAgB,CAC1C,CACF,CAAC,EAGLqC,EAAsBf,EAAO,OAAO,EACjC,KACCM,GAAUH,CAAK,EACfM,EAAOO,GAAM,EAAEA,EAAG,SAAWA,EAAG,QAAQ,CAC1C,EACG,UAAUA,GAAMA,EAAG,eAAe,CAAC,EAGxCD,EAAsBf,EAAO,WAAW,EACrC,KACCM,GAAUH,CAAK,EACfc,GAAehB,CAAK,CACtB,EACG,UAAU,CAAC,CAACe,EAAI,CAAE,OAAAtB,CAAO,CAAC,IAAM,CAvOzC,IAAAwB,EA0OU,GAAIF,EAAG,SAAW,GAAKA,EAAG,SAAWA,EAAG,QACtCA,EAAG,eAAe,UAGTtB,EAAQ,CACjBsB,EAAG,eAAe,EAGlB,IAAMG,EAASzC,EAAG,cAAe,QAAQ,gBAAgB,EACrDyC,aAAkB,YACpBA,EAAO,MAAM,GAEbD,EAAAE,GAAiB,IAAjB,MAAAF,EAAoB,MACxB,CACF,CAAC,EAGLpB,EACG,KACCQ,GAAUH,CAAK,EACfM,EAAOY,GAAUA,IAAWtB,CAAO,EACnCuB,GAAM,GAAG,CACX,EACG,UAAU,IAAM5C,EAAG,MAAM,CAAC,EAGxBD,GAAgBC,EAAIC,CAAS,EACjC,KACC4C,EAAIC,GAASvB,EAAM,KAAKuB,CAAK,CAAC,EAC9BC,EAAS,IAAMxB,EAAM,SAAS,CAAC,EAC/BhB,EAAIuC,GAAUE,EAAA,CAAE,IAAKhD,GAAO8C,EAAQ,CACtC,CACJ,CAAC,CACH,CCrMA,SAASG,GAAsBC,EAAgC,CAC7D,IAAMC,EAAkB,CAAC,EACzB,QAAWC,KAAMC,EAAY,eAAgBH,CAAS,EAAG,CACvD,IAAMI,EAAgB,CAAC,EAGjBC,EAAK,SAAS,mBAAmBH,EAAI,WAAW,SAAS,EAC/D,QAASI,EAAOD,EAAG,SAAS,EAAGC,EAAMA,EAAOD,EAAG,SAAS,EACtDD,EAAM,KAAKE,CAAY,EAGzB,QAASC,KAAQH,EAAO,CACtB,IAAII,EAGJ,KAAQA,EAAQ,gBAAgB,KAAKD,EAAK,WAAY,GAAI,CACxD,GAAM,CAAC,CAAEE,EAAIC,CAAK,EAAIF,EACtB,GAAI,OAAOE,GAAU,YAAa,CAChC,IAAMC,EAASJ,EAAK,UAAUC,EAAM,KAAK,EACzCD,EAAOI,EAAO,UAAUF,EAAG,MAAM,EACjCR,EAAQ,KAAKU,CAAM,CAGrB,KAAO,CACLJ,EAAK,YAAcE,EACnBR,EAAQ,KAAKM,CAAI,EACjB,KACF,CACF,CACF,CACF,CACA,OAAON,CACT,CAQA,SAASW,GAAKC,EAAqBC,EAA2B,CAC5DA,EAAO,OAAO,GAAG,MAAM,KAAKD,EAAO,UAAU,CAAC,CAChD,CAoBO,SAASE,GACdb,EAAiBF,EAAwB,CAAE,QAAAgB,EAAS,OAAAC,CAAO,EACxB,CAGnC,IAAMC,EAASlB,EAAU,QAAQ,MAAM,EACjCmB,EAASD,GAAA,YAAAA,EAAQ,GAGjBE,EAAc,IAAI,IACxB,QAAWT,KAAUZ,GAAsBC,CAAS,EAAG,CACrD,GAAM,CAAC,CAAES,CAAE,EAAIE,EAAO,YAAa,MAAM,WAAW,EAChDU,GAAmB,gBAAgBZ,KAAOP,CAAE,IAC9CkB,EAAY,IAAIX,EAAIa,GAAiBb,EAAIU,CAAM,CAAC,EAChDR,EAAO,YAAYS,EAAY,IAAIX,CAAE,CAAE,EAE3C,CAGA,OAAIW,EAAY,OAAS,EAChBG,EAGFC,EAAM,IAAM,CACjB,IAAMC,EAAQ,IAAIC,EAGZC,EAAsC,CAAC,EAC7C,OAAW,CAAClB,EAAImB,CAAU,IAAKR,EAC7BO,EAAM,KAAK,CACTE,EAAW,cAAeD,CAAU,EACpCC,EAAW,gBAAgBpB,KAAOP,CAAE,CACtC,CAAC,EAGH,OAAAe,EACG,KACCa,GAAUL,EAAM,KAAKM,GAAS,CAAC,CAAC,CAAC,CACnC,EACG,UAAUC,GAAU,CACnB9B,EAAG,OAAS,CAAC8B,EAGb,OAAW,CAACC,EAAOC,CAAK,IAAKP,EACtBK,EAGHpB,GAAKqB,EAAOC,CAAK,EAFjBtB,GAAKsB,EAAOD,CAAK,CAGvB,CAAC,EAGEE,EAAM,GAAG,CAAC,GAAGf,CAAW,EAC5B,IAAI,CAAC,CAAC,CAAEQ,CAAU,IACjBQ,GAAgBR,EAAY5B,EAAW,CAAE,QAAAgB,CAAQ,CAAC,CACnD,CACH,EACG,KACCqB,EAAS,IAAMZ,EAAM,SAAS,CAAC,EAC/Ba,GAAM,CACR,CACJ,CAAC,CACH,CV9GA,IAAIC,GAAW,EAaf,SAASC,GAAkBC,EAA0C,CACnE,GAAIA,EAAG,mBAAoB,CACzB,IAAMC,EAAUD,EAAG,mBACnB,GAAIC,EAAQ,UAAY,KACtB,OAAOA,EAGJ,GAAIA,EAAQ,UAAY,KAAO,CAACA,EAAQ,SAAS,OACpD,OAAOF,GAAkBE,CAAO,CACpC,CAIF,CAgBO,SAASC,GACdF,EACuB,CACvB,OAAOG,GAAiBH,CAAE,EACvB,KACCI,EAAI,CAAC,CAAE,MAAAC,CAAM,KAEJ,CACL,WAFcC,GAAsBN,CAAE,EAElB,MAAQK,CAC9B,EACD,EACDE,EAAwB,YAAY,CACtC,CACJ,CAoBO,SAASC,GACdR,EAAiBS,EAC8B,CAC/C,GAAM,CAAE,QAASC,CAAM,EAAI,WAAW,SAAS,EAGzCC,EAAWC,EAAM,IAAM,CAC3B,IAAMC,EAAQ,IAAIC,EASlB,GARAD,EAAM,UAAU,CAAC,CAAE,WAAAE,CAAW,IAAM,CAC9BA,GAAcL,EAChBV,EAAG,aAAa,WAAY,GAAG,EAE/BA,EAAG,gBAAgB,UAAU,CACjC,CAAC,EAGG,GAAAgB,QAAY,YAAY,EAAG,CAC7B,IAAMC,EAASjB,EAAG,QAAQ,KAAK,EAC/BiB,EAAO,GAAK,UAAU,EAAEnB,KACxBmB,EAAO,aACLC,GAAsBD,EAAO,EAAE,EAC/BjB,CACF,CACF,CAGA,IAAMmB,EAAYnB,EAAG,QAAQ,YAAY,EACzC,GAAImB,aAAqB,YAAa,CACpC,IAAMC,EAAOrB,GAAkBoB,CAAS,EAGxC,GAAI,OAAOC,GAAS,cAClBD,EAAU,UAAU,SAAS,UAAU,GACvCE,EAAQ,uBAAuB,GAC9B,CACD,IAAMC,EAAeC,GAAoBH,EAAMpB,EAAIS,CAAO,EAG1D,OAAOP,GAAeF,CAAE,EACrB,KACCwB,EAAIC,GAASZ,EAAM,KAAKY,CAAK,CAAC,EAC9BC,EAAS,IAAMb,EAAM,SAAS,CAAC,EAC/BT,EAAIqB,GAAUE,EAAA,CAAE,IAAK3B,GAAOyB,EAAQ,EACpCG,GACEzB,GAAiBgB,CAAS,EACvB,KACCf,EAAI,CAAC,CAAE,MAAAC,EAAO,OAAAwB,CAAO,IAAMxB,GAASwB,CAAM,EAC1CC,EAAqB,EACrBC,EAAUC,GAAUA,EAASV,EAAeW,CAAK,CACnD,CACJ,CACF,CACJ,CACF,CAGA,OAAO/B,GAAeF,CAAE,EACrB,KACCwB,EAAIC,GAASZ,EAAM,KAAKY,CAAK,CAAC,EAC9BC,EAAS,IAAMb,EAAM,SAAS,CAAC,EAC/BT,EAAIqB,GAAUE,EAAA,CAAE,IAAK3B,GAAOyB,EAAQ,CACtC,CACJ,CAAC,EAGD,OAAIJ,EAAQ,cAAc,EACjBa,GAAuBlC,CAAE,EAC7B,KACCmC,EAAOC,GAAWA,CAAO,EACzBC,GAAK,CAAC,EACNN,EAAU,IAAMpB,CAAQ,CAC1B,EAGGA,CACT,iyJWpLA,IAAI2B,GAKAC,GAAW,EAWf,SAASC,IAAiC,CACxC,OAAO,OAAO,SAAY,aAAe,mBAAmB,QACxDC,GAAY,qDAAqD,EACjEC,EAAG,MAAS,CAClB,CAaO,SAASC,GACdC,EACgC,CAChC,OAAAA,EAAG,UAAU,OAAO,SAAS,EAC7BN,QAAaE,GAAa,EACvB,KACCK,EAAI,IAAM,QAAQ,WAAW,CAC3B,YAAa,GACb,SAAAC,GACA,SAAU,CACR,cAAe,OACf,gBAAiB,OACjB,aAAc,MAChB,CACF,CAAC,CAAC,EACFC,EAAI,IAAG,EAAY,EACnBC,EAAY,CAAC,CACf,GAGFV,GAAS,UAAU,IAAM,CACvBM,EAAG,UAAU,IAAI,SAAS,EAC1B,IAAMK,EAAK,aAAaV,OAClBW,EAAOC,EAAE,MAAO,CAAE,MAAO,SAAU,CAAC,EAC1C,QAAQ,WAAW,OAAOF,EAAIL,EAAG,YAAcQ,GAAgB,CAG7D,IAAMC,EAASH,EAAK,aAAa,CAAE,KAAM,QAAS,CAAC,EACnDG,EAAO,UAAYD,EAGnBR,EAAG,YAAYM,CAAI,CACrB,CAAC,CACH,CAAC,EAGMZ,GACJ,KACCS,EAAI,KAAO,CAAE,IAAKH,CAAG,EAAE,CACzB,CACJ,CC/CO,SAASU,GACdC,EAAwB,CAAE,QAAAC,EAAS,OAAAC,CAAO,EACrB,CACrB,IAAIC,EAAO,GACX,OAAOC,EAGLH,EACG,KACCI,EAAIC,GAAUA,EAAO,QAAQ,qBAAqB,CAAE,EACpDC,EAAOC,GAAWR,IAAOQ,CAAO,EAChCH,EAAI,KAAO,CACT,OAAQ,OAAQ,OAAQ,EAC1B,EAAa,CACf,EAGFH,EACG,KACCK,EAAOE,GAAUA,GAAU,CAACN,CAAI,EAChCO,EAAI,IAAMP,EAAOH,EAAG,IAAI,EACxBK,EAAII,IAAW,CACb,OAAQA,EAAS,OAAS,OAC5B,EAAa,CACf,CACJ,CACF,CAaO,SAASE,GACdX,EAAwBY,EACQ,CAChC,OAAOC,EAAM,IAAM,CACjB,IAAMC,EAAQ,IAAIC,EAClB,OAAAD,EAAM,UAAU,CAAC,CAAE,OAAAE,EAAQ,OAAAC,CAAO,IAAM,CACtCjB,EAAG,gBAAgB,OAAQgB,IAAW,MAAM,EACxCC,GACFjB,EAAG,eAAe,CACtB,CAAC,EAGMD,GAAaC,EAAIY,CAAO,EAC5B,KACCF,EAAIQ,GAASJ,EAAM,KAAKI,CAAK,CAAC,EAC9BC,EAAS,IAAML,EAAM,SAAS,CAAC,EAC/BT,EAAIa,GAAUE,EAAA,CAAE,IAAKpB,GAAOkB,EAAQ,CACtC,CACJ,CAAC,CACH,CC5FA,IAAMG,GAAWC,EAAE,OAAO,EAgBnB,SAASC,GACdC,EACkC,CAClC,OAAAA,EAAG,YAAYH,EAAQ,EACvBA,GAAS,YAAYI,GAAYD,CAAE,CAAC,EAG7BE,EAAG,CAAE,IAAKF,CAAG,CAAC,CACvB,CCuBO,SAASG,GACdC,EACyB,CACzB,IAAMC,EAASC,EAA8B,iBAAkBF,CAAE,EAC3DG,EAAUF,EAAO,KAAKG,GAASA,EAAM,OAAO,GAAKH,EAAO,GAC9D,OAAOI,EAAM,GAAGJ,EAAO,IAAIG,GAASE,EAAUF,EAAO,QAAQ,EAC1D,KACCG,EAAI,IAAMC,EAA6B,cAAcJ,EAAM,MAAM,CAAC,CACpE,CACF,CAAC,EACE,KACCK,EAAUD,EAA6B,cAAcL,EAAQ,MAAM,CAAC,EACpEI,EAAIG,IAAW,CAAE,OAAAA,CAAO,EAAE,CAC5B,CACJ,CAeO,SAASC,GACdX,EAAiB,CAAE,UAAAY,CAAU,EACO,CAGpC,IAAMC,EAAOC,GAAoB,MAAM,EACvCd,EAAG,OAAOa,CAAI,EAGd,IAAME,EAAOD,GAAoB,MAAM,EACvCd,EAAG,OAAOe,CAAI,EAGd,IAAMC,EAAYR,EAAW,iBAAkBR,CAAE,EACjD,OAAOiB,EAAM,IAAM,CACjB,IAAMC,EAAQ,IAAIC,EACZC,EAAQF,EAAM,KAAKG,GAAS,CAAC,CAAC,EACpC,OAAAC,EAAc,CAACJ,EAAOK,GAAiBvB,CAAE,CAAC,CAAC,EACxC,KACCwB,GAAU,EAAGC,EAAuB,EACpCC,GAAUN,CAAK,CACjB,EACG,UAAU,CAGT,KAAK,CAAC,CAAE,OAAAV,CAAO,EAAGiB,CAAI,EAAG,CACvB,IAAMC,EAASC,GAAiBnB,CAAM,EAChC,CAAE,MAAAoB,CAAM,EAAIC,GAAerB,CAAM,EAGvCV,EAAG,MAAM,YAAY,mBAAoB,GAAG4B,EAAO,KAAK,EACxD5B,EAAG,MAAM,YAAY,uBAAwB,GAAG8B,KAAS,EAGzD,IAAME,EAAUC,GAAwBjB,CAAS,GAE/CY,EAAO,EAAYI,EAAQ,GAC3BJ,EAAO,EAAIE,EAAQE,EAAQ,EAAIL,EAAK,QAEpCX,EAAU,SAAS,CACjB,KAAM,KAAK,IAAI,EAAGY,EAAO,EAAI,EAAE,EAC/B,SAAU,QACZ,CAAC,CACL,EAGA,UAAW,CACT5B,EAAG,MAAM,eAAe,kBAAkB,EAC1CA,EAAG,MAAM,eAAe,sBAAsB,CAChD,CACF,CAAC,EAGLsB,EAAc,CACZY,GAA0BlB,CAAS,EACnCO,GAAiBP,CAAS,CAC5B,CAAC,EACE,KACCU,GAAUN,CAAK,CACjB,EACG,UAAU,CAAC,CAACQ,EAAQD,CAAI,IAAM,CAC7B,IAAMK,EAAUG,GAAsBnB,CAAS,EAC/CH,EAAK,OAASe,EAAO,EAAI,GACzBb,EAAK,OAASa,EAAO,EAAII,EAAQ,MAAQL,EAAK,MAAQ,EACxD,CAAC,EAGLtB,EACEC,EAAUO,EAAM,OAAO,EAAE,KAAKN,EAAI,IAAM,EAAE,CAAC,EAC3CD,EAAUS,EAAM,OAAO,EAAE,KAAKR,EAAI,IAAM,CAAE,CAAC,CAC7C,EACG,KACCmB,GAAUN,CAAK,CACjB,EACG,UAAUgB,GAAa,CACtB,GAAM,CAAE,MAAAN,CAAM,EAAIC,GAAef,CAAS,EAC1CA,EAAU,SAAS,CACjB,KAAMc,EAAQM,EACd,SAAU,QACZ,CAAC,CACH,CAAC,EAGDC,EAAQ,mBAAmB,GAC7BnB,EAAM,KACJoB,GAAK,CAAC,EACNC,GAAe3B,CAAS,CAC1B,EACG,UAAU,CAAC,CAAC,CAAE,OAAAF,CAAO,EAAG,CAAE,OAAAkB,CAAO,CAAC,IAAM,CACvC,IAAMY,EAAM9B,EAAO,UAAU,KAAK,EAClC,GAAIA,EAAO,aAAa,mBAAmB,EACzCA,EAAO,gBAAgB,mBAAmB,MAGrC,CACL,IAAM+B,EAAIzC,EAAG,UAAY4B,EAAO,EAGhC,QAAWc,KAAOxC,EAAY,aAAa,EACzC,QAAWE,KAASF,EAClB,iBAAkBwC,CACpB,EAAG,CACD,IAAMC,EAAQnC,EAAW,cAAcJ,EAAM,MAAM,EACnD,GACEuC,IAAUjC,GACViC,EAAM,UAAU,KAAK,IAAMH,EAC3B,CACAG,EAAM,aAAa,oBAAqB,EAAE,EAC1CvC,EAAM,MAAM,EACZ,KACF,CACF,CAGF,OAAO,SAAS,CACd,IAAKJ,EAAG,UAAYyC,CACtB,CAAC,EAGD,IAAMG,EAAO,SAAmB,QAAQ,GAAK,CAAC,EAC9C,SAAS,SAAU,CAAC,GAAG,IAAI,IAAI,CAACJ,EAAK,GAAGI,CAAI,CAAC,CAAC,CAAC,CACjD,CACF,CAAC,EAGE7C,GAAiBC,CAAE,EACvB,KACC6C,EAAIC,GAAS5B,EAAM,KAAK4B,CAAK,CAAC,EAC9BC,EAAS,IAAM7B,EAAM,SAAS,CAAC,EAC/BX,EAAIuC,GAAUE,EAAA,CAAE,IAAKhD,GAAO8C,EAAQ,CACtC,CACJ,CAAC,EACE,KACCG,GAAYC,EAAc,CAC5B,CACJ,CCtKO,SAASC,GACdC,EAAiB,CAAE,UAAAC,EAAW,QAAAC,EAAS,OAAAC,CAAO,EACd,CAChC,OAAOC,EAGL,GAAGC,EAAY,2BAA4BL,CAAE,EAC1C,IAAIM,GAASC,GAAeD,EAAO,CAAE,QAAAJ,EAAS,OAAAC,CAAO,CAAC,CAAC,EAG1D,GAAGE,EAAY,cAAeL,CAAE,EAC7B,IAAIM,GAASE,GAAaF,CAAK,CAAC,EAGnC,GAAGD,EAAY,qBAAsBL,CAAE,EACpC,IAAIM,GAASG,GAAeH,CAAK,CAAC,EAGrC,GAAGD,EAAY,UAAWL,CAAE,EACzB,IAAIM,GAASI,GAAaJ,EAAO,CAAE,QAAAJ,EAAS,OAAAC,CAAO,CAAC,CAAC,EAGxD,GAAGE,EAAY,cAAeL,CAAE,EAC7B,IAAIM,GAASK,GAAiBL,EAAO,CAAE,UAAAL,CAAU,CAAC,CAAC,CACxD,CACF,CClCO,SAASW,GACdC,EAAkB,CAAE,OAAAC,CAAO,EACP,CACpB,OAAOA,EACJ,KACCC,EAAUC,GAAWC,EACnBC,EAAG,EAAI,EACPA,EAAG,EAAK,EAAE,KAAKC,GAAM,GAAI,CAAC,CAC5B,EACG,KACCC,EAAIC,IAAW,CAAE,QAAAL,EAAS,OAAAK,CAAO,EAAE,CACrC,CACF,CACF,CACJ,CAaO,SAASC,GACdC,EAAiBC,EACc,CAC/B,IAAMC,EAAQC,EAAW,cAAeH,CAAE,EAC1C,OAAOI,EAAM,IAAM,CACjB,IAAMC,EAAQ,IAAIC,EAClB,OAAAD,EAAM,UAAU,CAAC,CAAE,QAAAZ,EAAS,OAAAK,CAAO,IAAM,CACvCE,EAAG,UAAU,OAAO,oBAAqBF,CAAM,EAC/CI,EAAM,YAAcT,CACtB,CAAC,EAGMJ,GAAYW,EAAIC,CAAO,EAC3B,KACCM,EAAIC,GAASH,EAAM,KAAKG,CAAK,CAAC,EAC9BC,EAAS,IAAMJ,EAAM,SAAS,CAAC,EAC/BR,EAAIW,GAAUE,EAAA,CAAE,IAAKV,GAAOQ,EAAQ,CACtC,CACJ,CAAC,CACH,CC9BA,SAASG,GAAS,CAAE,UAAAC,CAAU,EAAsC,CAClE,GAAI,CAACC,EAAQ,iBAAiB,EAC5B,OAAOC,EAAG,EAAK,EAGjB,IAAMC,EAAaH,EAChB,KACCI,EAAI,CAAC,CAAE,OAAQ,CAAE,EAAAC,CAAE,CAAE,IAAMA,CAAC,EAC5BC,GAAY,EAAG,CAAC,EAChBF,EAAI,CAAC,CAACG,EAAGC,CAAC,IAAM,CAACD,EAAIC,EAAGA,CAAC,CAAU,EACnCC,EAAwB,CAAC,CAC3B,EAGIC,EAAUC,EAAc,CAACX,EAAWG,CAAU,CAAC,EAClD,KACCS,EAAO,CAAC,CAAC,CAAE,OAAAC,CAAO,EAAG,CAAC,CAAER,CAAC,CAAC,IAAM,KAAK,IAAIA,EAAIQ,EAAO,CAAC,EAAI,GAAG,EAC5DT,EAAI,CAAC,CAAC,CAAE,CAACU,CAAS,CAAC,IAAMA,CAAS,EAClCC,EAAqB,CACvB,EAGIC,EAAUC,GAAY,QAAQ,EACpC,OAAON,EAAc,CAACX,EAAWgB,CAAO,CAAC,EACtC,KACCZ,EAAI,CAAC,CAAC,CAAE,OAAAS,CAAO,EAAGK,CAAM,IAAML,EAAO,EAAI,KAAO,CAACK,CAAM,EACvDH,EAAqB,EACrBI,EAAUC,GAAUA,EAASV,EAAUR,EAAG,EAAK,CAAC,EAChDmB,EAAU,EAAK,CACjB,CACJ,CAcO,SAASC,GACdC,EAAiBC,EACG,CACpB,OAAOC,EAAM,IAAMd,EAAc,CAC/Be,GAAiBH,CAAE,EACnBxB,GAASyB,CAAO,CAClB,CAAC,CAAC,EACC,KACCpB,EAAI,CAAC,CAAC,CAAE,OAAAuB,CAAO,EAAGC,CAAM,KAAO,CAC7B,OAAAD,EACA,OAAAC,CACF,EAAE,EACFb,EAAqB,CAACR,EAAGC,IACvBD,EAAE,SAAWC,EAAE,QACfD,EAAE,SAAWC,EAAE,MAChB,EACDqB,EAAY,CAAC,CACf,CACJ,CAaO,SAASC,GACdP,EAAiB,CAAE,QAAAQ,EAAS,MAAAC,CAAM,EACH,CAC/B,OAAOP,EAAM,IAAM,CACjB,IAAMQ,EAAQ,IAAIC,EACZC,EAAQF,EAAM,KAAKG,GAAS,CAAC,CAAC,EACpC,OAAAH,EACG,KACCxB,EAAwB,QAAQ,EAChC4B,GAAkBN,CAAO,CAC3B,EACG,UAAU,CAAC,CAAC,CAAE,OAAAX,CAAO,EAAG,CAAE,OAAAQ,CAAO,CAAC,IAAM,CACvCL,EAAG,UAAU,OAAO,oBAAqBH,GAAU,CAACQ,CAAM,EAC1DL,EAAG,OAASK,CACd,CAAC,EAGLI,EAAM,UAAUC,CAAK,EAGdF,EACJ,KACCO,GAAUH,CAAK,EACf/B,EAAImC,GAAUC,EAAA,CAAE,IAAKjB,GAAOgB,EAAQ,CACtC,CACJ,CAAC,CACH,CChHO,SAASE,GACdC,EAAiB,CAAE,UAAAC,EAAW,QAAAC,CAAQ,EACb,CACzB,OAAOC,GAAgBH,EAAI,CAAE,UAAAC,EAAW,QAAAC,CAAQ,CAAC,EAC9C,KACCE,EAAI,CAAC,CAAE,OAAQ,CAAE,EAAAC,CAAE,CAAE,IAAM,CACzB,GAAM,CAAE,OAAAC,CAAO,EAAIC,GAAeP,CAAE,EACpC,MAAO,CACL,OAAQK,GAAKC,CACf,CACF,CAAC,EACDE,EAAwB,QAAQ,CAClC,CACJ,CAaO,SAASC,GACdT,EAAiBU,EACmB,CACpC,OAAOC,EAAM,IAAM,CACjB,IAAMC,EAAQ,IAAIC,EAClBD,EAAM,UAAU,CAAC,CAAE,OAAAE,CAAO,IAAM,CAC9Bd,EAAG,UAAU,OAAO,2BAA4Bc,CAAM,CACxD,CAAC,EAGD,IAAMC,EAAUC,GAAmB,YAAY,EAC/C,OAAI,OAAOD,GAAY,YACdE,EAGFlB,GAAiBgB,EAASL,CAAO,EACrC,KACCQ,EAAIC,GAASP,EAAM,KAAKO,CAAK,CAAC,EAC9BC,EAAS,IAAMR,EAAM,SAAS,CAAC,EAC/BR,EAAIe,GAAUE,EAAA,CAAE,IAAKrB,GAAOmB,EAAQ,CACtC,CACJ,CAAC,CACH,CCvDO,SAASG,GACdC,EAAiB,CAAE,UAAAC,EAAW,QAAAC,CAAQ,EACpB,CAGlB,IAAMC,EAAUD,EACb,KACCE,EAAI,CAAC,CAAE,OAAAC,CAAO,IAAMA,CAAM,EAC1BC,EAAqB,CACvB,EAGIC,EAAUJ,EACb,KACCK,EAAU,IAAMC,GAAiBT,CAAE,EAChC,KACCI,EAAI,CAAC,CAAE,OAAAC,CAAO,KAAO,CACnB,IAAQL,EAAG,UACX,OAAQA,EAAG,UAAYK,CACzB,EAAE,EACFK,EAAwB,QAAQ,CAClC,CACF,CACF,EAGF,OAAOC,EAAc,CAACR,EAASI,EAASN,CAAS,CAAC,EAC/C,KACCG,EAAI,CAAC,CAACQ,EAAQ,CAAE,IAAAC,EAAK,OAAAC,CAAO,EAAG,CAAE,OAAQ,CAAE,EAAAC,CAAE,EAAG,KAAM,CAAE,OAAAV,CAAO,CAAE,CAAC,KAChEA,EAAS,KAAK,IAAI,EAAGA,EACjB,KAAK,IAAI,EAAGQ,EAASE,EAAIH,CAAM,EAC/B,KAAK,IAAI,EAAGP,EAASU,EAAID,CAAM,CACnC,EACO,CACL,OAAQD,EAAMD,EACd,OAAAP,EACA,OAAQQ,EAAMD,GAAUG,CAC1B,EACD,EACDT,EAAqB,CAACU,EAAGC,IACvBD,EAAE,SAAWC,EAAE,QACfD,EAAE,SAAWC,EAAE,QACfD,EAAE,SAAWC,EAAE,MAChB,CACH,CACJ,CClDO,SAASC,GACdC,EACqB,CACrB,IAAMC,EAAU,SAAkB,WAAW,GAAK,CAChD,MAAOD,EAAO,UAAUE,GAAS,WAC/BA,EAAM,aAAa,qBAAqB,CAC1C,EAAE,OAAO,CACX,EAGA,OAAOC,EAAG,GAAGH,CAAM,EAChB,KACCI,GAASF,GAASG,EAAUH,EAAO,QAAQ,EACxC,KACCI,EAAI,IAAMJ,CAAK,CACjB,CACF,EACAK,EAAUP,EAAO,KAAK,IAAI,EAAGC,EAAQ,KAAK,EAAE,EAC5CK,EAAIJ,IAAU,CACZ,MAAOF,EAAO,QAAQE,CAAK,EAC3B,MAAO,CACL,OAASA,EAAM,aAAa,sBAAsB,EAClD,QAASA,EAAM,aAAa,uBAAuB,EACnD,OAASA,EAAM,aAAa,sBAAsB,CACpD,CACF,EAAa,EACbM,EAAY,CAAC,CACf,CACJ,CASO,SAASC,GACdC,EACgC,CAChC,OAAOC,EAAM,IAAM,CACjB,IAAMC,EAAQ,IAAIC,EAClBD,EAAM,UAAUE,GAAW,CACzB,SAAS,KAAK,aAAa,0BAA2B,EAAE,EAGxD,OAAW,CAACC,EAAKC,CAAK,IAAK,OAAO,QAAQF,EAAQ,KAAK,EACrD,SAAS,KAAK,aAAa,iBAAiBC,IAAOC,CAAK,EAG1D,QAASC,EAAQ,EAAGA,EAAQjB,EAAO,OAAQiB,IAAS,CAClD,IAAMC,EAAQlB,EAAOiB,GAAO,mBACxBC,aAAiB,cACnBA,EAAM,OAASJ,EAAQ,QAAUG,EACrC,CAGA,SAAS,YAAaH,CAAO,CAC/B,CAAC,EAGDF,EAAM,KAAKO,GAAUC,EAAc,CAAC,EACjC,UAAU,IAAM,CACf,SAAS,KAAK,gBAAgB,yBAAyB,CACzD,CAAC,EAGH,IAAMpB,EAASqB,EAA8B,QAASX,CAAE,EACxD,OAAOX,GAAaC,CAAM,EACvB,KACCsB,EAAIC,GAASX,EAAM,KAAKW,CAAK,CAAC,EAC9BC,EAAS,IAAMZ,EAAM,SAAS,CAAC,EAC/BN,EAAIiB,GAAUE,EAAA,CAAE,IAAKf,GAAOa,EAAQ,CACtC,CACJ,CAAC,CACH,CC/HA,IAAAG,GAAwB,SAiCxB,SAASC,GAAQC,EAAyB,CACxCA,EAAG,aAAa,kBAAmB,EAAE,EACrC,IAAMC,EAAOD,EAAG,UAChB,OAAAA,EAAG,gBAAgB,iBAAiB,EAC7BC,CACT,CAWO,SAASC,GACd,CAAE,OAAAC,CAAO,EACH,CACF,GAAAC,QAAY,YAAY,GAC1B,IAAIC,EAA8BC,GAAc,CAC9C,IAAI,GAAAF,QAAY,iDAAkD,CAChE,KAAMJ,GACJA,EAAG,aAAa,qBAAqB,GACrCD,GAAQQ,EACNP,EAAG,aAAa,uBAAuB,CACzC,CAAC,CAEL,CAAC,EACE,GAAG,UAAWQ,GAAMF,EAAW,KAAKE,CAAE,CAAC,CAC5C,CAAC,EACE,KACCC,EAAID,GAAM,CACQA,EAAG,QACX,MAAM,CAChB,CAAC,EACDE,EAAI,IAAMC,GAAY,kBAAkB,CAAC,CAC3C,EACG,UAAUR,CAAM,CAEzB,CCrCA,SAASS,GAAWC,EAAwB,CAC1C,GAAIA,EAAK,OAAS,EAChB,MAAO,CAAC,EAAE,EAGZ,GAAM,CAACC,EAAMC,CAAI,EAAI,CAAC,GAAGF,CAAI,EAC1B,KAAK,CAACG,EAAGC,IAAMD,EAAE,OAASC,EAAE,MAAM,EAClC,IAAIC,GAAOA,EAAI,QAAQ,SAAU,EAAE,CAAC,EAGnCC,EAAQ,EACZ,GAAIL,IAASC,EACXI,EAAQL,EAAK,WAEb,MAAOA,EAAK,WAAWK,CAAK,IAAMJ,EAAK,WAAWI,CAAK,GACrDA,IAGJ,OAAON,EAAK,IAAIK,GAAOA,EAAI,QAAQJ,EAAK,MAAM,EAAGK,CAAK,EAAG,EAAE,CAAC,CAC9D,CAaO,SAASC,GAAaC,EAAiC,CAC5D,IAAMC,EAAS,SAAkB,YAAa,eAAgBD,CAAI,EAClE,GAAIC,EACF,OAAOC,EAAGD,CAAM,EACX,CACL,IAAME,EAASC,GAAc,EAC7B,OAAOC,GAAW,IAAI,IAAI,cAAeL,GAAQG,EAAO,IAAI,CAAC,EAC1D,KACCG,EAAIC,GAAWhB,GAAWiB,EAAY,MAAOD,CAAO,EACjD,IAAIE,GAAQA,EAAK,WAAY,CAChC,CAAC,EACDC,GAAW,IAAMC,CAAK,EACtBC,GAAe,CAAC,CAAC,EACjBC,EAAIN,GAAW,SAAS,YAAaA,EAAS,eAAgBP,CAAI,CAAC,CACrE,CACJ,CACF,CCIO,SAASc,GACd,CAAE,UAAAC,EAAW,UAAAC,EAAW,UAAAC,CAAU,EAC5B,CACN,IAAMC,EAASC,GAAc,EAC7B,GAAI,SAAS,WAAa,QACxB,OAGE,sBAAuB,UACzB,QAAQ,kBAAoB,SAG5BC,EAAU,OAAQ,cAAc,EAC7B,UAAU,IAAM,CACf,QAAQ,kBAAoB,MAC9B,CAAC,GAIL,IAAMC,EAAUC,GAAoC,gBAAgB,EAChE,OAAOD,GAAY,cACrBA,EAAQ,KAAOA,EAAQ,MAGzB,IAAME,EAAQC,GAAa,EACxB,KACCC,EAAIC,GAASA,EAAM,IAAIC,GAAQ,GAAG,IAAI,IAAIA,EAAMT,EAAO,IAAI,GAAG,CAAC,EAC/DU,EAAUC,GAAQT,EAAsB,SAAS,KAAM,OAAO,EAC3D,KACCU,EAAOC,GAAM,CAACA,EAAG,SAAW,CAACA,EAAG,OAAO,EACvCH,EAAUG,GAAM,CACd,GAAIA,EAAG,kBAAkB,QAAS,CAChC,IAAMC,EAAKD,EAAG,OAAO,QAAQ,GAAG,EAChC,GAAIC,GAAM,CAACA,EAAG,OAAQ,CACpB,IAAMC,EAAM,IAAI,IAAID,EAAG,IAAI,EAO3B,GAJAC,EAAI,OAAS,GACbA,EAAI,KAAO,GAITA,EAAI,WAAa,SAAS,UAC1BJ,EAAK,SAASI,EAAI,SAAS,CAAC,EAE5B,OAAAF,EAAG,eAAe,EACXG,EAAG,CACR,IAAK,IAAI,IAAIF,EAAG,IAAI,CACtB,CAAC,CAEL,CACF,CACA,OAAOG,EACT,CAAC,CACH,CACF,EACAC,GAAoB,CACtB,EAGIC,EAAOjB,EAAyB,OAAQ,UAAU,EACrD,KACCU,EAAOC,GAAMA,EAAG,QAAU,IAAI,EAC9BN,EAAIM,IAAO,CACT,IAAK,IAAI,IAAI,SAAS,IAAI,EAC1B,OAAQA,EAAG,KACb,EAAE,EACFK,GAAoB,CACtB,EAGFE,EAAMf,EAAOc,CAAI,EACd,KACCE,EAAqB,CAACC,EAAGC,IAAMD,EAAE,IAAI,OAASC,EAAE,IAAI,IAAI,EACxDhB,EAAI,CAAC,CAAE,IAAAQ,CAAI,IAAMA,CAAG,CACtB,EACG,UAAUjB,CAAS,EAGxB,IAAM0B,EAAY1B,EACf,KACC2B,EAAwB,UAAU,EAClCf,EAAUK,GAAOW,GAAQX,EAAI,IAAI,EAC9B,KACCY,GAAW,KACTC,GAAYb,CAAG,EACRE,GACR,CACH,CACF,EACAC,GAAM,CACR,EAGFb,EACG,KACCwB,GAAOL,CAAS,CAClB,EACG,UAAU,CAAC,CAAE,IAAAT,CAAI,IAAM,CACtB,QAAQ,UAAU,CAAC,EAAG,GAAI,GAAGA,GAAK,CACpC,CAAC,EAGL,IAAMe,EAAM,IAAI,UAChBN,EACG,KACCd,EAAUqB,GAAOA,EAAI,KAAK,CAAC,EAC3BxB,EAAIwB,GAAOD,EAAI,gBAAgBC,EAAK,WAAW,CAAC,CAClD,EACG,UAAUlC,CAAS,EAGxBA,EACG,KACCmC,GAAK,CAAC,CACR,EACG,UAAUC,GAAe,CACxB,QAAWC,IAAY,CAGrB,QACA,sBACA,oBACA,yBAGA,+BACA,gCACA,mCACA,+BACA,2BACA,2BACA,GAAGC,EAAQ,wBAAwB,EAC/B,CAAC,0BAA0B,EAC3B,CAAC,CACP,EAAG,CACD,IAAMC,EAAShC,GAAmB8B,CAAQ,EACpCG,EAASjC,GAAmB8B,EAAUD,CAAW,EAErD,OAAOG,GAAW,aAClB,OAAOC,GAAW,aAElBD,EAAO,YAAYC,CAAM,CAE7B,CACF,CAAC,EAGLxC,EACG,KACCmC,GAAK,CAAC,EACNzB,EAAI,IAAM+B,GAAoB,WAAW,CAAC,EAC1C5B,EAAUI,GAAMyB,EAAY,SAAUzB,CAAE,CAAC,EACzC0B,GAAU1B,GAAM,CACd,IAAM2B,EAASC,EAAE,QAAQ,EACzB,GAAI5B,EAAG,IAAK,CACV,QAAW6B,KAAQ7B,EAAG,kBAAkB,EACtC2B,EAAO,aAAaE,EAAM7B,EAAG,aAAa6B,CAAI,CAAE,EAClD,OAAA7B,EAAG,YAAY2B,CAAM,EAGd,IAAIG,EAAWC,GAAY,CAChCJ,EAAO,OAAS,IAAMI,EAAS,SAAS,CAC1C,CAAC,CAGH,KACE,QAAAJ,EAAO,YAAc3B,EAAG,YACxBA,EAAG,YAAY2B,CAAM,EACdK,CAEX,CAAC,CACH,EACG,UAAU,EAGf1B,EAAMf,EAAOc,CAAI,EACd,KACCU,GAAOhC,CAAS,CAClB,EACG,UAAU,CAAC,CAAE,IAAAkB,EAAK,OAAAgC,CAAO,IAAM,CAC1BhC,EAAI,MAAQ,CAACgC,EACfC,GAAgBjC,EAAI,IAAI,EAExB,OAAO,SAAS,GAAGgC,GAAA,YAAAA,EAAQ,IAAK,CAAC,CAErC,CAAC,EAGLhD,EACG,KACCkD,GAAU5C,CAAK,EACf6C,GAAa,GAAG,EAChBzB,EAAwB,QAAQ,CAClC,EACG,UAAU,CAAC,CAAE,OAAAsB,CAAO,IAAM,CACzB,QAAQ,aAAaA,EAAQ,EAAE,CACjC,CAAC,EAGL3B,EAAMf,EAAOc,CAAI,EACd,KACCgC,GAAY,EAAG,CAAC,EAChBvC,EAAO,CAAC,CAACU,EAAGC,CAAC,IAAMD,EAAE,IAAI,WAAaC,EAAE,IAAI,QAAQ,EACpDhB,EAAI,CAAC,CAAC,CAAE6C,CAAK,IAAMA,CAAK,CAC1B,EACG,UAAU,CAAC,CAAE,OAAAL,CAAO,IAAM,CACzB,OAAO,SAAS,GAAGA,GAAA,YAAAA,EAAQ,IAAK,CAAC,CACnC,CAAC,CACP,CCzSA,IAAAM,GAAuB,SCAvB,IAAAC,GAAuB,SAsChB,SAASC,GACdC,EAA2BC,EACD,CAC1B,IAAMC,EAAY,IAAI,OAAOF,EAAO,UAAW,KAAK,EAC9CG,EAAY,CAACC,EAAYC,EAAcC,IACpC,GAAGD,4BAA+BC,WAI3C,OAAQC,GAAkB,CACxBA,EAAQA,EACL,QAAQ,gBAAiB,GAAG,EAC5B,KAAK,EAGR,IAAMC,EAAQ,IAAI,OAAO,MAAMR,EAAO,cACpCO,EACG,QAAQ,uBAAwB,MAAM,EACtC,QAAQL,EAAW,GAAG,KACtB,KAAK,EAGV,OAAOO,IACLR,KACI,GAAAS,SAAWD,CAAK,EAChBA,GAED,QAAQD,EAAOL,CAAS,EACxB,QAAQ,8BAA+B,IAAI,CAClD,CACF,CC9BO,SAASQ,GAAiBC,EAAuB,CACtD,OAAOA,EACJ,MAAM,YAAY,EAChB,IAAI,CAACC,EAAOC,IAAUA,EAAQ,EAC3BD,EAAM,QAAQ,+BAAgC,IAAI,EAClDA,CACJ,EACC,KAAK,EAAE,EACT,QAAQ,kCAAmC,EAAE,EAC7C,KAAK,CACV,CCoCO,SAASE,GACdC,EAC+B,CAC/B,OAAOA,EAAQ,OAAS,CAC1B,CASO,SAASC,GACdD,EAC+B,CAC/B,OAAOA,EAAQ,OAAS,CAC1B,CASO,SAASE,GACdF,EACgC,CAChC,OAAOA,EAAQ,OAAS,CAC1B,CCvEA,SAASG,GAAiB,CAAE,OAAAC,EAAQ,KAAAC,CAAK,EAA6B,CAGhED,EAAO,KAAK,SAAW,GAAKA,EAAO,KAAK,KAAO,OACjDA,EAAO,KAAO,CACZE,GAAY,oBAAoB,CAClC,GAGEF,EAAO,YAAc,cACvBA,EAAO,UAAYE,GAAY,yBAAyB,GAQ1D,IAAMC,EAAyB,CAC7B,SANeD,GAAY,wBAAwB,EAClD,MAAM,SAAS,EACf,OAAO,OAAO,EAKf,YAAaE,EAAQ,gBAAgB,CACvC,EAGA,MAAO,CAAE,OAAAJ,EAAQ,KAAAC,EAAM,QAAAE,CAAQ,CACjC,CAkBO,SAASE,GACdC,EAAaC,EACC,CACd,IAAMP,EAASQ,GAAc,EACvBC,EAAS,IAAI,OAAOH,CAAG,EAGvBI,EAAM,IAAIC,EACVC,EAAMC,GAAYJ,EAAQ,CAAE,IAAAC,CAAI,CAAC,EACpC,KACCI,EAAIC,GAAW,CACb,GAAIC,GAAsBD,CAAO,EAC/B,QAAWE,KAAUF,EAAQ,KAAK,MAChC,QAAWG,KAAYD,EACrBC,EAAS,SAAW,GAAG,IAAI,IAAIA,EAAS,SAAUlB,EAAO,IAAI,IAEnE,OAAOe,CACT,CAAC,EACDI,GAAM,CACR,EAGF,OAAAC,GAAKb,CAAK,EACP,KACCO,EAAIO,IAAS,CACX,OACA,KAAMtB,GAAiBsB,CAAI,CAC7B,EAAwB,CAC1B,EACG,UAAUX,EAAI,KAAK,KAAKA,CAAG,CAAC,EAG1B,CAAE,IAAAA,EAAK,IAAAE,CAAI,CACpB,CCvEO,SAASU,GACd,CAAE,UAAAC,CAAU,EACN,CACN,IAAMC,EAASC,GAAc,EACvBC,EAAYC,GAChB,IAAI,IAAI,mBAAoBH,EAAO,IAAI,CACzC,EACG,KACCI,GAAW,IAAMC,CAAK,CACxB,EAGIC,EAAWJ,EACd,KACCK,EAAIC,GAAY,CACd,GAAM,CAAC,CAAEC,CAAO,EAAIT,EAAO,KAAK,MAAM,aAAa,EACnD,OAAOQ,EAAS,KAAK,CAAC,CAAE,QAAAE,EAAS,QAAAC,CAAQ,IACvCD,IAAYD,GAAWE,EAAQ,SAASF,CAAO,CAChD,GAAKD,EAAS,EACjB,CAAC,CACH,EAGFN,EACG,KACCK,EAAIC,GAAY,IAAI,IAAIA,EAAS,IAAIE,GAAW,CAC9C,GAAG,IAAI,IAAI,MAAMA,EAAQ,WAAYV,EAAO,IAAI,IAChDU,CACF,CAAC,CAAC,CAAC,EACHE,EAAUC,GAAQC,EAAsB,SAAS,KAAM,OAAO,EAC3D,KACCC,EAAOC,GAAM,CAACA,EAAG,SAAW,CAACA,EAAG,OAAO,EACvCC,GAAeX,CAAQ,EACvBM,EAAU,CAAC,CAACI,EAAIP,CAAO,IAAM,CAC3B,GAAIO,EAAG,kBAAkB,QAAS,CAChC,IAAME,EAAKF,EAAG,OAAO,QAAQ,GAAG,EAChC,GAAIE,GAAM,CAACA,EAAG,QAAUL,EAAK,IAAIK,EAAG,IAAI,EAAG,CACzC,IAAMC,EAAMD,EAAG,KAWf,MAAI,CAACF,EAAG,OAAO,QAAQ,aAAa,GAClBH,EAAK,IAAIM,CAAG,IACZV,EACPJ,GAEXW,EAAG,eAAe,EACXI,EAAGD,CAAG,EACf,CACF,CACA,OAAOd,CACT,CAAC,EACDO,EAAUO,GAAO,CACf,GAAM,CAAE,QAAAT,CAAQ,EAAIG,EAAK,IAAIM,CAAG,EAChC,OAAOE,GAAa,IAAI,IAAIF,CAAG,CAAC,EAC7B,KACCZ,EAAIe,GAAW,CAEb,IAAMC,EADWC,GAAY,EACP,KAAK,QAAQxB,EAAO,KAAM,EAAE,EAClD,OAAOsB,EAAQ,SAASC,EAAK,MAAM,GAAG,EAAE,EAAE,EACtC,IAAI,IAAI,MAAMb,KAAWa,IAAQvB,EAAO,IAAI,EAC5C,IAAI,IAAImB,CAAG,CACjB,CAAC,CACH,CACJ,CAAC,CACH,CACF,CACF,EACG,UAAUA,GAAOM,GAAYN,CAAG,CAAC,EAGtCO,EAAc,CAACxB,EAAWI,CAAQ,CAAC,EAChC,UAAU,CAAC,CAACE,EAAUC,CAAO,IAAM,CACpBkB,EAAW,mBAAmB,EACtC,YAAYC,GAAsBpB,EAAUC,CAAO,CAAC,CAC5D,CAAC,EAGHV,EAAU,KAAKa,EAAU,IAAMN,CAAQ,CAAC,EACrC,UAAUG,GAAW,CA5J1B,IAAAoB,EA+JM,IAAIC,EAAW,SAAS,aAAc,cAAc,EACpD,GAAIA,IAAa,KAAM,CACrB,IAAMC,IAASF,EAAA7B,EAAO,UAAP,YAAA6B,EAAgB,UAAW,SAC1CC,EAAW,CAACrB,EAAQ,QAAQ,SAASsB,CAAM,EAG3C,SAAS,aAAcD,EAAU,cAAc,CACjD,CAGA,GAAIA,EACF,QAAWE,KAAWC,GAAqB,UAAU,EACnDD,EAAQ,OAAS,EACvB,CAAC,CACL,CCtFO,SAASE,GACdC,EAAsB,CAAE,IAAAC,CAAI,EACH,CACzB,IAAMC,GAAK,+BAAU,YAAaC,GAG5B,CAAE,aAAAC,CAAa,EAAIC,GAAY,EACjCD,EAAa,IAAI,GAAG,GACtBE,GAAU,SAAU,EAAI,EAG1B,IAAMC,EAASN,EACZ,KACCO,EAAOC,EAAoB,EAC3BC,GAAK,CAAC,EACNC,EAAI,IAAMP,EAAa,IAAI,GAAG,GAAK,EAAE,CACvC,EAGFQ,GAAY,QAAQ,EACjB,KACCJ,EAAOK,GAAU,CAACA,CAAM,EACxBH,GAAK,CAAC,CACR,EACG,UAAU,IAAM,CACf,IAAMI,EAAM,IAAI,IAAI,SAAS,IAAI,EACjCA,EAAI,aAAa,OAAO,GAAG,EAC3B,QAAQ,aAAa,CAAC,EAAG,GAAI,GAAGA,GAAK,CACvC,CAAC,EAGLP,EAAO,UAAUQ,GAAS,CACpBA,IACFf,EAAG,MAAQe,EACXf,EAAG,MAAM,EAEb,CAAC,EAGD,IAAMgB,EAASC,GAAkBjB,CAAE,EAC7BkB,EAASC,EACbC,EAAUpB,EAAI,OAAO,EACrBoB,EAAUpB,EAAI,OAAO,EAAE,KAAKqB,GAAM,CAAC,CAAC,EACpCd,CACF,EACG,KACCI,EAAI,IAAMT,EAAGF,EAAG,KAAK,CAAC,EACtBsB,EAAU,EAAE,EACZC,EAAqB,CACvB,EAGF,OAAOC,EAAc,CAACN,EAAQF,CAAM,CAAC,EAClC,KACCL,EAAI,CAAC,CAACI,EAAOU,CAAK,KAAO,CAAE,MAAAV,EAAO,MAAAU,CAAM,EAAE,EAC1CC,EAAY,CAAC,CACf,CACJ,CAUO,SAASC,GACd3B,EAAsB,CAAE,IAAA4B,EAAK,IAAA3B,CAAI,EACqB,CACtD,IAAM4B,EAAQ,IAAIC,EACZC,EAAQF,EAAM,KAAKG,GAAS,CAAC,CAAC,EAGpC,OAAAH,EACG,KACCI,EAAwB,OAAO,EAC/BtB,EAAI,CAAC,CAAE,MAAAI,CAAM,KAA2B,CACtC,OACA,KAAMA,CACR,EAAE,CACJ,EACG,UAAUa,EAAI,KAAK,KAAKA,CAAG,CAAC,EAGjCC,EACG,KACCI,EAAwB,OAAO,CACjC,EACG,UAAU,CAAC,CAAE,MAAAR,CAAM,IAAM,CACpBA,GACFnB,GAAU,SAAUmB,CAAK,EACzBzB,EAAG,YAAc,IAEjBA,EAAG,YAAckC,GAAY,oBAAoB,CAErD,CAAC,EAGLd,EAAUpB,EAAG,KAAO,OAAO,EACxB,KACCmC,GAAUJ,CAAK,CACjB,EACG,UAAU,IAAM/B,EAAG,MAAM,CAAC,EAGxBD,GAAiBC,EAAI,CAAE,IAAA4B,EAAK,IAAA3B,CAAI,CAAC,EACrC,KACCmC,EAAIC,GAASR,EAAM,KAAKQ,CAAK,CAAC,EAC9BC,EAAS,IAAMT,EAAM,SAAS,CAAC,EAC/BlB,EAAI0B,GAAUE,EAAA,CAAE,IAAKvC,GAAOqC,EAAQ,EACpCG,GAAM,CACR,CACJ,CCrHO,SAASC,GACdC,EAAiB,CAAE,IAAAC,CAAI,EAAiB,CAAE,OAAAC,CAAO,EACZ,CACrC,IAAMC,EAAQ,IAAIC,EACZC,EAAYC,GAAqBN,EAAG,aAAc,EACrD,KACCO,EAAO,OAAO,CAChB,EAGIC,EAAOC,EAAW,wBAAyBT,CAAE,EAC7CU,EAAOD,EAAW,uBAAwBT,CAAE,EAG5CW,EAASV,EACZ,KACCM,EAAOK,EAAoB,EAC3BC,GAAK,CAAC,CACR,EAGF,OAAAV,EACG,KACCW,GAAeZ,CAAM,EACrBa,GAAUJ,CAAM,CAClB,EACG,UAAU,CAAC,CAAC,CAAE,MAAAK,CAAM,EAAG,CAAE,MAAAC,CAAM,CAAC,IAAM,CACrC,GAAIA,EACF,OAAQD,EAAM,OAAQ,CAGpB,IAAK,GACHR,EAAK,YAAcU,GAAY,oBAAoB,EACnD,MAGF,IAAK,GACHV,EAAK,YAAcU,GAAY,mBAAmB,EAClD,MAGF,QACEV,EAAK,YAAcU,GACjB,sBACAC,GAAMH,EAAM,MAAM,CACpB,CACJ,MAEAR,EAAK,YAAcU,GAAY,2BAA2B,CAE9D,CAAC,EAGLf,EACG,KACCiB,EAAI,IAAMV,EAAK,UAAY,EAAE,EAC7BW,EAAU,CAAC,CAAE,MAAAL,CAAM,IAAMM,EACvBC,EAAG,GAAGP,EAAM,MAAM,EAAG,EAAE,CAAC,EACxBO,EAAG,GAAGP,EAAM,MAAM,EAAE,CAAC,EAClB,KACCQ,GAAY,CAAC,EACbC,GAAQpB,CAAS,EACjBgB,EAAU,CAAC,CAACK,CAAK,IAAMA,CAAK,CAC9B,CACJ,CAAC,CACH,EACG,UAAUC,GAAUjB,EAAK,YACxBkB,GAAuBD,CAAM,CAC/B,CAAC,EAGW1B,EACb,KACCM,EAAOsB,EAAqB,EAC5BC,EAAI,CAAC,CAAE,KAAAC,CAAK,IAAMA,CAAI,CACxB,EAIC,KACCX,EAAIY,GAAS7B,EAAM,KAAK6B,CAAK,CAAC,EAC9BC,EAAS,IAAM9B,EAAM,SAAS,CAAC,EAC/B2B,EAAIE,GAAUE,EAAA,CAAE,IAAKlC,GAAOgC,EAAQ,CACtC,CACJ,CC1FO,SAASG,GACdC,EAAkB,CAAE,OAAAC,CAAO,EACF,CACzB,OAAOA,EACJ,KACCC,EAAI,CAAC,CAAE,MAAAC,CAAM,IAAM,CACjB,IAAMC,EAAMC,GAAY,EACxB,OAAAD,EAAI,KAAO,GACXA,EAAI,aAAa,OAAO,GAAG,EAC3BA,EAAI,aAAa,IAAI,IAAKD,CAAK,EACxB,CAAE,IAAAC,CAAI,CACf,CAAC,CACH,CACJ,CAUO,SAASE,GACdC,EAAuBC,EACa,CACpC,IAAMC,EAAQ,IAAIC,EAClB,OAAAD,EAAM,UAAU,CAAC,CAAE,IAAAL,CAAI,IAAM,CAC3BG,EAAG,aAAa,sBAAuBA,EAAG,IAAI,EAC9CA,EAAG,KAAO,GAAGH,GACf,CAAC,EAGDO,EAAUJ,EAAI,OAAO,EAClB,UAAUK,GAAMA,EAAG,eAAe,CAAC,EAG/Bb,GAAiBQ,EAAIC,CAAO,EAChC,KACCK,EAAIC,GAASL,EAAM,KAAKK,CAAK,CAAC,EAC9BC,EAAS,IAAMN,EAAM,SAAS,CAAC,EAC/BP,EAAIY,GAAUE,EAAA,CAAE,IAAKT,GAAOO,EAAQ,CACtC,CACJ,CCtCO,SAASG,GACdC,EAAiB,CAAE,IAAAC,CAAI,EAAiB,CAAE,UAAAC,CAAU,EACd,CACtC,IAAMC,EAAQ,IAAIC,EAGZC,EAASC,GAAoB,cAAc,EAC3CC,EAASC,EACbC,EAAUJ,EAAO,SAAS,EAC1BI,EAAUJ,EAAO,OAAO,CAC1B,EACG,KACCK,GAAUC,EAAc,EACxBC,EAAI,IAAMP,EAAM,KAAK,EACrBQ,EAAqB,CACvB,EAGF,OAAAV,EACG,KACCW,GAAkBP,CAAM,EACxBK,EAAI,CAAC,CAAC,CAAE,YAAAG,CAAY,EAAGC,CAAK,IAAM,CAChC,IAAMC,EAAQD,EAAM,MAAM,UAAU,EACpC,IAAID,GAAA,YAAAA,EAAa,SAAUE,EAAMA,EAAM,OAAS,GAAI,CAClD,IAAMC,EAAOH,EAAYA,EAAY,OAAS,GAC1CG,EAAK,WAAWD,EAAMA,EAAM,OAAS,EAAE,IACzCA,EAAMA,EAAM,OAAS,GAAKC,EAC9B,MACED,EAAM,OAAS,EAEjB,OAAOA,CACT,CAAC,CACH,EACG,UAAUA,GAASjB,EAAG,UAAYiB,EAChC,KAAK,EAAE,EACP,QAAQ,MAAO,QAAQ,CAC1B,EAGJf,EACG,KACCiB,EAAO,CAAC,CAAE,KAAAC,CAAK,IAAMA,IAAS,QAAQ,CACxC,EACG,UAAUC,GAAO,CAChB,OAAQA,EAAI,KAAM,CAGhB,IAAK,aAEDrB,EAAG,UAAU,QACbK,EAAM,iBAAmBA,EAAM,MAAM,SAErCA,EAAM,MAAQL,EAAG,WACnB,KACJ,CACF,CAAC,EAGWC,EACb,KACCkB,EAAOG,EAAqB,EAC5BV,EAAI,CAAC,CAAE,KAAAW,CAAK,IAAMA,CAAI,CACxB,EAIC,KACCC,EAAIC,GAAStB,EAAM,KAAKsB,CAAK,CAAC,EAC9BC,EAAS,IAAMvB,EAAM,SAAS,CAAC,EAC/BS,EAAI,KAAO,CAAE,IAAKZ,CAAG,EAAE,CACzB,CACJ,CC9CO,SAAS2B,GACdC,EAAiB,CAAE,OAAAC,EAAQ,UAAAC,CAAU,EACN,CAC/B,IAAMC,EAASC,GAAc,EAC7B,GAAI,CACF,IAAMC,GAAM,+BAAU,SAAUF,EAAO,OACjCG,EAASC,GAAkBF,EAAKJ,CAAM,EAGtCO,EAASC,GAAoB,eAAgBT,CAAE,EAC/CU,EAASD,GAAoB,gBAAiBT,CAAE,EAGhD,CAAE,IAAAW,EAAK,IAAAC,CAAI,EAAIN,EACrBK,EACG,KACCE,EAAOC,EAAoB,EAC3BC,GAAOH,EAAI,KAAKC,EAAOG,EAAoB,CAAC,CAAC,EAC7CC,GAAK,CAAC,CACR,EACG,UAAUN,EAAI,KAAK,KAAKA,CAAG,CAAC,EAGjCT,EACG,KACCW,EAAO,CAAC,CAAE,KAAAK,CAAK,IAAMA,IAAS,QAAQ,CACxC,EACG,UAAUC,GAAO,CAChB,IAAMC,EAASC,GAAiB,EAChC,OAAQF,EAAI,KAAM,CAGhB,IAAK,QACH,GAAIC,IAAWZ,EAAO,CACpB,IAAMc,EAAU,IAAI,IACpB,QAAWC,KAAUC,EACnB,sBAAuBd,CACzB,EAAG,CACD,IAAMe,EAAUF,EAAO,kBACvBD,EAAQ,IAAIC,EAAQ,WAClBE,EAAQ,aAAa,eAAe,CACtC,CAAC,CACH,CAGA,GAAIH,EAAQ,KAAM,CAChB,GAAM,CAAC,CAACI,CAAI,CAAC,EAAI,CAAC,GAAGJ,CAAO,EAAE,KAAK,CAAC,CAAC,CAAEK,CAAC,EAAG,CAAC,CAAEC,CAAC,IAAMA,EAAID,CAAC,EAC1DD,EAAK,MAAM,CACb,CAGAP,EAAI,MAAM,CACZ,CACA,MAGF,IAAK,SACL,IAAK,MACHU,GAAU,SAAU,EAAK,EACzBrB,EAAM,KAAK,EACX,MAGF,IAAK,UACL,IAAK,YACH,GAAI,OAAOY,GAAW,YACpBZ,EAAM,MAAM,MACP,CACL,IAAMsB,EAAM,CAACtB,EAAO,GAAGgB,EACrB,wDACAd,CACF,CAAC,EACKqB,EAAI,KAAK,IAAI,GACjB,KAAK,IAAI,EAAGD,EAAI,QAAQV,CAAM,CAAC,EAAIU,EAAI,QACrCX,EAAI,OAAS,UAAY,GAAK,IAE9BW,EAAI,MAAM,EACdA,EAAIC,GAAG,MAAM,CACf,CAGAZ,EAAI,MAAM,EACV,MAGF,QACMX,IAAUa,GAAiB,GAC7Bb,EAAM,MAAM,CAClB,CACF,CAAC,EAGLN,EACG,KACCW,EAAO,CAAC,CAAE,KAAAK,CAAK,IAAMA,IAAS,QAAQ,CACxC,EACG,UAAUC,GAAO,CAChB,OAAQA,EAAI,KAAM,CAGhB,IAAK,IACL,IAAK,IACL,IAAK,IACHX,EAAM,MAAM,EACZA,EAAM,OAAO,EAGbW,EAAI,MAAM,EACV,KACJ,CACF,CAAC,EAGL,IAAMa,EAAUC,GAAiBzB,EAAOF,CAAM,EACxC4B,EAAUC,GAAkBzB,EAAQJ,EAAQ,CAAE,OAAA0B,CAAO,CAAC,EAC5D,OAAOI,EAAMJ,EAAQE,CAAO,EACzB,KACCG,GAGE,GAAGC,GAAqB,eAAgBtC,CAAE,EACvC,IAAIuC,GAASC,GAAiBD,EAAO,CAAE,OAAAP,CAAO,CAAC,CAAC,EAGnD,GAAGM,GAAqB,iBAAkBtC,CAAE,EACzC,IAAIuC,GAASE,GAAmBF,EAAOjC,EAAQ,CAAE,UAAAJ,CAAU,CAAC,CAAC,CAClE,CACF,CAGJ,OAASwC,EAAP,CACA,OAAA1C,EAAG,OAAS,GACL2C,EACT,CACF,CCtKO,SAASC,GACdC,EAAiB,CAAE,OAAAC,EAAQ,UAAAC,CAAU,EACG,CACxC,OAAOC,EAAc,CACnBF,EACAC,EACG,KACCE,EAAUC,GAAY,CAAC,EACvBC,EAAOC,GAAO,CAAC,CAACA,EAAI,aAAa,IAAI,GAAG,CAAC,CAC3C,CACJ,CAAC,EACE,KACCC,EAAI,CAAC,CAACC,EAAOF,CAAG,IAAMG,GAAuBD,EAAM,OAAQ,EAAI,EAC7DF,EAAI,aAAa,IAAI,GAAG,CAC1B,CAAC,EACDC,EAAIG,GAAM,CA1FhB,IAAAC,EA2FQ,IAAMC,EAAQ,IAAI,IAGZC,EAAK,SAAS,mBAAmBd,EAAI,WAAW,SAAS,EAC/D,QAASe,EAAOD,EAAG,SAAS,EAAGC,EAAMA,EAAOD,EAAG,SAAS,EACtD,IAAIF,EAAAG,EAAK,gBAAL,MAAAH,EAAoB,aAAc,CACpC,IAAMI,EAAWD,EAAK,YAChBE,EAAWN,EAAGK,CAAQ,EACxBC,EAAS,OAASD,EAAS,QAC7BH,EAAM,IAAIE,EAAmBE,CAAQ,CACzC,CAIF,OAAW,CAACF,EAAMG,CAAI,IAAKL,EAAO,CAChC,GAAM,CAAE,WAAAM,CAAW,EAAIC,EAAE,OAAQ,KAAMF,CAAI,EAC3CH,EAAK,YAAY,GAAG,MAAM,KAAKI,CAAU,CAAC,CAC5C,CAGA,MAAO,CAAE,IAAKnB,EAAI,MAAAa,CAAM,CAC1B,CAAC,CACH,CACJ,CCbO,SAASQ,GACdC,EAAiB,CAAE,UAAAC,EAAW,MAAAC,CAAM,EACf,CACrB,IAAMC,EAASH,EAAG,cACZI,EACJD,EAAO,UACPA,EAAO,cAAe,UAGxB,OAAOE,EAAc,CAACH,EAAOD,CAAS,CAAC,EACpC,KACCK,EAAI,CAAC,CAAC,CAAE,OAAAC,EAAQ,OAAAC,CAAO,EAAG,CAAE,OAAQ,CAAE,EAAAC,CAAE,CAAE,CAAC,KACzCD,EAASA,EACL,KAAK,IAAIJ,EAAQ,KAAK,IAAI,EAAGK,EAAIF,CAAM,CAAC,EACxCH,EACG,CACL,OAAAI,EACA,OAAQC,GAAKF,EAASH,CACxB,EACD,EACDM,EAAqB,CAACC,EAAGC,IACvBD,EAAE,SAAWC,EAAE,QACfD,EAAE,SAAWC,EAAE,MAChB,CACH,CACJ,CAuBO,SAASC,GACdb,EAAiBc,EACe,CADf,IAAAC,EAAAD,EAAE,SAAAE,CAtJrB,EAsJmBD,EAAcE,EAAAC,GAAdH,EAAc,CAAZ,YAEnB,IAAMI,EAAQC,EAAW,0BAA2BpB,CAAE,EAChD,CAAE,EAAAS,CAAE,EAAIY,GAAiBF,CAAK,EACpC,OAAOG,EAAM,IAAM,CACjB,IAAMC,EAAQ,IAAIC,EAClB,OAAAD,EACG,KACCE,GAAU,EAAGC,EAAuB,EACpCC,GAAeX,CAAO,CACxB,EACG,UAAU,CAGT,KAAK,CAAC,CAAE,OAAAR,CAAO,EAAG,CAAE,OAAQD,CAAO,CAAC,EAAG,CACrCY,EAAM,MAAM,OAAS,GAAGX,EAAS,EAAIC,MACrCT,EAAG,MAAM,IAAY,GAAGO,KAC1B,EAGA,UAAW,CACTY,EAAM,MAAM,OAAS,GACrBnB,EAAG,MAAM,IAAY,EACvB,CACF,CAAC,EAGLuB,EACG,KACCK,GAAUF,EAAuB,EACjCG,GAAK,CAAC,CACR,EACG,UAAU,IAAM,CACf,QAAWC,KAAQC,EAAY,8BAA+B/B,CAAE,EAAG,CACjE,IAAMgC,EAAYC,GAAoBH,CAAI,EAC1C,GAAI,OAAOE,GAAc,YAAa,CACpC,IAAMzB,EAASuB,EAAK,UAAYE,EAAU,UACpC,CAAE,OAAAxB,CAAO,EAAI0B,GAAeF,CAAS,EAC3CA,EAAU,SAAS,CACjB,IAAKzB,EAASC,EAAS,CACzB,CAAC,CACH,CACF,CACF,CAAC,EAGET,GAAaC,EAAIiB,CAAO,EAC5B,KACCkB,EAAIC,GAASb,EAAM,KAAKa,CAAK,CAAC,EAC9BC,EAAS,IAAMd,EAAM,SAAS,CAAC,EAC/BjB,EAAI8B,GAAUE,EAAA,CAAE,IAAKtC,GAAOoC,EAAQ,CACtC,CACJ,CAAC,CACH,CChJO,SAASG,GACdC,EAAcC,EACW,CACzB,GAAI,OAAOA,GAAS,YAAa,CAC/B,IAAMC,EAAM,gCAAgCF,KAAQC,IACpD,OAAOE,GAGLC,GAAqB,GAAGF,mBAAqB,EAC1C,KACCG,GAAW,IAAMC,CAAK,EACtBC,EAAIC,IAAY,CACd,QAASA,EAAQ,QACnB,EAAE,EACFC,GAAe,CAAC,CAAC,CACnB,EAGFL,GAAkBF,CAAG,EAClB,KACCG,GAAW,IAAMC,CAAK,EACtBC,EAAIG,IAAS,CACX,MAAOA,EAAK,iBACZ,MAAOA,EAAK,WACd,EAAE,EACFD,GAAe,CAAC,CAAC,CACnB,CACJ,EACG,KACCF,EAAI,CAAC,CAACC,EAASE,CAAI,IAAOC,IAAA,GAAKH,GAAYE,EAAO,CACpD,CAGJ,KAAO,CACL,IAAMR,EAAM,gCAAgCF,IAC5C,OAAOI,GAAkBF,CAAG,EACzB,KACCK,EAAIG,IAAS,CACX,aAAcA,EAAK,YACrB,EAAE,EACFD,GAAe,CAAC,CAAC,CACnB,CACJ,CACF,CCvDO,SAASG,GACdC,EAAcC,EACW,CACzB,IAAMC,EAAM,WAAWF,qBAAwB,mBAAmBC,CAAO,IACzE,OAAOE,GAA2BD,CAAG,EAClC,KACCE,GAAW,IAAMC,CAAK,EACtBC,EAAI,CAAC,CAAE,WAAAC,EAAY,YAAAC,CAAY,KAAO,CACpC,MAAOD,EACP,MAAOC,CACT,EAAE,EACFC,GAAe,CAAC,CAAC,CACnB,CACJ,CCOO,SAASC,GACdC,EACyB,CAGzB,IAAIC,EAAQD,EAAI,MAAM,qCAAqC,EAC3D,GAAIC,EAAO,CACT,GAAM,CAAC,CAAEC,EAAMC,CAAI,EAAIF,EACvB,OAAOG,GAA2BF,EAAMC,CAAI,CAC9C,CAIA,GADAF,EAAQD,EAAI,MAAM,oCAAoC,EAClDC,EAAO,CACT,GAAM,CAAC,CAAEI,EAAMC,CAAI,EAAIL,EACvB,OAAOM,GAA2BF,EAAMC,CAAI,CAC9C,CAGA,OAAOE,CACT,CCpBA,IAAIC,GAgBG,SAASC,GACdC,EACoB,CACpB,OAAOF,QAAWG,EAAM,IAAM,CAC5B,IAAMC,EAAS,SAAsB,WAAY,cAAc,EAC/D,GAAIA,EACF,OAAOC,EAAGD,CAAM,EAKhB,GADYE,GAAqB,SAAS,EAClC,OAAQ,CACd,IAAMC,EAAU,SAA0B,WAAW,EACrD,GAAI,EAAEA,GAAWA,EAAQ,QACvB,OAAOC,CACX,CAGA,OAAOC,GAAiBP,EAAG,IAAI,EAC5B,KACCQ,EAAIC,GAAS,SAAS,WAAYA,EAAO,cAAc,CAAC,CAC1D,CAEN,CAAC,EACE,KACCC,GAAW,IAAMJ,CAAK,EACtBK,EAAOF,GAAS,OAAO,KAAKA,CAAK,EAAE,OAAS,CAAC,EAC7CG,EAAIH,IAAU,CAAE,MAAAA,CAAM,EAAE,EACxBI,EAAY,CAAC,CACf,EACJ,CASO,SAASC,GACdd,EAC+B,CAC/B,IAAMe,EAAQC,EAAW,uBAAwBhB,CAAE,EACnD,OAAOC,EAAM,IAAM,CACjB,IAAMgB,EAAQ,IAAIC,EAClB,OAAAD,EAAM,UAAU,CAAC,CAAE,MAAAR,CAAM,IAAM,CAC7BM,EAAM,YAAYI,GAAkBV,CAAK,CAAC,EAC1CM,EAAM,UAAU,IAAI,+BAA+B,CACrD,CAAC,EAGMhB,GAAYC,CAAE,EAClB,KACCQ,EAAIY,GAASH,EAAM,KAAKG,CAAK,CAAC,EAC9BC,EAAS,IAAMJ,EAAM,SAAS,CAAC,EAC/BL,EAAIQ,GAAUE,EAAA,CAAE,IAAKtB,GAAOoB,EAAQ,CACtC,CACJ,CAAC,CACH,CCtDO,SAASG,GACdC,EAAiB,CAAE,UAAAC,EAAW,QAAAC,CAAQ,EACpB,CAClB,OAAOC,GAAiB,SAAS,IAAI,EAClC,KACCC,EAAU,IAAMC,GAAgBL,EAAI,CAAE,QAAAE,EAAS,UAAAD,CAAU,CAAC,CAAC,EAC3DK,EAAI,CAAC,CAAE,OAAQ,CAAE,EAAAC,CAAE,CAAE,KACZ,CACL,OAAQA,GAAK,EACf,EACD,EACDC,EAAwB,QAAQ,CAClC,CACJ,CAaO,SAASC,GACdT,EAAiBU,EACY,CAC7B,OAAOC,EAAM,IAAM,CACjB,IAAMC,EAAQ,IAAIC,EAClB,OAAAD,EAAM,UAAU,CAGd,KAAK,CAAE,OAAAE,CAAO,EAAG,CACfd,EAAG,OAASc,CACd,EAGA,UAAW,CACTd,EAAG,OAAS,EACd,CACF,CAAC,GAICe,EAAQ,wBAAwB,EAC5BC,EAAG,CAAE,OAAQ,EAAM,CAAC,EACpBjB,GAAUC,EAAIU,CAAO,GAExB,KACCO,EAAIC,GAASN,EAAM,KAAKM,CAAK,CAAC,EAC9BC,EAAS,IAAMP,EAAM,SAAS,CAAC,EAC/BN,EAAIY,GAAUE,EAAA,CAAE,IAAKpB,GAAOkB,EAAQ,CACtC,CACJ,CAAC,CACH,CCpBO,SAASG,GACdC,EAAiB,CAAE,UAAAC,EAAW,QAAAC,CAAQ,EACT,CAC7B,IAAMC,EAAQ,IAAI,IAGZC,EAAUC,EAA+B,cAAeL,CAAE,EAChE,QAAWM,KAAUF,EAAS,CAC5B,IAAMG,EAAK,mBAAmBD,EAAO,KAAK,UAAU,CAAC,CAAC,EAChDE,EAASC,GAAmB,QAAQF,KAAM,EAC5C,OAAOC,GAAW,aACpBL,EAAM,IAAIG,EAAQE,CAAM,CAC5B,CAGA,IAAME,EAAUR,EACb,KACCS,EAAwB,QAAQ,EAChCC,EAAI,CAAC,CAAE,OAAAC,CAAO,IAAM,CAClB,IAAMC,EAAOC,GAAoB,MAAM,EACjCC,EAAOC,EAAW,wBAAyBH,CAAI,EACrD,OAAOD,EAAS,IACdG,EAAK,UACLF,EAAK,UAET,CAAC,EACDI,GAAM,CACR,EAgFF,OA7EmBC,GAAiB,SAAS,IAAI,EAC9C,KACCR,EAAwB,QAAQ,EAGhCS,EAAUC,GAAQC,EAAM,IAAM,CAC5B,IAAIC,EAA4B,CAAC,EACjC,OAAOC,EAAG,CAAC,GAAGrB,CAAK,EAAE,OAAO,CAACsB,EAAO,CAACnB,EAAQE,CAAM,IAAM,CACvD,KAAOe,EAAK,QACGpB,EAAM,IAAIoB,EAAKA,EAAK,OAAS,EAAE,EACnC,SAAWf,EAAO,SACzBe,EAAK,IAAI,EAOb,IAAIG,EAASlB,EAAO,UACpB,KAAO,CAACkB,GAAUlB,EAAO,eACvBA,EAASA,EAAO,cAChBkB,EAASlB,EAAO,UAIlB,OAAOiB,EAAM,IACX,CAAC,GAAGF,EAAO,CAAC,GAAGA,EAAMjB,CAAM,CAAC,EAAE,QAAQ,EACtCoB,CACF,CACF,EAAG,IAAI,GAAkC,CAAC,CAC5C,CAAC,EACE,KAGCd,EAAIa,GAAS,IAAI,IAAI,CAAC,GAAGA,CAAK,EAAE,KAAK,CAAC,CAAC,CAAEE,CAAC,EAAG,CAAC,CAAEC,CAAC,IAAMD,EAAIC,CAAC,CAAC,CAAC,EAC9DC,GAAkBnB,CAAO,EAGzBU,EAAU,CAAC,CAACK,EAAOK,CAAM,IAAM7B,EAC5B,KACC8B,GAAK,CAAC,CAACC,EAAMC,CAAI,EAAG,CAAE,OAAQ,CAAE,EAAAC,CAAE,EAAG,KAAAC,CAAK,IAAM,CAC9C,IAAMC,EAAOF,EAAIC,EAAK,QAAU,KAAK,MAAMd,EAAK,MAAM,EAGtD,KAAOY,EAAK,QAAQ,CAClB,GAAM,CAAC,CAAEP,CAAM,EAAIO,EAAK,GACxB,GAAIP,EAASI,EAASI,GAAKE,EACzBJ,EAAO,CAAC,GAAGA,EAAMC,EAAK,MAAM,CAAE,MAE9B,MAEJ,CAGA,KAAOD,EAAK,QAAQ,CAClB,GAAM,CAAC,CAAEN,CAAM,EAAIM,EAAKA,EAAK,OAAS,GACtC,GAAIN,EAASI,GAAUI,GAAK,CAACE,EAC3BH,EAAO,CAACD,EAAK,IAAI,EAAI,GAAGC,CAAI,MAE5B,MAEJ,CAGA,MAAO,CAACD,EAAMC,CAAI,CACpB,EAAG,CAAC,CAAC,EAAG,CAAC,GAAGR,CAAK,CAAC,CAAC,EACnBY,EAAqB,CAACV,EAAGC,IACvBD,EAAE,KAAOC,EAAE,IACXD,EAAE,KAAOC,EAAE,EACZ,CACH,CACF,CACF,CACF,CACF,EAIC,KACChB,EAAI,CAAC,CAACoB,EAAMC,CAAI,KAAO,CACrB,KAAMD,EAAK,IAAI,CAAC,CAACT,CAAI,IAAMA,CAAI,EAC/B,KAAMU,EAAK,IAAI,CAAC,CAACV,CAAI,IAAMA,CAAI,CACjC,EAAE,EAGFe,EAAU,CAAE,KAAM,CAAC,EAAG,KAAM,CAAC,CAAE,CAAC,EAChCC,GAAY,EAAG,CAAC,EAChB3B,EAAI,CAAC,CAAC,EAAGgB,CAAC,IAGJ,EAAE,KAAK,OAASA,EAAE,KAAK,OAClB,CACL,KAAMA,EAAE,KAAK,MAAM,KAAK,IAAI,EAAG,EAAE,KAAK,OAAS,CAAC,EAAGA,EAAE,KAAK,MAAM,EAChE,KAAM,CAAC,CACT,EAIO,CACL,KAAMA,EAAE,KAAK,MAAM,EAAE,EACrB,KAAMA,EAAE,KAAK,MAAM,EAAGA,EAAE,KAAK,OAAS,EAAE,KAAK,MAAM,CACrD,CAEH,CACH,CACJ,CAYO,SAASY,GACdxC,EAAiB,CAAE,UAAAC,EAAW,QAAAC,EAAS,QAAAuC,CAAQ,EACP,CACxC,OAAOnB,EAAM,IAAM,CACjB,IAAMoB,EAAQ,IAAIC,EACZC,EAAQF,EAAM,KAAKG,GAAS,CAAC,CAAC,EAoBpC,GAnBAH,EAAM,UAAU,CAAC,CAAE,KAAAV,EAAM,KAAAC,CAAK,IAAM,CAGlC,OAAW,CAAC3B,CAAM,IAAK2B,EACrB3B,EAAO,UAAU,OAAO,sBAAsB,EAC9CA,EAAO,UAAU,OAAO,sBAAsB,EAIhD,OAAW,CAACmB,EAAO,CAACnB,CAAM,CAAC,IAAK0B,EAAK,QAAQ,EAC3C1B,EAAO,UAAU,IAAI,sBAAsB,EAC3CA,EAAO,UAAU,OACf,uBACAmB,IAAUO,EAAK,OAAS,CAC1B,CAEJ,CAAC,EAGGc,EAAQ,YAAY,EAAG,CAGzB,IAAMC,EAAUC,EACd/C,EAAU,KAAKgD,GAAa,CAAC,EAAGrC,EAAI,IAAG,EAAY,CAAC,EACpDX,EAAU,KAAKgD,GAAa,GAAG,EAAGrC,EAAI,IAAM,QAAiB,CAAC,CAChE,EAGA8B,EACG,KACCQ,EAAO,CAAC,CAAE,KAAAlB,CAAK,IAAMA,EAAK,OAAS,CAAC,EACpCmB,GAAeJ,CAAO,CACxB,EACG,UAAU,CAAC,CAAC,CAAE,KAAAf,CAAK,EAAGoB,CAAQ,IAAM,CACnC,GAAM,CAAC9C,CAAM,EAAI0B,EAAKA,EAAK,OAAS,GACpC,GAAI1B,EAAO,aAAc,CAGvB,IAAM+C,EAAYC,GAAoBhD,CAAM,EAC5C,GAAI,OAAO+C,GAAc,YAAa,CACpC,IAAM3B,EAASpB,EAAO,UAAY+C,EAAU,UACtC,CAAE,OAAAxC,CAAO,EAAI0C,GAAeF,CAAS,EAC3CA,EAAU,SAAS,CACjB,IAAK3B,EAASb,EAAS,EACvB,SAAAuC,CACF,CAAC,CACH,CACF,CACF,CAAC,CACP,CAGA,OAAIN,EAAQ,qBAAqB,GAC/B7C,EACG,KACCuD,GAAUZ,CAAK,EACfjC,EAAwB,QAAQ,EAChCsC,GAAa,GAAG,EAChBQ,GAAK,CAAC,EACND,GAAUf,EAAQ,KAAKgB,GAAK,CAAC,CAAC,CAAC,EAC/BC,GAAO,CAAE,MAAO,GAAI,CAAC,EACrBP,GAAeT,CAAK,CACtB,EACG,UAAU,CAAC,CAAC,CAAE,CAAE,KAAAV,CAAK,CAAC,IAAM,CAC3B,IAAM2B,EAAMC,GAAY,EAGlBtD,EAAS0B,EAAKA,EAAK,OAAS,GAClC,GAAI1B,GAAUA,EAAO,OAAQ,CAC3B,GAAM,CAACuD,CAAM,EAAIvD,EACX,CAAE,KAAAwD,CAAK,EAAI,IAAI,IAAID,EAAO,IAAI,EAChCF,EAAI,OAASG,IACfH,EAAI,KAAOG,EACX,QAAQ,aAAa,CAAC,EAAG,GAAI,GAAGH,GAAK,EAIzC,MACEA,EAAI,KAAO,GACX,QAAQ,aAAa,CAAC,EAAG,GAAI,GAAGA,GAAK,CAEzC,CAAC,EAGA5D,GAAqBC,EAAI,CAAE,UAAAC,EAAW,QAAAC,CAAQ,CAAC,EACnD,KACC6D,EAAIC,GAAStB,EAAM,KAAKsB,CAAK,CAAC,EAC9BC,EAAS,IAAMvB,EAAM,SAAS,CAAC,EAC/B9B,EAAIoD,GAAUE,EAAA,CAAE,IAAKlE,GAAOgE,EAAQ,CACtC,CACJ,CAAC,CACH,CCpRO,SAASG,GACdC,EAAkB,CAAE,UAAAC,EAAW,MAAAC,EAAO,QAAAC,CAAQ,EACvB,CAGvB,IAAMC,EAAaH,EAChB,KACCI,EAAI,CAAC,CAAE,OAAQ,CAAE,EAAAC,CAAE,CAAE,IAAMA,CAAC,EAC5BC,GAAY,EAAG,CAAC,EAChBF,EAAI,CAAC,CAACG,EAAGC,CAAC,IAAMD,EAAIC,GAAKA,EAAI,CAAC,EAC9BC,EAAqB,CACvB,EAGIC,EAAUT,EACb,KACCG,EAAI,CAAC,CAAE,OAAAO,CAAO,IAAMA,CAAM,CAC5B,EAGF,OAAOC,EAAc,CAACF,EAASP,CAAU,CAAC,EACvC,KACCC,EAAI,CAAC,CAACO,EAAQE,CAAS,IAAM,EAAEF,GAAUE,EAAU,EACnDJ,EAAqB,EACrBK,GAAUZ,EAAQ,KAAKa,GAAK,CAAC,CAAC,CAAC,EAC/BC,GAAQ,EAAI,EACZC,GAAO,CAAE,MAAO,GAAI,CAAC,EACrBb,EAAIc,IAAW,CAAE,OAAAA,CAAO,EAAE,CAC5B,CACJ,CAYO,SAASC,GACdC,EAAiB,CAAE,UAAApB,EAAW,QAAAqB,EAAS,MAAApB,EAAO,QAAAC,CAAQ,EACpB,CAClC,IAAMoB,EAAQ,IAAIC,EACZC,EAAQF,EAAM,KAAKG,GAAS,CAAC,CAAC,EACpC,OAAAH,EAAM,UAAU,CAGd,KAAK,CAAE,OAAAJ,CAAO,EAAG,CACfE,EAAG,OAASF,EACRA,GACFE,EAAG,aAAa,WAAY,IAAI,EAChCA,EAAG,KAAK,GAERA,EAAG,gBAAgB,UAAU,CAEjC,EAGA,UAAW,CACTA,EAAG,MAAM,IAAM,GACfA,EAAG,OAAS,GACZA,EAAG,gBAAgB,UAAU,CAC/B,CACF,CAAC,EAGDC,EACG,KACCP,GAAUU,CAAK,EACfE,EAAwB,QAAQ,CAClC,EACG,UAAU,CAAC,CAAE,OAAAC,CAAO,IAAM,CACzBP,EAAG,MAAM,IAAM,GAAGO,EAAS,MAC7B,CAAC,EAGE7B,GAAesB,EAAI,CAAE,UAAApB,EAAW,MAAAC,EAAO,QAAAC,CAAQ,CAAC,EACpD,KACC0B,EAAIC,GAASP,EAAM,KAAKO,CAAK,CAAC,EAC9BC,EAAS,IAAMR,EAAM,SAAS,CAAC,EAC/BlB,EAAIyB,GAAUE,EAAA,CAAE,IAAKX,GAAOS,EAAQ,CACtC,CACJ,CCpHO,SAASG,GACd,CAAE,UAAAC,EAAW,QAAAC,CAAQ,EACf,CACND,EACG,KACCE,EAAU,IAAMC,EAEd,0DACF,CAAC,EACDC,EAAIC,GAAM,CACRA,EAAG,cAAgB,GACnBA,EAAG,QAAU,EACf,CAAC,EACDC,GAASD,GAAME,EAAUF,EAAI,QAAQ,EAClC,KACCG,GAAU,IAAMH,EAAG,UAAU,SAAS,0BAA0B,CAAC,EACjEI,EAAI,IAAMJ,CAAE,CACd,CACF,EACAK,GAAeT,CAAO,CACxB,EACG,UAAU,CAAC,CAACI,EAAIM,CAAM,IAAM,CAC3BN,EAAG,UAAU,OAAO,0BAA0B,EAC1CM,IACFN,EAAG,QAAU,GACjB,CAAC,CACP,CC/BA,SAASO,IAAyB,CAChC,MAAO,qBAAqB,KAAK,UAAU,SAAS,CACtD,CAiBO,SAASC,GACd,CAAE,UAAAC,CAAU,EACN,CACNA,EACG,KACCC,EAAU,IAAMC,EAAY,qBAAqB,CAAC,EAClDC,EAAIC,GAAMA,EAAG,gBAAgB,mBAAmB,CAAC,EACjDC,EAAOP,EAAa,EACpBQ,GAASF,GAAMG,EAAUH,EAAI,YAAY,EACtC,KACCI,EAAI,IAAMJ,CAAE,CACd,CACF,CACF,EACG,UAAUA,GAAM,CACf,IAAMK,EAAML,EAAG,UAGXK,IAAQ,EACVL,EAAG,UAAY,EAGNK,EAAML,EAAG,eAAiBA,EAAG,eACtCA,EAAG,UAAYK,EAAM,EAEzB,CAAC,CACP,CCpCO,SAASC,GACd,CAAE,UAAAC,EAAW,QAAAC,CAAQ,EACf,CACNC,EAAc,CAACC,GAAY,QAAQ,EAAGF,CAAO,CAAC,EAC3C,KACCG,EAAI,CAAC,CAACC,EAAQC,CAAM,IAAMD,GAAU,CAACC,CAAM,EAC3CC,EAAUF,GAAUG,EAAGH,CAAM,EAC1B,KACCI,GAAMJ,EAAS,IAAM,GAAG,CAC1B,CACF,EACAK,GAAeV,CAAS,CAC1B,EACG,UAAU,CAAC,CAACK,EAAQ,CAAE,OAAQ,CAAE,EAAAM,CAAE,CAAC,CAAC,IAAM,CACzC,GAAIN,EACF,SAAS,KAAK,aAAa,qBAAsB,EAAE,EACnD,SAAS,KAAK,MAAM,IAAM,IAAIM,UACzB,CACL,IAAMC,EAAQ,GAAK,SAAS,SAAS,KAAK,MAAM,IAAK,EAAE,EACvD,SAAS,KAAK,gBAAgB,oBAAoB,EAClD,SAAS,KAAK,MAAM,IAAM,GACtBA,GACF,OAAO,SAAS,EAAGA,CAAK,CAC5B,CACF,CAAC,CACP,CC7DK,OAAO,UACV,OAAO,QAAU,SAAUC,EAAa,CACtC,IAAMC,EAA2B,CAAC,EAClC,QAAWC,KAAO,OAAO,KAAKF,CAAG,EAE/BC,EAAK,KAAK,CAACC,EAAKF,EAAIE,EAAI,CAAC,EAG3B,OAAOD,CACT,GAGG,OAAO,SACV,OAAO,OAAS,SAAUD,EAAa,CACrC,IAAMC,EAAiB,CAAC,EACxB,QAAWC,KAAO,OAAO,KAAKF,CAAG,EAE/BC,EAAK,KAAKD,EAAIE,EAAI,EAGpB,OAAOD,CACT,GAKE,OAAO,SAAY,cAGhB,QAAQ,UAAU,WACrB,QAAQ,UAAU,SAAW,SAC3BE,EAA8BC,EACxB,CACF,OAAOD,GAAM,UACf,KAAK,WAAaA,EAAE,KACpB,KAAK,UAAYA,EAAE,MAEnB,KAAK,WAAaA,EAClB,KAAK,UAAYC,EAErB,GAGG,QAAQ,UAAU,cACrB,QAAQ,UAAU,YAAc,YAC3BC,EACG,CACN,IAAMC,EAAS,KAAK,WACpB,GAAIA,EAAQ,CACND,EAAM,SAAW,GACnBC,EAAO,YAAY,IAAI,EAGzB,QAASC,EAAIF,EAAM,OAAS,EAAGE,GAAK,EAAGA,IAAK,CAC1C,IAAIC,EAAOH,EAAME,GACb,OAAOC,GAAS,SAClBA,EAAO,SAAS,eAAeA,CAAI,EAC5BA,EAAK,YACZA,EAAK,WAAW,YAAYA,CAAI,EAG7BD,EAGHD,EAAO,aAAa,KAAK,gBAAkBE,CAAI,EAF/CF,EAAO,aAAaE,EAAM,IAAI,CAGlC,CACF,CACF,IjMDJ,SAAS,gBAAgB,UAAU,OAAO,OAAO,EACjD,SAAS,gBAAgB,UAAU,IAAI,IAAI,EAG3C,IAAMC,GAAYC,GAAc,EAC1BC,GAAYC,GAAc,EAC1BC,GAAYC,GAAoB,EAChCC,GAAYC,GAAc,EAG1BC,GAAYC,GAAc,EAC1BC,GAAYC,GAAW,oBAAoB,EAC3CC,GAAYD,GAAW,qBAAqB,EAC5CE,GAAYC,GAAW,EAGvBC,GAASC,GAAc,EACvBC,GAAS,SAAS,MAAM,UAAU,QAAQ,GAC5C,+BAAU,QAASC,GACnB,IAAI,IAAI,2BAA4BH,GAAO,IAAI,CACjD,EACEI,GAGEC,GAAS,IAAIC,EACnBC,GAAiB,CAAE,OAAAF,EAAO,CAAC,EAGvBG,EAAQ,oBAAoB,GAC9BC,GAAoB,CAAE,UAAAxB,GAAW,UAAAE,GAAW,UAAAM,EAAU,CAAC,EA1HzD,IAAAiB,KA6HIA,GAAAV,GAAO,UAAP,YAAAU,GAAgB,YAAa,QAC/BC,GAAqB,CAAE,UAAA1B,EAAU,CAAC,EAGpC2B,EAAMzB,GAAWE,EAAO,EACrB,KACCwB,GAAM,GAAG,CACX,EACG,UAAU,IAAM,CACfC,GAAU,SAAU,EAAK,EACzBA,GAAU,SAAU,EAAK,CAC3B,CAAC,EAGLvB,GACG,KACCwB,EAAO,CAAC,CAAE,KAAAC,CAAK,IAAMA,IAAS,QAAQ,CACxC,EACG,UAAUC,GAAO,CAChB,OAAQA,EAAI,KAAM,CAGhB,IAAK,IACL,IAAK,IACH,IAAMC,EAAOC,GAAmB,kBAAkB,EAC9C,OAAOD,GAAS,aAClBA,EAAK,MAAM,EACb,MAGF,IAAK,IACL,IAAK,IACH,IAAME,EAAOD,GAAmB,kBAAkB,EAC9C,OAAOC,GAAS,aAClBA,EAAK,MAAM,EACb,KACJ,CACF,CAAC,EAGLC,GAAmB,CAAE,UAAApC,GAAW,QAAAU,EAAQ,CAAC,EACzC2B,GAAe,CAAE,UAAArC,EAAU,CAAC,EAC5BsC,GAAgB,CAAE,UAAA9B,GAAW,QAAAE,EAAQ,CAAC,EAGtC,IAAM6B,GAAUC,GAAYC,GAAoB,QAAQ,EAAG,CAAE,UAAAjC,EAAU,CAAC,EAClEkC,GAAQ1C,GACX,KACC2C,EAAI,IAAMF,GAAoB,MAAM,CAAC,EACrCG,EAAUC,GAAMC,GAAUD,EAAI,CAAE,UAAArC,GAAW,QAAA+B,EAAQ,CAAC,CAAC,EACrDQ,EAAY,CAAC,CACf,EAGIC,GAAWrB,EAGf,GAAGsB,GAAqB,SAAS,EAC9B,IAAIJ,GAAMK,GAAaL,EAAI,CAAE,QAAAzC,EAAQ,CAAC,CAAC,EAG1C,GAAG6C,GAAqB,QAAQ,EAC7B,IAAIJ,GAAMM,GAAYN,EAAI,CAAE,OAAAzB,EAAO,CAAC,CAAC,EAGxC,GAAG6B,GAAqB,QAAQ,EAC7B,IAAIJ,GAAMO,GAAYP,EAAI,CAAE,UAAArC,GAAW,QAAA+B,GAAS,MAAAG,EAAM,CAAC,CAAC,EAG3D,GAAGO,GAAqB,SAAS,EAC9B,IAAIJ,GAAMQ,GAAaR,CAAE,CAAC,EAG7B,GAAGI,GAAqB,QAAQ,EAC7B,IAAIJ,GAAMS,GAAYT,EAAI,CAAE,OAAA5B,GAAQ,UAAAX,EAAU,CAAC,CAAC,EAGnD,GAAG2C,GAAqB,QAAQ,EAC7B,IAAIJ,GAAMU,GAAYV,CAAE,CAAC,CAC9B,EAGMW,GAAWC,EAAM,IAAM9B,EAG3B,GAAGsB,GAAqB,UAAU,EAC/B,IAAIJ,GAAMa,GAAcb,CAAE,CAAC,EAG9B,GAAGI,GAAqB,SAAS,EAC9B,IAAIJ,GAAMc,GAAad,EAAI,CAAE,UAAArC,GAAW,QAAAJ,GAAS,OAAAS,EAAO,CAAC,CAAC,EAG7D,GAAGoC,GAAqB,SAAS,EAC9B,IAAIJ,GAAMtB,EAAQ,kBAAkB,EACjCqC,GAAoBf,EAAI,CAAE,OAAA5B,GAAQ,UAAAf,EAAU,CAAC,EAC7C2D,CACJ,EAGF,GAAGZ,GAAqB,cAAc,EACnC,IAAIJ,GAAMiB,GAAiBjB,EAAI,CAAE,UAAArC,GAAW,QAAA+B,EAAQ,CAAC,CAAC,EAGzD,GAAGU,GAAqB,SAAS,EAC9B,IAAIJ,GAAMA,EAAG,aAAa,cAAc,IAAM,aAC3CkB,GAAGnD,GAAS,IAAMoD,GAAanB,EAAI,CAAE,UAAArC,GAAW,QAAA+B,GAAS,MAAAG,EAAM,CAAC,CAAC,EACjEqB,GAAGrD,GAAS,IAAMsD,GAAanB,EAAI,CAAE,UAAArC,GAAW,QAAA+B,GAAS,MAAAG,EAAM,CAAC,CAAC,CACrE,EAGF,GAAGO,GAAqB,MAAM,EAC3B,IAAIJ,GAAMoB,GAAUpB,EAAI,CAAE,UAAArC,GAAW,QAAA+B,EAAQ,CAAC,CAAC,EAGlD,GAAGU,GAAqB,KAAK,EAC1B,IAAIJ,GAAMqB,GAAqBrB,EAAI,CAAE,UAAArC,GAAW,QAAA+B,GAAS,QAAAnC,EAAQ,CAAC,CAAC,EAGtE,GAAG6C,GAAqB,KAAK,EAC1B,IAAIJ,GAAMsB,GAAetB,EAAI,CAAE,UAAArC,GAAW,QAAA+B,GAAS,MAAAG,GAAO,QAAAtC,EAAQ,CAAC,CAAC,CACzE,CAAC,EAGKgE,GAAapE,GAChB,KACC4C,EAAU,IAAMY,EAAQ,EACxBa,GAAUrB,EAAQ,EAClBD,EAAY,CAAC,CACf,EAGFqB,GAAW,UAAU,EAMrB,OAAO,UAAapE,GACpB,OAAO,UAAaE,GACpB,OAAO,QAAaE,GACpB,OAAO,UAAaE,GACpB,OAAO,UAAaE,GACpB,OAAO,QAAaE,GACpB,OAAO,QAAaE,GACpB,OAAO,OAAaC,GACpB,OAAO,OAAaO,GACpB,OAAO,WAAagD", + "names": ["require_focus_visible", "__commonJSMin", "exports", "module", "global", "factory", "applyFocusVisiblePolyfill", "scope", "hadKeyboardEvent", "hadFocusVisibleRecently", "hadFocusVisibleRecentlyTimeout", "inputTypesAllowlist", "isValidFocusTarget", "el", "focusTriggersKeyboardModality", "type", "tagName", "addFocusVisibleClass", "removeFocusVisibleClass", "onKeyDown", "e", "onPointerDown", "onFocus", "onBlur", "onVisibilityChange", "addInitialPointerMoveListeners", "onInitialPointerMove", "removeInitialPointerMoveListeners", "event", "error", "require_url_polyfill", "__commonJSMin", "exports", "global", "checkIfIteratorIsSupported", "error", "iteratorSupported", "createIterator", "items", "iterator", "value", "serializeParam", "deserializeParam", "polyfillURLSearchParams", "URLSearchParams", "searchString", "typeofSearchString", "_this", "name", "i", "entry", "key", "proto", "callback", "thisArg", "entries", "searchArray", "checkIfURLSearchParamsSupported", "e", "a", "b", "keys", "attributes", "attribute", "checkIfURLIsSupported", "u", "polyfillURL", "_URL", "URL", "url", "base", "doc", "baseElement", "err", "anchorElement", "inputElement", "searchParams", "enableSearchUpdate", "enableSearchParamsUpdate", "methodName", "method", "search", "linkURLWithAnchorAttribute", "attributeName", "expectedPort", "addPortToOrigin", "blob", "getOrigin", "require_tslib", "__commonJSMin", "exports", "module", "__extends", "__assign", "__rest", "__decorate", "__param", "__metadata", "__awaiter", "__generator", "__exportStar", "__values", "__read", "__spread", "__spreadArrays", "__spreadArray", "__await", "__asyncGenerator", "__asyncDelegator", "__asyncValues", "__makeTemplateObject", "__importStar", "__importDefault", "__classPrivateFieldGet", "__classPrivateFieldSet", "__createBinding", "factory", "root", "createExporter", "previous", "id", "v", "exporter", "extendStatics", "d", "b", "p", "__", "t", "s", "n", "e", "i", "decorators", "target", "key", "desc", "c", "r", "paramIndex", "decorator", "metadataKey", "metadataValue", "thisArg", "_arguments", "P", "generator", "adopt", "value", "resolve", "reject", "fulfilled", "step", "rejected", "result", "body", "_", "f", "y", "g", "verb", "op", "m", "o", "k", "k2", "ar", "error", "il", "a", "j", "jl", "to", "from", "pack", "l", "q", "resume", "settle", "fulfill", "cooked", "raw", "__setModuleDefault", "mod", "receiver", "state", "kind", "require_clipboard", "__commonJSMin", "exports", "module", "root", "factory", "__webpack_modules__", "__unused_webpack_module", "__webpack_exports__", "__webpack_require__", "clipboard", "tiny_emitter", "tiny_emitter_default", "listen", "listen_default", "src_select", "select_default", "command", "type", "err", "ClipboardActionCut", "target", "selectedText", "actions_cut", "createFakeElement", "value", "isRTL", "fakeElement", "yPosition", "fakeCopyAction", "options", "ClipboardActionCopy", "actions_copy", "_typeof", "obj", "ClipboardActionDefault", "_options$action", "action", "container", "text", "actions_default", "clipboard_typeof", "_classCallCheck", "instance", "Constructor", "_defineProperties", "props", "i", "descriptor", "_createClass", "protoProps", "staticProps", "_inherits", "subClass", "superClass", "_setPrototypeOf", "o", "p", "_createSuper", "Derived", "hasNativeReflectConstruct", "_isNativeReflectConstruct", "Super", "_getPrototypeOf", "result", "NewTarget", "_possibleConstructorReturn", "self", "call", "_assertThisInitialized", "e", "getAttributeValue", "suffix", "element", "attribute", "Clipboard", "_Emitter", "_super", "trigger", "_this", "_this2", "selector", "actions", "support", "DOCUMENT_NODE_TYPE", "proto", "closest", "__unused_webpack_exports", "_delegate", "callback", "useCapture", "listenerFn", "listener", "delegate", "elements", "is", "listenNode", "listenNodeList", "listenSelector", "node", "nodeList", "select", "isReadOnly", "selection", "range", "E", "name", "ctx", "data", "evtArr", "len", "evts", "liveEvents", "__webpack_module_cache__", "moduleId", "getter", "definition", "key", "prop", "require_escape_html", "__commonJSMin", "exports", "module", "matchHtmlRegExp", "escapeHtml", "string", "str", "match", "escape", "html", "index", "lastIndex", "r", "a", "e", "import_focus_visible", "n", "t", "s", "r", "o", "u", "i", "a", "e", "c", "import_url_polyfill", "import_tslib", "__extends", "__assign", "__rest", "__decorate", "__param", "__metadata", "__awaiter", "__generator", "__exportStar", "__createBinding", "__values", "__read", "__spread", "__spreadArrays", "__spreadArray", "__await", "__asyncGenerator", "__asyncDelegator", "__asyncValues", "__makeTemplateObject", "__importStar", "__importDefault", "__classPrivateFieldGet", "__classPrivateFieldSet", "tslib", "isFunction", "value", "createErrorClass", "createImpl", "_super", "instance", "ctorFunc", "UnsubscriptionError", "createErrorClass", "_super", "errors", "err", "i", "arrRemove", "arr", "item", "index", "Subscription", "initialTeardown", "errors", "_parentage", "_parentage_1", "__values", "_parentage_1_1", "parent_1", "initialFinalizer", "isFunction", "e", "UnsubscriptionError", "_finalizers", "_finalizers_1", "_finalizers_1_1", "finalizer", "execFinalizer", "err", "__spreadArray", "__read", "teardown", "_a", "parent", "arrRemove", "empty", "EMPTY_SUBSCRIPTION", "Subscription", "isSubscription", "value", "isFunction", "execFinalizer", "finalizer", "config", "timeoutProvider", "handler", "timeout", "args", "_i", "delegate", "__spreadArray", "__read", "handle", "reportUnhandledError", "err", "timeoutProvider", "onUnhandledError", "config", "noop", "COMPLETE_NOTIFICATION", "createNotification", "errorNotification", "error", "nextNotification", "value", "kind", "context", "errorContext", "cb", "config", "isRoot", "_a", "errorThrown", "error", "captureError", "err", "Subscriber", "_super", "__extends", "destination", "_this", "isSubscription", "EMPTY_OBSERVER", "next", "error", "complete", "SafeSubscriber", "value", "handleStoppedNotification", "nextNotification", "err", "errorNotification", "COMPLETE_NOTIFICATION", "Subscription", "_bind", "bind", "fn", "thisArg", "ConsumerObserver", "partialObserver", "value", "error", "handleUnhandledError", "err", "SafeSubscriber", "_super", "__extends", "observerOrNext", "complete", "_this", "isFunction", "context_1", "config", "Subscriber", "handleUnhandledError", "error", "config", "captureError", "reportUnhandledError", "defaultErrorHandler", "err", "handleStoppedNotification", "notification", "subscriber", "onStoppedNotification", "timeoutProvider", "EMPTY_OBSERVER", "noop", "observable", "identity", "x", "pipe", "fns", "_i", "pipeFromArray", "identity", "input", "prev", "fn", "Observable", "subscribe", "operator", "observable", "observerOrNext", "error", "complete", "_this", "subscriber", "isSubscriber", "SafeSubscriber", "errorContext", "_a", "source", "sink", "err", "next", "promiseCtor", "getPromiseCtor", "resolve", "reject", "value", "operations", "_i", "pipeFromArray", "x", "getPromiseCtor", "promiseCtor", "_a", "config", "isObserver", "value", "isFunction", "isSubscriber", "Subscriber", "isSubscription", "hasLift", "source", "isFunction", "operate", "init", "liftedSource", "err", "createOperatorSubscriber", "destination", "onNext", "onComplete", "onError", "onFinalize", "OperatorSubscriber", "_super", "__extends", "shouldUnsubscribe", "_this", "value", "err", "closed_1", "_a", "Subscriber", "animationFrameProvider", "callback", "request", "cancel", "delegate", "handle", "timestamp", "Subscription", "args", "_i", "__spreadArray", "__read", "ObjectUnsubscribedError", "createErrorClass", "_super", "Subject", "_super", "__extends", "_this", "operator", "subject", "AnonymousSubject", "ObjectUnsubscribedError", "value", "errorContext", "_b", "__values", "_c", "observer", "err", "observers", "_a", "subscriber", "hasError", "isStopped", "EMPTY_SUBSCRIPTION", "Subscription", "arrRemove", "thrownError", "observable", "Observable", "destination", "source", "AnonymousSubject", "_super", "__extends", "destination", "source", "_this", "value", "_b", "_a", "err", "subscriber", "EMPTY_SUBSCRIPTION", "Subject", "dateTimestampProvider", "ReplaySubject", "_super", "__extends", "_bufferSize", "_windowTime", "_timestampProvider", "dateTimestampProvider", "_this", "value", "_a", "isStopped", "_buffer", "_infiniteTimeWindow", "subscriber", "subscription", "copy", "i", "adjustedBufferSize", "now", "last", "Subject", "Action", "_super", "__extends", "scheduler", "work", "state", "delay", "Subscription", "intervalProvider", "handler", "timeout", "args", "_i", "delegate", "__spreadArray", "__read", "handle", "AsyncAction", "_super", "__extends", "scheduler", "work", "_this", "state", "delay", "id", "_a", "_id", "intervalProvider", "_scheduler", "error", "_delay", "errored", "errorValue", "e", "actions", "arrRemove", "Action", "Scheduler", "schedulerActionCtor", "now", "work", "delay", "state", "dateTimestampProvider", "AsyncScheduler", "_super", "__extends", "SchedulerAction", "now", "Scheduler", "_this", "action", "actions", "error", "asyncScheduler", "AsyncScheduler", "AsyncAction", "async", "AnimationFrameAction", "_super", "__extends", "scheduler", "work", "_this", "id", "delay", "animationFrameProvider", "actions", "_a", "AsyncAction", "AnimationFrameScheduler", "_super", "__extends", "action", "flushId", "actions", "error", "AsyncScheduler", "animationFrameScheduler", "AnimationFrameScheduler", "AnimationFrameAction", "EMPTY", "Observable", "subscriber", "isScheduler", "value", "isFunction", "last", "arr", "popResultSelector", "args", "isFunction", "popScheduler", "isScheduler", "popNumber", "defaultValue", "isArrayLike", "x", "isPromise", "value", "isFunction", "isInteropObservable", "input", "isFunction", "observable", "isAsyncIterable", "obj", "isFunction", "createInvalidObservableTypeError", "input", "getSymbolIterator", "iterator", "isIterable", "input", "isFunction", "iterator", "readableStreamLikeToAsyncGenerator", "readableStream", "reader", "__await", "_a", "_b", "value", "done", "isReadableStreamLike", "obj", "isFunction", "innerFrom", "input", "Observable", "isInteropObservable", "fromInteropObservable", "isArrayLike", "fromArrayLike", "isPromise", "fromPromise", "isAsyncIterable", "fromAsyncIterable", "isIterable", "fromIterable", "isReadableStreamLike", "fromReadableStreamLike", "createInvalidObservableTypeError", "obj", "subscriber", "obs", "observable", "isFunction", "array", "i", "promise", "value", "err", "reportUnhandledError", "iterable", "iterable_1", "__values", "iterable_1_1", "asyncIterable", "process", "readableStream", "readableStreamLikeToAsyncGenerator", "asyncIterable_1", "__asyncValues", "asyncIterable_1_1", "executeSchedule", "parentSubscription", "scheduler", "work", "delay", "repeat", "scheduleSubscription", "observeOn", "scheduler", "delay", "operate", "source", "subscriber", "createOperatorSubscriber", "value", "executeSchedule", "err", "subscribeOn", "scheduler", "delay", "operate", "source", "subscriber", "scheduleObservable", "input", "scheduler", "innerFrom", "subscribeOn", "observeOn", "schedulePromise", "input", "scheduler", "innerFrom", "subscribeOn", "observeOn", "scheduleArray", "input", "scheduler", "Observable", "subscriber", "i", "scheduleIterable", "input", "scheduler", "Observable", "subscriber", "iterator", "executeSchedule", "value", "done", "_a", "err", "isFunction", "scheduleAsyncIterable", "input", "scheduler", "Observable", "subscriber", "executeSchedule", "iterator", "result", "scheduleReadableStreamLike", "input", "scheduler", "scheduleAsyncIterable", "readableStreamLikeToAsyncGenerator", "scheduled", "input", "scheduler", "isInteropObservable", "scheduleObservable", "isArrayLike", "scheduleArray", "isPromise", "schedulePromise", "isAsyncIterable", "scheduleAsyncIterable", "isIterable", "scheduleIterable", "isReadableStreamLike", "scheduleReadableStreamLike", "createInvalidObservableTypeError", "from", "input", "scheduler", "scheduled", "innerFrom", "of", "args", "_i", "scheduler", "popScheduler", "from", "throwError", "errorOrErrorFactory", "scheduler", "errorFactory", "isFunction", "init", "subscriber", "Observable", "isValidDate", "value", "map", "project", "thisArg", "operate", "source", "subscriber", "index", "createOperatorSubscriber", "value", "isArray", "callOrApply", "fn", "args", "__spreadArray", "__read", "mapOneOrManyArgs", "map", "isArray", "getPrototypeOf", "objectProto", "getKeys", "argsArgArrayOrObject", "args", "first_1", "isPOJO", "keys", "key", "obj", "createObject", "keys", "values", "result", "key", "i", "combineLatest", "args", "_i", "scheduler", "popScheduler", "resultSelector", "popResultSelector", "_a", "argsArgArrayOrObject", "observables", "keys", "from", "result", "Observable", "combineLatestInit", "values", "createObject", "identity", "mapOneOrManyArgs", "valueTransform", "subscriber", "maybeSchedule", "length", "active", "remainingFirstValues", "i", "source", "hasFirstValue", "createOperatorSubscriber", "value", "execute", "subscription", "executeSchedule", "mergeInternals", "source", "subscriber", "project", "concurrent", "onBeforeNext", "expand", "innerSubScheduler", "additionalFinalizer", "buffer", "active", "index", "isComplete", "checkComplete", "outerNext", "value", "doInnerSub", "innerComplete", "innerFrom", "createOperatorSubscriber", "innerValue", "bufferedValue", "executeSchedule", "err", "mergeMap", "project", "resultSelector", "concurrent", "isFunction", "a", "i", "map", "b", "ii", "innerFrom", "operate", "source", "subscriber", "mergeInternals", "mergeAll", "concurrent", "mergeMap", "identity", "concatAll", "mergeAll", "concat", "args", "_i", "concatAll", "from", "popScheduler", "defer", "observableFactory", "Observable", "subscriber", "innerFrom", "nodeEventEmitterMethods", "eventTargetMethods", "jqueryMethods", "fromEvent", "target", "eventName", "options", "resultSelector", "isFunction", "mapOneOrManyArgs", "_a", "__read", "isEventTarget", "methodName", "handler", "isNodeStyleEventEmitter", "toCommonHandlerRegistry", "isJQueryStyleEventEmitter", "add", "remove", "isArrayLike", "mergeMap", "subTarget", "innerFrom", "Observable", "subscriber", "args", "_i", "fromEventPattern", "addHandler", "removeHandler", "resultSelector", "mapOneOrManyArgs", "Observable", "subscriber", "handler", "e", "_i", "retValue", "isFunction", "timer", "dueTime", "intervalOrScheduler", "scheduler", "async", "intervalDuration", "isScheduler", "Observable", "subscriber", "due", "isValidDate", "n", "merge", "args", "_i", "scheduler", "popScheduler", "concurrent", "popNumber", "sources", "innerFrom", "mergeAll", "from", "EMPTY", "NEVER", "Observable", "noop", "isArray", "argsOrArgArray", "args", "filter", "predicate", "thisArg", "operate", "source", "subscriber", "index", "createOperatorSubscriber", "value", "zip", "args", "_i", "resultSelector", "popResultSelector", "sources", "argsOrArgArray", "Observable", "subscriber", "buffers", "completed", "sourceIndex", "innerFrom", "createOperatorSubscriber", "value", "buffer", "result", "__spreadArray", "__read", "i", "EMPTY", "audit", "durationSelector", "operate", "source", "subscriber", "hasValue", "lastValue", "durationSubscriber", "isComplete", "endDuration", "value", "cleanupDuration", "createOperatorSubscriber", "innerFrom", "auditTime", "duration", "scheduler", "asyncScheduler", "audit", "timer", "bufferCount", "bufferSize", "startBufferEvery", "operate", "source", "subscriber", "buffers", "count", "createOperatorSubscriber", "value", "toEmit", "buffers_1", "__values", "buffers_1_1", "buffer", "toEmit_1", "toEmit_1_1", "arrRemove", "buffers_2", "buffers_2_1", "catchError", "selector", "operate", "source", "subscriber", "innerSub", "syncUnsub", "handledResult", "createOperatorSubscriber", "err", "innerFrom", "scanInternals", "accumulator", "seed", "hasSeed", "emitOnNext", "emitBeforeComplete", "source", "subscriber", "hasState", "state", "index", "createOperatorSubscriber", "value", "i", "combineLatest", "args", "_i", "resultSelector", "popResultSelector", "pipe", "__spreadArray", "__read", "mapOneOrManyArgs", "operate", "source", "subscriber", "combineLatestInit", "argsOrArgArray", "combineLatestWith", "otherSources", "_i", "combineLatest", "__spreadArray", "__read", "concatMap", "project", "resultSelector", "isFunction", "mergeMap", "debounceTime", "dueTime", "scheduler", "asyncScheduler", "operate", "source", "subscriber", "activeTask", "lastValue", "lastTime", "emit", "value", "emitWhenIdle", "targetTime", "now", "createOperatorSubscriber", "defaultIfEmpty", "defaultValue", "operate", "source", "subscriber", "hasValue", "createOperatorSubscriber", "value", "take", "count", "EMPTY", "operate", "source", "subscriber", "seen", "createOperatorSubscriber", "value", "ignoreElements", "operate", "source", "subscriber", "createOperatorSubscriber", "noop", "mapTo", "value", "map", "delayWhen", "delayDurationSelector", "subscriptionDelay", "source", "concat", "take", "ignoreElements", "mergeMap", "value", "index", "mapTo", "delay", "due", "scheduler", "asyncScheduler", "duration", "timer", "delayWhen", "distinctUntilChanged", "comparator", "keySelector", "identity", "defaultCompare", "operate", "source", "subscriber", "previousKey", "first", "createOperatorSubscriber", "value", "currentKey", "a", "b", "distinctUntilKeyChanged", "key", "compare", "distinctUntilChanged", "x", "y", "endWith", "values", "_i", "source", "concat", "of", "__spreadArray", "__read", "finalize", "callback", "operate", "source", "subscriber", "takeLast", "count", "EMPTY", "operate", "source", "subscriber", "buffer", "createOperatorSubscriber", "value", "buffer_1", "__values", "buffer_1_1", "merge", "args", "_i", "scheduler", "popScheduler", "concurrent", "popNumber", "argsOrArgArray", "operate", "source", "subscriber", "mergeAll", "from", "__spreadArray", "__read", "mergeWith", "otherSources", "_i", "merge", "__spreadArray", "__read", "repeat", "countOrConfig", "count", "delay", "_a", "EMPTY", "operate", "source", "subscriber", "soFar", "sourceSub", "resubscribe", "notifier", "timer", "innerFrom", "notifierSubscriber_1", "createOperatorSubscriber", "subscribeToSource", "syncUnsub", "sample", "notifier", "operate", "source", "subscriber", "hasValue", "lastValue", "createOperatorSubscriber", "value", "noop", "scan", "accumulator", "seed", "operate", "scanInternals", "share", "options", "_a", "connector", "Subject", "_b", "resetOnError", "_c", "resetOnComplete", "_d", "resetOnRefCountZero", "wrapperSource", "connection", "resetConnection", "subject", "refCount", "hasCompleted", "hasErrored", "cancelReset", "reset", "resetAndUnsubscribe", "conn", "operate", "source", "subscriber", "dest", "handleReset", "SafeSubscriber", "value", "err", "innerFrom", "on", "args", "_i", "onSubscriber", "__spreadArray", "__read", "shareReplay", "configOrBufferSize", "windowTime", "scheduler", "bufferSize", "refCount", "_a", "_b", "_c", "share", "ReplaySubject", "skip", "count", "filter", "_", "index", "skipUntil", "notifier", "operate", "source", "subscriber", "taking", "skipSubscriber", "createOperatorSubscriber", "noop", "innerFrom", "value", "startWith", "values", "_i", "scheduler", "popScheduler", "operate", "source", "subscriber", "concat", "switchMap", "project", "resultSelector", "operate", "source", "subscriber", "innerSubscriber", "index", "isComplete", "checkComplete", "createOperatorSubscriber", "value", "innerIndex", "outerIndex", "innerFrom", "innerValue", "takeUntil", "notifier", "operate", "source", "subscriber", "innerFrom", "createOperatorSubscriber", "noop", "takeWhile", "predicate", "inclusive", "operate", "source", "subscriber", "index", "createOperatorSubscriber", "value", "result", "tap", "observerOrNext", "error", "complete", "tapObserver", "isFunction", "operate", "source", "subscriber", "_a", "isUnsub", "createOperatorSubscriber", "value", "err", "_b", "identity", "defaultThrottleConfig", "throttle", "durationSelector", "config", "operate", "source", "subscriber", "leading", "trailing", "hasValue", "sendValue", "throttled", "isComplete", "endThrottling", "send", "cleanupThrottling", "startThrottle", "value", "innerFrom", "createOperatorSubscriber", "throttleTime", "duration", "scheduler", "config", "asyncScheduler", "defaultThrottleConfig", "duration$", "timer", "throttle", "withLatestFrom", "inputs", "_i", "project", "popResultSelector", "operate", "source", "subscriber", "len", "otherValues", "hasValue", "ready", "i", "innerFrom", "createOperatorSubscriber", "value", "identity", "noop", "values", "__spreadArray", "__read", "zip", "sources", "_i", "operate", "source", "subscriber", "__spreadArray", "__read", "zipWith", "otherInputs", "_i", "zip", "__spreadArray", "__read", "watchDocument", "document$", "ReplaySubject", "fromEvent", "getElements", "selector", "node", "getElement", "el", "getOptionalElement", "getActiveElement", "watchElementFocus", "el", "merge", "fromEvent", "debounceTime", "map", "active", "getActiveElement", "startWith", "distinctUntilChanged", "getElementOffset", "el", "watchElementOffset", "merge", "fromEvent", "auditTime", "animationFrameScheduler", "map", "startWith", "getElementContentOffset", "el", "watchElementContentOffset", "merge", "fromEvent", "auditTime", "animationFrameScheduler", "map", "startWith", "MapShim", "getIndex", "arr", "key", "result", "entry", "index", "class_1", "value", "entries", "callback", "ctx", "_i", "_a", "isBrowser", "global$1", "requestAnimationFrame$1", "trailingTimeout", "throttle", "delay", "leadingCall", "trailingCall", "lastCallTime", "resolvePending", "proxy", "timeoutCallback", "timeStamp", "REFRESH_DELAY", "transitionKeys", "mutationObserverSupported", "ResizeObserverController", "observer", "observers", "changesDetected", "activeObservers", "_b", "propertyName", "isReflowProperty", "defineConfigurable", "target", "props", "getWindowOf", "ownerGlobal", "emptyRect", "createRectInit", "toFloat", "getBordersSize", "styles", "positions", "size", "position", "getPaddings", "paddings", "positions_1", "getSVGContentRect", "bbox", "getHTMLElementContentRect", "clientWidth", "clientHeight", "horizPad", "vertPad", "width", "height", "isDocumentElement", "vertScrollbar", "horizScrollbar", "isSVGGraphicsElement", "getContentRect", "createReadOnlyRect", "x", "y", "Constr", "rect", "ResizeObservation", "ResizeObserverEntry", "rectInit", "contentRect", "ResizeObserverSPI", "controller", "callbackCtx", "observations", "_this", "observation", "ResizeObserver", "method", "ResizeObserver_es_default", "entry$", "Subject", "observer$", "defer", "of", "ResizeObserver_es_default", "entries", "entry", "switchMap", "observer", "merge", "NEVER", "finalize", "shareReplay", "getElementSize", "el", "watchElementSize", "tap", "filter", "target", "map", "startWith", "getElementContentSize", "el", "getElementContainer", "parent", "entry$", "Subject", "observer$", "defer", "of", "entries", "entry", "switchMap", "observer", "merge", "NEVER", "finalize", "shareReplay", "watchElementVisibility", "el", "tap", "filter", "target", "map", "isIntersecting", "watchElementBoundary", "threshold", "watchElementContentOffset", "y", "visible", "getElementSize", "content", "getElementContentSize", "distinctUntilChanged", "toggles", "getElement", "getToggle", "name", "setToggle", "value", "watchToggle", "el", "fromEvent", "map", "startWith", "isSusceptibleToKeyboard", "el", "type", "watchKeyboard", "fromEvent", "filter", "ev", "map", "getToggle", "mode", "active", "getActiveElement", "share", "getLocation", "setLocation", "url", "watchLocation", "Subject", "appendChild", "el", "child", "node", "h", "tag", "attributes", "children", "attr", "truncate", "value", "n", "i", "round", "digits", "getLocationHash", "setLocationHash", "hash", "el", "h", "ev", "watchLocationHash", "fromEvent", "map", "startWith", "filter", "shareReplay", "watchLocationTarget", "id", "getOptionalElement", "watchMedia", "query", "media", "fromEventPattern", "next", "startWith", "watchPrint", "merge", "fromEvent", "map", "at", "query$", "factory", "switchMap", "active", "EMPTY", "request", "url", "options", "from", "catchError", "EMPTY", "switchMap", "res", "throwError", "of", "requestJSON", "shareReplay", "requestXML", "dom", "map", "watchScript", "src", "script", "h", "defer", "merge", "fromEvent", "switchMap", "throwError", "map", "finalize", "take", "getViewportOffset", "watchViewportOffset", "merge", "fromEvent", "map", "startWith", "getViewportSize", "watchViewportSize", "fromEvent", "map", "startWith", "watchViewport", "combineLatest", "watchViewportOffset", "watchViewportSize", "map", "offset", "size", "shareReplay", "watchViewportAt", "el", "viewport$", "header$", "size$", "distinctUntilKeyChanged", "offset$", "combineLatest", "map", "getElementOffset", "height", "offset", "size", "x", "y", "watchWorker", "worker", "tx$", "rx$", "fromEvent", "map", "data", "throttle", "tap", "message", "switchMap", "share", "script", "getElement", "config", "getLocation", "configuration", "feature", "flag", "translation", "key", "value", "getComponentElement", "type", "node", "getElement", "getComponentElements", "getElements", "watchAnnounce", "el", "button", "getElement", "fromEvent", "map", "content", "mountAnnounce", "feature", "EMPTY", "defer", "push$", "Subject", "startWith", "hash", "_a", "tap", "state", "finalize", "__spreadValues", "watchConsent", "el", "target$", "map", "target", "mountConsent", "options", "internal$", "Subject", "hidden", "tap", "state", "finalize", "__spreadValues", "import_clipboard", "renderTooltip", "id", "h", "renderAnnotation", "id", "prefix", "anchor", "h", "renderTooltip", "renderClipboardButton", "id", "h", "translation", "renderSearchDocument", "document", "flag", "parent", "teaser", "missing", "key", "list", "h", "url", "feature", "match", "highlight", "value", "tags", "configuration", "truncate", "tag", "id", "type", "translation", "renderSearchResultItem", "result", "threshold", "docs", "doc", "article", "index", "best", "more", "children", "section", "renderSourceFacts", "facts", "h", "key", "value", "round", "renderTabbedControl", "type", "classes", "h", "renderTable", "table", "h", "renderVersion", "version", "config", "configuration", "url", "h", "renderVersionSelector", "versions", "active", "translation", "watchAnnotation", "el", "container", "offset$", "defer", "combineLatest", "watchElementOffset", "watchElementContentOffset", "map", "x", "y", "scroll", "width", "height", "getElementSize", "watchElementFocus", "switchMap", "active", "offset", "take", "mountAnnotation", "target$", "tooltip", "index", "push$", "Subject", "done$", "takeLast", "watchElementVisibility", "takeUntil", "visible", "merge", "filter", "debounceTime", "auditTime", "animationFrameScheduler", "throttleTime", "origin", "fromEvent", "ev", "withLatestFrom", "_a", "parent", "getActiveElement", "target", "delay", "tap", "state", "finalize", "__spreadValues", "findAnnotationMarkers", "container", "markers", "el", "getElements", "nodes", "it", "node", "text", "match", "id", "force", "marker", "swap", "source", "target", "mountAnnotationList", "target$", "print$", "parent", "prefix", "annotations", "getOptionalElement", "renderAnnotation", "EMPTY", "defer", "done$", "Subject", "pairs", "annotation", "getElement", "takeUntil", "takeLast", "active", "inner", "child", "merge", "mountAnnotation", "finalize", "share", "sequence", "findCandidateList", "el", "sibling", "watchCodeBlock", "watchElementSize", "map", "width", "getElementContentSize", "distinctUntilKeyChanged", "mountCodeBlock", "options", "hover", "factory$", "defer", "push$", "Subject", "scrollable", "ClipboardJS", "parent", "renderClipboardButton", "container", "list", "feature", "annotations$", "mountAnnotationList", "tap", "state", "finalize", "__spreadValues", "mergeWith", "height", "distinctUntilChanged", "switchMap", "active", "EMPTY", "watchElementVisibility", "filter", "visible", "take", "mermaid$", "sequence", "fetchScripts", "watchScript", "of", "mountMermaid", "el", "tap", "mermaid_default", "map", "shareReplay", "id", "host", "h", "svg", "shadow", "watchDetails", "el", "target$", "print$", "open", "merge", "map", "target", "filter", "details", "active", "tap", "mountDetails", "options", "defer", "push$", "Subject", "action", "reveal", "state", "finalize", "__spreadValues", "sentinel", "h", "mountDataTable", "el", "renderTable", "of", "watchContentTabs", "el", "inputs", "getElements", "initial", "input", "merge", "fromEvent", "map", "getElement", "startWith", "active", "mountContentTabs", "viewport$", "prev", "renderTabbedControl", "next", "container", "defer", "push$", "Subject", "done$", "takeLast", "combineLatest", "watchElementSize", "auditTime", "animationFrameScheduler", "takeUntil", "size", "offset", "getElementOffset", "width", "getElementSize", "content", "getElementContentOffset", "watchElementContentOffset", "getElementContentSize", "direction", "feature", "skip", "withLatestFrom", "tab", "y", "set", "label", "tabs", "tap", "state", "finalize", "__spreadValues", "subscribeOn", "asyncScheduler", "mountContent", "el", "viewport$", "target$", "print$", "merge", "getElements", "child", "mountCodeBlock", "mountMermaid", "mountDataTable", "mountDetails", "mountContentTabs", "watchDialog", "_el", "alert$", "switchMap", "message", "merge", "of", "delay", "map", "active", "mountDialog", "el", "options", "inner", "getElement", "defer", "push$", "Subject", "tap", "state", "finalize", "__spreadValues", "isHidden", "viewport$", "feature", "of", "direction$", "map", "y", "bufferCount", "a", "b", "distinctUntilKeyChanged", "hidden$", "combineLatest", "filter", "offset", "direction", "distinctUntilChanged", "search$", "watchToggle", "search", "switchMap", "active", "startWith", "watchHeader", "el", "options", "defer", "watchElementSize", "height", "hidden", "shareReplay", "mountHeader", "header$", "main$", "push$", "Subject", "done$", "takeLast", "combineLatestWith", "takeUntil", "state", "__spreadValues", "watchHeaderTitle", "el", "viewport$", "header$", "watchViewportAt", "map", "y", "height", "getElementSize", "distinctUntilKeyChanged", "mountHeaderTitle", "options", "defer", "push$", "Subject", "active", "heading", "getOptionalElement", "EMPTY", "tap", "state", "finalize", "__spreadValues", "watchMain", "el", "viewport$", "header$", "adjust$", "map", "height", "distinctUntilChanged", "border$", "switchMap", "watchElementSize", "distinctUntilKeyChanged", "combineLatest", "header", "top", "bottom", "y", "a", "b", "watchPalette", "inputs", "current", "input", "of", "mergeMap", "fromEvent", "map", "startWith", "shareReplay", "mountPalette", "el", "defer", "push$", "Subject", "palette", "key", "value", "index", "label", "observeOn", "asyncScheduler", "getElements", "tap", "state", "finalize", "__spreadValues", "import_clipboard", "extract", "el", "text", "setupClipboardJS", "alert$", "ClipboardJS", "Observable", "subscriber", "getElement", "ev", "tap", "map", "translation", "preprocess", "urls", "root", "next", "a", "b", "url", "index", "fetchSitemap", "base", "cached", "of", "config", "configuration", "requestXML", "map", "sitemap", "getElements", "node", "catchError", "EMPTY", "defaultIfEmpty", "tap", "setupInstantLoading", "document$", "location$", "viewport$", "config", "configuration", "fromEvent", "favicon", "getOptionalElement", "push$", "fetchSitemap", "map", "paths", "path", "switchMap", "urls", "filter", "ev", "el", "url", "of", "NEVER", "share", "pop$", "merge", "distinctUntilChanged", "a", "b", "response$", "distinctUntilKeyChanged", "request", "catchError", "setLocation", "sample", "dom", "res", "skip", "replacement", "selector", "feature", "source", "target", "getComponentElement", "getElements", "concatMap", "script", "h", "name", "Observable", "observer", "EMPTY", "offset", "setLocationHash", "skipUntil", "debounceTime", "bufferCount", "state", "import_escape_html", "import_escape_html", "setupSearchHighlighter", "config", "escape", "separator", "highlight", "_", "data", "term", "query", "match", "value", "escapeHTML", "defaultTransform", "query", "terms", "index", "isSearchReadyMessage", "message", "isSearchQueryMessage", "isSearchResultMessage", "setupSearchIndex", "config", "docs", "translation", "options", "feature", "setupSearchWorker", "url", "index", "configuration", "worker", "tx$", "Subject", "rx$", "watchWorker", "map", "message", "isSearchResultMessage", "result", "document", "share", "from", "data", "setupVersionSelector", "document$", "config", "configuration", "versions$", "requestJSON", "catchError", "EMPTY", "current$", "map", "versions", "current", "version", "aliases", "switchMap", "urls", "fromEvent", "filter", "ev", "withLatestFrom", "el", "url", "of", "fetchSitemap", "sitemap", "path", "getLocation", "setLocation", "combineLatest", "getElement", "renderVersionSelector", "_a", "outdated", "latest", "warning", "getComponentElements", "watchSearchQuery", "el", "rx$", "fn", "defaultTransform", "searchParams", "getLocation", "setToggle", "param$", "filter", "isSearchReadyMessage", "take", "map", "watchToggle", "active", "url", "value", "focus$", "watchElementFocus", "value$", "merge", "fromEvent", "delay", "startWith", "distinctUntilChanged", "combineLatest", "focus", "shareReplay", "mountSearchQuery", "tx$", "push$", "Subject", "done$", "takeLast", "distinctUntilKeyChanged", "translation", "takeUntil", "tap", "state", "finalize", "__spreadValues", "share", "mountSearchResult", "el", "rx$", "query$", "push$", "Subject", "boundary$", "watchElementBoundary", "filter", "meta", "getElement", "list", "ready$", "isSearchReadyMessage", "take", "withLatestFrom", "skipUntil", "items", "value", "translation", "round", "tap", "switchMap", "merge", "of", "bufferCount", "zipWith", "chunk", "result", "renderSearchResultItem", "isSearchResultMessage", "map", "data", "state", "finalize", "__spreadValues", "watchSearchShare", "_el", "query$", "map", "value", "url", "getLocation", "mountSearchShare", "el", "options", "push$", "Subject", "fromEvent", "ev", "tap", "state", "finalize", "__spreadValues", "mountSearchSuggest", "el", "rx$", "keyboard$", "push$", "Subject", "query", "getComponentElement", "query$", "merge", "fromEvent", "observeOn", "asyncScheduler", "map", "distinctUntilChanged", "combineLatestWith", "suggestions", "value", "words", "last", "filter", "mode", "key", "isSearchResultMessage", "data", "tap", "state", "finalize", "mountSearch", "el", "index$", "keyboard$", "config", "configuration", "url", "worker", "setupSearchWorker", "query", "getComponentElement", "result", "tx$", "rx$", "filter", "isSearchQueryMessage", "sample", "isSearchReadyMessage", "take", "mode", "key", "active", "getActiveElement", "anchors", "anchor", "getElements", "article", "best", "a", "b", "setToggle", "els", "i", "query$", "mountSearchQuery", "result$", "mountSearchResult", "merge", "mergeWith", "getComponentElements", "child", "mountSearchShare", "mountSearchSuggest", "err", "NEVER", "mountSearchHiglight", "el", "index$", "location$", "combineLatest", "startWith", "getLocation", "filter", "url", "map", "index", "setupSearchHighlighter", "fn", "_a", "nodes", "it", "node", "original", "replaced", "text", "childNodes", "h", "watchSidebar", "el", "viewport$", "main$", "parent", "adjust", "combineLatest", "map", "offset", "height", "y", "distinctUntilChanged", "a", "b", "mountSidebar", "_a", "_b", "header$", "options", "__objRest", "inner", "getElement", "getElementOffset", "defer", "push$", "Subject", "auditTime", "animationFrameScheduler", "withLatestFrom", "observeOn", "take", "item", "getElements", "container", "getElementContainer", "getElementSize", "tap", "state", "finalize", "__spreadValues", "fetchSourceFactsFromGitHub", "user", "repo", "url", "zip", "requestJSON", "catchError", "EMPTY", "map", "release", "defaultIfEmpty", "info", "__spreadValues", "fetchSourceFactsFromGitLab", "base", "project", "url", "requestJSON", "catchError", "EMPTY", "map", "star_count", "forks_count", "defaultIfEmpty", "fetchSourceFacts", "url", "match", "user", "repo", "fetchSourceFactsFromGitHub", "base", "slug", "fetchSourceFactsFromGitLab", "EMPTY", "fetch$", "watchSource", "el", "defer", "cached", "of", "getComponentElements", "consent", "EMPTY", "fetchSourceFacts", "tap", "facts", "catchError", "filter", "map", "shareReplay", "mountSource", "inner", "getElement", "push$", "Subject", "renderSourceFacts", "state", "finalize", "__spreadValues", "watchTabs", "el", "viewport$", "header$", "watchElementSize", "switchMap", "watchViewportAt", "map", "y", "distinctUntilKeyChanged", "mountTabs", "options", "defer", "push$", "Subject", "hidden", "feature", "of", "tap", "state", "finalize", "__spreadValues", "watchTableOfContents", "el", "viewport$", "header$", "table", "anchors", "getElements", "anchor", "id", "target", "getOptionalElement", "adjust$", "distinctUntilKeyChanged", "map", "height", "main", "getComponentElement", "grid", "getElement", "share", "watchElementSize", "switchMap", "body", "defer", "path", "of", "index", "offset", "a", "b", "combineLatestWith", "adjust", "scan", "prev", "next", "y", "size", "last", "distinctUntilChanged", "startWith", "bufferCount", "mountTableOfContents", "target$", "push$", "Subject", "done$", "takeLast", "feature", "smooth$", "merge", "debounceTime", "filter", "withLatestFrom", "behavior", "container", "getElementContainer", "getElementSize", "takeUntil", "skip", "repeat", "url", "getLocation", "active", "hash", "tap", "state", "finalize", "__spreadValues", "watchBackToTop", "_el", "viewport$", "main$", "target$", "direction$", "map", "y", "bufferCount", "a", "b", "distinctUntilChanged", "active$", "active", "combineLatest", "direction", "takeUntil", "skip", "endWith", "repeat", "hidden", "mountBackToTop", "el", "header$", "push$", "Subject", "done$", "takeLast", "distinctUntilKeyChanged", "height", "tap", "state", "finalize", "__spreadValues", "patchIndeterminate", "document$", "tablet$", "switchMap", "getElements", "tap", "el", "mergeMap", "fromEvent", "takeWhile", "map", "withLatestFrom", "tablet", "isAppleDevice", "patchScrollfix", "document$", "switchMap", "getElements", "tap", "el", "filter", "mergeMap", "fromEvent", "map", "top", "patchScrolllock", "viewport$", "tablet$", "combineLatest", "watchToggle", "map", "active", "tablet", "switchMap", "of", "delay", "withLatestFrom", "y", "value", "obj", "data", "key", "x", "y", "nodes", "parent", "i", "node", "document$", "watchDocument", "location$", "watchLocation", "target$", "watchLocationTarget", "keyboard$", "watchKeyboard", "viewport$", "watchViewport", "tablet$", "watchMedia", "screen$", "print$", "watchPrint", "config", "configuration", "index$", "requestJSON", "NEVER", "alert$", "Subject", "setupClipboardJS", "feature", "setupInstantLoading", "_a", "setupVersionSelector", "merge", "delay", "setToggle", "filter", "mode", "key", "prev", "getOptionalElement", "next", "patchIndeterminate", "patchScrollfix", "patchScrolllock", "header$", "watchHeader", "getComponentElement", "main$", "map", "switchMap", "el", "watchMain", "shareReplay", "control$", "getComponentElements", "mountConsent", "mountDialog", "mountHeader", "mountPalette", "mountSearch", "mountSource", "content$", "defer", "mountAnnounce", "mountContent", "mountSearchHiglight", "EMPTY", "mountHeaderTitle", "at", "mountSidebar", "mountTabs", "mountTableOfContents", "mountBackToTop", "component$", "mergeWith"] +} diff --git a/assets/javascripts/extra/bundle.5f09fbc3.min.js b/assets/javascripts/extra/bundle.5f09fbc3.min.js new file mode 100644 index 0000000..48b752c --- /dev/null +++ b/assets/javascripts/extra/bundle.5f09fbc3.min.js @@ -0,0 +1,18 @@ +"use strict";(()=>{var Je=Object.create;var qr=Object.defineProperty;var $e=Object.getOwnPropertyDescriptor;var Qe=Object.getOwnPropertyNames;var Xe=Object.getPrototypeOf,Ze=Object.prototype.hasOwnProperty;var rt=(r,o)=>()=>(o||r((o={exports:{}}).exports,o),o.exports);var et=(r,o,t,e)=>{if(o&&typeof o=="object"||typeof o=="function")for(let n of Qe(o))!Ze.call(r,n)&&n!==t&&qr(r,n,{get:()=>o[n],enumerable:!(e=$e(o,n))||e.enumerable});return r};var tt=(r,o,t)=>(t=r!=null?Je(Xe(r)):{},et(o||!r||!r.__esModule?qr(t,"default",{value:r,enumerable:!0}):t,r));var me=rt((Tt,er)=>{/*! ***************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */var Hr,Kr,Jr,$r,Qr,Xr,Zr,re,ee,Z,Ar,te,oe,ne,k,ie,fe,ae,ue,ce,se,pe,le,rr;(function(r){var o=typeof global=="object"?global:typeof self=="object"?self:typeof this=="object"?this:{};typeof define=="function"&&define.amd?define("tslib",["exports"],function(e){r(t(o,t(e)))}):typeof er=="object"&&typeof er.exports=="object"?r(t(o,t(er.exports))):r(t(o));function t(e,n){return e!==o&&(typeof Object.create=="function"?Object.defineProperty(e,"__esModule",{value:!0}):e.__esModule=!0),function(i,f){return e[i]=n?n(i,f):f}}})(function(r){var o=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(e,n){e.__proto__=n}||function(e,n){for(var i in n)Object.prototype.hasOwnProperty.call(n,i)&&(e[i]=n[i])};Hr=function(e,n){if(typeof n!="function"&&n!==null)throw new TypeError("Class extends value "+String(n)+" is not a constructor or null");o(e,n);function i(){this.constructor=e}e.prototype=n===null?Object.create(n):(i.prototype=n.prototype,new i)},Kr=Object.assign||function(e){for(var n,i=1,f=arguments.length;i=0;s--)(c=e[s])&&(a=(u<3?c(a):u>3?c(n,i,a):c(n,i))||a);return u>3&&a&&Object.defineProperty(n,i,a),a},Qr=function(e,n){return function(i,f){n(i,f,e)}},Xr=function(e,n){if(typeof Reflect=="object"&&typeof Reflect.metadata=="function")return Reflect.metadata(e,n)},Zr=function(e,n,i,f){function u(a){return a instanceof i?a:new i(function(c){c(a)})}return new(i||(i=Promise))(function(a,c){function s(y){try{p(f.next(y))}catch(g){c(g)}}function d(y){try{p(f.throw(y))}catch(g){c(g)}}function p(y){y.done?a(y.value):u(y.value).then(s,d)}p((f=f.apply(e,n||[])).next())})},re=function(e,n){var i={label:0,sent:function(){if(a[0]&1)throw a[1];return a[1]},trys:[],ops:[]},f,u,a,c;return c={next:s(0),throw:s(1),return:s(2)},typeof Symbol=="function"&&(c[Symbol.iterator]=function(){return this}),c;function s(p){return function(y){return d([p,y])}}function d(p){if(f)throw new TypeError("Generator is already executing.");for(;i;)try{if(f=1,u&&(a=p[0]&2?u.return:p[0]?u.throw||((a=u.return)&&a.call(u),0):u.next)&&!(a=a.call(u,p[1])).done)return a;switch(u=0,a&&(p=[p[0]&2,a.value]),p[0]){case 0:case 1:a=p;break;case 4:return i.label++,{value:p[1],done:!1};case 5:i.label++,u=p[1],p=[0];continue;case 7:p=i.ops.pop(),i.trys.pop();continue;default:if(a=i.trys,!(a=a.length>0&&a[a.length-1])&&(p[0]===6||p[0]===2)){i=0;continue}if(p[0]===3&&(!a||p[1]>a[0]&&p[1]=e.length&&(e=void 0),{value:e&&e[f++],done:!e}}};throw new TypeError(n?"Object is not iterable.":"Symbol.iterator is not defined.")},Ar=function(e,n){var i=typeof Symbol=="function"&&e[Symbol.iterator];if(!i)return e;var f=i.call(e),u,a=[],c;try{for(;(n===void 0||n-- >0)&&!(u=f.next()).done;)a.push(u.value)}catch(s){c={error:s}}finally{try{u&&!u.done&&(i=f.return)&&i.call(f)}finally{if(c)throw c.error}}return a},te=function(){for(var e=[],n=0;n1||s(m,P)})})}function s(m,P){try{d(f[m](P))}catch(j){g(a[0][3],j)}}function d(m){m.value instanceof k?Promise.resolve(m.value.v).then(p,y):g(a[0][2],m)}function p(m){s("next",m)}function y(m){s("throw",m)}function g(m,P){m(P),a.shift(),a.length&&s(a[0][0],a[0][1])}},fe=function(e){var n,i;return n={},f("next"),f("throw",function(u){throw u}),f("return"),n[Symbol.iterator]=function(){return this},n;function f(u,a){n[u]=e[u]?function(c){return(i=!i)?{value:k(e[u](c)),done:u==="return"}:a?a(c):c}:a}},ae=function(e){if(!Symbol.asyncIterator)throw new TypeError("Symbol.asyncIterator is not defined.");var n=e[Symbol.asyncIterator],i;return n?n.call(e):(e=typeof Z=="function"?Z(e):e[Symbol.iterator](),i={},f("next"),f("throw"),f("return"),i[Symbol.asyncIterator]=function(){return this},i);function f(a){i[a]=e[a]&&function(c){return new Promise(function(s,d){c=e[a](c),u(s,d,c.done,c.value)})}}function u(a,c,s,d){Promise.resolve(d).then(function(p){a({value:p,done:s})},c)}},ue=function(e,n){return Object.defineProperty?Object.defineProperty(e,"raw",{value:n}):e.raw=n,e};var t=Object.create?function(e,n){Object.defineProperty(e,"default",{enumerable:!0,value:n})}:function(e,n){e.default=n};ce=function(e){if(e&&e.__esModule)return e;var n={};if(e!=null)for(var i in e)i!=="default"&&Object.prototype.hasOwnProperty.call(e,i)&&rr(n,e,i);return t(n,e),n},se=function(e){return e&&e.__esModule?e:{default:e}},pe=function(e,n,i,f){if(i==="a"&&!f)throw new TypeError("Private accessor was defined without a getter");if(typeof n=="function"?e!==n||!f:!n.has(e))throw new TypeError("Cannot read private member from an object whose class did not declare it");return i==="m"?f:i==="a"?f.call(e):f?f.value:n.get(e)},le=function(e,n,i,f,u){if(f==="m")throw new TypeError("Private method is not writable");if(f==="a"&&!u)throw new TypeError("Private accessor was defined without a setter");if(typeof n=="function"?e!==n||!u:!n.has(e))throw new TypeError("Cannot write private member to an object whose class did not declare it");return f==="a"?u.call(e,i):u?u.value=i:n.set(e,i),i},r("__extends",Hr),r("__assign",Kr),r("__rest",Jr),r("__decorate",$r),r("__param",Qr),r("__metadata",Xr),r("__awaiter",Zr),r("__generator",re),r("__exportStar",ee),r("__createBinding",rr),r("__values",Z),r("__read",Ar),r("__spread",te),r("__spreadArrays",oe),r("__spreadArray",ne),r("__await",k),r("__asyncGenerator",ie),r("__asyncDelegator",fe),r("__asyncValues",ae),r("__makeTemplateObject",ue),r("__importStar",ce),r("__importDefault",se),r("__classPrivateFieldGet",pe),r("__classPrivateFieldSet",le)})});var de=tt(me(),1),{__extends:_,__assign:Pt,__rest:jt,__decorate:Ft,__param:Mt,__metadata:Ct,__awaiter:he,__generator:tr,__exportStar:Lt,__createBinding:Rt,__values:M,__read:w,__spread:kt,__spreadArrays:Ut,__spreadArray:S,__await:or,__asyncGenerator:ve,__asyncDelegator:Wt,__asyncValues:be,__makeTemplateObject:Dt,__importStar:Vt,__importDefault:Bt,__classPrivateFieldGet:Gt,__classPrivateFieldSet:Nt}=de.default;function l(r){return typeof r=="function"}function nr(r){var o=function(e){Error.call(e),e.stack=new Error().stack},t=r(o);return t.prototype=Object.create(Error.prototype),t.prototype.constructor=t,t}var ir=nr(function(r){return function(t){r(this),this.message=t?t.length+` errors occurred during unsubscription: +`+t.map(function(e,n){return n+1+") "+e.toString()}).join(` + `):"",this.name="UnsubscriptionError",this.errors=t}});function C(r,o){if(r){var t=r.indexOf(o);0<=t&&r.splice(t,1)}}var F=function(){function r(o){this.initialTeardown=o,this.closed=!1,this._parentage=null,this._finalizers=null}return r.prototype.unsubscribe=function(){var o,t,e,n,i;if(!this.closed){this.closed=!0;var f=this._parentage;if(f)if(this._parentage=null,Array.isArray(f))try{for(var u=M(f),a=u.next();!a.done;a=u.next()){var c=a.value;c.remove(this)}}catch(m){o={error:m}}finally{try{a&&!a.done&&(t=u.return)&&t.call(u)}finally{if(o)throw o.error}}else f.remove(this);var s=this.initialTeardown;if(l(s))try{s()}catch(m){i=m instanceof ir?m.errors:[m]}var d=this._finalizers;if(d){this._finalizers=null;try{for(var p=M(d),y=p.next();!y.done;y=p.next()){var g=y.value;try{ye(g)}catch(m){i=i!=null?i:[],m instanceof ir?i=S(S([],w(i)),w(m.errors)):i.push(m)}}}catch(m){e={error:m}}finally{try{y&&!y.done&&(n=p.return)&&n.call(p)}finally{if(e)throw e.error}}}if(i)throw new ir(i)}},r.prototype.add=function(o){var t;if(o&&o!==this)if(this.closed)ye(o);else{if(o instanceof r){if(o.closed||o._hasParent(this))return;o._addParent(this)}(this._finalizers=(t=this._finalizers)!==null&&t!==void 0?t:[]).push(o)}},r.prototype._hasParent=function(o){var t=this._parentage;return t===o||Array.isArray(t)&&t.includes(o)},r.prototype._addParent=function(o){var t=this._parentage;this._parentage=Array.isArray(t)?(t.push(o),t):t?[t,o]:o},r.prototype._removeParent=function(o){var t=this._parentage;t===o?this._parentage=null:Array.isArray(t)&&C(t,o)},r.prototype.remove=function(o){var t=this._finalizers;t&&C(t,o),o instanceof r&&o._removeParent(this)},r.EMPTY=function(){var o=new r;return o.closed=!0,o}(),r}();var Ir=F.EMPTY;function fr(r){return r instanceof F||r&&"closed"in r&&l(r.remove)&&l(r.add)&&l(r.unsubscribe)}function ye(r){l(r)?r():r.unsubscribe()}var O={onUnhandledError:null,onStoppedNotification:null,Promise:void 0,useDeprecatedSynchronousErrorHandling:!1,useDeprecatedNextContext:!1};var U={setTimeout:function(r,o){for(var t=[],e=2;e0},enumerable:!1,configurable:!0}),o.prototype._trySubscribe=function(t){return this._throwIfClosed(),r.prototype._trySubscribe.call(this,t)},o.prototype._subscribe=function(t){return this._throwIfClosed(),this._checkFinalizedStatuses(t),this._innerSubscribe(t)},o.prototype._innerSubscribe=function(t){var e=this,n=this,i=n.hasError,f=n.isStopped,u=n.observers;return i||f?Ir:(this.currentObservers=null,u.push(t),new F(function(){e.currentObservers=null,C(u,t)}))},o.prototype._checkFinalizedStatuses=function(t){var e=this,n=e.hasError,i=e.thrownError,f=e.isStopped;n?t.error(i):f&&t.complete()},o.prototype.asObservable=function(){var t=new b;return t.source=this,t},o.create=function(t,e){return new Ae(t,e)},o}(b);var Ae=function(r){_(o,r);function o(t,e){var n=r.call(this)||this;return n.destination=t,n.source=e,n}return o.prototype.next=function(t){var e,n;(n=(e=this.destination)===null||e===void 0?void 0:e.next)===null||n===void 0||n.call(e,t)},o.prototype.error=function(t){var e,n;(n=(e=this.destination)===null||e===void 0?void 0:e.error)===null||n===void 0||n.call(e,t)},o.prototype.complete=function(){var t,e;(e=(t=this.destination)===null||t===void 0?void 0:t.complete)===null||e===void 0||e.call(t)},o.prototype._subscribe=function(t){var e,n;return(n=(e=this.source)===null||e===void 0?void 0:e.subscribe(t))!==null&&n!==void 0?n:Ir},o}(Fr);var J={now:function(){return(J.delegate||Date).now()},delegate:void 0};var Mr=function(r){_(o,r);function o(t,e,n){t===void 0&&(t=1/0),e===void 0&&(e=1/0),n===void 0&&(n=J);var i=r.call(this)||this;return i._bufferSize=t,i._windowTime=e,i._timestampProvider=n,i._buffer=[],i._infiniteTimeWindow=!0,i._infiniteTimeWindow=e===1/0,i._bufferSize=Math.max(1,t),i._windowTime=Math.max(1,e),i}return o.prototype.next=function(t){var e=this,n=e.isStopped,i=e._buffer,f=e._infiniteTimeWindow,u=e._timestampProvider,a=e._windowTime;n||(i.push(t),!f&&i.push(u.now()+a)),this._trimBuffer(),r.prototype.next.call(this,t)},o.prototype._subscribe=function(t){this._throwIfClosed(),this._trimBuffer();for(var e=this._innerSubscribe(t),n=this,i=n._infiniteTimeWindow,f=n._buffer,u=f.slice(),a=0;a{sessionStorage.setItem("\u1D34\u2092\u1D34\u2092\u1D34\u2092",`${t}`),r.hidden=!t}),o.next(JSON.parse(sessionStorage.getItem("\u1D34\u2092\u1D34\u2092\u1D34\u2092")||"true")),z(r,"click").pipe(zr(o)).subscribe(([,t])=>o.next(!t)),kr(250).pipe(gr(o.pipe(X(t=>!t))),H(75),Nr({delay:()=>o.pipe(X(t=>t))}),T(()=>{let t=document.createElement("div");return t.className="\u1D34\u2092\u1D34\u2092\u1D34\u2092",t.ariaHidden="true",Ke.appendChild(t),Ur(Wr,Rr(t)).pipe(Gr(()=>t.remove()),gr(o.pipe(X(e=>!e))),Yr(e=>z(e,"click").pipe(Er(()=>e.classList.add("\u1D34\u2092\u1D34\u2092\u1D34\u2092--\u1D4D\u2092\u1D57\uA700\u1D34\u2090")),Vr(1e3),Er(()=>e.classList.remove("\u1D34\u2092\u1D34\u2092\u1D34\u2092--\u1D4D\u2092\u1D57\uA700\u1D34\u2090")))))})).subscribe()}})(); +//# sourceMappingURL=bundle.5f09fbc3.min.js.map + diff --git a/assets/javascripts/extra/bundle.5f09fbc3.min.js.map b/assets/javascripts/extra/bundle.5f09fbc3.min.js.map new file mode 100644 index 0000000..24f3674 --- /dev/null +++ b/assets/javascripts/extra/bundle.5f09fbc3.min.js.map @@ -0,0 +1,8 @@ +{ + "version": 3, + "sources": ["node_modules/rxjs/node_modules/tslib/tslib.js", "node_modules/rxjs/node_modules/tslib/modules/index.js", "node_modules/rxjs/src/internal/util/isFunction.ts", "node_modules/rxjs/src/internal/util/createErrorClass.ts", "node_modules/rxjs/src/internal/util/UnsubscriptionError.ts", "node_modules/rxjs/src/internal/util/arrRemove.ts", "node_modules/rxjs/src/internal/Subscription.ts", "node_modules/rxjs/src/internal/config.ts", "node_modules/rxjs/src/internal/scheduler/timeoutProvider.ts", "node_modules/rxjs/src/internal/util/reportUnhandledError.ts", "node_modules/rxjs/src/internal/util/noop.ts", "node_modules/rxjs/src/internal/NotificationFactories.ts", "node_modules/rxjs/src/internal/util/errorContext.ts", "node_modules/rxjs/src/internal/Subscriber.ts", "node_modules/rxjs/src/internal/symbol/observable.ts", "node_modules/rxjs/src/internal/util/identity.ts", "node_modules/rxjs/src/internal/util/pipe.ts", "node_modules/rxjs/src/internal/Observable.ts", "node_modules/rxjs/src/internal/util/lift.ts", "node_modules/rxjs/src/internal/operators/OperatorSubscriber.ts", "node_modules/rxjs/src/internal/util/ObjectUnsubscribedError.ts", "node_modules/rxjs/src/internal/Subject.ts", "node_modules/rxjs/src/internal/scheduler/dateTimestampProvider.ts", "node_modules/rxjs/src/internal/ReplaySubject.ts", "node_modules/rxjs/src/internal/scheduler/Action.ts", "node_modules/rxjs/src/internal/scheduler/intervalProvider.ts", "node_modules/rxjs/src/internal/scheduler/AsyncAction.ts", "node_modules/rxjs/src/internal/Scheduler.ts", "node_modules/rxjs/src/internal/scheduler/AsyncScheduler.ts", "node_modules/rxjs/src/internal/scheduler/async.ts", "node_modules/rxjs/src/internal/observable/empty.ts", "node_modules/rxjs/src/internal/util/isScheduler.ts", "node_modules/rxjs/src/internal/util/args.ts", "node_modules/rxjs/src/internal/util/isArrayLike.ts", "node_modules/rxjs/src/internal/util/isPromise.ts", "node_modules/rxjs/src/internal/util/isInteropObservable.ts", "node_modules/rxjs/src/internal/util/isAsyncIterable.ts", "node_modules/rxjs/src/internal/util/throwUnobservableError.ts", "node_modules/rxjs/src/internal/symbol/iterator.ts", "node_modules/rxjs/src/internal/util/isIterable.ts", "node_modules/rxjs/src/internal/util/isReadableStreamLike.ts", "node_modules/rxjs/src/internal/observable/innerFrom.ts", "node_modules/rxjs/src/internal/util/executeSchedule.ts", "node_modules/rxjs/src/internal/operators/observeOn.ts", "node_modules/rxjs/src/internal/operators/subscribeOn.ts", "node_modules/rxjs/src/internal/scheduled/scheduleObservable.ts", "node_modules/rxjs/src/internal/scheduled/schedulePromise.ts", "node_modules/rxjs/src/internal/scheduled/scheduleArray.ts", "node_modules/rxjs/src/internal/scheduled/scheduleIterable.ts", "node_modules/rxjs/src/internal/scheduled/scheduleAsyncIterable.ts", "node_modules/rxjs/src/internal/scheduled/scheduleReadableStreamLike.ts", "node_modules/rxjs/src/internal/scheduled/scheduled.ts", "node_modules/rxjs/src/internal/observable/from.ts", "node_modules/rxjs/src/internal/observable/of.ts", "node_modules/rxjs/src/internal/util/isDate.ts", "node_modules/rxjs/src/internal/operators/map.ts", "node_modules/rxjs/src/internal/util/mapOneOrManyArgs.ts", "node_modules/rxjs/src/internal/operators/mergeInternals.ts", "node_modules/rxjs/src/internal/operators/mergeMap.ts", "node_modules/rxjs/src/internal/operators/mergeAll.ts", "node_modules/rxjs/src/internal/operators/concatAll.ts", "node_modules/rxjs/src/internal/observable/concat.ts", "node_modules/rxjs/src/internal/observable/fromEvent.ts", "node_modules/rxjs/src/internal/observable/timer.ts", "node_modules/rxjs/src/internal/observable/interval.ts", "node_modules/rxjs/src/internal/observable/merge.ts", "node_modules/rxjs/src/internal/observable/never.ts", "node_modules/rxjs/src/internal/operators/filter.ts", "node_modules/rxjs/src/internal/operators/take.ts", "node_modules/rxjs/src/internal/operators/ignoreElements.ts", "node_modules/rxjs/src/internal/operators/mapTo.ts", "node_modules/rxjs/src/internal/operators/delayWhen.ts", "node_modules/rxjs/src/internal/operators/delay.ts", "node_modules/rxjs/src/internal/operators/distinctUntilChanged.ts", "node_modules/rxjs/src/internal/operators/finalize.ts", "node_modules/rxjs/src/internal/operators/repeat.ts", "node_modules/rxjs/src/internal/operators/switchMap.ts", "node_modules/rxjs/src/internal/operators/takeUntil.ts", "node_modules/rxjs/src/internal/operators/tap.ts", "node_modules/rxjs/src/internal/operators/withLatestFrom.ts", "src/assets/javascripts/extra/bundle.ts"], + "sourceRoot": "../../../..", + "sourcesContent": ["/*! *****************************************************************************\r\nCopyright (c) Microsoft Corporation.\r\n\r\nPermission to use, copy, modify, and/or distribute this software for any\r\npurpose with or without fee is hereby granted.\r\n\r\nTHE SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH\r\nREGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY\r\nAND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,\r\nINDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM\r\nLOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR\r\nOTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR\r\nPERFORMANCE OF THIS SOFTWARE.\r\n***************************************************************************** */\r\n/* global global, define, System, Reflect, Promise */\r\nvar __extends;\r\nvar __assign;\r\nvar __rest;\r\nvar __decorate;\r\nvar __param;\r\nvar __metadata;\r\nvar __awaiter;\r\nvar __generator;\r\nvar __exportStar;\r\nvar __values;\r\nvar __read;\r\nvar __spread;\r\nvar __spreadArrays;\r\nvar __spreadArray;\r\nvar __await;\r\nvar __asyncGenerator;\r\nvar __asyncDelegator;\r\nvar __asyncValues;\r\nvar __makeTemplateObject;\r\nvar __importStar;\r\nvar __importDefault;\r\nvar __classPrivateFieldGet;\r\nvar __classPrivateFieldSet;\r\nvar __createBinding;\r\n(function (factory) {\r\n var root = typeof global === \"object\" ? global : typeof self === \"object\" ? self : typeof this === \"object\" ? this : {};\r\n if (typeof define === \"function\" && define.amd) {\r\n define(\"tslib\", [\"exports\"], function (exports) { factory(createExporter(root, createExporter(exports))); });\r\n }\r\n else if (typeof module === \"object\" && typeof module.exports === \"object\") {\r\n factory(createExporter(root, createExporter(module.exports)));\r\n }\r\n else {\r\n factory(createExporter(root));\r\n }\r\n function createExporter(exports, previous) {\r\n if (exports !== root) {\r\n if (typeof Object.create === \"function\") {\r\n Object.defineProperty(exports, \"__esModule\", { value: true });\r\n }\r\n else {\r\n exports.__esModule = true;\r\n }\r\n }\r\n return function (id, v) { return exports[id] = previous ? previous(id, v) : v; };\r\n }\r\n})\r\n(function (exporter) {\r\n var extendStatics = Object.setPrototypeOf ||\r\n ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||\r\n function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };\r\n\r\n __extends = function (d, b) {\r\n if (typeof b !== \"function\" && b !== null)\r\n throw new TypeError(\"Class extends value \" + String(b) + \" is not a constructor or null\");\r\n extendStatics(d, b);\r\n function __() { this.constructor = d; }\r\n d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());\r\n };\r\n\r\n __assign = Object.assign || function (t) {\r\n for (var s, i = 1, n = arguments.length; i < n; i++) {\r\n s = arguments[i];\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p];\r\n }\r\n return t;\r\n };\r\n\r\n __rest = function (s, e) {\r\n var t = {};\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)\r\n t[p] = s[p];\r\n if (s != null && typeof Object.getOwnPropertySymbols === \"function\")\r\n for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {\r\n if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))\r\n t[p[i]] = s[p[i]];\r\n }\r\n return t;\r\n };\r\n\r\n __decorate = function (decorators, target, key, desc) {\r\n var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;\r\n if (typeof Reflect === \"object\" && typeof Reflect.decorate === \"function\") r = Reflect.decorate(decorators, target, key, desc);\r\n else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;\r\n return c > 3 && r && Object.defineProperty(target, key, r), r;\r\n };\r\n\r\n __param = function (paramIndex, decorator) {\r\n return function (target, key) { decorator(target, key, paramIndex); }\r\n };\r\n\r\n __metadata = function (metadataKey, metadataValue) {\r\n if (typeof Reflect === \"object\" && typeof Reflect.metadata === \"function\") return Reflect.metadata(metadataKey, metadataValue);\r\n };\r\n\r\n __awaiter = function (thisArg, _arguments, P, generator) {\r\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\r\n return new (P || (P = Promise))(function (resolve, reject) {\r\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\r\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\r\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\r\n step((generator = generator.apply(thisArg, _arguments || [])).next());\r\n });\r\n };\r\n\r\n __generator = function (thisArg, body) {\r\n var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;\r\n return g = { next: verb(0), \"throw\": verb(1), \"return\": verb(2) }, typeof Symbol === \"function\" && (g[Symbol.iterator] = function() { return this; }), g;\r\n function verb(n) { return function (v) { return step([n, v]); }; }\r\n function step(op) {\r\n if (f) throw new TypeError(\"Generator is already executing.\");\r\n while (_) try {\r\n if (f = 1, y && (t = op[0] & 2 ? y[\"return\"] : op[0] ? y[\"throw\"] || ((t = y[\"return\"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;\r\n if (y = 0, t) op = [op[0] & 2, t.value];\r\n switch (op[0]) {\r\n case 0: case 1: t = op; break;\r\n case 4: _.label++; return { value: op[1], done: false };\r\n case 5: _.label++; y = op[1]; op = [0]; continue;\r\n case 7: op = _.ops.pop(); _.trys.pop(); continue;\r\n default:\r\n if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }\r\n if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }\r\n if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }\r\n if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }\r\n if (t[2]) _.ops.pop();\r\n _.trys.pop(); continue;\r\n }\r\n op = body.call(thisArg, _);\r\n } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }\r\n if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };\r\n }\r\n };\r\n\r\n __exportStar = function(m, o) {\r\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p);\r\n };\r\n\r\n __createBinding = Object.create ? (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\r\n }) : (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n });\r\n\r\n __values = function (o) {\r\n var s = typeof Symbol === \"function\" && Symbol.iterator, m = s && o[s], i = 0;\r\n if (m) return m.call(o);\r\n if (o && typeof o.length === \"number\") return {\r\n next: function () {\r\n if (o && i >= o.length) o = void 0;\r\n return { value: o && o[i++], done: !o };\r\n }\r\n };\r\n throw new TypeError(s ? \"Object is not iterable.\" : \"Symbol.iterator is not defined.\");\r\n };\r\n\r\n __read = function (o, n) {\r\n var m = typeof Symbol === \"function\" && o[Symbol.iterator];\r\n if (!m) return o;\r\n var i = m.call(o), r, ar = [], e;\r\n try {\r\n while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value);\r\n }\r\n catch (error) { e = { error: error }; }\r\n finally {\r\n try {\r\n if (r && !r.done && (m = i[\"return\"])) m.call(i);\r\n }\r\n finally { if (e) throw e.error; }\r\n }\r\n return ar;\r\n };\r\n\r\n /** @deprecated */\r\n __spread = function () {\r\n for (var ar = [], i = 0; i < arguments.length; i++)\r\n ar = ar.concat(__read(arguments[i]));\r\n return ar;\r\n };\r\n\r\n /** @deprecated */\r\n __spreadArrays = function () {\r\n for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length;\r\n for (var r = Array(s), k = 0, i = 0; i < il; i++)\r\n for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++)\r\n r[k] = a[j];\r\n return r;\r\n };\r\n\r\n __spreadArray = function (to, from, pack) {\r\n if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) {\r\n if (ar || !(i in from)) {\r\n if (!ar) ar = Array.prototype.slice.call(from, 0, i);\r\n ar[i] = from[i];\r\n }\r\n }\r\n return to.concat(ar || Array.prototype.slice.call(from));\r\n };\r\n\r\n __await = function (v) {\r\n return this instanceof __await ? (this.v = v, this) : new __await(v);\r\n };\r\n\r\n __asyncGenerator = function (thisArg, _arguments, generator) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var g = generator.apply(thisArg, _arguments || []), i, q = [];\r\n return i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i;\r\n function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }\r\n function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }\r\n function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }\r\n function fulfill(value) { resume(\"next\", value); }\r\n function reject(value) { resume(\"throw\", value); }\r\n function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }\r\n };\r\n\r\n __asyncDelegator = function (o) {\r\n var i, p;\r\n return i = {}, verb(\"next\"), verb(\"throw\", function (e) { throw e; }), verb(\"return\"), i[Symbol.iterator] = function () { return this; }, i;\r\n function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === \"return\" } : f ? f(v) : v; } : f; }\r\n };\r\n\r\n __asyncValues = function (o) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var m = o[Symbol.asyncIterator], i;\r\n return m ? m.call(o) : (o = typeof __values === \"function\" ? __values(o) : o[Symbol.iterator](), i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i);\r\n function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }\r\n function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }\r\n };\r\n\r\n __makeTemplateObject = function (cooked, raw) {\r\n if (Object.defineProperty) { Object.defineProperty(cooked, \"raw\", { value: raw }); } else { cooked.raw = raw; }\r\n return cooked;\r\n };\r\n\r\n var __setModuleDefault = Object.create ? (function(o, v) {\r\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\r\n }) : function(o, v) {\r\n o[\"default\"] = v;\r\n };\r\n\r\n __importStar = function (mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\r\n __setModuleDefault(result, mod);\r\n return result;\r\n };\r\n\r\n __importDefault = function (mod) {\r\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\r\n };\r\n\r\n __classPrivateFieldGet = function (receiver, state, kind, f) {\r\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\r\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\r\n return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\r\n };\r\n\r\n __classPrivateFieldSet = function (receiver, state, value, kind, f) {\r\n if (kind === \"m\") throw new TypeError(\"Private method is not writable\");\r\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a setter\");\r\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot write private member to an object whose class did not declare it\");\r\n return (kind === \"a\" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;\r\n };\r\n\r\n exporter(\"__extends\", __extends);\r\n exporter(\"__assign\", __assign);\r\n exporter(\"__rest\", __rest);\r\n exporter(\"__decorate\", __decorate);\r\n exporter(\"__param\", __param);\r\n exporter(\"__metadata\", __metadata);\r\n exporter(\"__awaiter\", __awaiter);\r\n exporter(\"__generator\", __generator);\r\n exporter(\"__exportStar\", __exportStar);\r\n exporter(\"__createBinding\", __createBinding);\r\n exporter(\"__values\", __values);\r\n exporter(\"__read\", __read);\r\n exporter(\"__spread\", __spread);\r\n exporter(\"__spreadArrays\", __spreadArrays);\r\n exporter(\"__spreadArray\", __spreadArray);\r\n exporter(\"__await\", __await);\r\n exporter(\"__asyncGenerator\", __asyncGenerator);\r\n exporter(\"__asyncDelegator\", __asyncDelegator);\r\n exporter(\"__asyncValues\", __asyncValues);\r\n exporter(\"__makeTemplateObject\", __makeTemplateObject);\r\n exporter(\"__importStar\", __importStar);\r\n exporter(\"__importDefault\", __importDefault);\r\n exporter(\"__classPrivateFieldGet\", __classPrivateFieldGet);\r\n exporter(\"__classPrivateFieldSet\", __classPrivateFieldSet);\r\n});\r\n", "import tslib from '../tslib.js';\r\nconst {\r\n __extends,\r\n __assign,\r\n __rest,\r\n __decorate,\r\n __param,\r\n __metadata,\r\n __awaiter,\r\n __generator,\r\n __exportStar,\r\n __createBinding,\r\n __values,\r\n __read,\r\n __spread,\r\n __spreadArrays,\r\n __spreadArray,\r\n __await,\r\n __asyncGenerator,\r\n __asyncDelegator,\r\n __asyncValues,\r\n __makeTemplateObject,\r\n __importStar,\r\n __importDefault,\r\n __classPrivateFieldGet,\r\n __classPrivateFieldSet,\r\n} = tslib;\r\nexport {\r\n __extends,\r\n __assign,\r\n __rest,\r\n __decorate,\r\n __param,\r\n __metadata,\r\n __awaiter,\r\n __generator,\r\n __exportStar,\r\n __createBinding,\r\n __values,\r\n __read,\r\n __spread,\r\n __spreadArrays,\r\n __spreadArray,\r\n __await,\r\n __asyncGenerator,\r\n __asyncDelegator,\r\n __asyncValues,\r\n __makeTemplateObject,\r\n __importStar,\r\n __importDefault,\r\n __classPrivateFieldGet,\r\n __classPrivateFieldSet,\r\n};\r\n", null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n NEVER,\n ReplaySubject,\n delay,\n distinctUntilChanged,\n filter,\n finalize,\n fromEvent,\n interval,\n merge,\n mergeMap,\n of,\n repeat,\n switchMap,\n take,\n takeUntil,\n tap,\n withLatestFrom\n} from \"rxjs\"\n\n/* ----------------------------------------------------------------------------\n * Script\n * ------------------------------------------------------------------------- */\n\n/* Append container for instances */\nconst container = document.createElement(\"div\")\ndocument.body.appendChild(container)\n\n/* Append button next to palette toggle */\nconst header = document.querySelector(\".md-header__option\")\nif (header) {\n const button = document.createElement(\"button\")\n button.className = \"md-header__button md-icon \u1D34\u2092\u1D34\u2092\u1D34\u2092__button\"\n if (header.parentElement)\n header.parentElement.insertBefore(button, header)\n\n /* Toggle animation */\n const on$ = new ReplaySubject(1)\n on$\n .pipe(\n distinctUntilChanged()\n )\n .subscribe(on => {\n sessionStorage.setItem(\"\u1D34\u2092\u1D34\u2092\u1D34\u2092\", `${on}`)\n button.hidden = !on\n })\n\n /* Load state from session storage */\n on$.next(JSON.parse(sessionStorage.getItem(\"\u1D34\u2092\u1D34\u2092\u1D34\u2092\") || \"true\"))\n fromEvent(button, \"click\")\n .pipe(\n withLatestFrom(on$)\n )\n .subscribe(([, on]) => on$.next(!on))\n\n /* Generate instances */\n interval(250)\n .pipe(\n takeUntil(on$.pipe(filter(on => !on))),\n take(75),\n repeat({ delay: () => on$.pipe(filter(on => on)) }),\n mergeMap(() => {\n const instance = document.createElement(\"div\")\n instance.className = \"\u1D34\u2092\u1D34\u2092\u1D34\u2092\"\n instance.ariaHidden = \"true\"\n container.appendChild(instance)\n return merge(NEVER, of(instance))\n .pipe(\n finalize(() => instance.remove()),\n takeUntil(on$.pipe(filter(on => !on))),\n switchMap(el => fromEvent(el, \"click\")\n .pipe(\n tap(() => el.classList.add(\"\u1D34\u2092\u1D34\u2092\u1D34\u2092--\u1D4D\u2092\u1D57\uA700\u1D34\u2090\")),\n delay(1000),\n tap(() => el.classList.remove(\"\u1D34\u2092\u1D34\u2092\u1D34\u2092--\u1D4D\u2092\u1D57\uA700\u1D34\u2090\"))\n )\n )\n )\n })\n )\n .subscribe()\n}\n"], + "mappings": "6iBAAA,IAAAA,GAAAC,GAAA,CAAAC,GAAAC,KAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gFAeA,IAAIC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,EACAC,GACAC,GACAC,GACAC,GACAC,EACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,IACH,SAAUC,EAAS,CAChB,IAAIC,EAAO,OAAO,QAAW,SAAW,OAAS,OAAO,MAAS,SAAW,KAAO,OAAO,MAAS,SAAW,KAAO,CAAC,EAClH,OAAO,QAAW,YAAc,OAAO,IACvC,OAAO,QAAS,CAAC,SAAS,EAAG,SAAU3B,EAAS,CAAE0B,EAAQE,EAAeD,EAAMC,EAAe5B,CAAO,CAAC,CAAC,CAAG,CAAC,EAEtG,OAAOC,IAAW,UAAY,OAAOA,GAAO,SAAY,SAC7DyB,EAAQE,EAAeD,EAAMC,EAAe3B,GAAO,OAAO,CAAC,CAAC,EAG5DyB,EAAQE,EAAeD,CAAI,CAAC,EAEhC,SAASC,EAAe5B,EAAS6B,EAAU,CACvC,OAAI7B,IAAY2B,IACR,OAAO,OAAO,QAAW,WACzB,OAAO,eAAe3B,EAAS,aAAc,CAAE,MAAO,EAAK,CAAC,EAG5DA,EAAQ,WAAa,IAGtB,SAAU8B,EAAIC,EAAG,CAAE,OAAO/B,EAAQ8B,GAAMD,EAAWA,EAASC,EAAIC,CAAC,EAAIA,CAAG,CACnF,CACJ,GACC,SAAUC,EAAU,CACjB,IAAIC,EAAgB,OAAO,gBACtB,CAAE,UAAW,CAAC,CAAE,YAAa,OAAS,SAAUC,EAAGC,EAAG,CAAED,EAAE,UAAYC,CAAG,GAC1E,SAAUD,EAAGC,EAAG,CAAE,QAASC,KAAKD,EAAO,OAAO,UAAU,eAAe,KAAKA,EAAGC,CAAC,IAAGF,EAAEE,GAAKD,EAAEC,GAAI,EAEpGlC,GAAY,SAAUgC,EAAGC,EAAG,CACxB,GAAI,OAAOA,GAAM,YAAcA,IAAM,KACjC,MAAM,IAAI,UAAU,uBAAyB,OAAOA,CAAC,EAAI,+BAA+B,EAC5FF,EAAcC,EAAGC,CAAC,EAClB,SAASE,GAAK,CAAE,KAAK,YAAcH,CAAG,CACtCA,EAAE,UAAYC,IAAM,KAAO,OAAO,OAAOA,CAAC,GAAKE,EAAG,UAAYF,EAAE,UAAW,IAAIE,EACnF,EAEAlC,GAAW,OAAO,QAAU,SAAUmC,EAAG,CACrC,QAASC,EAAG,EAAI,EAAGC,EAAI,UAAU,OAAQ,EAAIA,EAAG,IAAK,CACjDD,EAAI,UAAU,GACd,QAASH,KAAKG,EAAO,OAAO,UAAU,eAAe,KAAKA,EAAGH,CAAC,IAAGE,EAAEF,GAAKG,EAAEH,GAC9E,CACA,OAAOE,CACX,EAEAlC,GAAS,SAAUmC,EAAGE,EAAG,CACrB,IAAIH,EAAI,CAAC,EACT,QAASF,KAAKG,EAAO,OAAO,UAAU,eAAe,KAAKA,EAAGH,CAAC,GAAKK,EAAE,QAAQL,CAAC,EAAI,IAC9EE,EAAEF,GAAKG,EAAEH,IACb,GAAIG,GAAK,MAAQ,OAAO,OAAO,uBAA0B,WACrD,QAASG,EAAI,EAAGN,EAAI,OAAO,sBAAsBG,CAAC,EAAGG,EAAIN,EAAE,OAAQM,IAC3DD,EAAE,QAAQL,EAAEM,EAAE,EAAI,GAAK,OAAO,UAAU,qBAAqB,KAAKH,EAAGH,EAAEM,EAAE,IACzEJ,EAAEF,EAAEM,IAAMH,EAAEH,EAAEM,KAE1B,OAAOJ,CACX,EAEAjC,GAAa,SAAUsC,EAAYC,EAAQC,EAAKC,EAAM,CAClD,IAAIC,EAAI,UAAU,OAAQC,EAAID,EAAI,EAAIH,EAASE,IAAS,KAAOA,EAAO,OAAO,yBAAyBF,EAAQC,CAAG,EAAIC,EAAMZ,EAC3H,GAAI,OAAO,SAAY,UAAY,OAAO,QAAQ,UAAa,WAAYc,EAAI,QAAQ,SAASL,EAAYC,EAAQC,EAAKC,CAAI,MACxH,SAASJ,EAAIC,EAAW,OAAS,EAAGD,GAAK,EAAGA,KAASR,EAAIS,EAAWD,MAAIM,GAAKD,EAAI,EAAIb,EAAEc,CAAC,EAAID,EAAI,EAAIb,EAAEU,EAAQC,EAAKG,CAAC,EAAId,EAAEU,EAAQC,CAAG,IAAMG,GAChJ,OAAOD,EAAI,GAAKC,GAAK,OAAO,eAAeJ,EAAQC,EAAKG,CAAC,EAAGA,CAChE,EAEA1C,GAAU,SAAU2C,EAAYC,EAAW,CACvC,OAAO,SAAUN,EAAQC,EAAK,CAAEK,EAAUN,EAAQC,EAAKI,CAAU,CAAG,CACxE,EAEA1C,GAAa,SAAU4C,EAAaC,EAAe,CAC/C,GAAI,OAAO,SAAY,UAAY,OAAO,QAAQ,UAAa,WAAY,OAAO,QAAQ,SAASD,EAAaC,CAAa,CACjI,EAEA5C,GAAY,SAAU6C,EAASC,EAAYC,EAAGC,EAAW,CACrD,SAASC,EAAMC,EAAO,CAAE,OAAOA,aAAiBH,EAAIG,EAAQ,IAAIH,EAAE,SAAUI,EAAS,CAAEA,EAAQD,CAAK,CAAG,CAAC,CAAG,CAC3G,OAAO,IAAKH,IAAMA,EAAI,UAAU,SAAUI,EAASC,EAAQ,CACvD,SAASC,EAAUH,EAAO,CAAE,GAAI,CAAEI,EAAKN,EAAU,KAAKE,CAAK,CAAC,CAAG,OAASjB,EAAP,CAAYmB,EAAOnB,CAAC,CAAG,CAAE,CAC1F,SAASsB,EAASL,EAAO,CAAE,GAAI,CAAEI,EAAKN,EAAU,MAASE,CAAK,CAAC,CAAG,OAASjB,EAAP,CAAYmB,EAAOnB,CAAC,CAAG,CAAE,CAC7F,SAASqB,EAAKE,EAAQ,CAAEA,EAAO,KAAOL,EAAQK,EAAO,KAAK,EAAIP,EAAMO,EAAO,KAAK,EAAE,KAAKH,EAAWE,CAAQ,CAAG,CAC7GD,GAAMN,EAAYA,EAAU,MAAMH,EAASC,GAAc,CAAC,CAAC,GAAG,KAAK,CAAC,CACxE,CAAC,CACL,EAEA7C,GAAc,SAAU4C,EAASY,EAAM,CACnC,IAAIC,EAAI,CAAE,MAAO,EAAG,KAAM,UAAW,CAAE,GAAI5B,EAAE,GAAK,EAAG,MAAMA,EAAE,GAAI,OAAOA,EAAE,EAAI,EAAG,KAAM,CAAC,EAAG,IAAK,CAAC,CAAE,EAAG,EAAG6B,EAAG7B,EAAG8B,EAC/G,OAAOA,EAAI,CAAE,KAAMC,EAAK,CAAC,EAAG,MAASA,EAAK,CAAC,EAAG,OAAUA,EAAK,CAAC,CAAE,EAAG,OAAO,QAAW,aAAeD,EAAE,OAAO,UAAY,UAAW,CAAE,OAAO,IAAM,GAAIA,EACvJ,SAASC,EAAK7B,EAAG,CAAE,OAAO,SAAUT,EAAG,CAAE,OAAO+B,EAAK,CAACtB,EAAGT,CAAC,CAAC,CAAG,CAAG,CACjE,SAAS+B,EAAKQ,EAAI,CACd,GAAI,EAAG,MAAM,IAAI,UAAU,iCAAiC,EAC5D,KAAOJ,GAAG,GAAI,CACV,GAAI,EAAI,EAAGC,IAAM7B,EAAIgC,EAAG,GAAK,EAAIH,EAAE,OAAYG,EAAG,GAAKH,EAAE,SAAc7B,EAAI6B,EAAE,SAAc7B,EAAE,KAAK6B,CAAC,EAAG,GAAKA,EAAE,OAAS,EAAE7B,EAAIA,EAAE,KAAK6B,EAAGG,EAAG,EAAE,GAAG,KAAM,OAAOhC,EAE3J,OADI6B,EAAI,EAAG7B,IAAGgC,EAAK,CAACA,EAAG,GAAK,EAAGhC,EAAE,KAAK,GAC9BgC,EAAG,GAAI,CACX,IAAK,GAAG,IAAK,GAAGhC,EAAIgC,EAAI,MACxB,IAAK,GAAG,OAAAJ,EAAE,QAAgB,CAAE,MAAOI,EAAG,GAAI,KAAM,EAAM,EACtD,IAAK,GAAGJ,EAAE,QAASC,EAAIG,EAAG,GAAIA,EAAK,CAAC,CAAC,EAAG,SACxC,IAAK,GAAGA,EAAKJ,EAAE,IAAI,IAAI,EAAGA,EAAE,KAAK,IAAI,EAAG,SACxC,QACI,GAAM5B,EAAI4B,EAAE,KAAM,EAAA5B,EAAIA,EAAE,OAAS,GAAKA,EAAEA,EAAE,OAAS,MAAQgC,EAAG,KAAO,GAAKA,EAAG,KAAO,GAAI,CAAEJ,EAAI,EAAG,QAAU,CAC3G,GAAII,EAAG,KAAO,IAAM,CAAChC,GAAMgC,EAAG,GAAKhC,EAAE,IAAMgC,EAAG,GAAKhC,EAAE,IAAM,CAAE4B,EAAE,MAAQI,EAAG,GAAI,KAAO,CACrF,GAAIA,EAAG,KAAO,GAAKJ,EAAE,MAAQ5B,EAAE,GAAI,CAAE4B,EAAE,MAAQ5B,EAAE,GAAIA,EAAIgC,EAAI,KAAO,CACpE,GAAIhC,GAAK4B,EAAE,MAAQ5B,EAAE,GAAI,CAAE4B,EAAE,MAAQ5B,EAAE,GAAI4B,EAAE,IAAI,KAAKI,CAAE,EAAG,KAAO,CAC9DhC,EAAE,IAAI4B,EAAE,IAAI,IAAI,EACpBA,EAAE,KAAK,IAAI,EAAG,QACtB,CACAI,EAAKL,EAAK,KAAKZ,EAASa,CAAC,CAC7B,OAASzB,EAAP,CAAY6B,EAAK,CAAC,EAAG7B,CAAC,EAAG0B,EAAI,CAAG,QAAE,CAAU,EAAI7B,EAAI,CAAG,CACzD,GAAIgC,EAAG,GAAK,EAAG,MAAMA,EAAG,GAAI,MAAO,CAAE,MAAOA,EAAG,GAAKA,EAAG,GAAK,OAAQ,KAAM,EAAK,CACnF,CACJ,EAEA5D,GAAe,SAAS6D,EAAGC,EAAG,CAC1B,QAASpC,KAAKmC,EAAOnC,IAAM,WAAa,CAAC,OAAO,UAAU,eAAe,KAAKoC,EAAGpC,CAAC,GAAGX,GAAgB+C,EAAGD,EAAGnC,CAAC,CAChH,EAEAX,GAAkB,OAAO,OAAU,SAAS+C,EAAGD,EAAGE,EAAGC,EAAI,CACjDA,IAAO,SAAWA,EAAKD,GAC3B,OAAO,eAAeD,EAAGE,EAAI,CAAE,WAAY,GAAM,IAAK,UAAW,CAAE,OAAOH,EAAEE,EAAI,CAAE,CAAC,CACvF,EAAM,SAASD,EAAGD,EAAGE,EAAGC,EAAI,CACpBA,IAAO,SAAWA,EAAKD,GAC3BD,EAAEE,GAAMH,EAAEE,EACd,EAEA9D,EAAW,SAAU6D,EAAG,CACpB,IAAIjC,EAAI,OAAO,QAAW,YAAc,OAAO,SAAUgC,EAAIhC,GAAKiC,EAAEjC,GAAIG,EAAI,EAC5E,GAAI6B,EAAG,OAAOA,EAAE,KAAKC,CAAC,EACtB,GAAIA,GAAK,OAAOA,EAAE,QAAW,SAAU,MAAO,CAC1C,KAAM,UAAY,CACd,OAAIA,GAAK9B,GAAK8B,EAAE,SAAQA,EAAI,QACrB,CAAE,MAAOA,GAAKA,EAAE9B,KAAM,KAAM,CAAC8B,CAAE,CAC1C,CACJ,EACA,MAAM,IAAI,UAAUjC,EAAI,0BAA4B,iCAAiC,CACzF,EAEA3B,GAAS,SAAU4D,EAAG,EAAG,CACrB,IAAID,EAAI,OAAO,QAAW,YAAcC,EAAE,OAAO,UACjD,GAAI,CAACD,EAAG,OAAOC,EACf,IAAI9B,EAAI6B,EAAE,KAAKC,CAAC,EAAGxB,EAAG2B,EAAK,CAAC,EAAGlC,EAC/B,GAAI,CACA,MAAQ,IAAM,QAAU,KAAM,IAAM,EAAEO,EAAIN,EAAE,KAAK,GAAG,MAAMiC,EAAG,KAAK3B,EAAE,KAAK,CAC7E,OACO4B,EAAP,CAAgBnC,EAAI,CAAE,MAAOmC,CAAM,CAAG,QACtC,CACI,GAAI,CACI5B,GAAK,CAACA,EAAE,OAASuB,EAAI7B,EAAE,SAAY6B,EAAE,KAAK7B,CAAC,CACnD,QACA,CAAU,GAAID,EAAG,MAAMA,EAAE,KAAO,CACpC,CACA,OAAOkC,CACX,EAGA9D,GAAW,UAAY,CACnB,QAAS8D,EAAK,CAAC,EAAGjC,EAAI,EAAGA,EAAI,UAAU,OAAQA,IAC3CiC,EAAKA,EAAG,OAAO/D,GAAO,UAAU8B,EAAE,CAAC,EACvC,OAAOiC,CACX,EAGA7D,GAAiB,UAAY,CACzB,QAASyB,EAAI,EAAGG,EAAI,EAAGmC,EAAK,UAAU,OAAQnC,EAAImC,EAAInC,IAAKH,GAAK,UAAUG,GAAG,OAC7E,QAASM,EAAI,MAAMT,CAAC,EAAGkC,EAAI,EAAG/B,EAAI,EAAGA,EAAImC,EAAInC,IACzC,QAAS,EAAI,UAAUA,GAAIoC,EAAI,EAAGC,EAAK,EAAE,OAAQD,EAAIC,EAAID,IAAKL,IAC1DzB,EAAEyB,GAAK,EAAEK,GACjB,OAAO9B,CACX,EAEAjC,GAAgB,SAAUiE,EAAIC,EAAMC,EAAM,CACtC,GAAIA,GAAQ,UAAU,SAAW,EAAG,QAASxC,EAAI,EAAGyC,EAAIF,EAAK,OAAQN,EAAIjC,EAAIyC,EAAGzC,KACxEiC,GAAM,EAAEjC,KAAKuC,MACRN,IAAIA,EAAK,MAAM,UAAU,MAAM,KAAKM,EAAM,EAAGvC,CAAC,GACnDiC,EAAGjC,GAAKuC,EAAKvC,IAGrB,OAAOsC,EAAG,OAAOL,GAAM,MAAM,UAAU,MAAM,KAAKM,CAAI,CAAC,CAC3D,EAEAjE,EAAU,SAAUe,EAAG,CACnB,OAAO,gBAAgBf,GAAW,KAAK,EAAIe,EAAG,MAAQ,IAAIf,EAAQe,CAAC,CACvE,EAEAd,GAAmB,SAAUoC,EAASC,EAAYE,EAAW,CACzD,GAAI,CAAC,OAAO,cAAe,MAAM,IAAI,UAAU,sCAAsC,EACrF,IAAIY,EAAIZ,EAAU,MAAMH,EAASC,GAAc,CAAC,CAAC,EAAGZ,EAAG0C,EAAI,CAAC,EAC5D,OAAO1C,EAAI,CAAC,EAAG2B,EAAK,MAAM,EAAGA,EAAK,OAAO,EAAGA,EAAK,QAAQ,EAAG3B,EAAE,OAAO,eAAiB,UAAY,CAAE,OAAO,IAAM,EAAGA,EACpH,SAAS2B,EAAK7B,EAAG,CAAM4B,EAAE5B,KAAIE,EAAEF,GAAK,SAAUT,EAAG,CAAE,OAAO,IAAI,QAAQ,SAAUsD,EAAGlD,EAAG,CAAEiD,EAAE,KAAK,CAAC5C,EAAGT,EAAGsD,EAAGlD,CAAC,CAAC,EAAI,GAAKmD,EAAO9C,EAAGT,CAAC,CAAG,CAAC,CAAG,EAAG,CACzI,SAASuD,EAAO9C,EAAGT,EAAG,CAAE,GAAI,CAAE+B,EAAKM,EAAE5B,GAAGT,CAAC,CAAC,CAAG,OAASU,EAAP,CAAY8C,EAAOH,EAAE,GAAG,GAAI3C,CAAC,CAAG,CAAE,CACjF,SAASqB,EAAKd,EAAG,CAAEA,EAAE,iBAAiBhC,EAAU,QAAQ,QAAQgC,EAAE,MAAM,CAAC,EAAE,KAAKwC,EAAS5B,CAAM,EAAI2B,EAAOH,EAAE,GAAG,GAAIpC,CAAC,CAAI,CACxH,SAASwC,EAAQ9B,EAAO,CAAE4B,EAAO,OAAQ5B,CAAK,CAAG,CACjD,SAASE,EAAOF,EAAO,CAAE4B,EAAO,QAAS5B,CAAK,CAAG,CACjD,SAAS6B,EAAOE,EAAG1D,EAAG,CAAM0D,EAAE1D,CAAC,EAAGqD,EAAE,MAAM,EAAGA,EAAE,QAAQE,EAAOF,EAAE,GAAG,GAAIA,EAAE,GAAG,EAAE,CAAG,CACrF,EAEAlE,GAAmB,SAAUsD,EAAG,CAC5B,IAAI9B,EAAGN,EACP,OAAOM,EAAI,CAAC,EAAG2B,EAAK,MAAM,EAAGA,EAAK,QAAS,SAAU5B,EAAG,CAAE,MAAMA,CAAG,CAAC,EAAG4B,EAAK,QAAQ,EAAG3B,EAAE,OAAO,UAAY,UAAY,CAAE,OAAO,IAAM,EAAGA,EAC1I,SAAS2B,EAAK7B,EAAGiD,EAAG,CAAE/C,EAAEF,GAAKgC,EAAEhC,GAAK,SAAUT,EAAG,CAAE,OAAQK,EAAI,CAACA,GAAK,CAAE,MAAOpB,EAAQwD,EAAEhC,GAAGT,CAAC,CAAC,EAAG,KAAMS,IAAM,QAAS,EAAIiD,EAAIA,EAAE1D,CAAC,EAAIA,CAAG,EAAI0D,CAAG,CAClJ,EAEAtE,GAAgB,SAAUqD,EAAG,CACzB,GAAI,CAAC,OAAO,cAAe,MAAM,IAAI,UAAU,sCAAsC,EACrF,IAAID,EAAIC,EAAE,OAAO,eAAgB,EACjC,OAAOD,EAAIA,EAAE,KAAKC,CAAC,GAAKA,EAAI,OAAO7D,GAAa,WAAaA,EAAS6D,CAAC,EAAIA,EAAE,OAAO,UAAU,EAAG,EAAI,CAAC,EAAGH,EAAK,MAAM,EAAGA,EAAK,OAAO,EAAGA,EAAK,QAAQ,EAAG,EAAE,OAAO,eAAiB,UAAY,CAAE,OAAO,IAAM,EAAG,GAC9M,SAASA,EAAK7B,EAAG,CAAE,EAAEA,GAAKgC,EAAEhC,IAAM,SAAUT,EAAG,CAAE,OAAO,IAAI,QAAQ,SAAU4B,EAASC,EAAQ,CAAE7B,EAAIyC,EAAEhC,GAAGT,CAAC,EAAGwD,EAAO5B,EAASC,EAAQ7B,EAAE,KAAMA,EAAE,KAAK,CAAG,CAAC,CAAG,CAAG,CAC/J,SAASwD,EAAO5B,EAASC,EAAQ1B,EAAGH,EAAG,CAAE,QAAQ,QAAQA,CAAC,EAAE,KAAK,SAASA,EAAG,CAAE4B,EAAQ,CAAE,MAAO5B,EAAG,KAAMG,CAAE,CAAC,CAAG,EAAG0B,CAAM,CAAG,CAC/H,EAEAxC,GAAuB,SAAUsE,EAAQC,EAAK,CAC1C,OAAI,OAAO,eAAkB,OAAO,eAAeD,EAAQ,MAAO,CAAE,MAAOC,CAAI,CAAC,EAAYD,EAAO,IAAMC,EAClGD,CACX,EAEA,IAAIE,EAAqB,OAAO,OAAU,SAASpB,EAAGzC,EAAG,CACrD,OAAO,eAAeyC,EAAG,UAAW,CAAE,WAAY,GAAM,MAAOzC,CAAE,CAAC,CACtE,EAAK,SAASyC,EAAGzC,EAAG,CAChByC,EAAE,QAAazC,CACnB,EAEAV,GAAe,SAAUwE,EAAK,CAC1B,GAAIA,GAAOA,EAAI,WAAY,OAAOA,EAClC,IAAI7B,EAAS,CAAC,EACd,GAAI6B,GAAO,KAAM,QAASpB,KAAKoB,EAASpB,IAAM,WAAa,OAAO,UAAU,eAAe,KAAKoB,EAAKpB,CAAC,GAAGhD,GAAgBuC,EAAQ6B,EAAKpB,CAAC,EACvI,OAAAmB,EAAmB5B,EAAQ6B,CAAG,EACvB7B,CACX,EAEA1C,GAAkB,SAAUuE,EAAK,CAC7B,OAAQA,GAAOA,EAAI,WAAcA,EAAM,CAAE,QAAWA,CAAI,CAC5D,EAEAtE,GAAyB,SAAUuE,EAAUC,EAAOC,EAAM,EAAG,CACzD,GAAIA,IAAS,KAAO,CAAC,EAAG,MAAM,IAAI,UAAU,+CAA+C,EAC3F,GAAI,OAAOD,GAAU,WAAaD,IAAaC,GAAS,CAAC,EAAI,CAACA,EAAM,IAAID,CAAQ,EAAG,MAAM,IAAI,UAAU,0EAA0E,EACjL,OAAOE,IAAS,IAAM,EAAIA,IAAS,IAAM,EAAE,KAAKF,CAAQ,EAAI,EAAI,EAAE,MAAQC,EAAM,IAAID,CAAQ,CAChG,EAEAtE,GAAyB,SAAUsE,EAAUC,EAAOrC,EAAOsC,EAAMP,EAAG,CAChE,GAAIO,IAAS,IAAK,MAAM,IAAI,UAAU,gCAAgC,EACtE,GAAIA,IAAS,KAAO,CAACP,EAAG,MAAM,IAAI,UAAU,+CAA+C,EAC3F,GAAI,OAAOM,GAAU,WAAaD,IAAaC,GAAS,CAACN,EAAI,CAACM,EAAM,IAAID,CAAQ,EAAG,MAAM,IAAI,UAAU,yEAAyE,EAChL,OAAQE,IAAS,IAAMP,EAAE,KAAKK,EAAUpC,CAAK,EAAI+B,EAAIA,EAAE,MAAQ/B,EAAQqC,EAAM,IAAID,EAAUpC,CAAK,EAAIA,CACxG,EAEA1B,EAAS,YAAa9B,EAAS,EAC/B8B,EAAS,WAAY7B,EAAQ,EAC7B6B,EAAS,SAAU5B,EAAM,EACzB4B,EAAS,aAAc3B,EAAU,EACjC2B,EAAS,UAAW1B,EAAO,EAC3B0B,EAAS,aAAczB,EAAU,EACjCyB,EAAS,YAAaxB,EAAS,EAC/BwB,EAAS,cAAevB,EAAW,EACnCuB,EAAS,eAAgBtB,EAAY,EACrCsB,EAAS,kBAAmBP,EAAe,EAC3CO,EAAS,WAAYrB,CAAQ,EAC7BqB,EAAS,SAAUpB,EAAM,EACzBoB,EAAS,WAAYnB,EAAQ,EAC7BmB,EAAS,iBAAkBlB,EAAc,EACzCkB,EAAS,gBAAiBjB,EAAa,EACvCiB,EAAS,UAAWhB,CAAO,EAC3BgB,EAAS,mBAAoBf,EAAgB,EAC7Ce,EAAS,mBAAoBd,EAAgB,EAC7Cc,EAAS,gBAAiBb,EAAa,EACvCa,EAAS,uBAAwBZ,EAAoB,EACrDY,EAAS,eAAgBX,EAAY,EACrCW,EAAS,kBAAmBV,EAAe,EAC3CU,EAAS,yBAA0BT,EAAsB,EACzDS,EAAS,yBAA0BR,EAAsB,CAC7D,CAAC,ICjTD,IAAAyE,GAAkB,WACZ,CACF,UAAAC,EACA,SAAAC,GACA,OAAAC,GACA,WAAAC,GACA,QAAAC,GACA,WAAAC,GACA,UAAAC,GACA,YAAAC,GACA,aAAAC,GACA,gBAAAC,GACA,SAAAC,EACA,OAAAC,EACA,SAAAC,GACA,eAAAC,GACA,cAAAC,EACA,QAAAC,GACA,iBAAAC,GACA,iBAAAC,GACA,cAAAC,GACA,qBAAAC,GACA,aAAAC,GACA,gBAAAC,GACA,uBAAAC,GACA,uBAAAC,EACJ,EAAI,GAAAC,QCtBE,SAAUC,EAAWC,EAAU,CACnC,OAAO,OAAOA,GAAU,UAC1B,CCGM,SAAUC,GAAoBC,EAAgC,CAClE,IAAMC,EAAS,SAACC,EAAa,CAC3B,MAAM,KAAKA,CAAQ,EACnBA,EAAS,MAAQ,IAAI,MAAK,EAAG,KAC/B,EAEMC,EAAWH,EAAWC,CAAM,EAClC,OAAAE,EAAS,UAAY,OAAO,OAAO,MAAM,SAAS,EAClDA,EAAS,UAAU,YAAcA,EAC1BA,CACT,CCDO,IAAMC,GAA+CC,GAC1D,SAACC,EAAM,CACL,OAAA,SAA4CC,EAA0B,CACpED,EAAO,IAAI,EACX,KAAK,QAAUC,EACRA,EAAO,OAAM;EACxBA,EAAO,IAAI,SAACC,EAAKC,EAAC,CAAK,OAAGA,EAAI,EAAC,KAAKD,EAAI,SAAQ,CAAzB,CAA6B,EAAE,KAAK;GAAM,EACzD,GACJ,KAAK,KAAO,sBACZ,KAAK,OAASD,CAChB,CARA,CAQC,ECvBC,SAAUG,EAAaC,EAA6BC,EAAO,CAC/D,GAAID,EAAK,CACP,IAAME,EAAQF,EAAI,QAAQC,CAAI,EAC9B,GAAKC,GAASF,EAAI,OAAOE,EAAO,CAAC,EAErC,CCOA,IAAAC,EAAA,UAAA,CAyBE,SAAAA,EAAoBC,EAA4B,CAA5B,KAAA,gBAAAA,EAdb,KAAA,OAAS,GAER,KAAA,WAAmD,KAMnD,KAAA,YAAqD,IAMV,CAQnD,OAAAD,EAAA,UAAA,YAAA,UAAA,aACME,EAEJ,GAAI,CAAC,KAAK,OAAQ,CAChB,KAAK,OAAS,GAGN,IAAAC,EAAe,KAAI,WAC3B,GAAIA,EAEF,GADA,KAAK,WAAa,KACd,MAAM,QAAQA,CAAU,MAC1B,QAAqBC,EAAAC,EAAAF,CAAU,EAAAG,EAAAF,EAAA,KAAA,EAAA,CAAAE,EAAA,KAAAA,EAAAF,EAAA,KAAA,EAAE,CAA5B,IAAMG,EAAMD,EAAA,MACfC,EAAO,OAAO,IAAI,yGAGpBJ,EAAW,OAAO,IAAI,EAIlB,IAAiBK,EAAqB,KAAI,gBAClD,GAAIC,EAAWD,CAAgB,EAC7B,GAAI,CACFA,EAAgB,QACTE,EAAP,CACAR,EAASQ,aAAaC,GAAsBD,EAAE,OAAS,CAACA,CAAC,EAIrD,IAAAE,EAAgB,KAAI,YAC5B,GAAIA,EAAa,CACf,KAAK,YAAc,SACnB,QAAwBC,EAAAR,EAAAO,CAAW,EAAAE,EAAAD,EAAA,KAAA,EAAA,CAAAC,EAAA,KAAAA,EAAAD,EAAA,KAAA,EAAE,CAAhC,IAAME,EAASD,EAAA,MAClB,GAAI,CACFE,GAAcD,CAAS,QAChBE,EAAP,CACAf,EAASA,GAAM,KAANA,EAAU,CAAA,EACfe,aAAeN,GACjBT,EAAMgB,EAAAA,EAAA,CAAA,EAAAC,EAAOjB,CAAM,CAAA,EAAAiB,EAAKF,EAAI,MAAM,CAAA,EAElCf,EAAO,KAAKe,CAAG,sGAMvB,GAAIf,EACF,MAAM,IAAIS,GAAoBT,CAAM,EAG1C,EAoBAF,EAAA,UAAA,IAAA,SAAIoB,EAAuB,OAGzB,GAAIA,GAAYA,IAAa,KAC3B,GAAI,KAAK,OAGPJ,GAAcI,CAAQ,MACjB,CACL,GAAIA,aAAoBpB,EAAc,CAGpC,GAAIoB,EAAS,QAAUA,EAAS,WAAW,IAAI,EAC7C,OAEFA,EAAS,WAAW,IAAI,GAEzB,KAAK,aAAcC,EAAA,KAAK,eAAW,MAAAA,IAAA,OAAAA,EAAI,CAAA,GAAI,KAAKD,CAAQ,EAG/D,EAOQpB,EAAA,UAAA,WAAR,SAAmBsB,EAAoB,CAC7B,IAAAnB,EAAe,KAAI,WAC3B,OAAOA,IAAemB,GAAW,MAAM,QAAQnB,CAAU,GAAKA,EAAW,SAASmB,CAAM,CAC1F,EASQtB,EAAA,UAAA,WAAR,SAAmBsB,EAAoB,CAC7B,IAAAnB,EAAe,KAAI,WAC3B,KAAK,WAAa,MAAM,QAAQA,CAAU,GAAKA,EAAW,KAAKmB,CAAM,EAAGnB,GAAcA,EAAa,CAACA,EAAYmB,CAAM,EAAIA,CAC5H,EAMQtB,EAAA,UAAA,cAAR,SAAsBsB,EAAoB,CAChC,IAAAnB,EAAe,KAAI,WACvBA,IAAemB,EACjB,KAAK,WAAa,KACT,MAAM,QAAQnB,CAAU,GACjCoB,EAAUpB,EAAYmB,CAAM,CAEhC,EAgBAtB,EAAA,UAAA,OAAA,SAAOoB,EAAsC,CACnC,IAAAR,EAAgB,KAAI,YAC5BA,GAAeW,EAAUX,EAAaQ,CAAQ,EAE1CA,aAAoBpB,GACtBoB,EAAS,cAAc,IAAI,CAE/B,EAlLcpB,EAAA,MAAS,UAAA,CACrB,IAAMwB,EAAQ,IAAIxB,EAClB,OAAAwB,EAAM,OAAS,GACRA,CACT,EAAE,EA+KJxB,GArLA,EAuLO,IAAMyB,GAAqBC,EAAa,MAEzC,SAAUC,GAAeC,EAAU,CACvC,OACEA,aAAiBF,GAChBE,GAAS,WAAYA,GAASC,EAAWD,EAAM,MAAM,GAAKC,EAAWD,EAAM,GAAG,GAAKC,EAAWD,EAAM,WAAW,CAEpH,CAEA,SAASE,GAAcC,EAAwC,CACzDF,EAAWE,CAAS,EACtBA,EAAS,EAETA,EAAU,YAAW,CAEzB,CChNO,IAAMC,EAAuB,CAClC,iBAAkB,KAClB,sBAAuB,KACvB,QAAS,OACT,sCAAuC,GACvC,yBAA0B,ICGrB,IAAMC,EAAmC,CAG9C,WAAA,SAAWC,EAAqBC,EAAgB,SAAEC,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,EAAA,GAAA,UAAAA,GACxC,IAAAC,EAAaL,EAAe,SACpC,OAAIK,GAAQ,MAARA,EAAU,WACLA,EAAS,WAAU,MAAnBA,EAAQC,EAAA,CAAYL,EAASC,CAAO,EAAAK,EAAKJ,CAAI,CAAA,CAAA,EAE/C,WAAU,MAAA,OAAAG,EAAA,CAACL,EAASC,CAAO,EAAAK,EAAKJ,CAAI,CAAA,CAAA,CAC7C,EACA,aAAA,SAAaK,EAAM,CACT,IAAAH,EAAaL,EAAe,SACpC,QAAQK,GAAQ,KAAA,OAARA,EAAU,eAAgB,cAAcG,CAAa,CAC/D,EACA,SAAU,QCjBN,SAAUC,GAAqBC,EAAQ,CAC3CC,EAAgB,WAAW,UAAA,CACjB,IAAAC,EAAqBC,EAAM,iBACnC,GAAID,EAEFA,EAAiBF,CAAG,MAGpB,OAAMA,CAEV,CAAC,CACH,CCtBM,SAAUI,GAAI,CAAK,CCMlB,IAAMC,GAAyB,UAAA,CAAM,OAAAC,GAAmB,IAAK,OAAW,MAAS,CAA5C,EAAsE,EAO5G,SAAUC,GAAkBC,EAAU,CAC1C,OAAOF,GAAmB,IAAK,OAAWE,CAAK,CACjD,CAOM,SAAUC,GAAoBC,EAAQ,CAC1C,OAAOJ,GAAmB,IAAKI,EAAO,MAAS,CACjD,CAQM,SAAUJ,GAAmBK,EAAuBD,EAAYF,EAAU,CAC9E,MAAO,CACL,KAAIG,EACJ,MAAKD,EACL,MAAKF,EAET,CCrCA,IAAII,EAAuD,KASrD,SAAUC,EAAaC,EAAc,CACzC,GAAIC,EAAO,sCAAuC,CAChD,IAAMC,EAAS,CAACJ,EAKhB,GAJII,IACFJ,EAAU,CAAE,YAAa,GAAO,MAAO,IAAI,GAE7CE,EAAE,EACEE,EAAQ,CACJ,IAAAC,EAAyBL,EAAvBM,EAAWD,EAAA,YAAEE,EAAKF,EAAA,MAE1B,GADAL,EAAU,KACNM,EACF,MAAMC,QAMVL,EAAE,CAEN,CAMM,SAAUM,GAAaC,EAAQ,CAC/BN,EAAO,uCAAyCH,IAClDA,EAAQ,YAAc,GACtBA,EAAQ,MAAQS,EAEpB,CCrBA,IAAAC,EAAA,SAAAC,EAAA,CAAmCC,EAAAF,EAAAC,CAAA,EA6BjC,SAAAD,EAAYG,EAA6C,CAAzD,IAAAC,EACEH,EAAA,KAAA,IAAA,GAAO,KATC,OAAAG,EAAA,UAAqB,GAUzBD,GACFC,EAAK,YAAcD,EAGfE,GAAeF,CAAW,GAC5BA,EAAY,IAAIC,CAAI,GAGtBA,EAAK,YAAcE,IAEvB,CAzBO,OAAAN,EAAA,OAAP,SAAiBO,EAAwBC,EAA2BC,EAAqB,CACvF,OAAO,IAAIC,GAAeH,EAAMC,EAAOC,CAAQ,CACjD,EAgCAT,EAAA,UAAA,KAAA,SAAKW,EAAS,CACR,KAAK,UACPC,GAA0BC,GAAiBF,CAAK,EAAG,IAAI,EAEvD,KAAK,MAAMA,CAAM,CAErB,EASAX,EAAA,UAAA,MAAA,SAAMc,EAAS,CACT,KAAK,UACPF,GAA0BG,GAAkBD,CAAG,EAAG,IAAI,GAEtD,KAAK,UAAY,GACjB,KAAK,OAAOA,CAAG,EAEnB,EAQAd,EAAA,UAAA,SAAA,UAAA,CACM,KAAK,UACPY,GAA0BI,GAAuB,IAAI,GAErD,KAAK,UAAY,GACjB,KAAK,UAAS,EAElB,EAEAhB,EAAA,UAAA,YAAA,UAAA,CACO,KAAK,SACR,KAAK,UAAY,GACjBC,EAAA,UAAM,YAAW,KAAA,IAAA,EACjB,KAAK,YAAc,KAEvB,EAEUD,EAAA,UAAA,MAAV,SAAgBW,EAAQ,CACtB,KAAK,YAAY,KAAKA,CAAK,CAC7B,EAEUX,EAAA,UAAA,OAAV,SAAiBc,EAAQ,CACvB,GAAI,CACF,KAAK,YAAY,MAAMA,CAAG,UAE1B,KAAK,YAAW,EAEpB,EAEUd,EAAA,UAAA,UAAV,UAAA,CACE,GAAI,CACF,KAAK,YAAY,SAAQ,UAEzB,KAAK,YAAW,EAEpB,EACFA,CAAA,EApHmCiB,CAAY,EA2H/C,IAAMC,GAAQ,SAAS,UAAU,KAEjC,SAASC,GAAyCC,EAAQC,EAAY,CACpE,OAAOH,GAAM,KAAKE,EAAIC,CAAO,CAC/B,CAMA,IAAAC,GAAA,UAAA,CACE,SAAAA,EAAoBC,EAAqC,CAArC,KAAA,gBAAAA,CAAwC,CAE5D,OAAAD,EAAA,UAAA,KAAA,SAAKE,EAAQ,CACH,IAAAD,EAAoB,KAAI,gBAChC,GAAIA,EAAgB,KAClB,GAAI,CACFA,EAAgB,KAAKC,CAAK,QACnBC,EAAP,CACAC,GAAqBD,CAAK,EAGhC,EAEAH,EAAA,UAAA,MAAA,SAAMK,EAAQ,CACJ,IAAAJ,EAAoB,KAAI,gBAChC,GAAIA,EAAgB,MAClB,GAAI,CACFA,EAAgB,MAAMI,CAAG,QAClBF,EAAP,CACAC,GAAqBD,CAAK,OAG5BC,GAAqBC,CAAG,CAE5B,EAEAL,EAAA,UAAA,SAAA,UAAA,CACU,IAAAC,EAAoB,KAAI,gBAChC,GAAIA,EAAgB,SAClB,GAAI,CACFA,EAAgB,SAAQ,QACjBE,EAAP,CACAC,GAAqBD,CAAK,EAGhC,EACFH,CAAA,EArCA,EAuCAM,GAAA,SAAAC,EAAA,CAAuCC,EAAAF,EAAAC,CAAA,EACrC,SAAAD,EACEG,EACAN,EACAO,EAA8B,CAHhC,IAAAC,EAKEJ,EAAA,KAAA,IAAA,GAAO,KAEHN,EACJ,GAAIW,EAAWH,CAAc,GAAK,CAACA,EAGjCR,EAAkB,CAChB,KAAOQ,GAAc,KAAdA,EAAkB,OACzB,MAAON,GAAK,KAALA,EAAS,OAChB,SAAUO,GAAQ,KAARA,EAAY,YAEnB,CAEL,IAAIG,EACAF,GAAQG,EAAO,0BAIjBD,EAAU,OAAO,OAAOJ,CAAc,EACtCI,EAAQ,YAAc,UAAA,CAAM,OAAAF,EAAK,YAAW,CAAhB,EAC5BV,EAAkB,CAChB,KAAMQ,EAAe,MAAQZ,GAAKY,EAAe,KAAMI,CAAO,EAC9D,MAAOJ,EAAe,OAASZ,GAAKY,EAAe,MAAOI,CAAO,EACjE,SAAUJ,EAAe,UAAYZ,GAAKY,EAAe,SAAUI,CAAO,IAI5EZ,EAAkBQ,EAMtB,OAAAE,EAAK,YAAc,IAAIX,GAAiBC,CAAe,GACzD,CACF,OAAAK,CAAA,EAzCuCS,CAAU,EA2CjD,SAASC,GAAqBC,EAAU,CAClCC,EAAO,sCACTC,GAAaF,CAAK,EAIlBG,GAAqBH,CAAK,CAE9B,CAQA,SAASI,GAAoBC,EAAQ,CACnC,MAAMA,CACR,CAOA,SAASC,GAA0BC,EAA2CC,EAA2B,CAC/F,IAAAC,EAA0BR,EAAM,sBACxCQ,GAAyBC,EAAgB,WAAW,UAAA,CAAM,OAAAD,EAAsBF,EAAcC,CAAU,CAA9C,CAA+C,CAC3G,CAOO,IAAMG,GAA6D,CACxE,OAAQ,GACR,KAAMC,EACN,MAAOR,GACP,SAAUQ,GCjRL,IAAMC,EAA+B,UAAA,CAAM,OAAC,OAAO,QAAW,YAAc,OAAO,YAAe,cAAvD,EAAsE,ECyClH,SAAUC,EAAYC,EAAI,CAC9B,OAAOA,CACT,CCsCM,SAAUC,GAAoBC,EAA+B,CACjE,OAAIA,EAAI,SAAW,EACVC,EAGLD,EAAI,SAAW,EACVA,EAAI,GAGN,SAAeE,EAAQ,CAC5B,OAAOF,EAAI,OAAO,SAACG,EAAWC,EAAuB,CAAK,OAAAA,EAAGD,CAAI,CAAP,EAAUD,CAAY,CAClF,CACF,CC9EA,IAAAG,EAAA,UAAA,CAkBE,SAAAA,EAAYC,EAA6E,CACnFA,IACF,KAAK,WAAaA,EAEtB,CA4BA,OAAAD,EAAA,UAAA,KAAA,SAAQE,EAAyB,CAC/B,IAAMC,EAAa,IAAIH,EACvB,OAAAG,EAAW,OAAS,KACpBA,EAAW,SAAWD,EACfC,CACT,EA8IAH,EAAA,UAAA,UAAA,SACEI,EACAC,EACAC,EAA8B,CAHhC,IAAAC,EAAA,KAKQC,EAAaC,GAAaL,CAAc,EAAIA,EAAiB,IAAIM,GAAeN,EAAgBC,EAAOC,CAAQ,EAErH,OAAAK,EAAa,UAAA,CACL,IAAAC,EAAuBL,EAArBL,EAAQU,EAAA,SAAEC,EAAMD,EAAA,OACxBJ,EAAW,IACTN,EAGIA,EAAS,KAAKM,EAAYK,CAAM,EAChCA,EAIAN,EAAK,WAAWC,CAAU,EAG1BD,EAAK,cAAcC,CAAU,CAAC,CAEtC,CAAC,EAEMA,CACT,EAGUR,EAAA,UAAA,cAAV,SAAwBc,EAAmB,CACzC,GAAI,CACF,OAAO,KAAK,WAAWA,CAAI,QACpBC,EAAP,CAIAD,EAAK,MAAMC,CAAG,EAElB,EA6DAf,EAAA,UAAA,QAAA,SAAQgB,EAA0BC,EAAoC,CAAtE,IAAAV,EAAA,KACE,OAAAU,EAAcC,GAAeD,CAAW,EAEjC,IAAIA,EAAkB,SAACE,EAASC,EAAM,CAC3C,IAAMZ,EAAa,IAAIE,GAAkB,CACvC,KAAM,SAACW,EAAK,CACV,GAAI,CACFL,EAAKK,CAAK,QACHN,EAAP,CACAK,EAAOL,CAAG,EACVP,EAAW,YAAW,EAE1B,EACA,MAAOY,EACP,SAAUD,EACX,EACDZ,EAAK,UAAUC,CAAU,CAC3B,CAAC,CACH,EAGUR,EAAA,UAAA,WAAV,SAAqBQ,EAA2B,OAC9C,OAAOI,EAAA,KAAK,UAAM,MAAAA,IAAA,OAAA,OAAAA,EAAE,UAAUJ,CAAU,CAC1C,EAOAR,EAAA,UAACG,GAAD,UAAA,CACE,OAAO,IACT,EA4FAH,EAAA,UAAA,KAAA,UAAA,SAAKsB,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,GAAA,UAAAA,GACH,OAAOC,GAAcF,CAAU,EAAE,IAAI,CACvC,EA6BAtB,EAAA,UAAA,UAAA,SAAUiB,EAAoC,CAA9C,IAAAV,EAAA,KACE,OAAAU,EAAcC,GAAeD,CAAW,EAEjC,IAAIA,EAAY,SAACE,EAASC,EAAM,CACrC,IAAIC,EACJd,EAAK,UACH,SAACkB,EAAI,CAAK,OAACJ,EAAQI,CAAT,EACV,SAACV,EAAQ,CAAK,OAAAK,EAAOL,CAAG,CAAV,EACd,UAAA,CAAM,OAAAI,EAAQE,CAAK,CAAb,CAAc,CAExB,CAAC,CACH,EA3aOrB,EAAA,OAAkC,SAAIC,EAAwD,CACnG,OAAO,IAAID,EAAcC,CAAS,CACpC,EA0aFD,GA/cA,EAwdA,SAAS0B,GAAeC,EAA+C,OACrE,OAAOC,EAAAD,GAAW,KAAXA,EAAeE,EAAO,WAAO,MAAAD,IAAA,OAAAA,EAAI,OAC1C,CAEA,SAASE,GAAcC,EAAU,CAC/B,OAAOA,GAASC,EAAWD,EAAM,IAAI,GAAKC,EAAWD,EAAM,KAAK,GAAKC,EAAWD,EAAM,QAAQ,CAChG,CAEA,SAASE,GAAgBF,EAAU,CACjC,OAAQA,GAASA,aAAiBG,GAAgBJ,GAAWC,CAAK,GAAKI,GAAeJ,CAAK,CAC7F,CC1eM,SAAUK,GAAQC,EAAW,CACjC,OAAOC,EAAWD,GAAM,KAAA,OAANA,EAAQ,IAAI,CAChC,CAMM,SAAUE,EACdC,EAAqF,CAErF,OAAO,SAACH,EAAqB,CAC3B,GAAID,GAAQC,CAAM,EAChB,OAAOA,EAAO,KAAK,SAA+BI,EAA2B,CAC3E,GAAI,CACF,OAAOD,EAAKC,EAAc,IAAI,QACvBC,EAAP,CACA,KAAK,MAAMA,CAAG,EAElB,CAAC,EAEH,MAAM,IAAI,UAAU,wCAAwC,CAC9D,CACF,CCjBM,SAAUC,EACdC,EACAC,EACAC,EACAC,EACAC,EAAuB,CAEvB,OAAO,IAAIC,GAAmBL,EAAaC,EAAQC,EAAYC,EAASC,CAAU,CACpF,CAMA,IAAAC,GAAA,SAAAC,EAAA,CAA2CC,EAAAF,EAAAC,CAAA,EAiBzC,SAAAD,EACEL,EACAC,EACAC,EACAC,EACQC,EACAI,EAAiC,CAN3C,IAAAC,EAoBEH,EAAA,KAAA,KAAMN,CAAW,GAAC,KAfV,OAAAS,EAAA,WAAAL,EACAK,EAAA,kBAAAD,EAeRC,EAAK,MAAQR,EACT,SAAuCS,EAAQ,CAC7C,GAAI,CACFT,EAAOS,CAAK,QACLC,EAAP,CACAX,EAAY,MAAMW,CAAG,EAEzB,EACAL,EAAA,UAAM,MACVG,EAAK,OAASN,EACV,SAAuCQ,EAAQ,CAC7C,GAAI,CACFR,EAAQQ,CAAG,QACJA,EAAP,CAEAX,EAAY,MAAMW,CAAG,UAGrB,KAAK,YAAW,EAEpB,EACAL,EAAA,UAAM,OACVG,EAAK,UAAYP,EACb,UAAA,CACE,GAAI,CACFA,EAAU,QACHS,EAAP,CAEAX,EAAY,MAAMW,CAAG,UAGrB,KAAK,YAAW,EAEpB,EACAL,EAAA,UAAM,WACZ,CAEA,OAAAD,EAAA,UAAA,YAAA,UAAA,OACE,GAAI,CAAC,KAAK,mBAAqB,KAAK,kBAAiB,EAAI,CAC/C,IAAAO,EAAW,KAAI,OACvBN,EAAA,UAAM,YAAW,KAAA,IAAA,EAEjB,CAACM,KAAUC,EAAA,KAAK,cAAU,MAAAA,IAAA,QAAAA,EAAA,KAAf,IAAI,GAEnB,EACFR,CAAA,EAnF2CS,CAAU,ECP9C,IAAMC,GAAuDC,GAClE,SAACC,EAAM,CACL,OAAA,UAAoC,CAClCA,EAAO,IAAI,EACX,KAAK,KAAO,0BACZ,KAAK,QAAU,qBACjB,CAJA,CAIC,ECXL,IAAAC,GAAA,SAAAC,EAAA,CAAgCC,EAAAF,EAAAC,CAAA,EAwB9B,SAAAD,GAAA,CAAA,IAAAG,EAEEF,EAAA,KAAA,IAAA,GAAO,KAzBT,OAAAE,EAAA,OAAS,GAEDA,EAAA,iBAAyC,KAGjDA,EAAA,UAA2B,CAAA,EAE3BA,EAAA,UAAY,GAEZA,EAAA,SAAW,GAEXA,EAAA,YAAmB,MAenB,CAGA,OAAAH,EAAA,UAAA,KAAA,SAAQI,EAAwB,CAC9B,IAAMC,EAAU,IAAIC,GAAiB,KAAM,IAAI,EAC/C,OAAAD,EAAQ,SAAWD,EACZC,CACT,EAGUL,EAAA,UAAA,eAAV,UAAA,CACE,GAAI,KAAK,OACP,MAAM,IAAIO,EAEd,EAEAP,EAAA,UAAA,KAAA,SAAKQ,EAAQ,CAAb,IAAAL,EAAA,KACEM,EAAa,UAAA,SAEX,GADAN,EAAK,eAAc,EACf,CAACA,EAAK,UAAW,CACdA,EAAK,mBACRA,EAAK,iBAAmB,MAAM,KAAKA,EAAK,SAAS,OAEnD,QAAuBO,EAAAC,EAAAR,EAAK,gBAAgB,EAAAS,EAAAF,EAAA,KAAA,EAAA,CAAAE,EAAA,KAAAA,EAAAF,EAAA,KAAA,EAAE,CAAzC,IAAMG,EAAQD,EAAA,MACjBC,EAAS,KAAKL,CAAK,qGAGzB,CAAC,CACH,EAEAR,EAAA,UAAA,MAAA,SAAMc,EAAQ,CAAd,IAAAX,EAAA,KACEM,EAAa,UAAA,CAEX,GADAN,EAAK,eAAc,EACf,CAACA,EAAK,UAAW,CACnBA,EAAK,SAAWA,EAAK,UAAY,GACjCA,EAAK,YAAcW,EAEnB,QADQC,EAAcZ,EAAI,UACnBY,EAAU,QACfA,EAAU,MAAK,EAAI,MAAMD,CAAG,EAGlC,CAAC,CACH,EAEAd,EAAA,UAAA,SAAA,UAAA,CAAA,IAAAG,EAAA,KACEM,EAAa,UAAA,CAEX,GADAN,EAAK,eAAc,EACf,CAACA,EAAK,UAAW,CACnBA,EAAK,UAAY,GAEjB,QADQY,EAAcZ,EAAI,UACnBY,EAAU,QACfA,EAAU,MAAK,EAAI,SAAQ,EAGjC,CAAC,CACH,EAEAf,EAAA,UAAA,YAAA,UAAA,CACE,KAAK,UAAY,KAAK,OAAS,GAC/B,KAAK,UAAY,KAAK,iBAAmB,IAC3C,EAEA,OAAA,eAAIA,EAAA,UAAA,WAAQ,KAAZ,UAAA,OACE,QAAOgB,EAAA,KAAK,aAAS,MAAAA,IAAA,OAAA,OAAAA,EAAE,QAAS,CAClC,kCAGUhB,EAAA,UAAA,cAAV,SAAwBiB,EAAyB,CAC/C,YAAK,eAAc,EACZhB,EAAA,UAAM,cAAa,KAAA,KAACgB,CAAU,CACvC,EAGUjB,EAAA,UAAA,WAAV,SAAqBiB,EAAyB,CAC5C,YAAK,eAAc,EACnB,KAAK,wBAAwBA,CAAU,EAChC,KAAK,gBAAgBA,CAAU,CACxC,EAGUjB,EAAA,UAAA,gBAAV,SAA0BiB,EAA2B,CAArD,IAAAd,EAAA,KACQa,EAAqC,KAAnCE,EAAQF,EAAA,SAAEG,EAASH,EAAA,UAAED,EAASC,EAAA,UACtC,OAAIE,GAAYC,EACPC,IAET,KAAK,iBAAmB,KACxBL,EAAU,KAAKE,CAAU,EAClB,IAAII,EAAa,UAAA,CACtBlB,EAAK,iBAAmB,KACxBmB,EAAUP,EAAWE,CAAU,CACjC,CAAC,EACH,EAGUjB,EAAA,UAAA,wBAAV,SAAkCiB,EAA2B,CACrD,IAAAD,EAAuC,KAArCE,EAAQF,EAAA,SAAEO,EAAWP,EAAA,YAAEG,EAASH,EAAA,UACpCE,EACFD,EAAW,MAAMM,CAAW,EACnBJ,GACTF,EAAW,SAAQ,CAEvB,EAQAjB,EAAA,UAAA,aAAA,UAAA,CACE,IAAMwB,EAAkB,IAAIC,EAC5B,OAAAD,EAAW,OAAS,KACbA,CACT,EAxHOxB,EAAA,OAAkC,SAAI0B,EAA0BC,EAAqB,CAC1F,OAAO,IAAIrB,GAAoBoB,EAAaC,CAAM,CACpD,EAuHF3B,GA7IgCyB,CAAU,EAkJ1C,IAAAG,GAAA,SAAAC,EAAA,CAAyCC,EAAAF,EAAAC,CAAA,EACvC,SAAAD,EAESG,EACPC,EAAsB,CAHxB,IAAAC,EAKEJ,EAAA,KAAA,IAAA,GAAO,KAHA,OAAAI,EAAA,YAAAF,EAIPE,EAAK,OAASD,GAChB,CAEA,OAAAJ,EAAA,UAAA,KAAA,SAAKM,EAAQ,UACXC,GAAAC,EAAA,KAAK,eAAW,MAAAA,IAAA,OAAA,OAAAA,EAAE,QAAI,MAAAD,IAAA,QAAAA,EAAA,KAAAC,EAAGF,CAAK,CAChC,EAEAN,EAAA,UAAA,MAAA,SAAMS,EAAQ,UACZF,GAAAC,EAAA,KAAK,eAAW,MAAAA,IAAA,OAAA,OAAAA,EAAE,SAAK,MAAAD,IAAA,QAAAA,EAAA,KAAAC,EAAGC,CAAG,CAC/B,EAEAT,EAAA,UAAA,SAAA,UAAA,UACEO,GAAAC,EAAA,KAAK,eAAW,MAAAA,IAAA,OAAA,OAAAA,EAAE,YAAQ,MAAAD,IAAA,QAAAA,EAAA,KAAAC,CAAA,CAC5B,EAGUR,EAAA,UAAA,WAAV,SAAqBU,EAAyB,SAC5C,OAAOH,GAAAC,EAAA,KAAK,UAAM,MAAAA,IAAA,OAAA,OAAAA,EAAE,UAAUE,CAAU,KAAC,MAAAH,IAAA,OAAAA,EAAII,EAC/C,EACFX,CAAA,EA1ByCY,EAAO,EC5JzC,IAAMC,EAA+C,CAC1D,IAAG,UAAA,CAGD,OAAQA,EAAsB,UAAY,MAAM,IAAG,CACrD,EACA,SAAU,QCwBZ,IAAAC,GAAA,SAAAC,EAAA,CAAsCC,EAAAF,EAAAC,CAAA,EAUpC,SAAAD,EACUG,EACAC,EACAC,EAA6D,CAF7DF,IAAA,SAAAA,EAAA,KACAC,IAAA,SAAAA,EAAA,KACAC,IAAA,SAAAA,EAAAC,GAHV,IAAAC,EAKEN,EAAA,KAAA,IAAA,GAAO,KAJC,OAAAM,EAAA,YAAAJ,EACAI,EAAA,YAAAH,EACAG,EAAA,mBAAAF,EAZFE,EAAA,QAA0B,CAAA,EAC1BA,EAAA,oBAAsB,GAc5BA,EAAK,oBAAsBH,IAAgB,IAC3CG,EAAK,YAAc,KAAK,IAAI,EAAGJ,CAAW,EAC1CI,EAAK,YAAc,KAAK,IAAI,EAAGH,CAAW,GAC5C,CAEA,OAAAJ,EAAA,UAAA,KAAA,SAAKQ,EAAQ,CACL,IAAAC,EAA+E,KAA7EC,EAASD,EAAA,UAAEE,EAAOF,EAAA,QAAEG,EAAmBH,EAAA,oBAAEJ,EAAkBI,EAAA,mBAAEL,EAAWK,EAAA,YAC3EC,IACHC,EAAQ,KAAKH,CAAK,EAClB,CAACI,GAAuBD,EAAQ,KAAKN,EAAmB,IAAG,EAAKD,CAAW,GAE7E,KAAK,YAAW,EAChBH,EAAA,UAAM,KAAI,KAAA,KAACO,CAAK,CAClB,EAGUR,EAAA,UAAA,WAAV,SAAqBa,EAAyB,CAC5C,KAAK,eAAc,EACnB,KAAK,YAAW,EAQhB,QANMC,EAAe,KAAK,gBAAgBD,CAAU,EAE9CJ,EAAmC,KAAjCG,EAAmBH,EAAA,oBAAEE,EAAOF,EAAA,QAG9BM,EAAOJ,EAAQ,MAAK,EACjBK,EAAI,EAAGA,EAAID,EAAK,QAAU,CAACF,EAAW,OAAQG,GAAKJ,EAAsB,EAAI,EACpFC,EAAW,KAAKE,EAAKC,EAAO,EAG9B,YAAK,wBAAwBH,CAAU,EAEhCC,CACT,EAEQd,EAAA,UAAA,YAAR,UAAA,CACQ,IAAAS,EAAoE,KAAlEN,EAAWM,EAAA,YAAEJ,EAAkBI,EAAA,mBAAEE,EAAOF,EAAA,QAAEG,EAAmBH,EAAA,oBAK/DQ,GAAsBL,EAAsB,EAAI,GAAKT,EAK3D,GAJAA,EAAc,KAAYc,EAAqBN,EAAQ,QAAUA,EAAQ,OAAO,EAAGA,EAAQ,OAASM,CAAkB,EAIlH,CAACL,EAAqB,CAKxB,QAJMM,EAAMb,EAAmB,IAAG,EAC9Bc,EAAO,EAGFH,EAAI,EAAGA,EAAIL,EAAQ,QAAWA,EAAQK,IAAiBE,EAAKF,GAAK,EACxEG,EAAOH,EAETG,GAAQR,EAAQ,OAAO,EAAGQ,EAAO,CAAC,EAEtC,EACFnB,CAAA,EAzEsCoB,EAAO,EClB7C,IAAAC,GAAA,SAAAC,EAAA,CAA+BC,EAAAF,EAAAC,CAAA,EAC7B,SAAAD,EAAYG,EAAsBC,EAAmD,QACnFH,EAAA,KAAA,IAAA,GAAO,IACT,CAWO,OAAAD,EAAA,UAAA,SAAP,SAAgBK,EAAWC,EAAiB,CAAjB,OAAAA,IAAA,SAAAA,EAAA,GAClB,IACT,EACFN,CAAA,EAjB+BO,CAAY,ECHpC,IAAMC,EAAqC,CAGhD,YAAA,SAAYC,EAAqBC,EAAgB,SAAEC,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,EAAA,GAAA,UAAAA,GACzC,IAAAC,EAAaL,EAAgB,SACrC,OAAIK,GAAQ,MAARA,EAAU,YACLA,EAAS,YAAW,MAApBA,EAAQC,EAAA,CAAaL,EAASC,CAAO,EAAAK,EAAKJ,CAAI,CAAA,CAAA,EAEhD,YAAW,MAAA,OAAAG,EAAA,CAACL,EAASC,CAAO,EAAAK,EAAKJ,CAAI,CAAA,CAAA,CAC9C,EACA,cAAA,SAAcK,EAAM,CACV,IAAAH,EAAaL,EAAgB,SACrC,QAAQK,GAAQ,KAAA,OAARA,EAAU,gBAAiB,eAAeG,CAAa,CACjE,EACA,SAAU,QCrBZ,IAAAC,GAAA,SAAAC,EAAA,CAAoCC,EAAAF,EAAAC,CAAA,EAOlC,SAAAD,EAAsBG,EAAqCC,EAAmD,CAA9G,IAAAC,EACEJ,EAAA,KAAA,KAAME,EAAWC,CAAI,GAAC,KADF,OAAAC,EAAA,UAAAF,EAAqCE,EAAA,KAAAD,EAFjDC,EAAA,QAAmB,IAI7B,CAEO,OAAAL,EAAA,UAAA,SAAP,SAAgBM,EAAWC,EAAiB,OAC1C,GADyBA,IAAA,SAAAA,EAAA,GACrB,KAAK,OACP,OAAO,KAIT,KAAK,MAAQD,EAEb,IAAME,EAAK,KAAK,GACVL,EAAY,KAAK,UAuBvB,OAAIK,GAAM,OACR,KAAK,GAAK,KAAK,eAAeL,EAAWK,EAAID,CAAK,GAKpD,KAAK,QAAU,GAEf,KAAK,MAAQA,EAEb,KAAK,IAAKE,EAAA,KAAK,MAAE,MAAAA,IAAA,OAAAA,EAAI,KAAK,eAAeN,EAAW,KAAK,GAAII,CAAK,EAE3D,IACT,EAEUP,EAAA,UAAA,eAAV,SAAyBG,EAA2BO,EAAmBH,EAAiB,CAAjB,OAAAA,IAAA,SAAAA,EAAA,GAC9DI,EAAiB,YAAYR,EAAU,MAAM,KAAKA,EAAW,IAAI,EAAGI,CAAK,CAClF,EAEUP,EAAA,UAAA,eAAV,SAAyBY,EAA4BJ,EAAkBD,EAAwB,CAE7F,GAFqEA,IAAA,SAAAA,EAAA,GAEjEA,GAAS,MAAQ,KAAK,QAAUA,GAAS,KAAK,UAAY,GAC5D,OAAOC,EAILA,GAAM,MACRG,EAAiB,cAAcH,CAAE,CAIrC,EAMOR,EAAA,UAAA,QAAP,SAAeM,EAAUC,EAAa,CACpC,GAAI,KAAK,OACP,OAAO,IAAI,MAAM,8BAA8B,EAGjD,KAAK,QAAU,GACf,IAAMM,EAAQ,KAAK,SAASP,EAAOC,CAAK,EACxC,GAAIM,EACF,OAAOA,EACE,KAAK,UAAY,IAAS,KAAK,IAAM,OAc9C,KAAK,GAAK,KAAK,eAAe,KAAK,UAAW,KAAK,GAAI,IAAI,EAE/D,EAEUb,EAAA,UAAA,SAAV,SAAmBM,EAAUQ,EAAc,CACzC,IAAIC,EAAmB,GACnBC,EACJ,GAAI,CACF,KAAK,KAAKV,CAAK,QACRW,EAAP,CACAF,EAAU,GAIVC,EAAaC,GAAQ,IAAI,MAAM,oCAAoC,EAErE,GAAIF,EACF,YAAK,YAAW,EACTC,CAEX,EAEAhB,EAAA,UAAA,YAAA,UAAA,CACE,GAAI,CAAC,KAAK,OAAQ,CACV,IAAAS,EAAoB,KAAlBD,EAAEC,EAAA,GAAEN,EAASM,EAAA,UACbS,EAAYf,EAAS,QAE7B,KAAK,KAAO,KAAK,MAAQ,KAAK,UAAY,KAC1C,KAAK,QAAU,GAEfgB,EAAUD,EAAS,IAAI,EACnBV,GAAM,OACR,KAAK,GAAK,KAAK,eAAeL,EAAWK,EAAI,IAAI,GAGnD,KAAK,MAAQ,KACbP,EAAA,UAAM,YAAW,KAAA,IAAA,EAErB,EACFD,CAAA,EA9IoCoB,EAAM,ECgB1C,IAAAC,GAAA,UAAA,CAGE,SAAAA,EAAoBC,EAAoCC,EAAiC,CAAjCA,IAAA,SAAAA,EAAoBF,EAAU,KAAlE,KAAA,oBAAAC,EAClB,KAAK,IAAMC,CACb,CA6BO,OAAAF,EAAA,UAAA,SAAP,SAAmBG,EAAqDC,EAAmBC,EAAS,CAA5B,OAAAD,IAAA,SAAAA,EAAA,GAC/D,IAAI,KAAK,oBAAuB,KAAMD,CAAI,EAAE,SAASE,EAAOD,CAAK,CAC1E,EAnCcJ,EAAA,IAAoBM,EAAsB,IAoC1DN,GArCA,ECnBA,IAAAO,GAAA,SAAAC,EAAA,CAAoCC,EAAAF,EAAAC,CAAA,EAkBlC,SAAAD,EAAYG,EAAgCC,EAAiC,CAAjCA,IAAA,SAAAA,EAAoBC,GAAU,KAA1E,IAAAC,EACEL,EAAA,KAAA,KAAME,EAAiBC,CAAG,GAAC,KAlBtB,OAAAE,EAAA,QAAmC,CAAA,EAOnCA,EAAA,QAAmB,IAY1B,CAEO,OAAAN,EAAA,UAAA,MAAP,SAAaO,EAAwB,CAC3B,IAAAC,EAAY,KAAI,QAExB,GAAI,KAAK,QAAS,CAChBA,EAAQ,KAAKD,CAAM,EACnB,OAGF,IAAIE,EACJ,KAAK,QAAU,GAEf,EACE,IAAKA,EAAQF,EAAO,QAAQA,EAAO,MAAOA,EAAO,KAAK,EACpD,YAEMA,EAASC,EAAQ,MAAK,GAIhC,GAFA,KAAK,QAAU,GAEXC,EAAO,CACT,KAAQF,EAASC,EAAQ,MAAK,GAC5BD,EAAO,YAAW,EAEpB,MAAME,EAEV,EACFT,CAAA,EAhDoCK,EAAS,EC6CtC,IAAMK,EAAiB,IAAIC,GAAeC,EAAW,EAK/CC,GAAQH,ECUd,IAAMI,EAAQ,IAAIC,EAAkB,SAACC,EAAU,CAAK,OAAAA,EAAW,SAAQ,CAAnB,CAAqB,EC9D1E,SAAUC,GAAYC,EAAU,CACpC,OAAOA,GAASC,EAAWD,EAAM,QAAQ,CAC3C,CCDA,SAASE,GAAQC,EAAQ,CACvB,OAAOA,EAAIA,EAAI,OAAS,EAC1B,CAEM,SAAUC,GAAkBC,EAAW,CAC3C,OAAOC,EAAWJ,GAAKG,CAAI,CAAC,EAAIA,EAAK,IAAG,EAAK,MAC/C,CAEM,SAAUE,EAAaF,EAAW,CACtC,OAAOG,GAAYN,GAAKG,CAAI,CAAC,EAAIA,EAAK,IAAG,EAAK,MAChD,CAEM,SAAUI,GAAUJ,EAAaK,EAAoB,CACzD,OAAO,OAAOR,GAAKG,CAAI,GAAM,SAAWA,EAAK,IAAG,EAAMK,CACxD,CClBO,IAAMC,EAAe,SAAIC,EAAM,CAAwB,OAAAA,GAAK,OAAOA,EAAE,QAAW,UAAY,OAAOA,GAAM,UAAlD,ECMxD,SAAUC,GAAUC,EAAU,CAClC,OAAOC,EAAWD,GAAK,KAAA,OAALA,EAAO,IAAI,CAC/B,CCHM,SAAUE,GAAoBC,EAAU,CAC5C,OAAOC,EAAWD,EAAME,EAAkB,CAC5C,CCLM,SAAUC,GAAmBC,EAAQ,CACzC,OAAO,OAAO,eAAiBC,EAAWD,GAAG,KAAA,OAAHA,EAAM,OAAO,cAAc,CACvE,CCAM,SAAUE,GAAiCC,EAAU,CAEzD,OAAO,IAAI,UACT,iBACEA,IAAU,MAAQ,OAAOA,GAAU,SAAW,oBAAsB,IAAIA,EAAK,KAAG,0HACwC,CAE9H,CCXM,SAAUC,IAAiB,CAC/B,OAAI,OAAO,QAAW,YAAc,CAAC,OAAO,SACnC,aAGF,OAAO,QAChB,CAEO,IAAMC,GAAWD,GAAiB,ECJnC,SAAUE,GAAWC,EAAU,CACnC,OAAOC,EAAWD,GAAK,KAAA,OAALA,EAAQE,GAAgB,CAC5C,CCHM,SAAiBC,GAAsCC,EAAqC,mGAC1FC,EAASD,EAAe,UAAS,2DAGX,MAAA,CAAA,EAAAE,GAAMD,EAAO,KAAI,CAAE,CAAA,gBAArCE,EAAkBC,EAAA,KAAA,EAAhBC,EAAKF,EAAA,MAAEG,EAAIH,EAAA,KACfG,iBAAA,CAAA,EAAA,CAAA,SACF,MAAA,CAAA,EAAAF,EAAA,KAAA,CAAA,qBAEIC,CAAM,CAAA,SAAZ,MAAA,CAAA,EAAAD,EAAA,KAAA,CAAA,SAAA,OAAAA,EAAA,KAAA,mCAGF,OAAAH,EAAO,YAAW,6BAIhB,SAAUM,GAAwBC,EAAQ,CAG9C,OAAOC,EAAWD,GAAG,KAAA,OAAHA,EAAK,SAAS,CAClC,CCPM,SAAUE,EAAaC,EAAyB,CACpD,GAAIA,aAAiBC,EACnB,OAAOD,EAET,GAAIA,GAAS,KAAM,CACjB,GAAIE,GAAoBF,CAAK,EAC3B,OAAOG,GAAsBH,CAAK,EAEpC,GAAII,EAAYJ,CAAK,EACnB,OAAOK,GAAcL,CAAK,EAE5B,GAAIM,GAAUN,CAAK,EACjB,OAAOO,GAAYP,CAAK,EAE1B,GAAIQ,GAAgBR,CAAK,EACvB,OAAOS,GAAkBT,CAAK,EAEhC,GAAIU,GAAWV,CAAK,EAClB,OAAOW,GAAaX,CAAK,EAE3B,GAAIY,GAAqBZ,CAAK,EAC5B,OAAOa,GAAuBb,CAAK,EAIvC,MAAMc,GAAiCd,CAAK,CAC9C,CAMM,SAAUG,GAAyBY,EAAQ,CAC/C,OAAO,IAAId,EAAW,SAACe,EAAyB,CAC9C,IAAMC,EAAMF,EAAIG,GAAkB,EAClC,GAAIC,EAAWF,EAAI,SAAS,EAC1B,OAAOA,EAAI,UAAUD,CAAU,EAGjC,MAAM,IAAI,UAAU,gEAAgE,CACtF,CAAC,CACH,CASM,SAAUX,GAAiBe,EAAmB,CAClD,OAAO,IAAInB,EAAW,SAACe,EAAyB,CAU9C,QAASK,EAAI,EAAGA,EAAID,EAAM,QAAU,CAACJ,EAAW,OAAQK,IACtDL,EAAW,KAAKI,EAAMC,EAAE,EAE1BL,EAAW,SAAQ,CACrB,CAAC,CACH,CAEM,SAAUT,GAAee,EAAuB,CACpD,OAAO,IAAIrB,EAAW,SAACe,EAAyB,CAC9CM,EACG,KACC,SAACC,EAAK,CACCP,EAAW,SACdA,EAAW,KAAKO,CAAK,EACrBP,EAAW,SAAQ,EAEvB,EACA,SAACQ,EAAQ,CAAK,OAAAR,EAAW,MAAMQ,CAAG,CAApB,CAAqB,EAEpC,KAAK,KAAMC,EAAoB,CACpC,CAAC,CACH,CAEM,SAAUd,GAAgBe,EAAqB,CACnD,OAAO,IAAIzB,EAAW,SAACe,EAAyB,aAC9C,QAAoBW,EAAAC,EAAAF,CAAQ,EAAAG,EAAAF,EAAA,KAAA,EAAA,CAAAE,EAAA,KAAAA,EAAAF,EAAA,KAAA,EAAE,CAAzB,IAAMJ,EAAKM,EAAA,MAEd,GADAb,EAAW,KAAKO,CAAK,EACjBP,EAAW,OACb,yGAGJA,EAAW,SAAQ,CACrB,CAAC,CACH,CAEM,SAAUP,GAAqBqB,EAA+B,CAClE,OAAO,IAAI7B,EAAW,SAACe,EAAyB,CAC9Ce,GAAQD,EAAed,CAAU,EAAE,MAAM,SAACQ,EAAG,CAAK,OAAAR,EAAW,MAAMQ,CAAG,CAApB,CAAqB,CACzE,CAAC,CACH,CAEM,SAAUX,GAA0BmB,EAAqC,CAC7E,OAAOvB,GAAkBwB,GAAmCD,CAAc,CAAC,CAC7E,CAEA,SAAeD,GAAWD,EAAiCd,EAAyB,uIACxDkB,EAAAC,GAAAL,CAAa,gFAIrC,GAJeP,EAAKa,EAAA,MACpBpB,EAAW,KAAKO,CAAK,EAGjBP,EAAW,OACb,MAAA,CAAA,CAAA,6RAGJ,OAAAA,EAAW,SAAQ,WChHf,SAAUqB,EACdC,EACAC,EACAC,EACAC,EACAC,EAAc,CADdD,IAAA,SAAAA,EAAA,GACAC,IAAA,SAAAA,EAAA,IAEA,IAAMC,EAAuBJ,EAAU,SAAS,UAAA,CAC9CC,EAAI,EACAE,EACFJ,EAAmB,IAAI,KAAK,SAAS,KAAMG,CAAK,CAAC,EAEjD,KAAK,YAAW,CAEpB,EAAGA,CAAK,EAIR,GAFAH,EAAmB,IAAIK,CAAoB,EAEvC,CAACD,EAKH,OAAOC,CAEX,CCeM,SAAUC,GAAaC,EAA0BC,EAAS,CAAT,OAAAA,IAAA,SAAAA,EAAA,GAC9CC,EAAQ,SAACC,EAAQC,EAAU,CAChCD,EAAO,UACLE,EACED,EACA,SAACE,EAAK,CAAK,OAAAC,EAAgBH,EAAYJ,EAAW,UAAA,CAAM,OAAAI,EAAW,KAAKE,CAAK,CAArB,EAAwBL,CAAK,CAA1E,EACX,UAAA,CAAM,OAAAM,EAAgBH,EAAYJ,EAAW,UAAA,CAAM,OAAAI,EAAW,SAAQ,CAAnB,EAAuBH,CAAK,CAAzE,EACN,SAACO,EAAG,CAAK,OAAAD,EAAgBH,EAAYJ,EAAW,UAAA,CAAM,OAAAI,EAAW,MAAMI,CAAG,CAApB,EAAuBP,CAAK,CAAzE,CAA0E,CACpF,CAEL,CAAC,CACH,CCPM,SAAUQ,GAAeC,EAA0BC,EAAiB,CAAjB,OAAAA,IAAA,SAAAA,EAAA,GAChDC,EAAQ,SAACC,EAAQC,EAAU,CAChCA,EAAW,IAAIJ,EAAU,SAAS,UAAA,CAAM,OAAAG,EAAO,UAAUC,CAAU,CAA3B,EAA8BH,CAAK,CAAC,CAC9E,CAAC,CACH,CC7DM,SAAUI,GAAsBC,EAA6BC,EAAwB,CACzF,OAAOC,EAAUF,CAAK,EAAE,KAAKG,GAAYF,CAAS,EAAGG,GAAUH,CAAS,CAAC,CAC3E,CCFM,SAAUI,GAAmBC,EAAuBC,EAAwB,CAChF,OAAOC,EAAUF,CAAK,EAAE,KAAKG,GAAYF,CAAS,EAAGG,GAAUH,CAAS,CAAC,CAC3E,CCJM,SAAUI,GAAiBC,EAAqBC,EAAwB,CAC5E,OAAO,IAAIC,EAAc,SAACC,EAAU,CAElC,IAAIC,EAAI,EAER,OAAOH,EAAU,SAAS,UAAA,CACpBG,IAAMJ,EAAM,OAGdG,EAAW,SAAQ,GAInBA,EAAW,KAAKH,EAAMI,IAAI,EAIrBD,EAAW,QACd,KAAK,SAAQ,EAGnB,CAAC,CACH,CAAC,CACH,CCfM,SAAUE,GAAoBC,EAAoBC,EAAwB,CAC9E,OAAO,IAAIC,EAAc,SAACC,EAAU,CAClC,IAAIC,EAKJ,OAAAC,EAAgBF,EAAYF,EAAW,UAAA,CAErCG,EAAYJ,EAAcI,IAAgB,EAE1CC,EACEF,EACAF,EACA,UAAA,OACMK,EACAC,EACJ,GAAI,CAEDC,EAAkBJ,EAAS,KAAI,EAA7BE,EAAKE,EAAA,MAAED,EAAIC,EAAA,WACPC,EAAP,CAEAN,EAAW,MAAMM,CAAG,EACpB,OAGEF,EAKFJ,EAAW,SAAQ,EAGnBA,EAAW,KAAKG,CAAK,CAEzB,EACA,EACA,EAAI,CAER,CAAC,EAMM,UAAA,CAAM,OAAAI,EAAWN,GAAQ,KAAA,OAARA,EAAU,MAAM,GAAKA,EAAS,OAAM,CAA/C,CACf,CAAC,CACH,CCvDM,SAAUO,GAAyBC,EAAyBC,EAAwB,CACxF,GAAI,CAACD,EACH,MAAM,IAAI,MAAM,yBAAyB,EAE3C,OAAO,IAAIE,EAAc,SAACC,EAAU,CAClCC,EAAgBD,EAAYF,EAAW,UAAA,CACrC,IAAMI,EAAWL,EAAM,OAAO,eAAc,EAC5CI,EACED,EACAF,EACA,UAAA,CACEI,EAAS,KAAI,EAAG,KAAK,SAACC,EAAM,CACtBA,EAAO,KAGTH,EAAW,SAAQ,EAEnBA,EAAW,KAAKG,EAAO,KAAK,CAEhC,CAAC,CACH,EACA,EACA,EAAI,CAER,CAAC,CACH,CAAC,CACH,CCzBM,SAAUC,GAA8BC,EAA8BC,EAAwB,CAClG,OAAOC,GAAsBC,GAAmCH,CAAK,EAAGC,CAAS,CACnF,CCoBM,SAAUG,GAAaC,EAA2BC,EAAwB,CAC9E,GAAID,GAAS,KAAM,CACjB,GAAIE,GAAoBF,CAAK,EAC3B,OAAOG,GAAmBH,EAAOC,CAAS,EAE5C,GAAIG,EAAYJ,CAAK,EACnB,OAAOK,GAAcL,EAAOC,CAAS,EAEvC,GAAIK,GAAUN,CAAK,EACjB,OAAOO,GAAgBP,EAAOC,CAAS,EAEzC,GAAIO,GAAgBR,CAAK,EACvB,OAAOS,GAAsBT,EAAOC,CAAS,EAE/C,GAAIS,GAAWV,CAAK,EAClB,OAAOW,GAAiBX,EAAOC,CAAS,EAE1C,GAAIW,GAAqBZ,CAAK,EAC5B,OAAOa,GAA2Bb,EAAOC,CAAS,EAGtD,MAAMa,GAAiCd,CAAK,CAC9C,CCoDM,SAAUe,EAAQC,EAA2BC,EAAyB,CAC1E,OAAOA,EAAYC,GAAUF,EAAOC,CAAS,EAAIE,EAAUH,CAAK,CAClE,CCxBM,SAAUI,IAAE,SAAIC,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,GAAA,UAAAA,GACpB,IAAMC,EAAYC,EAAaH,CAAI,EACnC,OAAOI,EAAKJ,EAAaE,CAAS,CACpC,CC3EM,SAAUG,GAAYC,EAAU,CACpC,OAAOA,aAAiB,MAAQ,CAAC,MAAMA,CAAY,CACrD,CCsCM,SAAUC,EAAUC,EAAyCC,EAAa,CAC9E,OAAOC,EAAQ,SAACC,EAAQC,EAAU,CAEhC,IAAIC,EAAQ,EAGZF,EAAO,UACLG,EAAyBF,EAAY,SAACG,EAAQ,CAG5CH,EAAW,KAAKJ,EAAQ,KAAKC,EAASM,EAAOF,GAAO,CAAC,CACvD,CAAC,CAAC,CAEN,CAAC,CACH,CC1DQ,IAAAG,GAAY,MAAK,QAEzB,SAASC,GAAkBC,EAA6BC,EAAW,CAC/D,OAAOH,GAAQG,CAAI,EAAID,EAAE,MAAA,OAAAE,EAAA,CAAA,EAAAC,EAAIF,CAAI,CAAA,CAAA,EAAID,EAAGC,CAAI,CAChD,CAMM,SAAUG,GAAuBJ,EAA2B,CAC9D,OAAOK,EAAI,SAAAJ,EAAI,CAAI,OAAAF,GAAYC,EAAIC,CAAI,CAApB,CAAqB,CAC5C,CCKM,SAAUK,GACdC,EACAC,EACAC,EACAC,EACAC,EACAC,EACAC,EACAC,EAAgC,CAGhC,IAAMC,EAAc,CAAA,EAEhBC,EAAS,EAETC,EAAQ,EAERC,EAAa,GAKXC,EAAgB,UAAA,CAIhBD,GAAc,CAACH,EAAO,QAAU,CAACC,GACnCR,EAAW,SAAQ,CAEvB,EAGMY,EAAY,SAACC,EAAQ,CAAK,OAACL,EAASN,EAAaY,EAAWD,CAAK,EAAIN,EAAO,KAAKM,CAAK,CAA5D,EAE1BC,EAAa,SAACD,EAAQ,CAI1BT,GAAUJ,EAAW,KAAKa,CAAY,EAItCL,IAKA,IAAIO,EAAgB,GAGpBC,EAAUf,EAAQY,EAAOJ,GAAO,CAAC,EAAE,UACjCQ,EACEjB,EACA,SAACkB,EAAU,CAGTf,GAAY,MAAZA,EAAee,CAAU,EAErBd,EAGFQ,EAAUM,CAAiB,EAG3BlB,EAAW,KAAKkB,CAAU,CAE9B,EACA,UAAA,CAGEH,EAAgB,EAClB,EAEA,OACA,UAAA,CAIE,GAAIA,EAKF,GAAI,CAIFP,IAKA,qBACE,IAAMW,EAAgBZ,EAAO,MAAK,EAI9BF,EACFe,EAAgBpB,EAAYK,EAAmB,UAAA,CAAM,OAAAS,EAAWK,CAAa,CAAxB,CAAyB,EAE9EL,EAAWK,CAAa,GARrBZ,EAAO,QAAUC,EAASN,OAYjCS,EAAa,QACNU,EAAP,CACArB,EAAW,MAAMqB,CAAG,EAG1B,CAAC,CACF,CAEL,EAGA,OAAAtB,EAAO,UACLkB,EAAyBjB,EAAYY,EAAW,UAAA,CAE9CF,EAAa,GACbC,EAAa,CACf,CAAC,CAAC,EAKG,UAAA,CACLL,GAAmB,MAAnBA,EAAmB,CACrB,CACF,CClEM,SAAUgB,EACdC,EACAC,EACAC,EAA6B,CAE7B,OAFAA,IAAA,SAAAA,EAAA,KAEIC,EAAWF,CAAc,EAEpBF,EAAS,SAACK,EAAGC,EAAC,CAAK,OAAAC,EAAI,SAACC,EAAQC,EAAU,CAAK,OAAAP,EAAeG,EAAGG,EAAGF,EAAGG,CAAE,CAA1B,CAA2B,EAAEC,EAAUT,EAAQI,EAAGC,CAAC,CAAC,CAAC,CAAjF,EAAoFH,CAAU,GAC/G,OAAOD,GAAmB,WACnCC,EAAaD,GAGRS,EAAQ,SAACC,EAAQC,EAAU,CAAK,OAAAC,GAAeF,EAAQC,EAAYZ,EAASE,CAAU,CAAtD,CAAuD,EAChG,CChCM,SAAUY,GAAyCC,EAA6B,CAA7B,OAAAA,IAAA,SAAAA,EAAA,KAChDC,EAASC,EAAUF,CAAU,CACtC,CCNM,SAAUG,IAAS,CACvB,OAAOC,GAAS,CAAC,CACnB,CCmDM,SAAUC,IAAM,SAACC,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,GAAA,UAAAA,GACrB,OAAOC,GAAS,EAAGC,EAAKH,EAAMI,EAAaJ,CAAI,CAAC,CAAC,CACnD,CC1GA,IAAMK,GAA0B,CAAC,cAAe,gBAAgB,EAC1DC,GAAqB,CAAC,mBAAoB,qBAAqB,EAC/DC,GAAgB,CAAC,KAAM,KAAK,EA8N5B,SAAUC,EACdC,EACAC,EACAC,EACAC,EAAsC,CAMtC,GAJIC,EAAWF,CAAO,IACpBC,EAAiBD,EACjBA,EAAU,QAERC,EACF,OAAOJ,EAAaC,EAAQC,EAAWC,CAA+B,EAAE,KAAKG,GAAiBF,CAAc,CAAC,EAUzG,IAAAG,EAAAC,EAEJC,GAAcR,CAAM,EAChBH,GAAmB,IAAI,SAACY,EAAU,CAAK,OAAA,SAACC,EAAY,CAAK,OAAAV,EAAOS,GAAYR,EAAWS,EAASR,CAA+B,CAAtE,CAAlB,CAAyF,EAElIS,GAAwBX,CAAM,EAC5BJ,GAAwB,IAAIgB,GAAwBZ,EAAQC,CAAS,CAAC,EACtEY,GAA0Bb,CAAM,EAChCF,GAAc,IAAIc,GAAwBZ,EAAQC,CAAS,CAAC,EAC5D,CAAA,EAAE,CAAA,EATDa,EAAGR,EAAA,GAAES,EAAMT,EAAA,GAgBlB,GAAI,CAACQ,GACCE,EAAYhB,CAAM,EACpB,OAAOiB,EAAS,SAACC,EAAc,CAAK,OAAAnB,EAAUmB,EAAWjB,EAAWC,CAA+B,CAA/D,CAAgE,EAClGiB,EAAUnB,CAAM,CAAC,EAOvB,GAAI,CAACc,EACH,MAAM,IAAI,UAAU,sBAAsB,EAG5C,OAAO,IAAIM,EAAc,SAACC,EAAU,CAIlC,IAAMX,EAAU,UAAA,SAACY,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,GAAA,UAAAA,GAAmB,OAAAF,EAAW,KAAK,EAAIC,EAAK,OAASA,EAAOA,EAAK,EAAE,CAAhD,EAEpC,OAAAR,EAAIJ,CAAO,EAEJ,UAAA,CAAM,OAAAK,EAAQL,CAAO,CAAf,CACf,CAAC,CACH,CASA,SAASE,GAAwBZ,EAAaC,EAAiB,CAC7D,OAAO,SAACQ,EAAkB,CAAK,OAAA,SAACC,EAAY,CAAK,OAAAV,EAAOS,GAAYR,EAAWS,CAAO,CAArC,CAAlB,CACjC,CAOA,SAASC,GAAwBX,EAAW,CAC1C,OAAOI,EAAWJ,EAAO,WAAW,GAAKI,EAAWJ,EAAO,cAAc,CAC3E,CAOA,SAASa,GAA0Bb,EAAW,CAC5C,OAAOI,EAAWJ,EAAO,EAAE,GAAKI,EAAWJ,EAAO,GAAG,CACvD,CAOA,SAASQ,GAAcR,EAAW,CAChC,OAAOI,EAAWJ,EAAO,gBAAgB,GAAKI,EAAWJ,EAAO,mBAAmB,CACrF,CCvMM,SAAUwB,EACdC,EACAC,EACAC,EAAyC,CAFzCF,IAAA,SAAAA,EAAA,GAEAE,IAAA,SAAAA,EAAAC,IAIA,IAAIC,EAAmB,GAEvB,OAAIH,GAAuB,OAIrBI,GAAYJ,CAAmB,EACjCC,EAAYD,EAIZG,EAAmBH,GAIhB,IAAIK,EAAW,SAACC,EAAU,CAI/B,IAAIC,EAAMC,GAAYT,CAAO,EAAI,CAACA,EAAUE,EAAW,IAAG,EAAKF,EAE3DQ,EAAM,IAERA,EAAM,GAIR,IAAIE,EAAI,EAGR,OAAOR,EAAU,SAAS,UAAA,CACnBK,EAAW,SAEdA,EAAW,KAAKG,GAAG,EAEf,GAAKN,EAGP,KAAK,SAAS,OAAWA,CAAgB,EAGzCG,EAAW,SAAQ,EAGzB,EAAGC,CAAG,CACR,CAAC,CACH,CCvIM,SAAUG,GAASC,EAAYC,EAAyC,CAArD,OAAAD,IAAA,SAAAA,EAAA,GAAYC,IAAA,SAAAA,EAAAC,GAC/BF,EAAS,IAEXA,EAAS,GAGJG,EAAMH,EAAQA,EAAQC,CAAS,CACxC,CCgCM,SAAUG,IAAK,SAACC,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,GAAA,UAAAA,GACpB,IAAMC,EAAYC,EAAaH,CAAI,EAC7BI,EAAaC,GAAUL,EAAM,GAAQ,EACrCM,EAAUN,EAChB,OAAQM,EAAQ,OAGZA,EAAQ,SAAW,EAEnBC,EAAUD,EAAQ,EAAE,EAEpBE,GAASJ,CAAU,EAAEK,EAAKH,EAASJ,CAAS,CAAC,EAL7CQ,CAMN,CCjEO,IAAMC,GAAQ,IAAIC,EAAkBC,CAAI,ECwBzC,SAAUC,EAAUC,EAAiDC,EAAa,CACtF,OAAOC,EAAQ,SAACC,EAAQC,EAAU,CAEhC,IAAIC,EAAQ,EAIZF,EAAO,UAILG,EAAyBF,EAAY,SAACG,EAAK,CAAK,OAAAP,EAAU,KAAKC,EAASM,EAAOF,GAAO,GAAKD,EAAW,KAAKG,CAAK,CAAhE,CAAiE,CAAC,CAEtH,CAAC,CACH,CC3BM,SAAUC,EAAQC,EAAa,CACnC,OAAOA,GAAS,EAEZ,UAAA,CAAM,OAAAC,CAAA,EACNC,EAAQ,SAACC,EAAQC,EAAU,CACzB,IAAIC,EAAO,EACXF,EAAO,UACLG,EAAyBF,EAAY,SAACG,EAAK,CAIrC,EAAEF,GAAQL,IACZI,EAAW,KAAKG,CAAK,EAIjBP,GAASK,GACXD,EAAW,SAAQ,EAGzB,CAAC,CAAC,CAEN,CAAC,CACP,CC9BM,SAAUI,IAAc,CAC5B,OAAOC,EAAQ,SAACC,EAAQC,EAAU,CAChCD,EAAO,UAAUE,EAAyBD,EAAYE,CAAI,CAAC,CAC7D,CAAC,CACH,CCCM,SAAUC,GAASC,EAAQ,CAC/B,OAAOC,EAAI,UAAA,CAAM,OAAAD,CAAA,CAAK,CACxB,CCyCM,SAAUE,GACdC,EACAC,EAAmC,CAEnC,OAAIA,EAEK,SAACC,EAAqB,CAC3B,OAAAC,GAAOF,EAAkB,KAAKG,EAAK,CAAC,EAAGC,GAAc,CAAE,EAAGH,EAAO,KAAKH,GAAUC,CAAqB,CAAC,CAAC,CAAvG,EAGGM,EAAS,SAACC,EAAOC,EAAK,CAAK,OAAAR,EAAsBO,EAAOC,CAAK,EAAE,KAAKJ,EAAK,CAAC,EAAGK,GAAMF,CAAK,CAAC,CAA9D,CAA+D,CACnG,CCtCM,SAAUG,GAASC,EAAoBC,EAAyC,CAAzCA,IAAA,SAAAA,EAAAC,GAC3C,IAAMC,EAAWC,EAAMJ,EAAKC,CAAS,EACrC,OAAOI,GAAU,UAAA,CAAM,OAAAF,CAAA,CAAQ,CACjC,CC0EM,SAAUG,GACdC,EACAC,EAA0D,CAA1D,OAAAA,IAAA,SAAAA,EAA+BC,GAK/BF,EAAaA,GAAU,KAAVA,EAAcG,GAEpBC,EAAQ,SAACC,EAAQC,EAAU,CAGhC,IAAIC,EAEAC,EAAQ,GAEZH,EAAO,UACLI,EAAyBH,EAAY,SAACI,EAAK,CAEzC,IAAMC,EAAaV,EAAYS,CAAK,GAKhCF,GAAS,CAACR,EAAYO,EAAaI,CAAU,KAM/CH,EAAQ,GACRD,EAAcI,EAGdL,EAAW,KAAKI,CAAK,EAEzB,CAAC,CAAC,CAEN,CAAC,CACH,CAEA,SAASP,GAAeS,EAAQC,EAAM,CACpC,OAAOD,IAAMC,CACf,CCrHM,SAAUC,GAAYC,EAAoB,CAC9C,OAAOC,EAAQ,SAACC,EAAQC,EAAU,CAGhC,GAAI,CACFD,EAAO,UAAUC,CAAU,UAE3BA,EAAW,IAAIH,CAAQ,EAE3B,CAAC,CACH,CCyCM,SAAUI,GAAUC,EAAqC,OACzDC,EAAQ,IACRC,EAEJ,OAAIF,GAAiB,OACf,OAAOA,GAAkB,UACxBG,EAA4BH,EAAa,MAAzCC,EAAKE,IAAA,OAAG,IAAQA,EAAED,EAAUF,EAAa,OAE5CC,EAAQD,GAILC,GAAS,EACZ,UAAA,CAAM,OAAAG,CAAA,EACNC,EAAQ,SAACC,EAAQC,EAAU,CACzB,IAAIC,EAAQ,EACRC,EAEEC,EAAc,UAAA,CAGlB,GAFAD,GAAS,MAATA,EAAW,YAAW,EACtBA,EAAY,KACRP,GAAS,KAAM,CACjB,IAAMS,EAAW,OAAOT,GAAU,SAAWU,EAAMV,CAAK,EAAIW,EAAUX,EAAMM,CAAK,CAAC,EAC5EM,EAAqBC,EAAyBR,EAAY,UAAA,CAC9DO,EAAmB,YAAW,EAC9BE,EAAiB,CACnB,CAAC,EACDL,EAAS,UAAUG,CAAkB,OAErCE,EAAiB,CAErB,EAEMA,EAAoB,UAAA,CACxB,IAAIC,EAAY,GAChBR,EAAYH,EAAO,UACjBS,EAAyBR,EAAY,OAAW,UAAA,CAC1C,EAAEC,EAAQP,EACRQ,EACFC,EAAW,EAEXO,EAAY,GAGdV,EAAW,SAAQ,CAEvB,CAAC,CAAC,EAGAU,GACFP,EAAW,CAEf,EAEAM,EAAiB,CACnB,CAAC,CACP,CCtFM,SAAUE,GACdC,EACAC,EAA6G,CAE7G,OAAOC,EAAQ,SAACC,EAAQC,EAAU,CAChC,IAAIC,EAAyD,KACzDC,EAAQ,EAERC,EAAa,GAIXC,EAAgB,UAAA,CAAM,OAAAD,GAAc,CAACF,GAAmBD,EAAW,SAAQ,CAArD,EAE5BD,EAAO,UACLM,EACEL,EACA,SAACM,EAAK,CAEJL,GAAe,MAAfA,EAAiB,YAAW,EAC5B,IAAIM,EAAa,EACXC,EAAaN,IAEnBO,EAAUb,EAAQU,EAAOE,CAAU,CAAC,EAAE,UACnCP,EAAkBI,EACjBL,EAIA,SAACU,EAAU,CAAK,OAAAV,EAAW,KAAKH,EAAiBA,EAAeS,EAAOI,EAAYF,EAAYD,GAAY,EAAIG,CAAU,CAAzG,EAChB,UAAA,CAIET,EAAkB,KAClBG,EAAa,CACf,CAAC,CACD,CAEN,EACA,UAAA,CACED,EAAa,GACbC,EAAa,CACf,CAAC,CACF,CAEL,CAAC,CACH,CCvFM,SAAUO,GAAaC,EAA8B,CACzD,OAAOC,EAAQ,SAACC,EAAQC,EAAU,CAChCC,EAAUJ,CAAQ,EAAE,UAAUK,EAAyBF,EAAY,UAAA,CAAM,OAAAA,EAAW,SAAQ,CAAnB,EAAuBG,CAAI,CAAC,EACrG,CAACH,EAAW,QAAUD,EAAO,UAAUC,CAAU,CACnD,CAAC,CACH,CCwDM,SAAUI,GACdC,EACAC,EACAC,EAA8B,CAK9B,IAAMC,EACJC,EAAWJ,CAAc,GAAKC,GAASC,EAElC,CAAE,KAAMF,EAA2E,MAAKC,EAAE,SAAQC,CAAA,EACnGF,EAEN,OAAOG,EACHE,EAAQ,SAACC,EAAQC,EAAU,QACzBC,EAAAL,EAAY,aAAS,MAAAK,IAAA,QAAAA,EAAA,KAArBL,CAAW,EACX,IAAIM,EAAU,GACdH,EAAO,UACLI,EACEH,EACA,SAACI,EAAK,QACJH,EAAAL,EAAY,QAAI,MAAAK,IAAA,QAAAA,EAAA,KAAhBL,EAAmBQ,CAAK,EACxBJ,EAAW,KAAKI,CAAK,CACvB,EACA,UAAA,OACEF,EAAU,IACVD,EAAAL,EAAY,YAAQ,MAAAK,IAAA,QAAAA,EAAA,KAApBL,CAAW,EACXI,EAAW,SAAQ,CACrB,EACA,SAACK,EAAG,OACFH,EAAU,IACVD,EAAAL,EAAY,SAAK,MAAAK,IAAA,QAAAA,EAAA,KAAjBL,EAAoBS,CAAG,EACvBL,EAAW,MAAMK,CAAG,CACtB,EACA,UAAA,SACMH,KACFD,EAAAL,EAAY,eAAW,MAAAK,IAAA,QAAAA,EAAA,KAAvBL,CAAW,IAEbU,EAAAV,EAAY,YAAQ,MAAAU,IAAA,QAAAA,EAAA,KAApBV,CAAW,CACb,CAAC,CACF,CAEL,CAAC,EAIDW,CACN,CCjGM,SAAUC,IAAc,SAAOC,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,GAAA,UAAAA,GACnC,IAAMC,EAAUC,GAAkBH,CAAM,EAExC,OAAOI,EAAQ,SAACC,EAAQC,EAAU,CAehC,QAdMC,EAAMP,EAAO,OACbQ,EAAc,IAAI,MAAMD,CAAG,EAI7BE,EAAWT,EAAO,IAAI,UAAA,CAAM,MAAA,EAAA,CAAK,EAGjCU,EAAQ,cAMHC,EAAC,CACRC,EAAUZ,EAAOW,EAAE,EAAE,UACnBE,EACEP,EACA,SAACQ,EAAK,CACJN,EAAYG,GAAKG,EACb,CAACJ,GAAS,CAACD,EAASE,KAEtBF,EAASE,GAAK,IAKbD,EAAQD,EAAS,MAAMM,CAAQ,KAAON,EAAW,MAEtD,EAGAO,CAAI,CACL,GAnBIL,EAAI,EAAGA,EAAIJ,EAAKI,MAAhBA,CAAC,EAwBVN,EAAO,UACLQ,EAAyBP,EAAY,SAACQ,EAAK,CACzC,GAAIJ,EAAO,CAET,IAAMO,EAAMC,EAAA,CAAIJ,CAAK,EAAAK,EAAKX,CAAW,CAAA,EACrCF,EAAW,KAAKJ,EAAUA,EAAO,MAAA,OAAAgB,EAAA,CAAA,EAAAC,EAAIF,CAAM,CAAA,CAAA,EAAIA,CAAM,EAEzD,CAAC,CAAC,CAEN,CAAC,CACH,CC9DA,IAAMG,GAAY,SAAS,cAAc,KAAK,EAC9C,SAAS,KAAK,YAAYA,EAAS,EAGnC,IAAMC,GAAS,SAAS,cAAc,oBAAoB,EAC1D,GAAIA,GAAQ,CACV,IAAMC,EAAS,SAAS,cAAc,QAAQ,EAC9CA,EAAO,UAAY,yEACfD,GAAO,eACTA,GAAO,cAAc,aAAaC,EAAQD,EAAM,EAGlD,IAAME,EAAM,IAAIC,GAAuB,CAAC,EACxCD,EACG,KACCE,GAAqB,CACvB,EACG,UAAUC,GAAM,CACf,eAAe,QAAQ,uCAAU,GAAGA,GAAI,EACxCJ,EAAO,OAAS,CAACI,CACnB,CAAC,EAGLH,EAAI,KAAK,KAAK,MAAM,eAAe,QAAQ,sCAAQ,GAAK,MAAM,CAAC,EAC/DI,EAAUL,EAAQ,OAAO,EACtB,KACCM,GAAeL,CAAG,CACpB,EACG,UAAU,CAAC,CAAC,CAAEG,CAAE,IAAMH,EAAI,KAAK,CAACG,CAAE,CAAC,EAGxCG,GAAS,GAAG,EACT,KACCC,GAAUP,EAAI,KAAKQ,EAAOL,GAAM,CAACA,CAAE,CAAC,CAAC,EACrCM,EAAK,EAAE,EACPC,GAAO,CAAE,MAAO,IAAMV,EAAI,KAAKQ,EAAOL,GAAMA,CAAE,CAAC,CAAE,CAAC,EAClDQ,EAAS,IAAM,CACb,IAAMC,EAAW,SAAS,cAAc,KAAK,EAC7C,OAAAA,EAAS,UAAY,uCACrBA,EAAS,WAAa,OACtBf,GAAU,YAAYe,CAAQ,EACvBC,GAAMC,GAAOC,GAAGH,CAAQ,CAAC,EAC7B,KACCI,GAAS,IAAMJ,EAAS,OAAO,CAAC,EAChCL,GAAUP,EAAI,KAAKQ,EAAOL,GAAM,CAACA,CAAE,CAAC,CAAC,EACrCc,GAAUC,GAAMd,EAAUc,EAAI,OAAO,EAClC,KACCC,GAAI,IAAMD,EAAG,UAAU,IAAI,4EAAgB,CAAC,EAC5CE,GAAM,GAAI,EACVD,GAAI,IAAMD,EAAG,UAAU,OAAO,4EAAgB,CAAC,CACjD,CACF,CACF,CACJ,CAAC,CACH,EACG,UAAU,CACjB", + "names": ["require_tslib", "__commonJSMin", "exports", "module", "__extends", "__assign", "__rest", "__decorate", "__param", "__metadata", "__awaiter", "__generator", "__exportStar", "__values", "__read", "__spread", "__spreadArrays", "__spreadArray", "__await", "__asyncGenerator", "__asyncDelegator", "__asyncValues", "__makeTemplateObject", "__importStar", "__importDefault", "__classPrivateFieldGet", "__classPrivateFieldSet", "__createBinding", "factory", "root", "createExporter", "previous", "id", "v", "exporter", "extendStatics", "d", "b", "p", "__", "t", "s", "n", "e", "i", "decorators", "target", "key", "desc", "c", "r", "paramIndex", "decorator", "metadataKey", "metadataValue", "thisArg", "_arguments", "P", "generator", "adopt", "value", "resolve", "reject", "fulfilled", "step", "rejected", "result", "body", "_", "y", "g", "verb", "op", "m", "o", "k", "k2", "ar", "error", "il", "j", "jl", "to", "from", "pack", "l", "q", "a", "resume", "settle", "fulfill", "f", "cooked", "raw", "__setModuleDefault", "mod", "receiver", "state", "kind", "import_tslib", "__extends", "__assign", "__rest", "__decorate", "__param", "__metadata", "__awaiter", "__generator", "__exportStar", "__createBinding", "__values", "__read", "__spread", "__spreadArrays", "__spreadArray", "__await", "__asyncGenerator", "__asyncDelegator", "__asyncValues", "__makeTemplateObject", "__importStar", "__importDefault", "__classPrivateFieldGet", "__classPrivateFieldSet", "tslib", "isFunction", "value", "createErrorClass", "createImpl", "_super", "instance", "ctorFunc", "UnsubscriptionError", "createErrorClass", "_super", "errors", "err", "i", "arrRemove", "arr", "item", "index", "Subscription", "initialTeardown", "errors", "_parentage", "_parentage_1", "__values", "_parentage_1_1", "parent_1", "initialFinalizer", "isFunction", "e", "UnsubscriptionError", "_finalizers", "_finalizers_1", "_finalizers_1_1", "finalizer", "execFinalizer", "err", "__spreadArray", "__read", "teardown", "_a", "parent", "arrRemove", "empty", "EMPTY_SUBSCRIPTION", "Subscription", "isSubscription", "value", "isFunction", "execFinalizer", "finalizer", "config", "timeoutProvider", "handler", "timeout", "args", "_i", "delegate", "__spreadArray", "__read", "handle", "reportUnhandledError", "err", "timeoutProvider", "onUnhandledError", "config", "noop", "COMPLETE_NOTIFICATION", "createNotification", "errorNotification", "error", "nextNotification", "value", "kind", "context", "errorContext", "cb", "config", "isRoot", "_a", "errorThrown", "error", "captureError", "err", "Subscriber", "_super", "__extends", "destination", "_this", "isSubscription", "EMPTY_OBSERVER", "next", "error", "complete", "SafeSubscriber", "value", "handleStoppedNotification", "nextNotification", "err", "errorNotification", "COMPLETE_NOTIFICATION", "Subscription", "_bind", "bind", "fn", "thisArg", "ConsumerObserver", "partialObserver", "value", "error", "handleUnhandledError", "err", "SafeSubscriber", "_super", "__extends", "observerOrNext", "complete", "_this", "isFunction", "context_1", "config", "Subscriber", "handleUnhandledError", "error", "config", "captureError", "reportUnhandledError", "defaultErrorHandler", "err", "handleStoppedNotification", "notification", "subscriber", "onStoppedNotification", "timeoutProvider", "EMPTY_OBSERVER", "noop", "observable", "identity", "x", "pipeFromArray", "fns", "identity", "input", "prev", "fn", "Observable", "subscribe", "operator", "observable", "observerOrNext", "error", "complete", "_this", "subscriber", "isSubscriber", "SafeSubscriber", "errorContext", "_a", "source", "sink", "err", "next", "promiseCtor", "getPromiseCtor", "resolve", "reject", "value", "operations", "_i", "pipeFromArray", "x", "getPromiseCtor", "promiseCtor", "_a", "config", "isObserver", "value", "isFunction", "isSubscriber", "Subscriber", "isSubscription", "hasLift", "source", "isFunction", "operate", "init", "liftedSource", "err", "createOperatorSubscriber", "destination", "onNext", "onComplete", "onError", "onFinalize", "OperatorSubscriber", "_super", "__extends", "shouldUnsubscribe", "_this", "value", "err", "closed_1", "_a", "Subscriber", "ObjectUnsubscribedError", "createErrorClass", "_super", "Subject", "_super", "__extends", "_this", "operator", "subject", "AnonymousSubject", "ObjectUnsubscribedError", "value", "errorContext", "_b", "__values", "_c", "observer", "err", "observers", "_a", "subscriber", "hasError", "isStopped", "EMPTY_SUBSCRIPTION", "Subscription", "arrRemove", "thrownError", "observable", "Observable", "destination", "source", "AnonymousSubject", "_super", "__extends", "destination", "source", "_this", "value", "_b", "_a", "err", "subscriber", "EMPTY_SUBSCRIPTION", "Subject", "dateTimestampProvider", "ReplaySubject", "_super", "__extends", "_bufferSize", "_windowTime", "_timestampProvider", "dateTimestampProvider", "_this", "value", "_a", "isStopped", "_buffer", "_infiniteTimeWindow", "subscriber", "subscription", "copy", "i", "adjustedBufferSize", "now", "last", "Subject", "Action", "_super", "__extends", "scheduler", "work", "state", "delay", "Subscription", "intervalProvider", "handler", "timeout", "args", "_i", "delegate", "__spreadArray", "__read", "handle", "AsyncAction", "_super", "__extends", "scheduler", "work", "_this", "state", "delay", "id", "_a", "_id", "intervalProvider", "_scheduler", "error", "_delay", "errored", "errorValue", "e", "actions", "arrRemove", "Action", "Scheduler", "schedulerActionCtor", "now", "work", "delay", "state", "dateTimestampProvider", "AsyncScheduler", "_super", "__extends", "SchedulerAction", "now", "Scheduler", "_this", "action", "actions", "error", "asyncScheduler", "AsyncScheduler", "AsyncAction", "async", "EMPTY", "Observable", "subscriber", "isScheduler", "value", "isFunction", "last", "arr", "popResultSelector", "args", "isFunction", "popScheduler", "isScheduler", "popNumber", "defaultValue", "isArrayLike", "x", "isPromise", "value", "isFunction", "isInteropObservable", "input", "isFunction", "observable", "isAsyncIterable", "obj", "isFunction", "createInvalidObservableTypeError", "input", "getSymbolIterator", "iterator", "isIterable", "input", "isFunction", "iterator", "readableStreamLikeToAsyncGenerator", "readableStream", "reader", "__await", "_a", "_b", "value", "done", "isReadableStreamLike", "obj", "isFunction", "innerFrom", "input", "Observable", "isInteropObservable", "fromInteropObservable", "isArrayLike", "fromArrayLike", "isPromise", "fromPromise", "isAsyncIterable", "fromAsyncIterable", "isIterable", "fromIterable", "isReadableStreamLike", "fromReadableStreamLike", "createInvalidObservableTypeError", "obj", "subscriber", "obs", "observable", "isFunction", "array", "i", "promise", "value", "err", "reportUnhandledError", "iterable", "iterable_1", "__values", "iterable_1_1", "asyncIterable", "process", "readableStream", "readableStreamLikeToAsyncGenerator", "asyncIterable_1", "__asyncValues", "asyncIterable_1_1", "executeSchedule", "parentSubscription", "scheduler", "work", "delay", "repeat", "scheduleSubscription", "observeOn", "scheduler", "delay", "operate", "source", "subscriber", "createOperatorSubscriber", "value", "executeSchedule", "err", "subscribeOn", "scheduler", "delay", "operate", "source", "subscriber", "scheduleObservable", "input", "scheduler", "innerFrom", "subscribeOn", "observeOn", "schedulePromise", "input", "scheduler", "innerFrom", "subscribeOn", "observeOn", "scheduleArray", "input", "scheduler", "Observable", "subscriber", "i", "scheduleIterable", "input", "scheduler", "Observable", "subscriber", "iterator", "executeSchedule", "value", "done", "_a", "err", "isFunction", "scheduleAsyncIterable", "input", "scheduler", "Observable", "subscriber", "executeSchedule", "iterator", "result", "scheduleReadableStreamLike", "input", "scheduler", "scheduleAsyncIterable", "readableStreamLikeToAsyncGenerator", "scheduled", "input", "scheduler", "isInteropObservable", "scheduleObservable", "isArrayLike", "scheduleArray", "isPromise", "schedulePromise", "isAsyncIterable", "scheduleAsyncIterable", "isIterable", "scheduleIterable", "isReadableStreamLike", "scheduleReadableStreamLike", "createInvalidObservableTypeError", "from", "input", "scheduler", "scheduled", "innerFrom", "of", "args", "_i", "scheduler", "popScheduler", "from", "isValidDate", "value", "map", "project", "thisArg", "operate", "source", "subscriber", "index", "createOperatorSubscriber", "value", "isArray", "callOrApply", "fn", "args", "__spreadArray", "__read", "mapOneOrManyArgs", "map", "mergeInternals", "source", "subscriber", "project", "concurrent", "onBeforeNext", "expand", "innerSubScheduler", "additionalFinalizer", "buffer", "active", "index", "isComplete", "checkComplete", "outerNext", "value", "doInnerSub", "innerComplete", "innerFrom", "createOperatorSubscriber", "innerValue", "bufferedValue", "executeSchedule", "err", "mergeMap", "project", "resultSelector", "concurrent", "isFunction", "a", "i", "map", "b", "ii", "innerFrom", "operate", "source", "subscriber", "mergeInternals", "mergeAll", "concurrent", "mergeMap", "identity", "concatAll", "mergeAll", "concat", "args", "_i", "concatAll", "from", "popScheduler", "nodeEventEmitterMethods", "eventTargetMethods", "jqueryMethods", "fromEvent", "target", "eventName", "options", "resultSelector", "isFunction", "mapOneOrManyArgs", "_a", "__read", "isEventTarget", "methodName", "handler", "isNodeStyleEventEmitter", "toCommonHandlerRegistry", "isJQueryStyleEventEmitter", "add", "remove", "isArrayLike", "mergeMap", "subTarget", "innerFrom", "Observable", "subscriber", "args", "_i", "timer", "dueTime", "intervalOrScheduler", "scheduler", "async", "intervalDuration", "isScheduler", "Observable", "subscriber", "due", "isValidDate", "n", "interval", "period", "scheduler", "asyncScheduler", "timer", "merge", "args", "_i", "scheduler", "popScheduler", "concurrent", "popNumber", "sources", "innerFrom", "mergeAll", "from", "EMPTY", "NEVER", "Observable", "noop", "filter", "predicate", "thisArg", "operate", "source", "subscriber", "index", "createOperatorSubscriber", "value", "take", "count", "EMPTY", "operate", "source", "subscriber", "seen", "createOperatorSubscriber", "value", "ignoreElements", "operate", "source", "subscriber", "createOperatorSubscriber", "noop", "mapTo", "value", "map", "delayWhen", "delayDurationSelector", "subscriptionDelay", "source", "concat", "take", "ignoreElements", "mergeMap", "value", "index", "mapTo", "delay", "due", "scheduler", "asyncScheduler", "duration", "timer", "delayWhen", "distinctUntilChanged", "comparator", "keySelector", "identity", "defaultCompare", "operate", "source", "subscriber", "previousKey", "first", "createOperatorSubscriber", "value", "currentKey", "a", "b", "finalize", "callback", "operate", "source", "subscriber", "repeat", "countOrConfig", "count", "delay", "_a", "EMPTY", "operate", "source", "subscriber", "soFar", "sourceSub", "resubscribe", "notifier", "timer", "innerFrom", "notifierSubscriber_1", "createOperatorSubscriber", "subscribeToSource", "syncUnsub", "switchMap", "project", "resultSelector", "operate", "source", "subscriber", "innerSubscriber", "index", "isComplete", "checkComplete", "createOperatorSubscriber", "value", "innerIndex", "outerIndex", "innerFrom", "innerValue", "takeUntil", "notifier", "operate", "source", "subscriber", "innerFrom", "createOperatorSubscriber", "noop", "tap", "observerOrNext", "error", "complete", "tapObserver", "isFunction", "operate", "source", "subscriber", "_a", "isUnsub", "createOperatorSubscriber", "value", "err", "_b", "identity", "withLatestFrom", "inputs", "_i", "project", "popResultSelector", "operate", "source", "subscriber", "len", "otherValues", "hasValue", "ready", "i", "innerFrom", "createOperatorSubscriber", "value", "identity", "noop", "values", "__spreadArray", "__read", "container", "header", "button", "on$", "ReplaySubject", "distinctUntilChanged", "on", "fromEvent", "withLatestFrom", "interval", "takeUntil", "filter", "take", "repeat", "mergeMap", "instance", "merge", "NEVER", "of", "finalize", "switchMap", "el", "tap", "delay"] +} diff --git a/assets/javascripts/lunr/min/lunr.ar.min.js b/assets/javascripts/lunr/min/lunr.ar.min.js new file mode 100644 index 0000000..9b06c26 --- /dev/null +++ b/assets/javascripts/lunr/min/lunr.ar.min.js @@ -0,0 +1 @@ +!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.ar=function(){this.pipeline.reset(),this.pipeline.add(e.ar.trimmer,e.ar.stopWordFilter,e.ar.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.ar.stemmer))},e.ar.wordCharacters="ء-ٛٱـ",e.ar.trimmer=e.trimmerSupport.generateTrimmer(e.ar.wordCharacters),e.Pipeline.registerFunction(e.ar.trimmer,"trimmer-ar"),e.ar.stemmer=function(){var e=this;return e.result=!1,e.preRemoved=!1,e.sufRemoved=!1,e.pre={pre1:"ف ك ب و س ل ن ا ي ت",pre2:"ال لل",pre3:"بال وال فال تال كال ولل",pre4:"فبال كبال وبال وكال"},e.suf={suf1:"ه ك ت ن ا ي",suf2:"نك نه ها وك يا اه ون ين تن تم نا وا ان كم كن ني نن ما هم هن تك ته ات يه",suf3:"تين كهم نيه نهم ونه وها يهم ونا ونك وني وهم تكم تنا تها تني تهم كما كها ناه نكم هنا تان يها",suf4:"كموه ناها ونني ونهم تكما تموه تكاه كماه ناكم ناهم نيها وننا"},e.patterns=JSON.parse('{"pt43":[{"pt":[{"c":"ا","l":1}]},{"pt":[{"c":"ا,ت,ن,ي","l":0}],"mPt":[{"c":"ف","l":0,"m":1},{"c":"ع","l":1,"m":2},{"c":"ل","l":2,"m":3}]},{"pt":[{"c":"و","l":2}],"mPt":[{"c":"ف","l":0,"m":0},{"c":"ع","l":1,"m":1},{"c":"ل","l":2,"m":3}]},{"pt":[{"c":"ا","l":2}]},{"pt":[{"c":"ي","l":2}],"mPt":[{"c":"ف","l":0,"m":0},{"c":"ع","l":1,"m":1},{"c":"ا","l":2},{"c":"ل","l":3,"m":3}]},{"pt":[{"c":"م","l":0}]}],"pt53":[{"pt":[{"c":"ت","l":0},{"c":"ا","l":2}]},{"pt":[{"c":"ا,ن,ت,ي","l":0},{"c":"ت","l":2}],"mPt":[{"c":"ا","l":0},{"c":"ف","l":1,"m":1},{"c":"ت","l":2},{"c":"ع","l":3,"m":3},{"c":"ا","l":4},{"c":"ل","l":5,"m":4}]},{"pt":[{"c":"ا","l":0},{"c":"ا","l":2}],"mPt":[{"c":"ا","l":0},{"c":"ف","l":1,"m":1},{"c":"ع","l":2,"m":3},{"c":"ل","l":3,"m":4},{"c":"ا","l":4},{"c":"ل","l":5,"m":4}]},{"pt":[{"c":"ا","l":0},{"c":"ا","l":3}],"mPt":[{"c":"ف","l":0,"m":1},{"c":"ع","l":1,"m":2},{"c":"ل","l":2,"m":4}]},{"pt":[{"c":"ا","l":3},{"c":"ن","l":4}]},{"pt":[{"c":"ت","l":0},{"c":"ي","l":3}]},{"pt":[{"c":"م","l":0},{"c":"و","l":3}]},{"pt":[{"c":"ا","l":1},{"c":"و","l":3}]},{"pt":[{"c":"و","l":1},{"c":"ا","l":2}]},{"pt":[{"c":"م","l":0},{"c":"ا","l":3}]},{"pt":[{"c":"م","l":0},{"c":"ي","l":3}]},{"pt":[{"c":"ا","l":2},{"c":"ن","l":3}]},{"pt":[{"c":"م","l":0},{"c":"ن","l":1}],"mPt":[{"c":"ا","l":0},{"c":"ن","l":1},{"c":"ف","l":2,"m":2},{"c":"ع","l":3,"m":3},{"c":"ا","l":4},{"c":"ل","l":5,"m":4}]},{"pt":[{"c":"م","l":0},{"c":"ت","l":2}],"mPt":[{"c":"ا","l":0},{"c":"ف","l":1,"m":1},{"c":"ت","l":2},{"c":"ع","l":3,"m":3},{"c":"ا","l":4},{"c":"ل","l":5,"m":4}]},{"pt":[{"c":"م","l":0},{"c":"ا","l":2}]},{"pt":[{"c":"م","l":1},{"c":"ا","l":3}]},{"pt":[{"c":"ي,ت,ا,ن","l":0},{"c":"ت","l":1}],"mPt":[{"c":"ف","l":0,"m":2},{"c":"ع","l":1,"m":3},{"c":"ا","l":2},{"c":"ل","l":3,"m":4}]},{"pt":[{"c":"ت,ي,ا,ن","l":0},{"c":"ت","l":2}],"mPt":[{"c":"ا","l":0},{"c":"ف","l":1,"m":1},{"c":"ت","l":2},{"c":"ع","l":3,"m":3},{"c":"ا","l":4},{"c":"ل","l":5,"m":4}]},{"pt":[{"c":"ا","l":2},{"c":"ي","l":3}]},{"pt":[{"c":"ا,ي,ت,ن","l":0},{"c":"ن","l":1}],"mPt":[{"c":"ا","l":0},{"c":"ن","l":1},{"c":"ف","l":2,"m":2},{"c":"ع","l":3,"m":3},{"c":"ا","l":4},{"c":"ل","l":5,"m":4}]},{"pt":[{"c":"ا","l":3},{"c":"ء","l":4}]}],"pt63":[{"pt":[{"c":"ا","l":0},{"c":"ت","l":2},{"c":"ا","l":4}]},{"pt":[{"c":"ا,ت,ن,ي","l":0},{"c":"س","l":1},{"c":"ت","l":2}],"mPt":[{"c":"ا","l":0},{"c":"س","l":1},{"c":"ت","l":2},{"c":"ف","l":3,"m":3},{"c":"ع","l":4,"m":4},{"c":"ا","l":5},{"c":"ل","l":6,"m":5}]},{"pt":[{"c":"ا,ن,ت,ي","l":0},{"c":"و","l":3}]},{"pt":[{"c":"م","l":0},{"c":"س","l":1},{"c":"ت","l":2}],"mPt":[{"c":"ا","l":0},{"c":"س","l":1},{"c":"ت","l":2},{"c":"ف","l":3,"m":3},{"c":"ع","l":4,"m":4},{"c":"ا","l":5},{"c":"ل","l":6,"m":5}]},{"pt":[{"c":"ي","l":1},{"c":"ي","l":3},{"c":"ا","l":4},{"c":"ء","l":5}]},{"pt":[{"c":"ا","l":0},{"c":"ن","l":1},{"c":"ا","l":4}]}],"pt54":[{"pt":[{"c":"ت","l":0}]},{"pt":[{"c":"ا,ي,ت,ن","l":0}],"mPt":[{"c":"ا","l":0},{"c":"ف","l":1,"m":1},{"c":"ع","l":2,"m":2},{"c":"ل","l":3,"m":3},{"c":"ر","l":4,"m":4},{"c":"ا","l":5},{"c":"ر","l":6,"m":4}]},{"pt":[{"c":"م","l":0}],"mPt":[{"c":"ا","l":0},{"c":"ف","l":1,"m":1},{"c":"ع","l":2,"m":2},{"c":"ل","l":3,"m":3},{"c":"ر","l":4,"m":4},{"c":"ا","l":5},{"c":"ر","l":6,"m":4}]},{"pt":[{"c":"ا","l":2}]},{"pt":[{"c":"ا","l":0},{"c":"ن","l":2}]}],"pt64":[{"pt":[{"c":"ا","l":0},{"c":"ا","l":4}]},{"pt":[{"c":"م","l":0},{"c":"ت","l":1}]}],"pt73":[{"pt":[{"c":"ا","l":0},{"c":"س","l":1},{"c":"ت","l":2},{"c":"ا","l":5}]}],"pt75":[{"pt":[{"c":"ا","l":0},{"c":"ا","l":5}]}]}'),e.execArray=["cleanWord","removeDiacritics","cleanAlef","removeStopWords","normalizeHamzaAndAlef","removeStartWaw","removePre432","removeEndTaa","wordCheck"],e.stem=function(){var r=0;for(e.result=!1,e.preRemoved=!1,e.sufRemoved=!1;r=0)return!0},e.normalizeHamzaAndAlef=function(){return e.word=e.word.replace("ؤ","ء"),e.word=e.word.replace("ئ","ء"),e.word=e.word.replace(/([\u0627])\1+/gi,"ا"),!1},e.removeEndTaa=function(){return!(e.word.length>2)||(e.word=e.word.replace(/[\u0627]$/,""),e.word=e.word.replace("ة",""),!1)},e.removeStartWaw=function(){return e.word.length>3&&"و"==e.word[0]&&"و"==e.word[1]&&(e.word=e.word.slice(1)),!1},e.removePre432=function(){var r=e.word;if(e.word.length>=7){var t=new RegExp("^("+e.pre.pre4.split(" ").join("|")+")");e.word=e.word.replace(t,"")}if(e.word==r&&e.word.length>=6){var c=new RegExp("^("+e.pre.pre3.split(" ").join("|")+")");e.word=e.word.replace(c,"")}if(e.word==r&&e.word.length>=5){var l=new RegExp("^("+e.pre.pre2.split(" ").join("|")+")");e.word=e.word.replace(l,"")}return r!=e.word&&(e.preRemoved=!0),!1},e.patternCheck=function(r){for(var t=0;t3){var t=new RegExp("^("+e.pre.pre1.split(" ").join("|")+")");e.word=e.word.replace(t,"")}return r!=e.word&&(e.preRemoved=!0),!1},e.removeSuf1=function(){var r=e.word;if(0==e.sufRemoved&&e.word.length>3){var t=new RegExp("("+e.suf.suf1.split(" ").join("|")+")$");e.word=e.word.replace(t,"")}return r!=e.word&&(e.sufRemoved=!0),!1},e.removeSuf432=function(){var r=e.word;if(e.word.length>=6){var t=new RegExp("("+e.suf.suf4.split(" ").join("|")+")$");e.word=e.word.replace(t,"")}if(e.word==r&&e.word.length>=5){var c=new RegExp("("+e.suf.suf3.split(" ").join("|")+")$");e.word=e.word.replace(c,"")}if(e.word==r&&e.word.length>=4){var l=new RegExp("("+e.suf.suf2.split(" ").join("|")+")$");e.word=e.word.replace(l,"")}return r!=e.word&&(e.sufRemoved=!0),!1},e.wordCheck=function(){for(var r=(e.word,[e.removeSuf432,e.removeSuf1,e.removePre1]),t=0,c=!1;e.word.length>=7&&!e.result&&t=f.limit)return;f.cursor++}for(;!f.out_grouping(w,97,248);){if(f.cursor>=f.limit)return;f.cursor++}d=f.cursor,d=d&&(r=f.limit_backward,f.limit_backward=d,f.ket=f.cursor,e=f.find_among_b(c,32),f.limit_backward=r,e))switch(f.bra=f.cursor,e){case 1:f.slice_del();break;case 2:f.in_grouping_b(p,97,229)&&f.slice_del()}}function t(){var e,r=f.limit-f.cursor;f.cursor>=d&&(e=f.limit_backward,f.limit_backward=d,f.ket=f.cursor,f.find_among_b(l,4)?(f.bra=f.cursor,f.limit_backward=e,f.cursor=f.limit-r,f.cursor>f.limit_backward&&(f.cursor--,f.bra=f.cursor,f.slice_del())):f.limit_backward=e)}function s(){var e,r,i,n=f.limit-f.cursor;if(f.ket=f.cursor,f.eq_s_b(2,"st")&&(f.bra=f.cursor,f.eq_s_b(2,"ig")&&f.slice_del()),f.cursor=f.limit-n,f.cursor>=d&&(r=f.limit_backward,f.limit_backward=d,f.ket=f.cursor,e=f.find_among_b(m,5),f.limit_backward=r,e))switch(f.bra=f.cursor,e){case 1:f.slice_del(),i=f.limit-f.cursor,t(),f.cursor=f.limit-i;break;case 2:f.slice_from("løs")}}function o(){var e;f.cursor>=d&&(e=f.limit_backward,f.limit_backward=d,f.ket=f.cursor,f.out_grouping_b(w,97,248)?(f.bra=f.cursor,u=f.slice_to(u),f.limit_backward=e,f.eq_v_b(u)&&f.slice_del()):f.limit_backward=e)}var a,d,u,c=[new r("hed",-1,1),new r("ethed",0,1),new r("ered",-1,1),new r("e",-1,1),new r("erede",3,1),new r("ende",3,1),new r("erende",5,1),new r("ene",3,1),new r("erne",3,1),new r("ere",3,1),new r("en",-1,1),new r("heden",10,1),new r("eren",10,1),new r("er",-1,1),new r("heder",13,1),new r("erer",13,1),new r("s",-1,2),new r("heds",16,1),new r("es",16,1),new r("endes",18,1),new r("erendes",19,1),new r("enes",18,1),new r("ernes",18,1),new r("eres",18,1),new r("ens",16,1),new r("hedens",24,1),new r("erens",24,1),new r("ers",16,1),new r("ets",16,1),new r("erets",28,1),new r("et",-1,1),new r("eret",30,1)],l=[new r("gd",-1,-1),new r("dt",-1,-1),new r("gt",-1,-1),new r("kt",-1,-1)],m=[new r("ig",-1,1),new r("lig",0,1),new r("elig",1,1),new r("els",-1,1),new r("løst",-1,2)],w=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,48,0,128],p=[239,254,42,3,0,0,0,0,0,0,0,0,0,0,0,0,16],f=new i;this.setCurrent=function(e){f.setCurrent(e)},this.getCurrent=function(){return f.getCurrent()},this.stem=function(){var r=f.cursor;return e(),f.limit_backward=r,f.cursor=f.limit,n(),f.cursor=f.limit,t(),f.cursor=f.limit,s(),f.cursor=f.limit,o(),!0}};return function(e){return"function"==typeof e.update?e.update(function(e){return n.setCurrent(e),n.stem(),n.getCurrent()}):(n.setCurrent(e),n.stem(),n.getCurrent())}}(),e.Pipeline.registerFunction(e.da.stemmer,"stemmer-da"),e.da.stopWordFilter=e.generateStopWordFilter("ad af alle alt anden at blev blive bliver da de dem den denne der deres det dette dig din disse dog du efter eller en end er et for fra ham han hans har havde have hende hendes her hos hun hvad hvis hvor i ikke ind jeg jer jo kunne man mange med meget men mig min mine mit mod ned noget nogle nu når og også om op os over på selv sig sin sine sit skal skulle som sådan thi til ud under var vi vil ville vor være været".split(" ")),e.Pipeline.registerFunction(e.da.stopWordFilter,"stopWordFilter-da")}}); \ No newline at end of file diff --git a/assets/javascripts/lunr/min/lunr.de.min.js b/assets/javascripts/lunr/min/lunr.de.min.js new file mode 100644 index 0000000..f3b5c10 --- /dev/null +++ b/assets/javascripts/lunr/min/lunr.de.min.js @@ -0,0 +1,18 @@ +/*! + * Lunr languages, `German` language + * https://github.com/MihaiValentin/lunr-languages + * + * Copyright 2014, Mihai Valentin + * http://www.mozilla.org/MPL/ + */ +/*! + * based on + * Snowball JavaScript Library v0.3 + * http://code.google.com/p/urim/ + * http://snowball.tartarus.org/ + * + * Copyright 2010, Oleg Mazko + * http://www.mozilla.org/MPL/ + */ + +!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.de=function(){this.pipeline.reset(),this.pipeline.add(e.de.trimmer,e.de.stopWordFilter,e.de.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.de.stemmer))},e.de.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",e.de.trimmer=e.trimmerSupport.generateTrimmer(e.de.wordCharacters),e.Pipeline.registerFunction(e.de.trimmer,"trimmer-de"),e.de.stemmer=function(){var r=e.stemmerSupport.Among,n=e.stemmerSupport.SnowballProgram,i=new function(){function e(e,r,n){return!(!v.eq_s(1,e)||(v.ket=v.cursor,!v.in_grouping(p,97,252)))&&(v.slice_from(r),v.cursor=n,!0)}function i(){for(var r,n,i,s,t=v.cursor;;)if(r=v.cursor,v.bra=r,v.eq_s(1,"ß"))v.ket=v.cursor,v.slice_from("ss");else{if(r>=v.limit)break;v.cursor=r+1}for(v.cursor=t;;)for(n=v.cursor;;){if(i=v.cursor,v.in_grouping(p,97,252)){if(s=v.cursor,v.bra=s,e("u","U",i))break;if(v.cursor=s,e("y","Y",i))break}if(i>=v.limit)return void(v.cursor=n);v.cursor=i+1}}function s(){for(;!v.in_grouping(p,97,252);){if(v.cursor>=v.limit)return!0;v.cursor++}for(;!v.out_grouping(p,97,252);){if(v.cursor>=v.limit)return!0;v.cursor++}return!1}function t(){m=v.limit,l=m;var e=v.cursor+3;0<=e&&e<=v.limit&&(d=e,s()||(m=v.cursor,m=v.limit)return;v.cursor++}}}function c(){return m<=v.cursor}function u(){return l<=v.cursor}function a(){var e,r,n,i,s=v.limit-v.cursor;if(v.ket=v.cursor,(e=v.find_among_b(w,7))&&(v.bra=v.cursor,c()))switch(e){case 1:v.slice_del();break;case 2:v.slice_del(),v.ket=v.cursor,v.eq_s_b(1,"s")&&(v.bra=v.cursor,v.eq_s_b(3,"nis")&&v.slice_del());break;case 3:v.in_grouping_b(g,98,116)&&v.slice_del()}if(v.cursor=v.limit-s,v.ket=v.cursor,(e=v.find_among_b(f,4))&&(v.bra=v.cursor,c()))switch(e){case 1:v.slice_del();break;case 2:if(v.in_grouping_b(k,98,116)){var t=v.cursor-3;v.limit_backward<=t&&t<=v.limit&&(v.cursor=t,v.slice_del())}}if(v.cursor=v.limit-s,v.ket=v.cursor,(e=v.find_among_b(_,8))&&(v.bra=v.cursor,u()))switch(e){case 1:v.slice_del(),v.ket=v.cursor,v.eq_s_b(2,"ig")&&(v.bra=v.cursor,r=v.limit-v.cursor,v.eq_s_b(1,"e")||(v.cursor=v.limit-r,u()&&v.slice_del()));break;case 2:n=v.limit-v.cursor,v.eq_s_b(1,"e")||(v.cursor=v.limit-n,v.slice_del());break;case 3:if(v.slice_del(),v.ket=v.cursor,i=v.limit-v.cursor,!v.eq_s_b(2,"er")&&(v.cursor=v.limit-i,!v.eq_s_b(2,"en")))break;v.bra=v.cursor,c()&&v.slice_del();break;case 4:v.slice_del(),v.ket=v.cursor,e=v.find_among_b(b,2),e&&(v.bra=v.cursor,u()&&1==e&&v.slice_del())}}var d,l,m,h=[new r("",-1,6),new r("U",0,2),new r("Y",0,1),new r("ä",0,3),new r("ö",0,4),new r("ü",0,5)],w=[new r("e",-1,2),new r("em",-1,1),new r("en",-1,2),new r("ern",-1,1),new r("er",-1,1),new r("s",-1,3),new r("es",5,2)],f=[new r("en",-1,1),new r("er",-1,1),new r("st",-1,2),new r("est",2,1)],b=[new r("ig",-1,1),new r("lich",-1,1)],_=[new r("end",-1,1),new r("ig",-1,2),new r("ung",-1,1),new r("lich",-1,3),new r("isch",-1,2),new r("ik",-1,2),new r("heit",-1,3),new r("keit",-1,4)],p=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,8,0,32,8],g=[117,30,5],k=[117,30,4],v=new n;this.setCurrent=function(e){v.setCurrent(e)},this.getCurrent=function(){return v.getCurrent()},this.stem=function(){var e=v.cursor;return i(),v.cursor=e,t(),v.limit_backward=e,v.cursor=v.limit,a(),v.cursor=v.limit_backward,o(),!0}};return function(e){return"function"==typeof e.update?e.update(function(e){return i.setCurrent(e),i.stem(),i.getCurrent()}):(i.setCurrent(e),i.stem(),i.getCurrent())}}(),e.Pipeline.registerFunction(e.de.stemmer,"stemmer-de"),e.de.stopWordFilter=e.generateStopWordFilter("aber alle allem allen aller alles als also am an ander andere anderem anderen anderer anderes anderm andern anderr anders auch auf aus bei bin bis bist da damit dann das dasselbe dazu daß dein deine deinem deinen deiner deines dem demselben den denn denselben der derer derselbe derselben des desselben dessen dich die dies diese dieselbe dieselben diesem diesen dieser dieses dir doch dort du durch ein eine einem einen einer eines einig einige einigem einigen einiger einiges einmal er es etwas euch euer eure eurem euren eurer eures für gegen gewesen hab habe haben hat hatte hatten hier hin hinter ich ihm ihn ihnen ihr ihre ihrem ihren ihrer ihres im in indem ins ist jede jedem jeden jeder jedes jene jenem jenen jener jenes jetzt kann kein keine keinem keinen keiner keines können könnte machen man manche manchem manchen mancher manches mein meine meinem meinen meiner meines mich mir mit muss musste nach nicht nichts noch nun nur ob oder ohne sehr sein seine seinem seinen seiner seines selbst sich sie sind so solche solchem solchen solcher solches soll sollte sondern sonst um und uns unse unsem unsen unser unses unter viel vom von vor war waren warst was weg weil weiter welche welchem welchen welcher welches wenn werde werden wie wieder will wir wird wirst wo wollen wollte während würde würden zu zum zur zwar zwischen über".split(" ")),e.Pipeline.registerFunction(e.de.stopWordFilter,"stopWordFilter-de")}}); \ No newline at end of file diff --git a/assets/javascripts/lunr/min/lunr.du.min.js b/assets/javascripts/lunr/min/lunr.du.min.js new file mode 100644 index 0000000..49a0f3f --- /dev/null +++ b/assets/javascripts/lunr/min/lunr.du.min.js @@ -0,0 +1,18 @@ +/*! + * Lunr languages, `Dutch` language + * https://github.com/MihaiValentin/lunr-languages + * + * Copyright 2014, Mihai Valentin + * http://www.mozilla.org/MPL/ + */ +/*! + * based on + * Snowball JavaScript Library v0.3 + * http://code.google.com/p/urim/ + * http://snowball.tartarus.org/ + * + * Copyright 2010, Oleg Mazko + * http://www.mozilla.org/MPL/ + */ + +!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");console.warn('[Lunr Languages] Please use the "nl" instead of the "du". The "nl" code is the standard code for Dutch language, and "du" will be removed in the next major versions.'),e.du=function(){this.pipeline.reset(),this.pipeline.add(e.du.trimmer,e.du.stopWordFilter,e.du.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.du.stemmer))},e.du.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",e.du.trimmer=e.trimmerSupport.generateTrimmer(e.du.wordCharacters),e.Pipeline.registerFunction(e.du.trimmer,"trimmer-du"),e.du.stemmer=function(){var r=e.stemmerSupport.Among,i=e.stemmerSupport.SnowballProgram,n=new function(){function e(){for(var e,r,i,o=C.cursor;;){if(C.bra=C.cursor,e=C.find_among(b,11))switch(C.ket=C.cursor,e){case 1:C.slice_from("a");continue;case 2:C.slice_from("e");continue;case 3:C.slice_from("i");continue;case 4:C.slice_from("o");continue;case 5:C.slice_from("u");continue;case 6:if(C.cursor>=C.limit)break;C.cursor++;continue}break}for(C.cursor=o,C.bra=o,C.eq_s(1,"y")?(C.ket=C.cursor,C.slice_from("Y")):C.cursor=o;;)if(r=C.cursor,C.in_grouping(q,97,232)){if(i=C.cursor,C.bra=i,C.eq_s(1,"i"))C.ket=C.cursor,C.in_grouping(q,97,232)&&(C.slice_from("I"),C.cursor=r);else if(C.cursor=i,C.eq_s(1,"y"))C.ket=C.cursor,C.slice_from("Y"),C.cursor=r;else if(n(r))break}else if(n(r))break}function n(e){return C.cursor=e,e>=C.limit||(C.cursor++,!1)}function o(){_=C.limit,f=_,t()||(_=C.cursor,_<3&&(_=3),t()||(f=C.cursor))}function t(){for(;!C.in_grouping(q,97,232);){if(C.cursor>=C.limit)return!0;C.cursor++}for(;!C.out_grouping(q,97,232);){if(C.cursor>=C.limit)return!0;C.cursor++}return!1}function s(){for(var e;;)if(C.bra=C.cursor,e=C.find_among(p,3))switch(C.ket=C.cursor,e){case 1:C.slice_from("y");break;case 2:C.slice_from("i");break;case 3:if(C.cursor>=C.limit)return;C.cursor++}}function u(){return _<=C.cursor}function c(){return f<=C.cursor}function a(){var e=C.limit-C.cursor;C.find_among_b(g,3)&&(C.cursor=C.limit-e,C.ket=C.cursor,C.cursor>C.limit_backward&&(C.cursor--,C.bra=C.cursor,C.slice_del()))}function l(){var e;w=!1,C.ket=C.cursor,C.eq_s_b(1,"e")&&(C.bra=C.cursor,u()&&(e=C.limit-C.cursor,C.out_grouping_b(q,97,232)&&(C.cursor=C.limit-e,C.slice_del(),w=!0,a())))}function m(){var e;u()&&(e=C.limit-C.cursor,C.out_grouping_b(q,97,232)&&(C.cursor=C.limit-e,C.eq_s_b(3,"gem")||(C.cursor=C.limit-e,C.slice_del(),a())))}function d(){var e,r,i,n,o,t,s=C.limit-C.cursor;if(C.ket=C.cursor,e=C.find_among_b(h,5))switch(C.bra=C.cursor,e){case 1:u()&&C.slice_from("heid");break;case 2:m();break;case 3:u()&&C.out_grouping_b(z,97,232)&&C.slice_del()}if(C.cursor=C.limit-s,l(),C.cursor=C.limit-s,C.ket=C.cursor,C.eq_s_b(4,"heid")&&(C.bra=C.cursor,c()&&(r=C.limit-C.cursor,C.eq_s_b(1,"c")||(C.cursor=C.limit-r,C.slice_del(),C.ket=C.cursor,C.eq_s_b(2,"en")&&(C.bra=C.cursor,m())))),C.cursor=C.limit-s,C.ket=C.cursor,e=C.find_among_b(k,6))switch(C.bra=C.cursor,e){case 1:if(c()){if(C.slice_del(),i=C.limit-C.cursor,C.ket=C.cursor,C.eq_s_b(2,"ig")&&(C.bra=C.cursor,c()&&(n=C.limit-C.cursor,!C.eq_s_b(1,"e")))){C.cursor=C.limit-n,C.slice_del();break}C.cursor=C.limit-i,a()}break;case 2:c()&&(o=C.limit-C.cursor,C.eq_s_b(1,"e")||(C.cursor=C.limit-o,C.slice_del()));break;case 3:c()&&(C.slice_del(),l());break;case 4:c()&&C.slice_del();break;case 5:c()&&w&&C.slice_del()}C.cursor=C.limit-s,C.out_grouping_b(j,73,232)&&(t=C.limit-C.cursor,C.find_among_b(v,4)&&C.out_grouping_b(q,97,232)&&(C.cursor=C.limit-t,C.ket=C.cursor,C.cursor>C.limit_backward&&(C.cursor--,C.bra=C.cursor,C.slice_del())))}var f,_,w,b=[new r("",-1,6),new r("á",0,1),new r("ä",0,1),new r("é",0,2),new r("ë",0,2),new r("í",0,3),new r("ï",0,3),new r("ó",0,4),new r("ö",0,4),new r("ú",0,5),new r("ü",0,5)],p=[new r("",-1,3),new r("I",0,2),new r("Y",0,1)],g=[new r("dd",-1,-1),new r("kk",-1,-1),new r("tt",-1,-1)],h=[new r("ene",-1,2),new r("se",-1,3),new r("en",-1,2),new r("heden",2,1),new r("s",-1,3)],k=[new r("end",-1,1),new r("ig",-1,2),new r("ing",-1,1),new r("lijk",-1,3),new r("baar",-1,4),new r("bar",-1,5)],v=[new r("aa",-1,-1),new r("ee",-1,-1),new r("oo",-1,-1),new r("uu",-1,-1)],q=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,128],j=[1,0,0,17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,128],z=[17,67,16,1,0,0,0,0,0,0,0,0,0,0,0,0,128],C=new i;this.setCurrent=function(e){C.setCurrent(e)},this.getCurrent=function(){return C.getCurrent()},this.stem=function(){var r=C.cursor;return e(),C.cursor=r,o(),C.limit_backward=r,C.cursor=C.limit,d(),C.cursor=C.limit_backward,s(),!0}};return function(e){return"function"==typeof e.update?e.update(function(e){return n.setCurrent(e),n.stem(),n.getCurrent()}):(n.setCurrent(e),n.stem(),n.getCurrent())}}(),e.Pipeline.registerFunction(e.du.stemmer,"stemmer-du"),e.du.stopWordFilter=e.generateStopWordFilter(" aan al alles als altijd andere ben bij daar dan dat de der deze die dit doch doen door dus een eens en er ge geen geweest haar had heb hebben heeft hem het hier hij hoe hun iemand iets ik in is ja je kan kon kunnen maar me meer men met mij mijn moet na naar niet niets nog nu of om omdat onder ons ook op over reeds te tegen toch toen tot u uit uw van veel voor want waren was wat werd wezen wie wil worden wordt zal ze zelf zich zij zijn zo zonder zou".split(" ")),e.Pipeline.registerFunction(e.du.stopWordFilter,"stopWordFilter-du")}}); \ No newline at end of file diff --git a/assets/javascripts/lunr/min/lunr.es.min.js b/assets/javascripts/lunr/min/lunr.es.min.js new file mode 100644 index 0000000..2989d34 --- /dev/null +++ b/assets/javascripts/lunr/min/lunr.es.min.js @@ -0,0 +1,18 @@ +/*! + * Lunr languages, `Spanish` language + * https://github.com/MihaiValentin/lunr-languages + * + * Copyright 2014, Mihai Valentin + * http://www.mozilla.org/MPL/ + */ +/*! + * based on + * Snowball JavaScript Library v0.3 + * http://code.google.com/p/urim/ + * http://snowball.tartarus.org/ + * + * Copyright 2010, Oleg Mazko + * http://www.mozilla.org/MPL/ + */ + +!function(e,s){"function"==typeof define&&define.amd?define(s):"object"==typeof exports?module.exports=s():s()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.es=function(){this.pipeline.reset(),this.pipeline.add(e.es.trimmer,e.es.stopWordFilter,e.es.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.es.stemmer))},e.es.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",e.es.trimmer=e.trimmerSupport.generateTrimmer(e.es.wordCharacters),e.Pipeline.registerFunction(e.es.trimmer,"trimmer-es"),e.es.stemmer=function(){var s=e.stemmerSupport.Among,r=e.stemmerSupport.SnowballProgram,n=new function(){function e(){if(A.out_grouping(x,97,252)){for(;!A.in_grouping(x,97,252);){if(A.cursor>=A.limit)return!0;A.cursor++}return!1}return!0}function n(){if(A.in_grouping(x,97,252)){var s=A.cursor;if(e()){if(A.cursor=s,!A.in_grouping(x,97,252))return!0;for(;!A.out_grouping(x,97,252);){if(A.cursor>=A.limit)return!0;A.cursor++}}return!1}return!0}function i(){var s,r=A.cursor;if(n()){if(A.cursor=r,!A.out_grouping(x,97,252))return;if(s=A.cursor,e()){if(A.cursor=s,!A.in_grouping(x,97,252)||A.cursor>=A.limit)return;A.cursor++}}g=A.cursor}function a(){for(;!A.in_grouping(x,97,252);){if(A.cursor>=A.limit)return!1;A.cursor++}for(;!A.out_grouping(x,97,252);){if(A.cursor>=A.limit)return!1;A.cursor++}return!0}function t(){var e=A.cursor;g=A.limit,p=g,v=g,i(),A.cursor=e,a()&&(p=A.cursor,a()&&(v=A.cursor))}function o(){for(var e;;){if(A.bra=A.cursor,e=A.find_among(k,6))switch(A.ket=A.cursor,e){case 1:A.slice_from("a");continue;case 2:A.slice_from("e");continue;case 3:A.slice_from("i");continue;case 4:A.slice_from("o");continue;case 5:A.slice_from("u");continue;case 6:if(A.cursor>=A.limit)break;A.cursor++;continue}break}}function u(){return g<=A.cursor}function w(){return p<=A.cursor}function c(){return v<=A.cursor}function m(){var e;if(A.ket=A.cursor,A.find_among_b(y,13)&&(A.bra=A.cursor,(e=A.find_among_b(q,11))&&u()))switch(e){case 1:A.bra=A.cursor,A.slice_from("iendo");break;case 2:A.bra=A.cursor,A.slice_from("ando");break;case 3:A.bra=A.cursor,A.slice_from("ar");break;case 4:A.bra=A.cursor,A.slice_from("er");break;case 5:A.bra=A.cursor,A.slice_from("ir");break;case 6:A.slice_del();break;case 7:A.eq_s_b(1,"u")&&A.slice_del()}}function l(e,s){if(!c())return!0;A.slice_del(),A.ket=A.cursor;var r=A.find_among_b(e,s);return r&&(A.bra=A.cursor,1==r&&c()&&A.slice_del()),!1}function d(e){return!c()||(A.slice_del(),A.ket=A.cursor,A.eq_s_b(2,e)&&(A.bra=A.cursor,c()&&A.slice_del()),!1)}function b(){var e;if(A.ket=A.cursor,e=A.find_among_b(S,46)){switch(A.bra=A.cursor,e){case 1:if(!c())return!1;A.slice_del();break;case 2:if(d("ic"))return!1;break;case 3:if(!c())return!1;A.slice_from("log");break;case 4:if(!c())return!1;A.slice_from("u");break;case 5:if(!c())return!1;A.slice_from("ente");break;case 6:if(!w())return!1;A.slice_del(),A.ket=A.cursor,e=A.find_among_b(C,4),e&&(A.bra=A.cursor,c()&&(A.slice_del(),1==e&&(A.ket=A.cursor,A.eq_s_b(2,"at")&&(A.bra=A.cursor,c()&&A.slice_del()))));break;case 7:if(l(P,3))return!1;break;case 8:if(l(F,3))return!1;break;case 9:if(d("at"))return!1}return!0}return!1}function f(){var e,s;if(A.cursor>=g&&(s=A.limit_backward,A.limit_backward=g,A.ket=A.cursor,e=A.find_among_b(W,12),A.limit_backward=s,e)){if(A.bra=A.cursor,1==e){if(!A.eq_s_b(1,"u"))return!1;A.slice_del()}return!0}return!1}function _(){var e,s,r,n;if(A.cursor>=g&&(s=A.limit_backward,A.limit_backward=g,A.ket=A.cursor,e=A.find_among_b(L,96),A.limit_backward=s,e))switch(A.bra=A.cursor,e){case 1:r=A.limit-A.cursor,A.eq_s_b(1,"u")?(n=A.limit-A.cursor,A.eq_s_b(1,"g")?A.cursor=A.limit-n:A.cursor=A.limit-r):A.cursor=A.limit-r,A.bra=A.cursor;case 2:A.slice_del()}}function h(){var e,s;if(A.ket=A.cursor,e=A.find_among_b(z,8))switch(A.bra=A.cursor,e){case 1:u()&&A.slice_del();break;case 2:u()&&(A.slice_del(),A.ket=A.cursor,A.eq_s_b(1,"u")&&(A.bra=A.cursor,s=A.limit-A.cursor,A.eq_s_b(1,"g")&&(A.cursor=A.limit-s,u()&&A.slice_del())))}}var v,p,g,k=[new s("",-1,6),new s("á",0,1),new s("é",0,2),new s("í",0,3),new s("ó",0,4),new s("ú",0,5)],y=[new s("la",-1,-1),new s("sela",0,-1),new s("le",-1,-1),new s("me",-1,-1),new s("se",-1,-1),new s("lo",-1,-1),new s("selo",5,-1),new s("las",-1,-1),new s("selas",7,-1),new s("les",-1,-1),new s("los",-1,-1),new s("selos",10,-1),new s("nos",-1,-1)],q=[new s("ando",-1,6),new s("iendo",-1,6),new s("yendo",-1,7),new s("ándo",-1,2),new s("iéndo",-1,1),new s("ar",-1,6),new s("er",-1,6),new s("ir",-1,6),new s("ár",-1,3),new s("ér",-1,4),new s("ír",-1,5)],C=[new s("ic",-1,-1),new s("ad",-1,-1),new s("os",-1,-1),new s("iv",-1,1)],P=[new s("able",-1,1),new s("ible",-1,1),new s("ante",-1,1)],F=[new s("ic",-1,1),new s("abil",-1,1),new s("iv",-1,1)],S=[new s("ica",-1,1),new s("ancia",-1,2),new s("encia",-1,5),new s("adora",-1,2),new s("osa",-1,1),new s("ista",-1,1),new s("iva",-1,9),new s("anza",-1,1),new s("logía",-1,3),new s("idad",-1,8),new s("able",-1,1),new s("ible",-1,1),new s("ante",-1,2),new s("mente",-1,7),new s("amente",13,6),new s("ación",-1,2),new s("ución",-1,4),new s("ico",-1,1),new s("ismo",-1,1),new s("oso",-1,1),new s("amiento",-1,1),new s("imiento",-1,1),new s("ivo",-1,9),new s("ador",-1,2),new s("icas",-1,1),new s("ancias",-1,2),new s("encias",-1,5),new s("adoras",-1,2),new s("osas",-1,1),new s("istas",-1,1),new s("ivas",-1,9),new s("anzas",-1,1),new s("logías",-1,3),new s("idades",-1,8),new s("ables",-1,1),new s("ibles",-1,1),new s("aciones",-1,2),new s("uciones",-1,4),new s("adores",-1,2),new s("antes",-1,2),new s("icos",-1,1),new s("ismos",-1,1),new s("osos",-1,1),new s("amientos",-1,1),new s("imientos",-1,1),new s("ivos",-1,9)],W=[new s("ya",-1,1),new s("ye",-1,1),new s("yan",-1,1),new s("yen",-1,1),new s("yeron",-1,1),new s("yendo",-1,1),new s("yo",-1,1),new s("yas",-1,1),new s("yes",-1,1),new s("yais",-1,1),new s("yamos",-1,1),new s("yó",-1,1)],L=[new s("aba",-1,2),new s("ada",-1,2),new s("ida",-1,2),new s("ara",-1,2),new s("iera",-1,2),new s("ía",-1,2),new s("aría",5,2),new s("ería",5,2),new s("iría",5,2),new s("ad",-1,2),new s("ed",-1,2),new s("id",-1,2),new s("ase",-1,2),new s("iese",-1,2),new s("aste",-1,2),new s("iste",-1,2),new s("an",-1,2),new s("aban",16,2),new s("aran",16,2),new s("ieran",16,2),new s("ían",16,2),new s("arían",20,2),new s("erían",20,2),new s("irían",20,2),new s("en",-1,1),new s("asen",24,2),new s("iesen",24,2),new s("aron",-1,2),new s("ieron",-1,2),new s("arán",-1,2),new s("erán",-1,2),new s("irán",-1,2),new s("ado",-1,2),new s("ido",-1,2),new s("ando",-1,2),new s("iendo",-1,2),new s("ar",-1,2),new s("er",-1,2),new s("ir",-1,2),new s("as",-1,2),new s("abas",39,2),new s("adas",39,2),new s("idas",39,2),new s("aras",39,2),new s("ieras",39,2),new s("ías",39,2),new s("arías",45,2),new s("erías",45,2),new s("irías",45,2),new s("es",-1,1),new s("ases",49,2),new s("ieses",49,2),new s("abais",-1,2),new s("arais",-1,2),new s("ierais",-1,2),new s("íais",-1,2),new s("aríais",55,2),new s("eríais",55,2),new s("iríais",55,2),new s("aseis",-1,2),new s("ieseis",-1,2),new s("asteis",-1,2),new s("isteis",-1,2),new s("áis",-1,2),new s("éis",-1,1),new s("aréis",64,2),new s("eréis",64,2),new s("iréis",64,2),new s("ados",-1,2),new s("idos",-1,2),new s("amos",-1,2),new s("ábamos",70,2),new s("áramos",70,2),new s("iéramos",70,2),new s("íamos",70,2),new s("aríamos",74,2),new s("eríamos",74,2),new s("iríamos",74,2),new s("emos",-1,1),new s("aremos",78,2),new s("eremos",78,2),new s("iremos",78,2),new s("ásemos",78,2),new s("iésemos",78,2),new s("imos",-1,2),new s("arás",-1,2),new s("erás",-1,2),new s("irás",-1,2),new s("ís",-1,2),new s("ará",-1,2),new s("erá",-1,2),new s("irá",-1,2),new s("aré",-1,2),new s("eré",-1,2),new s("iré",-1,2),new s("ió",-1,2)],z=[new s("a",-1,1),new s("e",-1,2),new s("o",-1,1),new s("os",-1,1),new s("á",-1,1),new s("é",-1,2),new s("í",-1,1),new s("ó",-1,1)],x=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,1,17,4,10],A=new r;this.setCurrent=function(e){A.setCurrent(e)},this.getCurrent=function(){return A.getCurrent()},this.stem=function(){var e=A.cursor;return t(),A.limit_backward=e,A.cursor=A.limit,m(),A.cursor=A.limit,b()||(A.cursor=A.limit,f()||(A.cursor=A.limit,_())),A.cursor=A.limit,h(),A.cursor=A.limit_backward,o(),!0}};return function(e){return"function"==typeof e.update?e.update(function(e){return n.setCurrent(e),n.stem(),n.getCurrent()}):(n.setCurrent(e),n.stem(),n.getCurrent())}}(),e.Pipeline.registerFunction(e.es.stemmer,"stemmer-es"),e.es.stopWordFilter=e.generateStopWordFilter("a al algo algunas algunos ante antes como con contra cual cuando de del desde donde durante e el ella ellas ellos en entre era erais eran eras eres es esa esas ese eso esos esta estaba estabais estaban estabas estad estada estadas estado estados estamos estando estar estaremos estará estarán estarás estaré estaréis estaría estaríais estaríamos estarían estarías estas este estemos esto estos estoy estuve estuviera estuvierais estuvieran estuvieras estuvieron estuviese estuvieseis estuviesen estuvieses estuvimos estuviste estuvisteis estuviéramos estuviésemos estuvo está estábamos estáis están estás esté estéis estén estés fue fuera fuerais fueran fueras fueron fuese fueseis fuesen fueses fui fuimos fuiste fuisteis fuéramos fuésemos ha habida habidas habido habidos habiendo habremos habrá habrán habrás habré habréis habría habríais habríamos habrían habrías habéis había habíais habíamos habían habías han has hasta hay haya hayamos hayan hayas hayáis he hemos hube hubiera hubierais hubieran hubieras hubieron hubiese hubieseis hubiesen hubieses hubimos hubiste hubisteis hubiéramos hubiésemos hubo la las le les lo los me mi mis mucho muchos muy más mí mía mías mío míos nada ni no nos nosotras nosotros nuestra nuestras nuestro nuestros o os otra otras otro otros para pero poco por porque que quien quienes qué se sea seamos sean seas seremos será serán serás seré seréis sería seríais seríamos serían serías seáis sido siendo sin sobre sois somos son soy su sus suya suyas suyo suyos sí también tanto te tendremos tendrá tendrán tendrás tendré tendréis tendría tendríais tendríamos tendrían tendrías tened tenemos tenga tengamos tengan tengas tengo tengáis tenida tenidas tenido tenidos teniendo tenéis tenía teníais teníamos tenían tenías ti tiene tienen tienes todo todos tu tus tuve tuviera tuvierais tuvieran tuvieras tuvieron tuviese tuvieseis tuviesen tuvieses tuvimos tuviste tuvisteis tuviéramos tuviésemos tuvo tuya tuyas tuyo tuyos tú un una uno unos vosotras vosotros vuestra vuestras vuestro vuestros y ya yo él éramos".split(" ")),e.Pipeline.registerFunction(e.es.stopWordFilter,"stopWordFilter-es")}}); \ No newline at end of file diff --git a/assets/javascripts/lunr/min/lunr.fi.min.js b/assets/javascripts/lunr/min/lunr.fi.min.js new file mode 100644 index 0000000..29f5dfc --- /dev/null +++ b/assets/javascripts/lunr/min/lunr.fi.min.js @@ -0,0 +1,18 @@ +/*! + * Lunr languages, `Finnish` language + * https://github.com/MihaiValentin/lunr-languages + * + * Copyright 2014, Mihai Valentin + * http://www.mozilla.org/MPL/ + */ +/*! + * based on + * Snowball JavaScript Library v0.3 + * http://code.google.com/p/urim/ + * http://snowball.tartarus.org/ + * + * Copyright 2010, Oleg Mazko + * http://www.mozilla.org/MPL/ + */ + +!function(i,e){"function"==typeof define&&define.amd?define(e):"object"==typeof exports?module.exports=e():e()(i.lunr)}(this,function(){return function(i){if(void 0===i)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===i.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");i.fi=function(){this.pipeline.reset(),this.pipeline.add(i.fi.trimmer,i.fi.stopWordFilter,i.fi.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(i.fi.stemmer))},i.fi.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",i.fi.trimmer=i.trimmerSupport.generateTrimmer(i.fi.wordCharacters),i.Pipeline.registerFunction(i.fi.trimmer,"trimmer-fi"),i.fi.stemmer=function(){var e=i.stemmerSupport.Among,r=i.stemmerSupport.SnowballProgram,n=new function(){function i(){f=A.limit,d=f,n()||(f=A.cursor,n()||(d=A.cursor))}function n(){for(var i;;){if(i=A.cursor,A.in_grouping(W,97,246))break;if(A.cursor=i,i>=A.limit)return!0;A.cursor++}for(A.cursor=i;!A.out_grouping(W,97,246);){if(A.cursor>=A.limit)return!0;A.cursor++}return!1}function t(){return d<=A.cursor}function s(){var i,e;if(A.cursor>=f)if(e=A.limit_backward,A.limit_backward=f,A.ket=A.cursor,i=A.find_among_b(h,10)){switch(A.bra=A.cursor,A.limit_backward=e,i){case 1:if(!A.in_grouping_b(x,97,246))return;break;case 2:if(!t())return}A.slice_del()}else A.limit_backward=e}function o(){var i,e,r;if(A.cursor>=f)if(e=A.limit_backward,A.limit_backward=f,A.ket=A.cursor,i=A.find_among_b(v,9))switch(A.bra=A.cursor,A.limit_backward=e,i){case 1:r=A.limit-A.cursor,A.eq_s_b(1,"k")||(A.cursor=A.limit-r,A.slice_del());break;case 2:A.slice_del(),A.ket=A.cursor,A.eq_s_b(3,"kse")&&(A.bra=A.cursor,A.slice_from("ksi"));break;case 3:A.slice_del();break;case 4:A.find_among_b(p,6)&&A.slice_del();break;case 5:A.find_among_b(g,6)&&A.slice_del();break;case 6:A.find_among_b(j,2)&&A.slice_del()}else A.limit_backward=e}function l(){return A.find_among_b(q,7)}function a(){return A.eq_s_b(1,"i")&&A.in_grouping_b(L,97,246)}function u(){var i,e,r;if(A.cursor>=f)if(e=A.limit_backward,A.limit_backward=f,A.ket=A.cursor,i=A.find_among_b(C,30)){switch(A.bra=A.cursor,A.limit_backward=e,i){case 1:if(!A.eq_s_b(1,"a"))return;break;case 2:case 9:if(!A.eq_s_b(1,"e"))return;break;case 3:if(!A.eq_s_b(1,"i"))return;break;case 4:if(!A.eq_s_b(1,"o"))return;break;case 5:if(!A.eq_s_b(1,"ä"))return;break;case 6:if(!A.eq_s_b(1,"ö"))return;break;case 7:if(r=A.limit-A.cursor,!l()&&(A.cursor=A.limit-r,!A.eq_s_b(2,"ie"))){A.cursor=A.limit-r;break}if(A.cursor=A.limit-r,A.cursor<=A.limit_backward){A.cursor=A.limit-r;break}A.cursor--,A.bra=A.cursor;break;case 8:if(!A.in_grouping_b(W,97,246)||!A.out_grouping_b(W,97,246))return}A.slice_del(),k=!0}else A.limit_backward=e}function c(){var i,e,r;if(A.cursor>=d)if(e=A.limit_backward,A.limit_backward=d,A.ket=A.cursor,i=A.find_among_b(P,14)){if(A.bra=A.cursor,A.limit_backward=e,1==i){if(r=A.limit-A.cursor,A.eq_s_b(2,"po"))return;A.cursor=A.limit-r}A.slice_del()}else A.limit_backward=e}function m(){var i;A.cursor>=f&&(i=A.limit_backward,A.limit_backward=f,A.ket=A.cursor,A.find_among_b(F,2)?(A.bra=A.cursor,A.limit_backward=i,A.slice_del()):A.limit_backward=i)}function w(){var i,e,r,n,t,s;if(A.cursor>=f){if(e=A.limit_backward,A.limit_backward=f,A.ket=A.cursor,A.eq_s_b(1,"t")&&(A.bra=A.cursor,r=A.limit-A.cursor,A.in_grouping_b(W,97,246)&&(A.cursor=A.limit-r,A.slice_del(),A.limit_backward=e,n=A.limit-A.cursor,A.cursor>=d&&(A.cursor=d,t=A.limit_backward,A.limit_backward=A.cursor,A.cursor=A.limit-n,A.ket=A.cursor,i=A.find_among_b(S,2))))){if(A.bra=A.cursor,A.limit_backward=t,1==i){if(s=A.limit-A.cursor,A.eq_s_b(2,"po"))return;A.cursor=A.limit-s}return void A.slice_del()}A.limit_backward=e}}function _(){var i,e,r,n;if(A.cursor>=f){for(i=A.limit_backward,A.limit_backward=f,e=A.limit-A.cursor,l()&&(A.cursor=A.limit-e,A.ket=A.cursor,A.cursor>A.limit_backward&&(A.cursor--,A.bra=A.cursor,A.slice_del())),A.cursor=A.limit-e,A.ket=A.cursor,A.in_grouping_b(y,97,228)&&(A.bra=A.cursor,A.out_grouping_b(W,97,246)&&A.slice_del()),A.cursor=A.limit-e,A.ket=A.cursor,A.eq_s_b(1,"j")&&(A.bra=A.cursor,r=A.limit-A.cursor,A.eq_s_b(1,"o")?A.slice_del():(A.cursor=A.limit-r,A.eq_s_b(1,"u")&&A.slice_del())),A.cursor=A.limit-e,A.ket=A.cursor,A.eq_s_b(1,"o")&&(A.bra=A.cursor,A.eq_s_b(1,"j")&&A.slice_del()),A.cursor=A.limit-e,A.limit_backward=i;;){if(n=A.limit-A.cursor,A.out_grouping_b(W,97,246)){A.cursor=A.limit-n;break}if(A.cursor=A.limit-n,A.cursor<=A.limit_backward)return;A.cursor--}A.ket=A.cursor,A.cursor>A.limit_backward&&(A.cursor--,A.bra=A.cursor,b=A.slice_to(),A.eq_v_b(b)&&A.slice_del())}}var k,b,d,f,h=[new e("pa",-1,1),new e("sti",-1,2),new e("kaan",-1,1),new e("han",-1,1),new e("kin",-1,1),new e("hän",-1,1),new e("kään",-1,1),new e("ko",-1,1),new e("pä",-1,1),new e("kö",-1,1)],p=[new e("lla",-1,-1),new e("na",-1,-1),new e("ssa",-1,-1),new e("ta",-1,-1),new e("lta",3,-1),new e("sta",3,-1)],g=[new e("llä",-1,-1),new e("nä",-1,-1),new e("ssä",-1,-1),new e("tä",-1,-1),new e("ltä",3,-1),new e("stä",3,-1)],j=[new e("lle",-1,-1),new e("ine",-1,-1)],v=[new e("nsa",-1,3),new e("mme",-1,3),new e("nne",-1,3),new e("ni",-1,2),new e("si",-1,1),new e("an",-1,4),new e("en",-1,6),new e("än",-1,5),new e("nsä",-1,3)],q=[new e("aa",-1,-1),new e("ee",-1,-1),new e("ii",-1,-1),new e("oo",-1,-1),new e("uu",-1,-1),new e("ää",-1,-1),new e("öö",-1,-1)],C=[new e("a",-1,8),new e("lla",0,-1),new e("na",0,-1),new e("ssa",0,-1),new e("ta",0,-1),new e("lta",4,-1),new e("sta",4,-1),new e("tta",4,9),new e("lle",-1,-1),new e("ine",-1,-1),new e("ksi",-1,-1),new e("n",-1,7),new e("han",11,1),new e("den",11,-1,a),new e("seen",11,-1,l),new e("hen",11,2),new e("tten",11,-1,a),new e("hin",11,3),new e("siin",11,-1,a),new e("hon",11,4),new e("hän",11,5),new e("hön",11,6),new e("ä",-1,8),new e("llä",22,-1),new e("nä",22,-1),new e("ssä",22,-1),new e("tä",22,-1),new e("ltä",26,-1),new e("stä",26,-1),new e("ttä",26,9)],P=[new e("eja",-1,-1),new e("mma",-1,1),new e("imma",1,-1),new e("mpa",-1,1),new e("impa",3,-1),new e("mmi",-1,1),new e("immi",5,-1),new e("mpi",-1,1),new e("impi",7,-1),new e("ejä",-1,-1),new e("mmä",-1,1),new e("immä",10,-1),new e("mpä",-1,1),new e("impä",12,-1)],F=[new e("i",-1,-1),new e("j",-1,-1)],S=[new e("mma",-1,1),new e("imma",0,-1)],y=[17,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8],W=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,8,0,32],L=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,8,0,32],x=[17,97,24,1,0,0,0,0,0,0,0,0,0,0,0,0,8,0,32],A=new r;this.setCurrent=function(i){A.setCurrent(i)},this.getCurrent=function(){return A.getCurrent()},this.stem=function(){var e=A.cursor;return i(),k=!1,A.limit_backward=e,A.cursor=A.limit,s(),A.cursor=A.limit,o(),A.cursor=A.limit,u(),A.cursor=A.limit,c(),A.cursor=A.limit,k?(m(),A.cursor=A.limit):(A.cursor=A.limit,w(),A.cursor=A.limit),_(),!0}};return function(i){return"function"==typeof i.update?i.update(function(i){return n.setCurrent(i),n.stem(),n.getCurrent()}):(n.setCurrent(i),n.stem(),n.getCurrent())}}(),i.Pipeline.registerFunction(i.fi.stemmer,"stemmer-fi"),i.fi.stopWordFilter=i.generateStopWordFilter("ei eivät emme en et ette että he heidän heidät heihin heille heillä heiltä heissä heistä heitä hän häneen hänelle hänellä häneltä hänen hänessä hänestä hänet häntä itse ja johon joiden joihin joiksi joilla joille joilta joina joissa joista joita joka joksi jolla jolle jolta jona jonka jos jossa josta jota jotka kanssa keiden keihin keiksi keille keillä keiltä keinä keissä keistä keitä keneen keneksi kenelle kenellä keneltä kenen kenenä kenessä kenestä kenet ketkä ketkä ketä koska kuin kuka kun me meidän meidät meihin meille meillä meiltä meissä meistä meitä mihin miksi mikä mille millä miltä minkä minkä minua minulla minulle minulta minun minussa minusta minut minuun minä minä missä mistä mitkä mitä mukaan mutta ne niiden niihin niiksi niille niillä niiltä niin niin niinä niissä niistä niitä noiden noihin noiksi noilla noille noilta noin noina noissa noista noita nuo nyt näiden näihin näiksi näille näillä näiltä näinä näissä näistä näitä nämä ole olemme olen olet olette oli olimme olin olisi olisimme olisin olisit olisitte olisivat olit olitte olivat olla olleet ollut on ovat poikki se sekä sen siihen siinä siitä siksi sille sillä sillä siltä sinua sinulla sinulle sinulta sinun sinussa sinusta sinut sinuun sinä sinä sitä tai te teidän teidät teihin teille teillä teiltä teissä teistä teitä tuo tuohon tuoksi tuolla tuolle tuolta tuon tuona tuossa tuosta tuota tähän täksi tälle tällä tältä tämä tämän tänä tässä tästä tätä vaan vai vaikka yli".split(" ")),i.Pipeline.registerFunction(i.fi.stopWordFilter,"stopWordFilter-fi")}}); \ No newline at end of file diff --git a/assets/javascripts/lunr/min/lunr.fr.min.js b/assets/javascripts/lunr/min/lunr.fr.min.js new file mode 100644 index 0000000..68cd009 --- /dev/null +++ b/assets/javascripts/lunr/min/lunr.fr.min.js @@ -0,0 +1,18 @@ +/*! + * Lunr languages, `French` language + * https://github.com/MihaiValentin/lunr-languages + * + * Copyright 2014, Mihai Valentin + * http://www.mozilla.org/MPL/ + */ +/*! + * based on + * Snowball JavaScript Library v0.3 + * http://code.google.com/p/urim/ + * http://snowball.tartarus.org/ + * + * Copyright 2010, Oleg Mazko + * http://www.mozilla.org/MPL/ + */ + +!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.fr=function(){this.pipeline.reset(),this.pipeline.add(e.fr.trimmer,e.fr.stopWordFilter,e.fr.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.fr.stemmer))},e.fr.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",e.fr.trimmer=e.trimmerSupport.generateTrimmer(e.fr.wordCharacters),e.Pipeline.registerFunction(e.fr.trimmer,"trimmer-fr"),e.fr.stemmer=function(){var r=e.stemmerSupport.Among,s=e.stemmerSupport.SnowballProgram,i=new function(){function e(e,r,s){return!(!W.eq_s(1,e)||(W.ket=W.cursor,!W.in_grouping(F,97,251)))&&(W.slice_from(r),W.cursor=s,!0)}function i(e,r,s){return!!W.eq_s(1,e)&&(W.ket=W.cursor,W.slice_from(r),W.cursor=s,!0)}function n(){for(var r,s;;){if(r=W.cursor,W.in_grouping(F,97,251)){if(W.bra=W.cursor,s=W.cursor,e("u","U",r))continue;if(W.cursor=s,e("i","I",r))continue;if(W.cursor=s,i("y","Y",r))continue}if(W.cursor=r,W.bra=r,!e("y","Y",r)){if(W.cursor=r,W.eq_s(1,"q")&&(W.bra=W.cursor,i("u","U",r)))continue;if(W.cursor=r,r>=W.limit)return;W.cursor++}}}function t(){for(;!W.in_grouping(F,97,251);){if(W.cursor>=W.limit)return!0;W.cursor++}for(;!W.out_grouping(F,97,251);){if(W.cursor>=W.limit)return!0;W.cursor++}return!1}function u(){var e=W.cursor;if(q=W.limit,g=q,p=q,W.in_grouping(F,97,251)&&W.in_grouping(F,97,251)&&W.cursor=W.limit){W.cursor=q;break}W.cursor++}while(!W.in_grouping(F,97,251))}q=W.cursor,W.cursor=e,t()||(g=W.cursor,t()||(p=W.cursor))}function o(){for(var e,r;;){if(r=W.cursor,W.bra=r,!(e=W.find_among(h,4)))break;switch(W.ket=W.cursor,e){case 1:W.slice_from("i");break;case 2:W.slice_from("u");break;case 3:W.slice_from("y");break;case 4:if(W.cursor>=W.limit)return;W.cursor++}}}function c(){return q<=W.cursor}function a(){return g<=W.cursor}function l(){return p<=W.cursor}function w(){var e,r;if(W.ket=W.cursor,e=W.find_among_b(C,43)){switch(W.bra=W.cursor,e){case 1:if(!l())return!1;W.slice_del();break;case 2:if(!l())return!1;W.slice_del(),W.ket=W.cursor,W.eq_s_b(2,"ic")&&(W.bra=W.cursor,l()?W.slice_del():W.slice_from("iqU"));break;case 3:if(!l())return!1;W.slice_from("log");break;case 4:if(!l())return!1;W.slice_from("u");break;case 5:if(!l())return!1;W.slice_from("ent");break;case 6:if(!c())return!1;if(W.slice_del(),W.ket=W.cursor,e=W.find_among_b(z,6))switch(W.bra=W.cursor,e){case 1:l()&&(W.slice_del(),W.ket=W.cursor,W.eq_s_b(2,"at")&&(W.bra=W.cursor,l()&&W.slice_del()));break;case 2:l()?W.slice_del():a()&&W.slice_from("eux");break;case 3:l()&&W.slice_del();break;case 4:c()&&W.slice_from("i")}break;case 7:if(!l())return!1;if(W.slice_del(),W.ket=W.cursor,e=W.find_among_b(y,3))switch(W.bra=W.cursor,e){case 1:l()?W.slice_del():W.slice_from("abl");break;case 2:l()?W.slice_del():W.slice_from("iqU");break;case 3:l()&&W.slice_del()}break;case 8:if(!l())return!1;if(W.slice_del(),W.ket=W.cursor,W.eq_s_b(2,"at")&&(W.bra=W.cursor,l()&&(W.slice_del(),W.ket=W.cursor,W.eq_s_b(2,"ic")))){W.bra=W.cursor,l()?W.slice_del():W.slice_from("iqU");break}break;case 9:W.slice_from("eau");break;case 10:if(!a())return!1;W.slice_from("al");break;case 11:if(l())W.slice_del();else{if(!a())return!1;W.slice_from("eux")}break;case 12:if(!a()||!W.out_grouping_b(F,97,251))return!1;W.slice_del();break;case 13:return c()&&W.slice_from("ant"),!1;case 14:return c()&&W.slice_from("ent"),!1;case 15:return r=W.limit-W.cursor,W.in_grouping_b(F,97,251)&&c()&&(W.cursor=W.limit-r,W.slice_del()),!1}return!0}return!1}function f(){var e,r;if(W.cursor=q){if(s=W.limit_backward,W.limit_backward=q,W.ket=W.cursor,e=W.find_among_b(P,7))switch(W.bra=W.cursor,e){case 1:if(l()){if(i=W.limit-W.cursor,!W.eq_s_b(1,"s")&&(W.cursor=W.limit-i,!W.eq_s_b(1,"t")))break;W.slice_del()}break;case 2:W.slice_from("i");break;case 3:W.slice_del();break;case 4:W.eq_s_b(2,"gu")&&W.slice_del()}W.limit_backward=s}}function b(){var e=W.limit-W.cursor;W.find_among_b(U,5)&&(W.cursor=W.limit-e,W.ket=W.cursor,W.cursor>W.limit_backward&&(W.cursor--,W.bra=W.cursor,W.slice_del()))}function d(){for(var e,r=1;W.out_grouping_b(F,97,251);)r--;if(r<=0){if(W.ket=W.cursor,e=W.limit-W.cursor,!W.eq_s_b(1,"é")&&(W.cursor=W.limit-e,!W.eq_s_b(1,"è")))return;W.bra=W.cursor,W.slice_from("e")}}function k(){if(!w()&&(W.cursor=W.limit,!f()&&(W.cursor=W.limit,!m())))return W.cursor=W.limit,void _();W.cursor=W.limit,W.ket=W.cursor,W.eq_s_b(1,"Y")?(W.bra=W.cursor,W.slice_from("i")):(W.cursor=W.limit,W.eq_s_b(1,"ç")&&(W.bra=W.cursor,W.slice_from("c")))}var p,g,q,v=[new r("col",-1,-1),new r("par",-1,-1),new r("tap",-1,-1)],h=[new r("",-1,4),new r("I",0,1),new r("U",0,2),new r("Y",0,3)],z=[new r("iqU",-1,3),new r("abl",-1,3),new r("Ièr",-1,4),new r("ièr",-1,4),new r("eus",-1,2),new r("iv",-1,1)],y=[new r("ic",-1,2),new r("abil",-1,1),new r("iv",-1,3)],C=[new r("iqUe",-1,1),new r("atrice",-1,2),new r("ance",-1,1),new r("ence",-1,5),new r("logie",-1,3),new r("able",-1,1),new r("isme",-1,1),new r("euse",-1,11),new r("iste",-1,1),new r("ive",-1,8),new r("if",-1,8),new r("usion",-1,4),new r("ation",-1,2),new r("ution",-1,4),new r("ateur",-1,2),new r("iqUes",-1,1),new r("atrices",-1,2),new r("ances",-1,1),new r("ences",-1,5),new r("logies",-1,3),new r("ables",-1,1),new r("ismes",-1,1),new r("euses",-1,11),new r("istes",-1,1),new r("ives",-1,8),new r("ifs",-1,8),new r("usions",-1,4),new r("ations",-1,2),new r("utions",-1,4),new r("ateurs",-1,2),new r("ments",-1,15),new r("ements",30,6),new r("issements",31,12),new r("ités",-1,7),new r("ment",-1,15),new r("ement",34,6),new r("issement",35,12),new r("amment",34,13),new r("emment",34,14),new r("aux",-1,10),new r("eaux",39,9),new r("eux",-1,1),new r("ité",-1,7)],x=[new r("ira",-1,1),new r("ie",-1,1),new r("isse",-1,1),new r("issante",-1,1),new r("i",-1,1),new r("irai",4,1),new r("ir",-1,1),new r("iras",-1,1),new r("ies",-1,1),new r("îmes",-1,1),new r("isses",-1,1),new r("issantes",-1,1),new r("îtes",-1,1),new r("is",-1,1),new r("irais",13,1),new r("issais",13,1),new r("irions",-1,1),new r("issions",-1,1),new r("irons",-1,1),new r("issons",-1,1),new r("issants",-1,1),new r("it",-1,1),new r("irait",21,1),new r("issait",21,1),new r("issant",-1,1),new r("iraIent",-1,1),new r("issaIent",-1,1),new r("irent",-1,1),new r("issent",-1,1),new r("iront",-1,1),new r("ît",-1,1),new r("iriez",-1,1),new r("issiez",-1,1),new r("irez",-1,1),new r("issez",-1,1)],I=[new r("a",-1,3),new r("era",0,2),new r("asse",-1,3),new r("ante",-1,3),new r("ée",-1,2),new r("ai",-1,3),new r("erai",5,2),new r("er",-1,2),new r("as",-1,3),new r("eras",8,2),new r("âmes",-1,3),new r("asses",-1,3),new r("antes",-1,3),new r("âtes",-1,3),new r("ées",-1,2),new r("ais",-1,3),new r("erais",15,2),new r("ions",-1,1),new r("erions",17,2),new r("assions",17,3),new r("erons",-1,2),new r("ants",-1,3),new r("és",-1,2),new r("ait",-1,3),new r("erait",23,2),new r("ant",-1,3),new r("aIent",-1,3),new r("eraIent",26,2),new r("èrent",-1,2),new r("assent",-1,3),new r("eront",-1,2),new r("ât",-1,3),new r("ez",-1,2),new r("iez",32,2),new r("eriez",33,2),new r("assiez",33,3),new r("erez",32,2),new r("é",-1,2)],P=[new r("e",-1,3),new r("Ière",0,2),new r("ière",0,2),new r("ion",-1,1),new r("Ier",-1,2),new r("ier",-1,2),new r("ë",-1,4)],U=[new r("ell",-1,-1),new r("eill",-1,-1),new r("enn",-1,-1),new r("onn",-1,-1),new r("ett",-1,-1)],F=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,128,130,103,8,5],S=[1,65,20,0,0,0,0,0,0,0,0,0,0,0,0,0,128],W=new s;this.setCurrent=function(e){W.setCurrent(e)},this.getCurrent=function(){return W.getCurrent()},this.stem=function(){var e=W.cursor;return n(),W.cursor=e,u(),W.limit_backward=e,W.cursor=W.limit,k(),W.cursor=W.limit,b(),W.cursor=W.limit,d(),W.cursor=W.limit_backward,o(),!0}};return function(e){return"function"==typeof e.update?e.update(function(e){return i.setCurrent(e),i.stem(),i.getCurrent()}):(i.setCurrent(e),i.stem(),i.getCurrent())}}(),e.Pipeline.registerFunction(e.fr.stemmer,"stemmer-fr"),e.fr.stopWordFilter=e.generateStopWordFilter("ai aie aient aies ait as au aura aurai auraient aurais aurait auras aurez auriez aurions aurons auront aux avaient avais avait avec avez aviez avions avons ayant ayez ayons c ce ceci celà ces cet cette d dans de des du elle en es est et eu eue eues eurent eus eusse eussent eusses eussiez eussions eut eux eûmes eût eûtes furent fus fusse fussent fusses fussiez fussions fut fûmes fût fûtes ici il ils j je l la le les leur leurs lui m ma mais me mes moi mon même n ne nos notre nous on ont ou par pas pour qu que quel quelle quelles quels qui s sa sans se sera serai seraient serais serait seras serez seriez serions serons seront ses soi soient sois soit sommes son sont soyez soyons suis sur t ta te tes toi ton tu un une vos votre vous y à étaient étais était étant étiez étions été étée étées étés êtes".split(" ")),e.Pipeline.registerFunction(e.fr.stopWordFilter,"stopWordFilter-fr")}}); \ No newline at end of file diff --git a/assets/javascripts/lunr/min/lunr.hi.min.js b/assets/javascripts/lunr/min/lunr.hi.min.js new file mode 100644 index 0000000..7dbc414 --- /dev/null +++ b/assets/javascripts/lunr/min/lunr.hi.min.js @@ -0,0 +1 @@ +!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.hi=function(){this.pipeline.reset(),this.pipeline.add(e.hi.trimmer,e.hi.stopWordFilter,e.hi.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.hi.stemmer))},e.hi.wordCharacters="ऀ-ःऄ-एऐ-टठ-यर-िी-ॏॐ-य़ॠ-९॰-ॿa-zA-Za-zA-Z0-90-9",e.hi.trimmer=e.trimmerSupport.generateTrimmer(e.hi.wordCharacters),e.Pipeline.registerFunction(e.hi.trimmer,"trimmer-hi"),e.hi.stopWordFilter=e.generateStopWordFilter("अत अपना अपनी अपने अभी अंदर आदि आप इत्यादि इन इनका इन्हीं इन्हें इन्हों इस इसका इसकी इसके इसमें इसी इसे उन उनका उनकी उनके उनको उन्हीं उन्हें उन्हों उस उसके उसी उसे एक एवं एस ऐसे और कई कर करता करते करना करने करें कहते कहा का काफ़ी कि कितना किन्हें किन्हों किया किर किस किसी किसे की कुछ कुल के को कोई कौन कौनसा गया घर जब जहाँ जा जितना जिन जिन्हें जिन्हों जिस जिसे जीधर जैसा जैसे जो तक तब तरह तिन तिन्हें तिन्हों तिस तिसे तो था थी थे दबारा दिया दुसरा दूसरे दो द्वारा न नके नहीं ना निहायत नीचे ने पर पहले पूरा पे फिर बनी बही बहुत बाद बाला बिलकुल भी भीतर मगर मानो मे में यदि यह यहाँ यही या यिह ये रखें रहा रहे ऱ्वासा लिए लिये लेकिन व वग़ैरह वर्ग वह वहाँ वहीं वाले वुह वे वो सकता सकते सबसे सभी साथ साबुत साभ सारा से सो संग ही हुआ हुई हुए है हैं हो होता होती होते होना होने".split(" ")),e.hi.stemmer=function(){return function(e){return"function"==typeof e.update?e.update(function(e){return e}):e}}();var r=e.wordcut;r.init(),e.hi.tokenizer=function(i){if(!arguments.length||null==i||void 0==i)return[];if(Array.isArray(i))return i.map(function(r){return isLunr2?new e.Token(r.toLowerCase()):r.toLowerCase()});var t=i.toString().toLowerCase().replace(/^\s+/,"");return r.cut(t).split("|")},e.Pipeline.registerFunction(e.hi.stemmer,"stemmer-hi"),e.Pipeline.registerFunction(e.hi.stopWordFilter,"stopWordFilter-hi")}}); \ No newline at end of file diff --git a/assets/javascripts/lunr/min/lunr.hu.min.js b/assets/javascripts/lunr/min/lunr.hu.min.js new file mode 100644 index 0000000..ed9d909 --- /dev/null +++ b/assets/javascripts/lunr/min/lunr.hu.min.js @@ -0,0 +1,18 @@ +/*! + * Lunr languages, `Hungarian` language + * https://github.com/MihaiValentin/lunr-languages + * + * Copyright 2014, Mihai Valentin + * http://www.mozilla.org/MPL/ + */ +/*! + * based on + * Snowball JavaScript Library v0.3 + * http://code.google.com/p/urim/ + * http://snowball.tartarus.org/ + * + * Copyright 2010, Oleg Mazko + * http://www.mozilla.org/MPL/ + */ + +!function(e,n){"function"==typeof define&&define.amd?define(n):"object"==typeof exports?module.exports=n():n()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.hu=function(){this.pipeline.reset(),this.pipeline.add(e.hu.trimmer,e.hu.stopWordFilter,e.hu.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.hu.stemmer))},e.hu.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",e.hu.trimmer=e.trimmerSupport.generateTrimmer(e.hu.wordCharacters),e.Pipeline.registerFunction(e.hu.trimmer,"trimmer-hu"),e.hu.stemmer=function(){var n=e.stemmerSupport.Among,r=e.stemmerSupport.SnowballProgram,i=new function(){function e(){var e,n=L.cursor;if(d=L.limit,L.in_grouping(W,97,252))for(;;){if(e=L.cursor,L.out_grouping(W,97,252))return L.cursor=e,L.find_among(g,8)||(L.cursor=e,e=L.limit)return void(d=e);L.cursor++}if(L.cursor=n,L.out_grouping(W,97,252)){for(;!L.in_grouping(W,97,252);){if(L.cursor>=L.limit)return;L.cursor++}d=L.cursor}}function i(){return d<=L.cursor}function a(){var e;if(L.ket=L.cursor,(e=L.find_among_b(h,2))&&(L.bra=L.cursor,i()))switch(e){case 1:L.slice_from("a");break;case 2:L.slice_from("e")}}function t(){var e=L.limit-L.cursor;return!!L.find_among_b(p,23)&&(L.cursor=L.limit-e,!0)}function s(){if(L.cursor>L.limit_backward){L.cursor--,L.ket=L.cursor;var e=L.cursor-1;L.limit_backward<=e&&e<=L.limit&&(L.cursor=e,L.bra=e,L.slice_del())}}function c(){var e;if(L.ket=L.cursor,(e=L.find_among_b(_,2))&&(L.bra=L.cursor,i())){if((1==e||2==e)&&!t())return;L.slice_del(),s()}}function o(){L.ket=L.cursor,L.find_among_b(v,44)&&(L.bra=L.cursor,i()&&(L.slice_del(),a()))}function w(){var e;if(L.ket=L.cursor,(e=L.find_among_b(z,3))&&(L.bra=L.cursor,i()))switch(e){case 1:L.slice_from("e");break;case 2:case 3:L.slice_from("a")}}function l(){var e;if(L.ket=L.cursor,(e=L.find_among_b(y,6))&&(L.bra=L.cursor,i()))switch(e){case 1:case 2:L.slice_del();break;case 3:L.slice_from("a");break;case 4:L.slice_from("e")}}function u(){var e;if(L.ket=L.cursor,(e=L.find_among_b(j,2))&&(L.bra=L.cursor,i())){if((1==e||2==e)&&!t())return;L.slice_del(),s()}}function m(){var e;if(L.ket=L.cursor,(e=L.find_among_b(C,7))&&(L.bra=L.cursor,i()))switch(e){case 1:L.slice_from("a");break;case 2:L.slice_from("e");break;case 3:case 4:case 5:case 6:case 7:L.slice_del()}}function k(){var e;if(L.ket=L.cursor,(e=L.find_among_b(P,12))&&(L.bra=L.cursor,i()))switch(e){case 1:case 4:case 7:case 9:L.slice_del();break;case 2:case 5:case 8:L.slice_from("e");break;case 3:case 6:L.slice_from("a")}}function f(){var e;if(L.ket=L.cursor,(e=L.find_among_b(F,31))&&(L.bra=L.cursor,i()))switch(e){case 1:case 4:case 7:case 8:case 9:case 12:case 13:case 16:case 17:case 18:L.slice_del();break;case 2:case 5:case 10:case 14:case 19:L.slice_from("a");break;case 3:case 6:case 11:case 15:case 20:L.slice_from("e")}}function b(){var e;if(L.ket=L.cursor,(e=L.find_among_b(S,42))&&(L.bra=L.cursor,i()))switch(e){case 1:case 4:case 5:case 6:case 9:case 10:case 11:case 14:case 15:case 16:case 17:case 20:case 21:case 24:case 25:case 26:case 29:L.slice_del();break;case 2:case 7:case 12:case 18:case 22:case 27:L.slice_from("a");break;case 3:case 8:case 13:case 19:case 23:case 28:L.slice_from("e")}}var d,g=[new n("cs",-1,-1),new n("dzs",-1,-1),new n("gy",-1,-1),new n("ly",-1,-1),new n("ny",-1,-1),new n("sz",-1,-1),new n("ty",-1,-1),new n("zs",-1,-1)],h=[new n("á",-1,1),new n("é",-1,2)],p=[new n("bb",-1,-1),new n("cc",-1,-1),new n("dd",-1,-1),new n("ff",-1,-1),new n("gg",-1,-1),new n("jj",-1,-1),new n("kk",-1,-1),new n("ll",-1,-1),new n("mm",-1,-1),new n("nn",-1,-1),new n("pp",-1,-1),new n("rr",-1,-1),new n("ccs",-1,-1),new n("ss",-1,-1),new n("zzs",-1,-1),new n("tt",-1,-1),new n("vv",-1,-1),new n("ggy",-1,-1),new n("lly",-1,-1),new n("nny",-1,-1),new n("tty",-1,-1),new n("ssz",-1,-1),new n("zz",-1,-1)],_=[new n("al",-1,1),new n("el",-1,2)],v=[new n("ba",-1,-1),new n("ra",-1,-1),new n("be",-1,-1),new n("re",-1,-1),new n("ig",-1,-1),new n("nak",-1,-1),new n("nek",-1,-1),new n("val",-1,-1),new n("vel",-1,-1),new n("ul",-1,-1),new n("nál",-1,-1),new n("nél",-1,-1),new n("ból",-1,-1),new n("ról",-1,-1),new n("tól",-1,-1),new n("bõl",-1,-1),new n("rõl",-1,-1),new n("tõl",-1,-1),new n("ül",-1,-1),new n("n",-1,-1),new n("an",19,-1),new n("ban",20,-1),new n("en",19,-1),new n("ben",22,-1),new n("képpen",22,-1),new n("on",19,-1),new n("ön",19,-1),new n("képp",-1,-1),new n("kor",-1,-1),new n("t",-1,-1),new n("at",29,-1),new n("et",29,-1),new n("ként",29,-1),new n("anként",32,-1),new n("enként",32,-1),new n("onként",32,-1),new n("ot",29,-1),new n("ért",29,-1),new n("öt",29,-1),new n("hez",-1,-1),new n("hoz",-1,-1),new n("höz",-1,-1),new n("vá",-1,-1),new n("vé",-1,-1)],z=[new n("án",-1,2),new n("én",-1,1),new n("ánként",-1,3)],y=[new n("stul",-1,2),new n("astul",0,1),new n("ástul",0,3),new n("stül",-1,2),new n("estül",3,1),new n("éstül",3,4)],j=[new n("á",-1,1),new n("é",-1,2)],C=[new n("k",-1,7),new n("ak",0,4),new n("ek",0,6),new n("ok",0,5),new n("ák",0,1),new n("ék",0,2),new n("ök",0,3)],P=[new n("éi",-1,7),new n("áéi",0,6),new n("ééi",0,5),new n("é",-1,9),new n("ké",3,4),new n("aké",4,1),new n("eké",4,1),new n("oké",4,1),new n("áké",4,3),new n("éké",4,2),new n("öké",4,1),new n("éé",3,8)],F=[new n("a",-1,18),new n("ja",0,17),new n("d",-1,16),new n("ad",2,13),new n("ed",2,13),new n("od",2,13),new n("ád",2,14),new n("éd",2,15),new n("öd",2,13),new n("e",-1,18),new n("je",9,17),new n("nk",-1,4),new n("unk",11,1),new n("ánk",11,2),new n("énk",11,3),new n("ünk",11,1),new n("uk",-1,8),new n("juk",16,7),new n("ájuk",17,5),new n("ük",-1,8),new n("jük",19,7),new n("éjük",20,6),new n("m",-1,12),new n("am",22,9),new n("em",22,9),new n("om",22,9),new n("ám",22,10),new n("ém",22,11),new n("o",-1,18),new n("á",-1,19),new n("é",-1,20)],S=[new n("id",-1,10),new n("aid",0,9),new n("jaid",1,6),new n("eid",0,9),new n("jeid",3,6),new n("áid",0,7),new n("éid",0,8),new n("i",-1,15),new n("ai",7,14),new n("jai",8,11),new n("ei",7,14),new n("jei",10,11),new n("ái",7,12),new n("éi",7,13),new n("itek",-1,24),new n("eitek",14,21),new n("jeitek",15,20),new n("éitek",14,23),new n("ik",-1,29),new n("aik",18,26),new n("jaik",19,25),new n("eik",18,26),new n("jeik",21,25),new n("áik",18,27),new n("éik",18,28),new n("ink",-1,20),new n("aink",25,17),new n("jaink",26,16),new n("eink",25,17),new n("jeink",28,16),new n("áink",25,18),new n("éink",25,19),new n("aitok",-1,21),new n("jaitok",32,20),new n("áitok",-1,22),new n("im",-1,5),new n("aim",35,4),new n("jaim",36,1),new n("eim",35,4),new n("jeim",38,1),new n("áim",35,2),new n("éim",35,3)],W=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,1,17,52,14],L=new r;this.setCurrent=function(e){L.setCurrent(e)},this.getCurrent=function(){return L.getCurrent()},this.stem=function(){var n=L.cursor;return e(),L.limit_backward=n,L.cursor=L.limit,c(),L.cursor=L.limit,o(),L.cursor=L.limit,w(),L.cursor=L.limit,l(),L.cursor=L.limit,u(),L.cursor=L.limit,k(),L.cursor=L.limit,f(),L.cursor=L.limit,b(),L.cursor=L.limit,m(),!0}};return function(e){return"function"==typeof e.update?e.update(function(e){return i.setCurrent(e),i.stem(),i.getCurrent()}):(i.setCurrent(e),i.stem(),i.getCurrent())}}(),e.Pipeline.registerFunction(e.hu.stemmer,"stemmer-hu"),e.hu.stopWordFilter=e.generateStopWordFilter("a abban ahhoz ahogy ahol aki akik akkor alatt amely amelyek amelyekben amelyeket amelyet amelynek ami amikor amit amolyan amíg annak arra arról az azok azon azonban azt aztán azután azzal azért be belül benne bár cikk cikkek cikkeket csak de e ebben eddig egy egyes egyetlen egyik egyre egyéb egész ehhez ekkor el ellen elsõ elég elõ elõször elõtt emilyen ennek erre ez ezek ezen ezt ezzel ezért fel felé hanem hiszen hogy hogyan igen ill ill. illetve ilyen ilyenkor ismét ison itt jobban jó jól kell kellett keressünk keresztül ki kívül között közül legalább legyen lehet lehetett lenne lenni lesz lett maga magát majd majd meg mellett mely melyek mert mi mikor milyen minden mindenki mindent mindig mint mintha mit mivel miért most már más másik még míg nagy nagyobb nagyon ne nekem neki nem nincs néha néhány nélkül olyan ott pedig persze rá s saját sem semmi sok sokat sokkal szemben szerint szinte számára talán tehát teljes tovább továbbá több ugyanis utolsó után utána vagy vagyis vagyok valaki valami valamint való van vannak vele vissza viszont volna volt voltak voltam voltunk által általában át én éppen és így õ õk õket össze úgy új újabb újra".split(" ")),e.Pipeline.registerFunction(e.hu.stopWordFilter,"stopWordFilter-hu")}}); \ No newline at end of file diff --git a/assets/javascripts/lunr/min/lunr.it.min.js b/assets/javascripts/lunr/min/lunr.it.min.js new file mode 100644 index 0000000..344b6a3 --- /dev/null +++ b/assets/javascripts/lunr/min/lunr.it.min.js @@ -0,0 +1,18 @@ +/*! + * Lunr languages, `Italian` language + * https://github.com/MihaiValentin/lunr-languages + * + * Copyright 2014, Mihai Valentin + * http://www.mozilla.org/MPL/ + */ +/*! + * based on + * Snowball JavaScript Library v0.3 + * http://code.google.com/p/urim/ + * http://snowball.tartarus.org/ + * + * Copyright 2010, Oleg Mazko + * http://www.mozilla.org/MPL/ + */ + +!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.it=function(){this.pipeline.reset(),this.pipeline.add(e.it.trimmer,e.it.stopWordFilter,e.it.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.it.stemmer))},e.it.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",e.it.trimmer=e.trimmerSupport.generateTrimmer(e.it.wordCharacters),e.Pipeline.registerFunction(e.it.trimmer,"trimmer-it"),e.it.stemmer=function(){var r=e.stemmerSupport.Among,n=e.stemmerSupport.SnowballProgram,i=new function(){function e(e,r,n){return!(!x.eq_s(1,e)||(x.ket=x.cursor,!x.in_grouping(L,97,249)))&&(x.slice_from(r),x.cursor=n,!0)}function i(){for(var r,n,i,o,t=x.cursor;;){if(x.bra=x.cursor,r=x.find_among(h,7))switch(x.ket=x.cursor,r){case 1:x.slice_from("à");continue;case 2:x.slice_from("è");continue;case 3:x.slice_from("ì");continue;case 4:x.slice_from("ò");continue;case 5:x.slice_from("ù");continue;case 6:x.slice_from("qU");continue;case 7:if(x.cursor>=x.limit)break;x.cursor++;continue}break}for(x.cursor=t;;)for(n=x.cursor;;){if(i=x.cursor,x.in_grouping(L,97,249)){if(x.bra=x.cursor,o=x.cursor,e("u","U",i))break;if(x.cursor=o,e("i","I",i))break}if(x.cursor=i,x.cursor>=x.limit)return void(x.cursor=n);x.cursor++}}function o(e){if(x.cursor=e,!x.in_grouping(L,97,249))return!1;for(;!x.out_grouping(L,97,249);){if(x.cursor>=x.limit)return!1;x.cursor++}return!0}function t(){if(x.in_grouping(L,97,249)){var e=x.cursor;if(x.out_grouping(L,97,249)){for(;!x.in_grouping(L,97,249);){if(x.cursor>=x.limit)return o(e);x.cursor++}return!0}return o(e)}return!1}function s(){var e,r=x.cursor;if(!t()){if(x.cursor=r,!x.out_grouping(L,97,249))return;if(e=x.cursor,x.out_grouping(L,97,249)){for(;!x.in_grouping(L,97,249);){if(x.cursor>=x.limit)return x.cursor=e,void(x.in_grouping(L,97,249)&&x.cursor=x.limit)return;x.cursor++}k=x.cursor}function a(){for(;!x.in_grouping(L,97,249);){if(x.cursor>=x.limit)return!1;x.cursor++}for(;!x.out_grouping(L,97,249);){if(x.cursor>=x.limit)return!1;x.cursor++}return!0}function u(){var e=x.cursor;k=x.limit,p=k,g=k,s(),x.cursor=e,a()&&(p=x.cursor,a()&&(g=x.cursor))}function c(){for(var e;;){if(x.bra=x.cursor,!(e=x.find_among(q,3)))break;switch(x.ket=x.cursor,e){case 1:x.slice_from("i");break;case 2:x.slice_from("u");break;case 3:if(x.cursor>=x.limit)return;x.cursor++}}}function w(){return k<=x.cursor}function l(){return p<=x.cursor}function m(){return g<=x.cursor}function f(){var e;if(x.ket=x.cursor,x.find_among_b(C,37)&&(x.bra=x.cursor,(e=x.find_among_b(z,5))&&w()))switch(e){case 1:x.slice_del();break;case 2:x.slice_from("e")}}function v(){var e;if(x.ket=x.cursor,!(e=x.find_among_b(S,51)))return!1;switch(x.bra=x.cursor,e){case 1:if(!m())return!1;x.slice_del();break;case 2:if(!m())return!1;x.slice_del(),x.ket=x.cursor,x.eq_s_b(2,"ic")&&(x.bra=x.cursor,m()&&x.slice_del());break;case 3:if(!m())return!1;x.slice_from("log");break;case 4:if(!m())return!1;x.slice_from("u");break;case 5:if(!m())return!1;x.slice_from("ente");break;case 6:if(!w())return!1;x.slice_del();break;case 7:if(!l())return!1;x.slice_del(),x.ket=x.cursor,e=x.find_among_b(P,4),e&&(x.bra=x.cursor,m()&&(x.slice_del(),1==e&&(x.ket=x.cursor,x.eq_s_b(2,"at")&&(x.bra=x.cursor,m()&&x.slice_del()))));break;case 8:if(!m())return!1;x.slice_del(),x.ket=x.cursor,e=x.find_among_b(F,3),e&&(x.bra=x.cursor,1==e&&m()&&x.slice_del());break;case 9:if(!m())return!1;x.slice_del(),x.ket=x.cursor,x.eq_s_b(2,"at")&&(x.bra=x.cursor,m()&&(x.slice_del(),x.ket=x.cursor,x.eq_s_b(2,"ic")&&(x.bra=x.cursor,m()&&x.slice_del())))}return!0}function b(){var e,r;x.cursor>=k&&(r=x.limit_backward,x.limit_backward=k,x.ket=x.cursor,e=x.find_among_b(W,87),e&&(x.bra=x.cursor,1==e&&x.slice_del()),x.limit_backward=r)}function d(){var e=x.limit-x.cursor;if(x.ket=x.cursor,x.in_grouping_b(y,97,242)&&(x.bra=x.cursor,w()&&(x.slice_del(),x.ket=x.cursor,x.eq_s_b(1,"i")&&(x.bra=x.cursor,w()))))return void x.slice_del();x.cursor=x.limit-e}function _(){d(),x.ket=x.cursor,x.eq_s_b(1,"h")&&(x.bra=x.cursor,x.in_grouping_b(U,99,103)&&w()&&x.slice_del())}var g,p,k,h=[new r("",-1,7),new r("qu",0,6),new r("á",0,1),new r("é",0,2),new r("í",0,3),new r("ó",0,4),new r("ú",0,5)],q=[new r("",-1,3),new r("I",0,1),new r("U",0,2)],C=[new r("la",-1,-1),new r("cela",0,-1),new r("gliela",0,-1),new r("mela",0,-1),new r("tela",0,-1),new r("vela",0,-1),new r("le",-1,-1),new r("cele",6,-1),new r("gliele",6,-1),new r("mele",6,-1),new r("tele",6,-1),new r("vele",6,-1),new r("ne",-1,-1),new r("cene",12,-1),new r("gliene",12,-1),new r("mene",12,-1),new r("sene",12,-1),new r("tene",12,-1),new r("vene",12,-1),new r("ci",-1,-1),new r("li",-1,-1),new r("celi",20,-1),new r("glieli",20,-1),new r("meli",20,-1),new r("teli",20,-1),new r("veli",20,-1),new r("gli",20,-1),new r("mi",-1,-1),new r("si",-1,-1),new r("ti",-1,-1),new r("vi",-1,-1),new r("lo",-1,-1),new r("celo",31,-1),new r("glielo",31,-1),new r("melo",31,-1),new r("telo",31,-1),new r("velo",31,-1)],z=[new r("ando",-1,1),new r("endo",-1,1),new r("ar",-1,2),new r("er",-1,2),new r("ir",-1,2)],P=[new r("ic",-1,-1),new r("abil",-1,-1),new r("os",-1,-1),new r("iv",-1,1)],F=[new r("ic",-1,1),new r("abil",-1,1),new r("iv",-1,1)],S=[new r("ica",-1,1),new r("logia",-1,3),new r("osa",-1,1),new r("ista",-1,1),new r("iva",-1,9),new r("anza",-1,1),new r("enza",-1,5),new r("ice",-1,1),new r("atrice",7,1),new r("iche",-1,1),new r("logie",-1,3),new r("abile",-1,1),new r("ibile",-1,1),new r("usione",-1,4),new r("azione",-1,2),new r("uzione",-1,4),new r("atore",-1,2),new r("ose",-1,1),new r("ante",-1,1),new r("mente",-1,1),new r("amente",19,7),new r("iste",-1,1),new r("ive",-1,9),new r("anze",-1,1),new r("enze",-1,5),new r("ici",-1,1),new r("atrici",25,1),new r("ichi",-1,1),new r("abili",-1,1),new r("ibili",-1,1),new r("ismi",-1,1),new r("usioni",-1,4),new r("azioni",-1,2),new r("uzioni",-1,4),new r("atori",-1,2),new r("osi",-1,1),new r("anti",-1,1),new r("amenti",-1,6),new r("imenti",-1,6),new r("isti",-1,1),new r("ivi",-1,9),new r("ico",-1,1),new r("ismo",-1,1),new r("oso",-1,1),new r("amento",-1,6),new r("imento",-1,6),new r("ivo",-1,9),new r("ità",-1,8),new r("istà",-1,1),new r("istè",-1,1),new r("istì",-1,1)],W=[new r("isca",-1,1),new r("enda",-1,1),new r("ata",-1,1),new r("ita",-1,1),new r("uta",-1,1),new r("ava",-1,1),new r("eva",-1,1),new r("iva",-1,1),new r("erebbe",-1,1),new r("irebbe",-1,1),new r("isce",-1,1),new r("ende",-1,1),new r("are",-1,1),new r("ere",-1,1),new r("ire",-1,1),new r("asse",-1,1),new r("ate",-1,1),new r("avate",16,1),new r("evate",16,1),new r("ivate",16,1),new r("ete",-1,1),new r("erete",20,1),new r("irete",20,1),new r("ite",-1,1),new r("ereste",-1,1),new r("ireste",-1,1),new r("ute",-1,1),new r("erai",-1,1),new r("irai",-1,1),new r("isci",-1,1),new r("endi",-1,1),new r("erei",-1,1),new r("irei",-1,1),new r("assi",-1,1),new r("ati",-1,1),new r("iti",-1,1),new r("eresti",-1,1),new r("iresti",-1,1),new r("uti",-1,1),new r("avi",-1,1),new r("evi",-1,1),new r("ivi",-1,1),new r("isco",-1,1),new r("ando",-1,1),new r("endo",-1,1),new r("Yamo",-1,1),new r("iamo",-1,1),new r("avamo",-1,1),new r("evamo",-1,1),new r("ivamo",-1,1),new r("eremo",-1,1),new r("iremo",-1,1),new r("assimo",-1,1),new r("ammo",-1,1),new r("emmo",-1,1),new r("eremmo",54,1),new r("iremmo",54,1),new r("immo",-1,1),new r("ano",-1,1),new r("iscano",58,1),new r("avano",58,1),new r("evano",58,1),new r("ivano",58,1),new r("eranno",-1,1),new r("iranno",-1,1),new r("ono",-1,1),new r("iscono",65,1),new r("arono",65,1),new r("erono",65,1),new r("irono",65,1),new r("erebbero",-1,1),new r("irebbero",-1,1),new r("assero",-1,1),new r("essero",-1,1),new r("issero",-1,1),new r("ato",-1,1),new r("ito",-1,1),new r("uto",-1,1),new r("avo",-1,1),new r("evo",-1,1),new r("ivo",-1,1),new r("ar",-1,1),new r("ir",-1,1),new r("erà",-1,1),new r("irà",-1,1),new r("erò",-1,1),new r("irò",-1,1)],L=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,128,128,8,2,1],y=[17,65,0,0,0,0,0,0,0,0,0,0,0,0,0,128,128,8,2],U=[17],x=new n;this.setCurrent=function(e){x.setCurrent(e)},this.getCurrent=function(){return x.getCurrent()},this.stem=function(){var e=x.cursor;return i(),x.cursor=e,u(),x.limit_backward=e,x.cursor=x.limit,f(),x.cursor=x.limit,v()||(x.cursor=x.limit,b()),x.cursor=x.limit,_(),x.cursor=x.limit_backward,c(),!0}};return function(e){return"function"==typeof e.update?e.update(function(e){return i.setCurrent(e),i.stem(),i.getCurrent()}):(i.setCurrent(e),i.stem(),i.getCurrent())}}(),e.Pipeline.registerFunction(e.it.stemmer,"stemmer-it"),e.it.stopWordFilter=e.generateStopWordFilter("a abbia abbiamo abbiano abbiate ad agl agli ai al all alla alle allo anche avemmo avendo avesse avessero avessi avessimo aveste avesti avete aveva avevamo avevano avevate avevi avevo avrai avranno avrebbe avrebbero avrei avremmo avremo avreste avresti avrete avrà avrò avuta avute avuti avuto c che chi ci coi col come con contro cui da dagl dagli dai dal dall dalla dalle dallo degl degli dei del dell della delle dello di dov dove e ebbe ebbero ebbi ed era erano eravamo eravate eri ero essendo faccia facciamo facciano facciate faccio facemmo facendo facesse facessero facessi facessimo faceste facesti faceva facevamo facevano facevate facevi facevo fai fanno farai faranno farebbe farebbero farei faremmo faremo fareste faresti farete farà farò fece fecero feci fosse fossero fossi fossimo foste fosti fu fui fummo furono gli ha hai hanno ho i il in io l la le lei li lo loro lui ma mi mia mie miei mio ne negl negli nei nel nell nella nelle nello noi non nostra nostre nostri nostro o per perché più quale quanta quante quanti quanto quella quelle quelli quello questa queste questi questo sarai saranno sarebbe sarebbero sarei saremmo saremo sareste saresti sarete sarà sarò se sei si sia siamo siano siate siete sono sta stai stando stanno starai staranno starebbe starebbero starei staremmo staremo stareste staresti starete starà starò stava stavamo stavano stavate stavi stavo stemmo stesse stessero stessi stessimo steste stesti stette stettero stetti stia stiamo stiano stiate sto su sua sue sugl sugli sui sul sull sulla sulle sullo suo suoi ti tra tu tua tue tuo tuoi tutti tutto un una uno vi voi vostra vostre vostri vostro è".split(" ")),e.Pipeline.registerFunction(e.it.stopWordFilter,"stopWordFilter-it")}}); \ No newline at end of file diff --git a/assets/javascripts/lunr/min/lunr.ja.min.js b/assets/javascripts/lunr/min/lunr.ja.min.js new file mode 100644 index 0000000..5f254eb --- /dev/null +++ b/assets/javascripts/lunr/min/lunr.ja.min.js @@ -0,0 +1 @@ +!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");var r="2"==e.version[0];e.ja=function(){this.pipeline.reset(),this.pipeline.add(e.ja.trimmer,e.ja.stopWordFilter,e.ja.stemmer),r?this.tokenizer=e.ja.tokenizer:(e.tokenizer&&(e.tokenizer=e.ja.tokenizer),this.tokenizerFn&&(this.tokenizerFn=e.ja.tokenizer))};var t=new e.TinySegmenter;e.ja.tokenizer=function(i){var n,o,s,p,a,u,m,l,c,f;if(!arguments.length||null==i||void 0==i)return[];if(Array.isArray(i))return i.map(function(t){return r?new e.Token(t.toLowerCase()):t.toLowerCase()});for(o=i.toString().toLowerCase().replace(/^\s+/,""),n=o.length-1;n>=0;n--)if(/\S/.test(o.charAt(n))){o=o.substring(0,n+1);break}for(a=[],s=o.length,c=0,l=0;c<=s;c++)if(u=o.charAt(c),m=c-l,u.match(/\s/)||c==s){if(m>0)for(p=t.segment(o.slice(l,c)).filter(function(e){return!!e}),f=l,n=0;n=C.limit)break;C.cursor++;continue}break}for(C.cursor=o,C.bra=o,C.eq_s(1,"y")?(C.ket=C.cursor,C.slice_from("Y")):C.cursor=o;;)if(e=C.cursor,C.in_grouping(q,97,232)){if(i=C.cursor,C.bra=i,C.eq_s(1,"i"))C.ket=C.cursor,C.in_grouping(q,97,232)&&(C.slice_from("I"),C.cursor=e);else if(C.cursor=i,C.eq_s(1,"y"))C.ket=C.cursor,C.slice_from("Y"),C.cursor=e;else if(n(e))break}else if(n(e))break}function n(r){return C.cursor=r,r>=C.limit||(C.cursor++,!1)}function o(){_=C.limit,d=_,t()||(_=C.cursor,_<3&&(_=3),t()||(d=C.cursor))}function t(){for(;!C.in_grouping(q,97,232);){if(C.cursor>=C.limit)return!0;C.cursor++}for(;!C.out_grouping(q,97,232);){if(C.cursor>=C.limit)return!0;C.cursor++}return!1}function s(){for(var r;;)if(C.bra=C.cursor,r=C.find_among(p,3))switch(C.ket=C.cursor,r){case 1:C.slice_from("y");break;case 2:C.slice_from("i");break;case 3:if(C.cursor>=C.limit)return;C.cursor++}}function u(){return _<=C.cursor}function c(){return d<=C.cursor}function a(){var r=C.limit-C.cursor;C.find_among_b(g,3)&&(C.cursor=C.limit-r,C.ket=C.cursor,C.cursor>C.limit_backward&&(C.cursor--,C.bra=C.cursor,C.slice_del()))}function l(){var r;w=!1,C.ket=C.cursor,C.eq_s_b(1,"e")&&(C.bra=C.cursor,u()&&(r=C.limit-C.cursor,C.out_grouping_b(q,97,232)&&(C.cursor=C.limit-r,C.slice_del(),w=!0,a())))}function m(){var r;u()&&(r=C.limit-C.cursor,C.out_grouping_b(q,97,232)&&(C.cursor=C.limit-r,C.eq_s_b(3,"gem")||(C.cursor=C.limit-r,C.slice_del(),a())))}function f(){var r,e,i,n,o,t,s=C.limit-C.cursor;if(C.ket=C.cursor,r=C.find_among_b(h,5))switch(C.bra=C.cursor,r){case 1:u()&&C.slice_from("heid");break;case 2:m();break;case 3:u()&&C.out_grouping_b(j,97,232)&&C.slice_del()}if(C.cursor=C.limit-s,l(),C.cursor=C.limit-s,C.ket=C.cursor,C.eq_s_b(4,"heid")&&(C.bra=C.cursor,c()&&(e=C.limit-C.cursor,C.eq_s_b(1,"c")||(C.cursor=C.limit-e,C.slice_del(),C.ket=C.cursor,C.eq_s_b(2,"en")&&(C.bra=C.cursor,m())))),C.cursor=C.limit-s,C.ket=C.cursor,r=C.find_among_b(k,6))switch(C.bra=C.cursor,r){case 1:if(c()){if(C.slice_del(),i=C.limit-C.cursor,C.ket=C.cursor,C.eq_s_b(2,"ig")&&(C.bra=C.cursor,c()&&(n=C.limit-C.cursor,!C.eq_s_b(1,"e")))){C.cursor=C.limit-n,C.slice_del();break}C.cursor=C.limit-i,a()}break;case 2:c()&&(o=C.limit-C.cursor,C.eq_s_b(1,"e")||(C.cursor=C.limit-o,C.slice_del()));break;case 3:c()&&(C.slice_del(),l());break;case 4:c()&&C.slice_del();break;case 5:c()&&w&&C.slice_del()}C.cursor=C.limit-s,C.out_grouping_b(z,73,232)&&(t=C.limit-C.cursor,C.find_among_b(v,4)&&C.out_grouping_b(q,97,232)&&(C.cursor=C.limit-t,C.ket=C.cursor,C.cursor>C.limit_backward&&(C.cursor--,C.bra=C.cursor,C.slice_del())))}var d,_,w,b=[new e("",-1,6),new e("á",0,1),new e("ä",0,1),new e("é",0,2),new e("ë",0,2),new e("í",0,3),new e("ï",0,3),new e("ó",0,4),new e("ö",0,4),new e("ú",0,5),new e("ü",0,5)],p=[new e("",-1,3),new e("I",0,2),new e("Y",0,1)],g=[new e("dd",-1,-1),new e("kk",-1,-1),new e("tt",-1,-1)],h=[new e("ene",-1,2),new e("se",-1,3),new e("en",-1,2),new e("heden",2,1),new e("s",-1,3)],k=[new e("end",-1,1),new e("ig",-1,2),new e("ing",-1,1),new e("lijk",-1,3),new e("baar",-1,4),new e("bar",-1,5)],v=[new e("aa",-1,-1),new e("ee",-1,-1),new e("oo",-1,-1),new e("uu",-1,-1)],q=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,128],z=[1,0,0,17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,128],j=[17,67,16,1,0,0,0,0,0,0,0,0,0,0,0,0,128],C=new i;this.setCurrent=function(r){C.setCurrent(r)},this.getCurrent=function(){return C.getCurrent()},this.stem=function(){var e=C.cursor;return r(),C.cursor=e,o(),C.limit_backward=e,C.cursor=C.limit,f(),C.cursor=C.limit_backward,s(),!0}};return function(r){return"function"==typeof r.update?r.update(function(r){return n.setCurrent(r),n.stem(),n.getCurrent()}):(n.setCurrent(r),n.stem(),n.getCurrent())}}(),r.Pipeline.registerFunction(r.nl.stemmer,"stemmer-nl"),r.nl.stopWordFilter=r.generateStopWordFilter(" aan al alles als altijd andere ben bij daar dan dat de der deze die dit doch doen door dus een eens en er ge geen geweest haar had heb hebben heeft hem het hier hij hoe hun iemand iets ik in is ja je kan kon kunnen maar me meer men met mij mijn moet na naar niet niets nog nu of om omdat onder ons ook op over reeds te tegen toch toen tot u uit uw van veel voor want waren was wat werd wezen wie wil worden wordt zal ze zelf zich zij zijn zo zonder zou".split(" ")),r.Pipeline.registerFunction(r.nl.stopWordFilter,"stopWordFilter-nl")}}); \ No newline at end of file diff --git a/assets/javascripts/lunr/min/lunr.no.min.js b/assets/javascripts/lunr/min/lunr.no.min.js new file mode 100644 index 0000000..92bc7e4 --- /dev/null +++ b/assets/javascripts/lunr/min/lunr.no.min.js @@ -0,0 +1,18 @@ +/*! + * Lunr languages, `Norwegian` language + * https://github.com/MihaiValentin/lunr-languages + * + * Copyright 2014, Mihai Valentin + * http://www.mozilla.org/MPL/ + */ +/*! + * based on + * Snowball JavaScript Library v0.3 + * http://code.google.com/p/urim/ + * http://snowball.tartarus.org/ + * + * Copyright 2010, Oleg Mazko + * http://www.mozilla.org/MPL/ + */ + +!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.no=function(){this.pipeline.reset(),this.pipeline.add(e.no.trimmer,e.no.stopWordFilter,e.no.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.no.stemmer))},e.no.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",e.no.trimmer=e.trimmerSupport.generateTrimmer(e.no.wordCharacters),e.Pipeline.registerFunction(e.no.trimmer,"trimmer-no"),e.no.stemmer=function(){var r=e.stemmerSupport.Among,n=e.stemmerSupport.SnowballProgram,i=new function(){function e(){var e,r=w.cursor+3;if(a=w.limit,0<=r||r<=w.limit){for(s=r;;){if(e=w.cursor,w.in_grouping(d,97,248)){w.cursor=e;break}if(e>=w.limit)return;w.cursor=e+1}for(;!w.out_grouping(d,97,248);){if(w.cursor>=w.limit)return;w.cursor++}a=w.cursor,a=a&&(r=w.limit_backward,w.limit_backward=a,w.ket=w.cursor,e=w.find_among_b(m,29),w.limit_backward=r,e))switch(w.bra=w.cursor,e){case 1:w.slice_del();break;case 2:n=w.limit-w.cursor,w.in_grouping_b(c,98,122)?w.slice_del():(w.cursor=w.limit-n,w.eq_s_b(1,"k")&&w.out_grouping_b(d,97,248)&&w.slice_del());break;case 3:w.slice_from("er")}}function t(){var e,r=w.limit-w.cursor;w.cursor>=a&&(e=w.limit_backward,w.limit_backward=a,w.ket=w.cursor,w.find_among_b(u,2)?(w.bra=w.cursor,w.limit_backward=e,w.cursor=w.limit-r,w.cursor>w.limit_backward&&(w.cursor--,w.bra=w.cursor,w.slice_del())):w.limit_backward=e)}function o(){var e,r;w.cursor>=a&&(r=w.limit_backward,w.limit_backward=a,w.ket=w.cursor,e=w.find_among_b(l,11),e?(w.bra=w.cursor,w.limit_backward=r,1==e&&w.slice_del()):w.limit_backward=r)}var s,a,m=[new r("a",-1,1),new r("e",-1,1),new r("ede",1,1),new r("ande",1,1),new r("ende",1,1),new r("ane",1,1),new r("ene",1,1),new r("hetene",6,1),new r("erte",1,3),new r("en",-1,1),new r("heten",9,1),new r("ar",-1,1),new r("er",-1,1),new r("heter",12,1),new r("s",-1,2),new r("as",14,1),new r("es",14,1),new r("edes",16,1),new r("endes",16,1),new r("enes",16,1),new r("hetenes",19,1),new r("ens",14,1),new r("hetens",21,1),new r("ers",14,1),new r("ets",14,1),new r("et",-1,1),new r("het",25,1),new r("ert",-1,3),new r("ast",-1,1)],u=[new r("dt",-1,-1),new r("vt",-1,-1)],l=[new r("leg",-1,1),new r("eleg",0,1),new r("ig",-1,1),new r("eig",2,1),new r("lig",2,1),new r("elig",4,1),new r("els",-1,1),new r("lov",-1,1),new r("elov",7,1),new r("slov",7,1),new r("hetslov",9,1)],d=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,48,0,128],c=[119,125,149,1],w=new n;this.setCurrent=function(e){w.setCurrent(e)},this.getCurrent=function(){return w.getCurrent()},this.stem=function(){var r=w.cursor;return e(),w.limit_backward=r,w.cursor=w.limit,i(),w.cursor=w.limit,t(),w.cursor=w.limit,o(),!0}};return function(e){return"function"==typeof e.update?e.update(function(e){return i.setCurrent(e),i.stem(),i.getCurrent()}):(i.setCurrent(e),i.stem(),i.getCurrent())}}(),e.Pipeline.registerFunction(e.no.stemmer,"stemmer-no"),e.no.stopWordFilter=e.generateStopWordFilter("alle at av bare begge ble blei bli blir blitt både båe da de deg dei deim deira deires dem den denne der dere deres det dette di din disse ditt du dykk dykkar då eg ein eit eitt eller elles en enn er et ett etter for fordi fra før ha hadde han hans har hennar henne hennes her hjå ho hoe honom hoss hossen hun hva hvem hver hvilke hvilken hvis hvor hvordan hvorfor i ikke ikkje ikkje ingen ingi inkje inn inni ja jeg kan kom korleis korso kun kunne kva kvar kvarhelst kven kvi kvifor man mange me med medan meg meget mellom men mi min mine mitt mot mykje ned no noe noen noka noko nokon nokor nokre nå når og også om opp oss over på samme seg selv si si sia sidan siden sin sine sitt sjøl skal skulle slik so som som somme somt så sånn til um upp ut uten var vart varte ved vere verte vi vil ville vore vors vort vår være være vært å".split(" ")),e.Pipeline.registerFunction(e.no.stopWordFilter,"stopWordFilter-no")}}); \ No newline at end of file diff --git a/assets/javascripts/lunr/min/lunr.pt.min.js b/assets/javascripts/lunr/min/lunr.pt.min.js new file mode 100644 index 0000000..6c16996 --- /dev/null +++ b/assets/javascripts/lunr/min/lunr.pt.min.js @@ -0,0 +1,18 @@ +/*! + * Lunr languages, `Portuguese` language + * https://github.com/MihaiValentin/lunr-languages + * + * Copyright 2014, Mihai Valentin + * http://www.mozilla.org/MPL/ + */ +/*! + * based on + * Snowball JavaScript Library v0.3 + * http://code.google.com/p/urim/ + * http://snowball.tartarus.org/ + * + * Copyright 2010, Oleg Mazko + * http://www.mozilla.org/MPL/ + */ + +!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.pt=function(){this.pipeline.reset(),this.pipeline.add(e.pt.trimmer,e.pt.stopWordFilter,e.pt.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.pt.stemmer))},e.pt.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",e.pt.trimmer=e.trimmerSupport.generateTrimmer(e.pt.wordCharacters),e.Pipeline.registerFunction(e.pt.trimmer,"trimmer-pt"),e.pt.stemmer=function(){var r=e.stemmerSupport.Among,s=e.stemmerSupport.SnowballProgram,n=new function(){function e(){for(var e;;){if(z.bra=z.cursor,e=z.find_among(k,3))switch(z.ket=z.cursor,e){case 1:z.slice_from("a~");continue;case 2:z.slice_from("o~");continue;case 3:if(z.cursor>=z.limit)break;z.cursor++;continue}break}}function n(){if(z.out_grouping(y,97,250)){for(;!z.in_grouping(y,97,250);){if(z.cursor>=z.limit)return!0;z.cursor++}return!1}return!0}function i(){if(z.in_grouping(y,97,250))for(;!z.out_grouping(y,97,250);){if(z.cursor>=z.limit)return!1;z.cursor++}return g=z.cursor,!0}function o(){var e,r,s=z.cursor;if(z.in_grouping(y,97,250))if(e=z.cursor,n()){if(z.cursor=e,i())return}else g=z.cursor;if(z.cursor=s,z.out_grouping(y,97,250)){if(r=z.cursor,n()){if(z.cursor=r,!z.in_grouping(y,97,250)||z.cursor>=z.limit)return;z.cursor++}g=z.cursor}}function t(){for(;!z.in_grouping(y,97,250);){if(z.cursor>=z.limit)return!1;z.cursor++}for(;!z.out_grouping(y,97,250);){if(z.cursor>=z.limit)return!1;z.cursor++}return!0}function a(){var e=z.cursor;g=z.limit,b=g,h=g,o(),z.cursor=e,t()&&(b=z.cursor,t()&&(h=z.cursor))}function u(){for(var e;;){if(z.bra=z.cursor,e=z.find_among(q,3))switch(z.ket=z.cursor,e){case 1:z.slice_from("ã");continue;case 2:z.slice_from("õ");continue;case 3:if(z.cursor>=z.limit)break;z.cursor++;continue}break}}function w(){return g<=z.cursor}function m(){return b<=z.cursor}function c(){return h<=z.cursor}function l(){var e;if(z.ket=z.cursor,!(e=z.find_among_b(F,45)))return!1;switch(z.bra=z.cursor,e){case 1:if(!c())return!1;z.slice_del();break;case 2:if(!c())return!1;z.slice_from("log");break;case 3:if(!c())return!1;z.slice_from("u");break;case 4:if(!c())return!1;z.slice_from("ente");break;case 5:if(!m())return!1;z.slice_del(),z.ket=z.cursor,e=z.find_among_b(j,4),e&&(z.bra=z.cursor,c()&&(z.slice_del(),1==e&&(z.ket=z.cursor,z.eq_s_b(2,"at")&&(z.bra=z.cursor,c()&&z.slice_del()))));break;case 6:if(!c())return!1;z.slice_del(),z.ket=z.cursor,e=z.find_among_b(C,3),e&&(z.bra=z.cursor,1==e&&c()&&z.slice_del());break;case 7:if(!c())return!1;z.slice_del(),z.ket=z.cursor,e=z.find_among_b(P,3),e&&(z.bra=z.cursor,1==e&&c()&&z.slice_del());break;case 8:if(!c())return!1;z.slice_del(),z.ket=z.cursor,z.eq_s_b(2,"at")&&(z.bra=z.cursor,c()&&z.slice_del());break;case 9:if(!w()||!z.eq_s_b(1,"e"))return!1;z.slice_from("ir")}return!0}function f(){var e,r;if(z.cursor>=g){if(r=z.limit_backward,z.limit_backward=g,z.ket=z.cursor,e=z.find_among_b(S,120))return z.bra=z.cursor,1==e&&z.slice_del(),z.limit_backward=r,!0;z.limit_backward=r}return!1}function d(){var e;z.ket=z.cursor,(e=z.find_among_b(W,7))&&(z.bra=z.cursor,1==e&&w()&&z.slice_del())}function v(e,r){if(z.eq_s_b(1,e)){z.bra=z.cursor;var s=z.limit-z.cursor;if(z.eq_s_b(1,r))return z.cursor=z.limit-s,w()&&z.slice_del(),!1}return!0}function p(){var e;if(z.ket=z.cursor,e=z.find_among_b(L,4))switch(z.bra=z.cursor,e){case 1:w()&&(z.slice_del(),z.ket=z.cursor,z.limit-z.cursor,v("u","g")&&v("i","c"));break;case 2:z.slice_from("c")}}function _(){if(!l()&&(z.cursor=z.limit,!f()))return z.cursor=z.limit,void d();z.cursor=z.limit,z.ket=z.cursor,z.eq_s_b(1,"i")&&(z.bra=z.cursor,z.eq_s_b(1,"c")&&(z.cursor=z.limit,w()&&z.slice_del()))}var h,b,g,k=[new r("",-1,3),new r("ã",0,1),new r("õ",0,2)],q=[new r("",-1,3),new r("a~",0,1),new r("o~",0,2)],j=[new r("ic",-1,-1),new r("ad",-1,-1),new r("os",-1,-1),new r("iv",-1,1)],C=[new r("ante",-1,1),new r("avel",-1,1),new r("ível",-1,1)],P=[new r("ic",-1,1),new r("abil",-1,1),new r("iv",-1,1)],F=[new r("ica",-1,1),new r("ância",-1,1),new r("ência",-1,4),new r("ira",-1,9),new r("adora",-1,1),new r("osa",-1,1),new r("ista",-1,1),new r("iva",-1,8),new r("eza",-1,1),new r("logía",-1,2),new r("idade",-1,7),new r("ante",-1,1),new r("mente",-1,6),new r("amente",12,5),new r("ável",-1,1),new r("ível",-1,1),new r("ución",-1,3),new r("ico",-1,1),new r("ismo",-1,1),new r("oso",-1,1),new r("amento",-1,1),new r("imento",-1,1),new r("ivo",-1,8),new r("aça~o",-1,1),new r("ador",-1,1),new r("icas",-1,1),new r("ências",-1,4),new r("iras",-1,9),new r("adoras",-1,1),new r("osas",-1,1),new r("istas",-1,1),new r("ivas",-1,8),new r("ezas",-1,1),new r("logías",-1,2),new r("idades",-1,7),new r("uciones",-1,3),new r("adores",-1,1),new r("antes",-1,1),new r("aço~es",-1,1),new r("icos",-1,1),new r("ismos",-1,1),new r("osos",-1,1),new r("amentos",-1,1),new r("imentos",-1,1),new r("ivos",-1,8)],S=[new r("ada",-1,1),new r("ida",-1,1),new r("ia",-1,1),new r("aria",2,1),new r("eria",2,1),new r("iria",2,1),new r("ara",-1,1),new r("era",-1,1),new r("ira",-1,1),new r("ava",-1,1),new r("asse",-1,1),new r("esse",-1,1),new r("isse",-1,1),new r("aste",-1,1),new r("este",-1,1),new r("iste",-1,1),new r("ei",-1,1),new r("arei",16,1),new r("erei",16,1),new r("irei",16,1),new r("am",-1,1),new r("iam",20,1),new r("ariam",21,1),new r("eriam",21,1),new r("iriam",21,1),new r("aram",20,1),new r("eram",20,1),new r("iram",20,1),new r("avam",20,1),new r("em",-1,1),new r("arem",29,1),new r("erem",29,1),new r("irem",29,1),new r("assem",29,1),new r("essem",29,1),new r("issem",29,1),new r("ado",-1,1),new r("ido",-1,1),new r("ando",-1,1),new r("endo",-1,1),new r("indo",-1,1),new r("ara~o",-1,1),new r("era~o",-1,1),new r("ira~o",-1,1),new r("ar",-1,1),new r("er",-1,1),new r("ir",-1,1),new r("as",-1,1),new r("adas",47,1),new r("idas",47,1),new r("ias",47,1),new r("arias",50,1),new r("erias",50,1),new r("irias",50,1),new r("aras",47,1),new r("eras",47,1),new r("iras",47,1),new r("avas",47,1),new r("es",-1,1),new r("ardes",58,1),new r("erdes",58,1),new r("irdes",58,1),new r("ares",58,1),new r("eres",58,1),new r("ires",58,1),new r("asses",58,1),new r("esses",58,1),new r("isses",58,1),new r("astes",58,1),new r("estes",58,1),new r("istes",58,1),new r("is",-1,1),new r("ais",71,1),new r("eis",71,1),new r("areis",73,1),new r("ereis",73,1),new r("ireis",73,1),new r("áreis",73,1),new r("éreis",73,1),new r("íreis",73,1),new r("ásseis",73,1),new r("ésseis",73,1),new r("ísseis",73,1),new r("áveis",73,1),new r("íeis",73,1),new r("aríeis",84,1),new r("eríeis",84,1),new r("iríeis",84,1),new r("ados",-1,1),new r("idos",-1,1),new r("amos",-1,1),new r("áramos",90,1),new r("éramos",90,1),new r("íramos",90,1),new r("ávamos",90,1),new r("íamos",90,1),new r("aríamos",95,1),new r("eríamos",95,1),new r("iríamos",95,1),new r("emos",-1,1),new r("aremos",99,1),new r("eremos",99,1),new r("iremos",99,1),new r("ássemos",99,1),new r("êssemos",99,1),new r("íssemos",99,1),new r("imos",-1,1),new r("armos",-1,1),new r("ermos",-1,1),new r("irmos",-1,1),new r("ámos",-1,1),new r("arás",-1,1),new r("erás",-1,1),new r("irás",-1,1),new r("eu",-1,1),new r("iu",-1,1),new r("ou",-1,1),new r("ará",-1,1),new r("erá",-1,1),new r("irá",-1,1)],W=[new r("a",-1,1),new r("i",-1,1),new r("o",-1,1),new r("os",-1,1),new r("á",-1,1),new r("í",-1,1),new r("ó",-1,1)],L=[new r("e",-1,1),new r("ç",-1,2),new r("é",-1,1),new r("ê",-1,1)],y=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,3,19,12,2],z=new s;this.setCurrent=function(e){z.setCurrent(e)},this.getCurrent=function(){return z.getCurrent()},this.stem=function(){var r=z.cursor;return e(),z.cursor=r,a(),z.limit_backward=r,z.cursor=z.limit,_(),z.cursor=z.limit,p(),z.cursor=z.limit_backward,u(),!0}};return function(e){return"function"==typeof e.update?e.update(function(e){return n.setCurrent(e),n.stem(),n.getCurrent()}):(n.setCurrent(e),n.stem(),n.getCurrent())}}(),e.Pipeline.registerFunction(e.pt.stemmer,"stemmer-pt"),e.pt.stopWordFilter=e.generateStopWordFilter("a ao aos aquela aquelas aquele aqueles aquilo as até com como da das de dela delas dele deles depois do dos e ela elas ele eles em entre era eram essa essas esse esses esta estamos estas estava estavam este esteja estejam estejamos estes esteve estive estivemos estiver estivera estiveram estiverem estivermos estivesse estivessem estivéramos estivéssemos estou está estávamos estão eu foi fomos for fora foram forem formos fosse fossem fui fôramos fôssemos haja hajam hajamos havemos hei houve houvemos houver houvera houveram houverei houverem houveremos houveria houveriam houvermos houverá houverão houveríamos houvesse houvessem houvéramos houvéssemos há hão isso isto já lhe lhes mais mas me mesmo meu meus minha minhas muito na nas nem no nos nossa nossas nosso nossos num numa não nós o os ou para pela pelas pelo pelos por qual quando que quem se seja sejam sejamos sem serei seremos seria seriam será serão seríamos seu seus somos sou sua suas são só também te tem temos tenha tenham tenhamos tenho terei teremos teria teriam terá terão teríamos teu teus teve tinha tinham tive tivemos tiver tivera tiveram tiverem tivermos tivesse tivessem tivéramos tivéssemos tu tua tuas tém tínhamos um uma você vocês vos à às éramos".split(" ")),e.Pipeline.registerFunction(e.pt.stopWordFilter,"stopWordFilter-pt")}}); \ No newline at end of file diff --git a/assets/javascripts/lunr/min/lunr.ro.min.js b/assets/javascripts/lunr/min/lunr.ro.min.js new file mode 100644 index 0000000..7277140 --- /dev/null +++ b/assets/javascripts/lunr/min/lunr.ro.min.js @@ -0,0 +1,18 @@ +/*! + * Lunr languages, `Romanian` language + * https://github.com/MihaiValentin/lunr-languages + * + * Copyright 2014, Mihai Valentin + * http://www.mozilla.org/MPL/ + */ +/*! + * based on + * Snowball JavaScript Library v0.3 + * http://code.google.com/p/urim/ + * http://snowball.tartarus.org/ + * + * Copyright 2010, Oleg Mazko + * http://www.mozilla.org/MPL/ + */ + +!function(e,i){"function"==typeof define&&define.amd?define(i):"object"==typeof exports?module.exports=i():i()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.ro=function(){this.pipeline.reset(),this.pipeline.add(e.ro.trimmer,e.ro.stopWordFilter,e.ro.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.ro.stemmer))},e.ro.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",e.ro.trimmer=e.trimmerSupport.generateTrimmer(e.ro.wordCharacters),e.Pipeline.registerFunction(e.ro.trimmer,"trimmer-ro"),e.ro.stemmer=function(){var i=e.stemmerSupport.Among,r=e.stemmerSupport.SnowballProgram,n=new function(){function e(e,i){L.eq_s(1,e)&&(L.ket=L.cursor,L.in_grouping(W,97,259)&&L.slice_from(i))}function n(){for(var i,r;;){if(i=L.cursor,L.in_grouping(W,97,259)&&(r=L.cursor,L.bra=r,e("u","U"),L.cursor=r,e("i","I")),L.cursor=i,L.cursor>=L.limit)break;L.cursor++}}function t(){if(L.out_grouping(W,97,259)){for(;!L.in_grouping(W,97,259);){if(L.cursor>=L.limit)return!0;L.cursor++}return!1}return!0}function a(){if(L.in_grouping(W,97,259))for(;!L.out_grouping(W,97,259);){if(L.cursor>=L.limit)return!0;L.cursor++}return!1}function o(){var e,i,r=L.cursor;if(L.in_grouping(W,97,259)){if(e=L.cursor,!t())return void(h=L.cursor);if(L.cursor=e,!a())return void(h=L.cursor)}L.cursor=r,L.out_grouping(W,97,259)&&(i=L.cursor,t()&&(L.cursor=i,L.in_grouping(W,97,259)&&L.cursor=L.limit)return!1;L.cursor++}for(;!L.out_grouping(W,97,259);){if(L.cursor>=L.limit)return!1;L.cursor++}return!0}function c(){var e=L.cursor;h=L.limit,k=h,g=h,o(),L.cursor=e,u()&&(k=L.cursor,u()&&(g=L.cursor))}function s(){for(var e;;){if(L.bra=L.cursor,e=L.find_among(z,3))switch(L.ket=L.cursor,e){case 1:L.slice_from("i");continue;case 2:L.slice_from("u");continue;case 3:if(L.cursor>=L.limit)break;L.cursor++;continue}break}}function w(){return h<=L.cursor}function m(){return k<=L.cursor}function l(){return g<=L.cursor}function f(){var e,i;if(L.ket=L.cursor,(e=L.find_among_b(C,16))&&(L.bra=L.cursor,m()))switch(e){case 1:L.slice_del();break;case 2:L.slice_from("a");break;case 3:L.slice_from("e");break;case 4:L.slice_from("i");break;case 5:i=L.limit-L.cursor,L.eq_s_b(2,"ab")||(L.cursor=L.limit-i,L.slice_from("i"));break;case 6:L.slice_from("at");break;case 7:L.slice_from("aţi")}}function p(){var e,i=L.limit-L.cursor;if(L.ket=L.cursor,(e=L.find_among_b(P,46))&&(L.bra=L.cursor,m())){switch(e){case 1:L.slice_from("abil");break;case 2:L.slice_from("ibil");break;case 3:L.slice_from("iv");break;case 4:L.slice_from("ic");break;case 5:L.slice_from("at");break;case 6:L.slice_from("it")}return _=!0,L.cursor=L.limit-i,!0}return!1}function d(){var e,i;for(_=!1;;)if(i=L.limit-L.cursor,!p()){L.cursor=L.limit-i;break}if(L.ket=L.cursor,(e=L.find_among_b(F,62))&&(L.bra=L.cursor,l())){switch(e){case 1:L.slice_del();break;case 2:L.eq_s_b(1,"ţ")&&(L.bra=L.cursor,L.slice_from("t"));break;case 3:L.slice_from("ist")}_=!0}}function b(){var e,i,r;if(L.cursor>=h){if(i=L.limit_backward,L.limit_backward=h,L.ket=L.cursor,e=L.find_among_b(q,94))switch(L.bra=L.cursor,e){case 1:if(r=L.limit-L.cursor,!L.out_grouping_b(W,97,259)&&(L.cursor=L.limit-r,!L.eq_s_b(1,"u")))break;case 2:L.slice_del()}L.limit_backward=i}}function v(){var e;L.ket=L.cursor,(e=L.find_among_b(S,5))&&(L.bra=L.cursor,w()&&1==e&&L.slice_del())}var _,g,k,h,z=[new i("",-1,3),new i("I",0,1),new i("U",0,2)],C=[new i("ea",-1,3),new i("aţia",-1,7),new i("aua",-1,2),new i("iua",-1,4),new i("aţie",-1,7),new i("ele",-1,3),new i("ile",-1,5),new i("iile",6,4),new i("iei",-1,4),new i("atei",-1,6),new i("ii",-1,4),new i("ului",-1,1),new i("ul",-1,1),new i("elor",-1,3),new i("ilor",-1,4),new i("iilor",14,4)],P=[new i("icala",-1,4),new i("iciva",-1,4),new i("ativa",-1,5),new i("itiva",-1,6),new i("icale",-1,4),new i("aţiune",-1,5),new i("iţiune",-1,6),new i("atoare",-1,5),new i("itoare",-1,6),new i("ătoare",-1,5),new i("icitate",-1,4),new i("abilitate",-1,1),new i("ibilitate",-1,2),new i("ivitate",-1,3),new i("icive",-1,4),new i("ative",-1,5),new i("itive",-1,6),new i("icali",-1,4),new i("atori",-1,5),new i("icatori",18,4),new i("itori",-1,6),new i("ători",-1,5),new i("icitati",-1,4),new i("abilitati",-1,1),new i("ivitati",-1,3),new i("icivi",-1,4),new i("ativi",-1,5),new i("itivi",-1,6),new i("icităi",-1,4),new i("abilităi",-1,1),new i("ivităi",-1,3),new i("icităţi",-1,4),new i("abilităţi",-1,1),new i("ivităţi",-1,3),new i("ical",-1,4),new i("ator",-1,5),new i("icator",35,4),new i("itor",-1,6),new i("ător",-1,5),new i("iciv",-1,4),new i("ativ",-1,5),new i("itiv",-1,6),new i("icală",-1,4),new i("icivă",-1,4),new i("ativă",-1,5),new i("itivă",-1,6)],F=[new i("ica",-1,1),new i("abila",-1,1),new i("ibila",-1,1),new i("oasa",-1,1),new i("ata",-1,1),new i("ita",-1,1),new i("anta",-1,1),new i("ista",-1,3),new i("uta",-1,1),new i("iva",-1,1),new i("ic",-1,1),new i("ice",-1,1),new i("abile",-1,1),new i("ibile",-1,1),new i("isme",-1,3),new i("iune",-1,2),new i("oase",-1,1),new i("ate",-1,1),new i("itate",17,1),new i("ite",-1,1),new i("ante",-1,1),new i("iste",-1,3),new i("ute",-1,1),new i("ive",-1,1),new i("ici",-1,1),new i("abili",-1,1),new i("ibili",-1,1),new i("iuni",-1,2),new i("atori",-1,1),new i("osi",-1,1),new i("ati",-1,1),new i("itati",30,1),new i("iti",-1,1),new i("anti",-1,1),new i("isti",-1,3),new i("uti",-1,1),new i("işti",-1,3),new i("ivi",-1,1),new i("ităi",-1,1),new i("oşi",-1,1),new i("ităţi",-1,1),new i("abil",-1,1),new i("ibil",-1,1),new i("ism",-1,3),new i("ator",-1,1),new i("os",-1,1),new i("at",-1,1),new i("it",-1,1),new i("ant",-1,1),new i("ist",-1,3),new i("ut",-1,1),new i("iv",-1,1),new i("ică",-1,1),new i("abilă",-1,1),new i("ibilă",-1,1),new i("oasă",-1,1),new i("ată",-1,1),new i("ită",-1,1),new i("antă",-1,1),new i("istă",-1,3),new i("ută",-1,1),new i("ivă",-1,1)],q=[new i("ea",-1,1),new i("ia",-1,1),new i("esc",-1,1),new i("ăsc",-1,1),new i("ind",-1,1),new i("ând",-1,1),new i("are",-1,1),new i("ere",-1,1),new i("ire",-1,1),new i("âre",-1,1),new i("se",-1,2),new i("ase",10,1),new i("sese",10,2),new i("ise",10,1),new i("use",10,1),new i("âse",10,1),new i("eşte",-1,1),new i("ăşte",-1,1),new i("eze",-1,1),new i("ai",-1,1),new i("eai",19,1),new i("iai",19,1),new i("sei",-1,2),new i("eşti",-1,1),new i("ăşti",-1,1),new i("ui",-1,1),new i("ezi",-1,1),new i("âi",-1,1),new i("aşi",-1,1),new i("seşi",-1,2),new i("aseşi",29,1),new i("seseşi",29,2),new i("iseşi",29,1),new i("useşi",29,1),new i("âseşi",29,1),new i("işi",-1,1),new i("uşi",-1,1),new i("âşi",-1,1),new i("aţi",-1,2),new i("eaţi",38,1),new i("iaţi",38,1),new i("eţi",-1,2),new i("iţi",-1,2),new i("âţi",-1,2),new i("arăţi",-1,1),new i("serăţi",-1,2),new i("aserăţi",45,1),new i("seserăţi",45,2),new i("iserăţi",45,1),new i("userăţi",45,1),new i("âserăţi",45,1),new i("irăţi",-1,1),new i("urăţi",-1,1),new i("ârăţi",-1,1),new i("am",-1,1),new i("eam",54,1),new i("iam",54,1),new i("em",-1,2),new i("asem",57,1),new i("sesem",57,2),new i("isem",57,1),new i("usem",57,1),new i("âsem",57,1),new i("im",-1,2),new i("âm",-1,2),new i("ăm",-1,2),new i("arăm",65,1),new i("serăm",65,2),new i("aserăm",67,1),new i("seserăm",67,2),new i("iserăm",67,1),new i("userăm",67,1),new i("âserăm",67,1),new i("irăm",65,1),new i("urăm",65,1),new i("ârăm",65,1),new i("au",-1,1),new i("eau",76,1),new i("iau",76,1),new i("indu",-1,1),new i("ându",-1,1),new i("ez",-1,1),new i("ească",-1,1),new i("ară",-1,1),new i("seră",-1,2),new i("aseră",84,1),new i("seseră",84,2),new i("iseră",84,1),new i("useră",84,1),new i("âseră",84,1),new i("iră",-1,1),new i("ură",-1,1),new i("âră",-1,1),new i("ează",-1,1)],S=[new i("a",-1,1),new i("e",-1,1),new i("ie",1,1),new i("i",-1,1),new i("ă",-1,1)],W=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,2,32,0,0,4],L=new r;this.setCurrent=function(e){L.setCurrent(e)},this.getCurrent=function(){return L.getCurrent()},this.stem=function(){var e=L.cursor;return n(),L.cursor=e,c(),L.limit_backward=e,L.cursor=L.limit,f(),L.cursor=L.limit,d(),L.cursor=L.limit,_||(L.cursor=L.limit,b(),L.cursor=L.limit),v(),L.cursor=L.limit_backward,s(),!0}};return function(e){return"function"==typeof e.update?e.update(function(e){return n.setCurrent(e),n.stem(),n.getCurrent()}):(n.setCurrent(e),n.stem(),n.getCurrent())}}(),e.Pipeline.registerFunction(e.ro.stemmer,"stemmer-ro"),e.ro.stopWordFilter=e.generateStopWordFilter("acea aceasta această aceea acei aceia acel acela acele acelea acest acesta aceste acestea aceşti aceştia acolo acord acum ai aia aibă aici al ale alea altceva altcineva am ar are asemenea asta astea astăzi asupra au avea avem aveţi azi aş aşadar aţi bine bucur bună ca care caut ce cel ceva chiar cinci cine cineva contra cu cum cumva curând curînd când cât câte câtva câţi cînd cît cîte cîtva cîţi că căci cărei căror cărui către da dacă dar datorită dată dau de deci deja deoarece departe deşi din dinaintea dintr- dintre doi doilea două drept după dă ea ei el ele eram este eu eşti face fata fi fie fiecare fii fim fiu fiţi frumos fără graţie halbă iar ieri la le li lor lui lângă lîngă mai mea mei mele mereu meu mi mie mine mult multă mulţi mulţumesc mâine mîine mă ne nevoie nici nicăieri nimeni nimeri nimic nişte noastre noastră noi noroc nostru nouă noştri nu opt ori oricare orice oricine oricum oricând oricât oricînd oricît oriunde patra patru patrulea pe pentru peste pic poate pot prea prima primul prin puţin puţina puţină până pînă rog sa sale sau se spate spre sub sunt suntem sunteţi sută sînt sîntem sînteţi să săi său ta tale te timp tine toate toată tot totuşi toţi trei treia treilea tu tăi tău un una unde undeva unei uneia unele uneori unii unor unora unu unui unuia unul vi voastre voastră voi vostru vouă voştri vreme vreo vreun vă zece zero zi zice îi îl îmi împotriva în înainte înaintea încotro încât încît între întrucât întrucît îţi ăla ălea ăsta ăstea ăştia şapte şase şi ştiu ţi ţie".split(" ")),e.Pipeline.registerFunction(e.ro.stopWordFilter,"stopWordFilter-ro")}}); \ No newline at end of file diff --git a/assets/javascripts/lunr/min/lunr.ru.min.js b/assets/javascripts/lunr/min/lunr.ru.min.js new file mode 100644 index 0000000..186cc48 --- /dev/null +++ b/assets/javascripts/lunr/min/lunr.ru.min.js @@ -0,0 +1,18 @@ +/*! + * Lunr languages, `Russian` language + * https://github.com/MihaiValentin/lunr-languages + * + * Copyright 2014, Mihai Valentin + * http://www.mozilla.org/MPL/ + */ +/*! + * based on + * Snowball JavaScript Library v0.3 + * http://code.google.com/p/urim/ + * http://snowball.tartarus.org/ + * + * Copyright 2010, Oleg Mazko + * http://www.mozilla.org/MPL/ + */ + +!function(e,n){"function"==typeof define&&define.amd?define(n):"object"==typeof exports?module.exports=n():n()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.ru=function(){this.pipeline.reset(),this.pipeline.add(e.ru.trimmer,e.ru.stopWordFilter,e.ru.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.ru.stemmer))},e.ru.wordCharacters="Ѐ-҄҇-ԯᴫᵸⷠ-ⷿꙀ-ꚟ︮︯",e.ru.trimmer=e.trimmerSupport.generateTrimmer(e.ru.wordCharacters),e.Pipeline.registerFunction(e.ru.trimmer,"trimmer-ru"),e.ru.stemmer=function(){var n=e.stemmerSupport.Among,r=e.stemmerSupport.SnowballProgram,t=new function(){function e(){for(;!W.in_grouping(S,1072,1103);){if(W.cursor>=W.limit)return!1;W.cursor++}return!0}function t(){for(;!W.out_grouping(S,1072,1103);){if(W.cursor>=W.limit)return!1;W.cursor++}return!0}function w(){b=W.limit,_=b,e()&&(b=W.cursor,t()&&e()&&t()&&(_=W.cursor))}function i(){return _<=W.cursor}function u(e,n){var r,t;if(W.ket=W.cursor,r=W.find_among_b(e,n)){switch(W.bra=W.cursor,r){case 1:if(t=W.limit-W.cursor,!W.eq_s_b(1,"а")&&(W.cursor=W.limit-t,!W.eq_s_b(1,"я")))return!1;case 2:W.slice_del()}return!0}return!1}function o(){return u(h,9)}function s(e,n){var r;return W.ket=W.cursor,!!(r=W.find_among_b(e,n))&&(W.bra=W.cursor,1==r&&W.slice_del(),!0)}function c(){return s(g,26)}function m(){return!!c()&&(u(C,8),!0)}function f(){return s(k,2)}function l(){return u(P,46)}function a(){s(v,36)}function p(){var e;W.ket=W.cursor,(e=W.find_among_b(F,2))&&(W.bra=W.cursor,i()&&1==e&&W.slice_del())}function d(){var e;if(W.ket=W.cursor,e=W.find_among_b(q,4))switch(W.bra=W.cursor,e){case 1:if(W.slice_del(),W.ket=W.cursor,!W.eq_s_b(1,"н"))break;W.bra=W.cursor;case 2:if(!W.eq_s_b(1,"н"))break;case 3:W.slice_del()}}var _,b,h=[new n("в",-1,1),new n("ив",0,2),new n("ыв",0,2),new n("вши",-1,1),new n("ивши",3,2),new n("ывши",3,2),new n("вшись",-1,1),new n("ившись",6,2),new n("ывшись",6,2)],g=[new n("ее",-1,1),new n("ие",-1,1),new n("ое",-1,1),new n("ые",-1,1),new n("ими",-1,1),new n("ыми",-1,1),new n("ей",-1,1),new n("ий",-1,1),new n("ой",-1,1),new n("ый",-1,1),new n("ем",-1,1),new n("им",-1,1),new n("ом",-1,1),new n("ым",-1,1),new n("его",-1,1),new n("ого",-1,1),new n("ему",-1,1),new n("ому",-1,1),new n("их",-1,1),new n("ых",-1,1),new n("ею",-1,1),new n("ою",-1,1),new n("ую",-1,1),new n("юю",-1,1),new n("ая",-1,1),new n("яя",-1,1)],C=[new n("ем",-1,1),new n("нн",-1,1),new n("вш",-1,1),new n("ивш",2,2),new n("ывш",2,2),new n("щ",-1,1),new n("ющ",5,1),new n("ующ",6,2)],k=[new n("сь",-1,1),new n("ся",-1,1)],P=[new n("ла",-1,1),new n("ила",0,2),new n("ыла",0,2),new n("на",-1,1),new n("ена",3,2),new n("ете",-1,1),new n("ите",-1,2),new n("йте",-1,1),new n("ейте",7,2),new n("уйте",7,2),new n("ли",-1,1),new n("или",10,2),new n("ыли",10,2),new n("й",-1,1),new n("ей",13,2),new n("уй",13,2),new n("л",-1,1),new n("ил",16,2),new n("ыл",16,2),new n("ем",-1,1),new n("им",-1,2),new n("ым",-1,2),new n("н",-1,1),new n("ен",22,2),new n("ло",-1,1),new n("ило",24,2),new n("ыло",24,2),new n("но",-1,1),new n("ено",27,2),new n("нно",27,1),new n("ет",-1,1),new n("ует",30,2),new n("ит",-1,2),new n("ыт",-1,2),new n("ют",-1,1),new n("уют",34,2),new n("ят",-1,2),new n("ны",-1,1),new n("ены",37,2),new n("ть",-1,1),new n("ить",39,2),new n("ыть",39,2),new n("ешь",-1,1),new n("ишь",-1,2),new n("ю",-1,2),new n("ую",44,2)],v=[new n("а",-1,1),new n("ев",-1,1),new n("ов",-1,1),new n("е",-1,1),new n("ие",3,1),new n("ье",3,1),new n("и",-1,1),new n("еи",6,1),new n("ии",6,1),new n("ами",6,1),new n("ями",6,1),new n("иями",10,1),new n("й",-1,1),new n("ей",12,1),new n("ией",13,1),new n("ий",12,1),new n("ой",12,1),new n("ам",-1,1),new n("ем",-1,1),new n("ием",18,1),new n("ом",-1,1),new n("ям",-1,1),new n("иям",21,1),new n("о",-1,1),new n("у",-1,1),new n("ах",-1,1),new n("ях",-1,1),new n("иях",26,1),new n("ы",-1,1),new n("ь",-1,1),new n("ю",-1,1),new n("ию",30,1),new n("ью",30,1),new n("я",-1,1),new n("ия",33,1),new n("ья",33,1)],F=[new n("ост",-1,1),new n("ость",-1,1)],q=[new n("ейше",-1,1),new n("н",-1,2),new n("ейш",-1,1),new n("ь",-1,3)],S=[33,65,8,232],W=new r;this.setCurrent=function(e){W.setCurrent(e)},this.getCurrent=function(){return W.getCurrent()},this.stem=function(){return w(),W.cursor=W.limit,!(W.cursor=i&&(e-=i,t[e>>3]&1<<(7&e)))return this.cursor++,!0}return!1},in_grouping_b:function(t,i,s){if(this.cursor>this.limit_backward){var e=r.charCodeAt(this.cursor-1);if(e<=s&&e>=i&&(e-=i,t[e>>3]&1<<(7&e)))return this.cursor--,!0}return!1},out_grouping:function(t,i,s){if(this.cursors||e>3]&1<<(7&e)))return this.cursor++,!0}return!1},out_grouping_b:function(t,i,s){if(this.cursor>this.limit_backward){var e=r.charCodeAt(this.cursor-1);if(e>s||e>3]&1<<(7&e)))return this.cursor--,!0}return!1},eq_s:function(t,i){if(this.limit-this.cursor>1),f=0,l=o0||e==s||c)break;c=!0}}for(;;){var _=t[s];if(o>=_.s_size){if(this.cursor=n+_.s_size,!_.method)return _.result;var b=_.method();if(this.cursor=n+_.s_size,b)return _.result}if((s=_.substring_i)<0)return 0}},find_among_b:function(t,i){for(var s=0,e=i,n=this.cursor,u=this.limit_backward,o=0,h=0,c=!1;;){for(var a=s+(e-s>>1),f=0,l=o=0;m--){if(n-l==u){f=-1;break}if(f=r.charCodeAt(n-1-l)-_.s[m])break;l++}if(f<0?(e=a,h=l):(s=a,o=l),e-s<=1){if(s>0||e==s||c)break;c=!0}}for(;;){var _=t[s];if(o>=_.s_size){if(this.cursor=n-_.s_size,!_.method)return _.result;var b=_.method();if(this.cursor=n-_.s_size,b)return _.result}if((s=_.substring_i)<0)return 0}},replace_s:function(t,i,s){var e=s.length-(i-t),n=r.substring(0,t),u=r.substring(i);return r=n+s+u,this.limit+=e,this.cursor>=i?this.cursor+=e:this.cursor>t&&(this.cursor=t),e},slice_check:function(){if(this.bra<0||this.bra>this.ket||this.ket>this.limit||this.limit>r.length)throw"faulty slice operation"},slice_from:function(r){this.slice_check(),this.replace_s(this.bra,this.ket,r)},slice_del:function(){this.slice_from("")},insert:function(r,t,i){var s=this.replace_s(r,t,i);r<=this.bra&&(this.bra+=s),r<=this.ket&&(this.ket+=s)},slice_to:function(){return this.slice_check(),r.substring(this.bra,this.ket)},eq_v_b:function(r){return this.eq_s_b(r.length,r)}}}},r.trimmerSupport={generateTrimmer:function(r){var t=new RegExp("^[^"+r+"]+"),i=new RegExp("[^"+r+"]+$");return function(r){return"function"==typeof r.update?r.update(function(r){return r.replace(t,"").replace(i,"")}):r.replace(t,"").replace(i,"")}}}}}); \ No newline at end of file diff --git a/assets/javascripts/lunr/min/lunr.sv.min.js b/assets/javascripts/lunr/min/lunr.sv.min.js new file mode 100644 index 0000000..3e5eb64 --- /dev/null +++ b/assets/javascripts/lunr/min/lunr.sv.min.js @@ -0,0 +1,18 @@ +/*! + * Lunr languages, `Swedish` language + * https://github.com/MihaiValentin/lunr-languages + * + * Copyright 2014, Mihai Valentin + * http://www.mozilla.org/MPL/ + */ +/*! + * based on + * Snowball JavaScript Library v0.3 + * http://code.google.com/p/urim/ + * http://snowball.tartarus.org/ + * + * Copyright 2010, Oleg Mazko + * http://www.mozilla.org/MPL/ + */ + +!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.sv=function(){this.pipeline.reset(),this.pipeline.add(e.sv.trimmer,e.sv.stopWordFilter,e.sv.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.sv.stemmer))},e.sv.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",e.sv.trimmer=e.trimmerSupport.generateTrimmer(e.sv.wordCharacters),e.Pipeline.registerFunction(e.sv.trimmer,"trimmer-sv"),e.sv.stemmer=function(){var r=e.stemmerSupport.Among,n=e.stemmerSupport.SnowballProgram,t=new function(){function e(){var e,r=w.cursor+3;if(o=w.limit,0<=r||r<=w.limit){for(a=r;;){if(e=w.cursor,w.in_grouping(l,97,246)){w.cursor=e;break}if(w.cursor=e,w.cursor>=w.limit)return;w.cursor++}for(;!w.out_grouping(l,97,246);){if(w.cursor>=w.limit)return;w.cursor++}o=w.cursor,o=o&&(w.limit_backward=o,w.cursor=w.limit,w.ket=w.cursor,e=w.find_among_b(u,37),w.limit_backward=r,e))switch(w.bra=w.cursor,e){case 1:w.slice_del();break;case 2:w.in_grouping_b(d,98,121)&&w.slice_del()}}function i(){var e=w.limit_backward;w.cursor>=o&&(w.limit_backward=o,w.cursor=w.limit,w.find_among_b(c,7)&&(w.cursor=w.limit,w.ket=w.cursor,w.cursor>w.limit_backward&&(w.bra=--w.cursor,w.slice_del())),w.limit_backward=e)}function s(){var e,r;if(w.cursor>=o){if(r=w.limit_backward,w.limit_backward=o,w.cursor=w.limit,w.ket=w.cursor,e=w.find_among_b(m,5))switch(w.bra=w.cursor,e){case 1:w.slice_del();break;case 2:w.slice_from("lös");break;case 3:w.slice_from("full")}w.limit_backward=r}}var a,o,u=[new r("a",-1,1),new r("arna",0,1),new r("erna",0,1),new r("heterna",2,1),new r("orna",0,1),new r("ad",-1,1),new r("e",-1,1),new r("ade",6,1),new r("ande",6,1),new r("arne",6,1),new r("are",6,1),new r("aste",6,1),new r("en",-1,1),new r("anden",12,1),new r("aren",12,1),new r("heten",12,1),new r("ern",-1,1),new r("ar",-1,1),new r("er",-1,1),new r("heter",18,1),new r("or",-1,1),new r("s",-1,2),new r("as",21,1),new r("arnas",22,1),new r("ernas",22,1),new r("ornas",22,1),new r("es",21,1),new r("ades",26,1),new r("andes",26,1),new r("ens",21,1),new r("arens",29,1),new r("hetens",29,1),new r("erns",21,1),new r("at",-1,1),new r("andet",-1,1),new r("het",-1,1),new r("ast",-1,1)],c=[new r("dd",-1,-1),new r("gd",-1,-1),new r("nn",-1,-1),new r("dt",-1,-1),new r("gt",-1,-1),new r("kt",-1,-1),new r("tt",-1,-1)],m=[new r("ig",-1,1),new r("lig",0,1),new r("els",-1,1),new r("fullt",-1,3),new r("löst",-1,2)],l=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,24,0,32],d=[119,127,149],w=new n;this.setCurrent=function(e){w.setCurrent(e)},this.getCurrent=function(){return w.getCurrent()},this.stem=function(){var r=w.cursor;return e(),w.limit_backward=r,w.cursor=w.limit,t(),w.cursor=w.limit,i(),w.cursor=w.limit,s(),!0}};return function(e){return"function"==typeof e.update?e.update(function(e){return t.setCurrent(e),t.stem(),t.getCurrent()}):(t.setCurrent(e),t.stem(),t.getCurrent())}}(),e.Pipeline.registerFunction(e.sv.stemmer,"stemmer-sv"),e.sv.stopWordFilter=e.generateStopWordFilter("alla allt att av blev bli blir blivit de dem den denna deras dess dessa det detta dig din dina ditt du där då efter ej eller en er era ert ett från för ha hade han hans har henne hennes hon honom hur här i icke ingen inom inte jag ju kan kunde man med mellan men mig min mina mitt mot mycket ni nu när någon något några och om oss på samma sedan sig sin sina sitta själv skulle som så sådan sådana sådant till under upp ut utan vad var vara varför varit varje vars vart vem vi vid vilka vilkas vilken vilket vår våra vårt än är åt över".split(" ")),e.Pipeline.registerFunction(e.sv.stopWordFilter,"stopWordFilter-sv")}}); \ No newline at end of file diff --git a/assets/javascripts/lunr/min/lunr.ta.min.js b/assets/javascripts/lunr/min/lunr.ta.min.js new file mode 100644 index 0000000..a644bed --- /dev/null +++ b/assets/javascripts/lunr/min/lunr.ta.min.js @@ -0,0 +1 @@ +!function(e,t){"function"==typeof define&&define.amd?define(t):"object"==typeof exports?module.exports=t():t()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.ta=function(){this.pipeline.reset(),this.pipeline.add(e.ta.trimmer,e.ta.stopWordFilter,e.ta.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.ta.stemmer))},e.ta.wordCharacters="஀-உஊ-ஏஐ-ஙச-ட஠-னப-யர-ஹ஺-ிீ-௉ொ-௏ௐ-௙௚-௟௠-௩௪-௯௰-௹௺-௿a-zA-Za-zA-Z0-90-9",e.ta.trimmer=e.trimmerSupport.generateTrimmer(e.ta.wordCharacters),e.Pipeline.registerFunction(e.ta.trimmer,"trimmer-ta"),e.ta.stopWordFilter=e.generateStopWordFilter("அங்கு அங்கே அது அதை அந்த அவர் அவர்கள் அவள் அவன் அவை ஆக ஆகவே ஆகையால் ஆதலால் ஆதலினால் ஆனாலும் ஆனால் இங்கு இங்கே இது இதை இந்த இப்படி இவர் இவர்கள் இவள் இவன் இவை இவ்வளவு உனக்கு உனது உன் உன்னால் எங்கு எங்கே எது எதை எந்த எப்படி எவர் எவர்கள் எவள் எவன் எவை எவ்வளவு எனக்கு எனது எனவே என் என்ன என்னால் ஏது ஏன் தனது தன்னால் தானே தான் நாங்கள் நாம் நான் நீ நீங்கள்".split(" ")),e.ta.stemmer=function(){return function(e){return"function"==typeof e.update?e.update(function(e){return e}):e}}();var t=e.wordcut;t.init(),e.ta.tokenizer=function(r){if(!arguments.length||null==r||void 0==r)return[];if(Array.isArray(r))return r.map(function(t){return isLunr2?new e.Token(t.toLowerCase()):t.toLowerCase()});var i=r.toString().toLowerCase().replace(/^\s+/,"");return t.cut(i).split("|")},e.Pipeline.registerFunction(e.ta.stemmer,"stemmer-ta"),e.Pipeline.registerFunction(e.ta.stopWordFilter,"stopWordFilter-ta")}}); \ No newline at end of file diff --git a/assets/javascripts/lunr/min/lunr.th.min.js b/assets/javascripts/lunr/min/lunr.th.min.js new file mode 100644 index 0000000..dee3aac --- /dev/null +++ b/assets/javascripts/lunr/min/lunr.th.min.js @@ -0,0 +1 @@ +!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");var r="2"==e.version[0];e.th=function(){this.pipeline.reset(),this.pipeline.add(e.th.trimmer),r?this.tokenizer=e.th.tokenizer:(e.tokenizer&&(e.tokenizer=e.th.tokenizer),this.tokenizerFn&&(this.tokenizerFn=e.th.tokenizer))},e.th.wordCharacters="[฀-๿]",e.th.trimmer=e.trimmerSupport.generateTrimmer(e.th.wordCharacters),e.Pipeline.registerFunction(e.th.trimmer,"trimmer-th");var t=e.wordcut;t.init(),e.th.tokenizer=function(i){if(!arguments.length||null==i||void 0==i)return[];if(Array.isArray(i))return i.map(function(t){return r?new e.Token(t):t});var n=i.toString().replace(/^\s+/,"");return t.cut(n).split("|")}}}); \ No newline at end of file diff --git a/assets/javascripts/lunr/min/lunr.tr.min.js b/assets/javascripts/lunr/min/lunr.tr.min.js new file mode 100644 index 0000000..563f6ec --- /dev/null +++ b/assets/javascripts/lunr/min/lunr.tr.min.js @@ -0,0 +1,18 @@ +/*! + * Lunr languages, `Turkish` language + * https://github.com/MihaiValentin/lunr-languages + * + * Copyright 2014, Mihai Valentin + * http://www.mozilla.org/MPL/ + */ +/*! + * based on + * Snowball JavaScript Library v0.3 + * http://code.google.com/p/urim/ + * http://snowball.tartarus.org/ + * + * Copyright 2010, Oleg Mazko + * http://www.mozilla.org/MPL/ + */ + +!function(r,i){"function"==typeof define&&define.amd?define(i):"object"==typeof exports?module.exports=i():i()(r.lunr)}(this,function(){return function(r){if(void 0===r)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===r.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");r.tr=function(){this.pipeline.reset(),this.pipeline.add(r.tr.trimmer,r.tr.stopWordFilter,r.tr.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(r.tr.stemmer))},r.tr.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",r.tr.trimmer=r.trimmerSupport.generateTrimmer(r.tr.wordCharacters),r.Pipeline.registerFunction(r.tr.trimmer,"trimmer-tr"),r.tr.stemmer=function(){var i=r.stemmerSupport.Among,e=r.stemmerSupport.SnowballProgram,n=new function(){function r(r,i,e){for(;;){var n=Dr.limit-Dr.cursor;if(Dr.in_grouping_b(r,i,e)){Dr.cursor=Dr.limit-n;break}if(Dr.cursor=Dr.limit-n,Dr.cursor<=Dr.limit_backward)return!1;Dr.cursor--}return!0}function n(){var i,e;i=Dr.limit-Dr.cursor,r(Wr,97,305);for(var n=0;nDr.limit_backward&&(Dr.cursor--,e=Dr.limit-Dr.cursor,i()))?(Dr.cursor=Dr.limit-e,!0):(Dr.cursor=Dr.limit-n,r()?(Dr.cursor=Dr.limit-n,!1):(Dr.cursor=Dr.limit-n,!(Dr.cursor<=Dr.limit_backward)&&(Dr.cursor--,!!i()&&(Dr.cursor=Dr.limit-n,!0))))}function u(r){return t(r,function(){return Dr.in_grouping_b(Wr,97,305)})}function o(){return u(function(){return Dr.eq_s_b(1,"n")})}function s(){return u(function(){return Dr.eq_s_b(1,"s")})}function c(){return u(function(){return Dr.eq_s_b(1,"y")})}function l(){return t(function(){return Dr.in_grouping_b(Lr,105,305)},function(){return Dr.out_grouping_b(Wr,97,305)})}function a(){return Dr.find_among_b(ur,10)&&l()}function m(){return n()&&Dr.in_grouping_b(Lr,105,305)&&s()}function d(){return Dr.find_among_b(or,2)}function f(){return n()&&Dr.in_grouping_b(Lr,105,305)&&c()}function b(){return n()&&Dr.find_among_b(sr,4)}function w(){return n()&&Dr.find_among_b(cr,4)&&o()}function _(){return n()&&Dr.find_among_b(lr,2)&&c()}function k(){return n()&&Dr.find_among_b(ar,2)}function p(){return n()&&Dr.find_among_b(mr,4)}function g(){return n()&&Dr.find_among_b(dr,2)}function y(){return n()&&Dr.find_among_b(fr,4)}function z(){return n()&&Dr.find_among_b(br,2)}function v(){return n()&&Dr.find_among_b(wr,2)&&c()}function h(){return Dr.eq_s_b(2,"ki")}function q(){return n()&&Dr.find_among_b(_r,2)&&o()}function C(){return n()&&Dr.find_among_b(kr,4)&&c()}function P(){return n()&&Dr.find_among_b(pr,4)}function F(){return n()&&Dr.find_among_b(gr,4)&&c()}function S(){return Dr.find_among_b(yr,4)}function W(){return n()&&Dr.find_among_b(zr,2)}function L(){return n()&&Dr.find_among_b(vr,4)}function x(){return n()&&Dr.find_among_b(hr,8)}function A(){return Dr.find_among_b(qr,2)}function E(){return n()&&Dr.find_among_b(Cr,32)&&c()}function j(){return Dr.find_among_b(Pr,8)&&c()}function T(){return n()&&Dr.find_among_b(Fr,4)&&c()}function Z(){return Dr.eq_s_b(3,"ken")&&c()}function B(){var r=Dr.limit-Dr.cursor;return!(T()||(Dr.cursor=Dr.limit-r,E()||(Dr.cursor=Dr.limit-r,j()||(Dr.cursor=Dr.limit-r,Z()))))}function D(){if(A()){var r=Dr.limit-Dr.cursor;if(S()||(Dr.cursor=Dr.limit-r,W()||(Dr.cursor=Dr.limit-r,C()||(Dr.cursor=Dr.limit-r,P()||(Dr.cursor=Dr.limit-r,F()||(Dr.cursor=Dr.limit-r))))),T())return!1}return!0}function G(){if(W()){Dr.bra=Dr.cursor,Dr.slice_del();var r=Dr.limit-Dr.cursor;return Dr.ket=Dr.cursor,x()||(Dr.cursor=Dr.limit-r,E()||(Dr.cursor=Dr.limit-r,j()||(Dr.cursor=Dr.limit-r,T()||(Dr.cursor=Dr.limit-r)))),nr=!1,!1}return!0}function H(){if(!L())return!0;var r=Dr.limit-Dr.cursor;return!E()&&(Dr.cursor=Dr.limit-r,!j())}function I(){var r,i=Dr.limit-Dr.cursor;return!(S()||(Dr.cursor=Dr.limit-i,F()||(Dr.cursor=Dr.limit-i,P()||(Dr.cursor=Dr.limit-i,C()))))||(Dr.bra=Dr.cursor,Dr.slice_del(),r=Dr.limit-Dr.cursor,Dr.ket=Dr.cursor,T()||(Dr.cursor=Dr.limit-r),!1)}function J(){var r,i=Dr.limit-Dr.cursor;if(Dr.ket=Dr.cursor,nr=!0,B()&&(Dr.cursor=Dr.limit-i,D()&&(Dr.cursor=Dr.limit-i,G()&&(Dr.cursor=Dr.limit-i,H()&&(Dr.cursor=Dr.limit-i,I()))))){if(Dr.cursor=Dr.limit-i,!x())return;Dr.bra=Dr.cursor,Dr.slice_del(),Dr.ket=Dr.cursor,r=Dr.limit-Dr.cursor,S()||(Dr.cursor=Dr.limit-r,W()||(Dr.cursor=Dr.limit-r,C()||(Dr.cursor=Dr.limit-r,P()||(Dr.cursor=Dr.limit-r,F()||(Dr.cursor=Dr.limit-r))))),T()||(Dr.cursor=Dr.limit-r)}Dr.bra=Dr.cursor,Dr.slice_del()}function K(){var r,i,e,n;if(Dr.ket=Dr.cursor,h()){if(r=Dr.limit-Dr.cursor,p())return Dr.bra=Dr.cursor,Dr.slice_del(),i=Dr.limit-Dr.cursor,Dr.ket=Dr.cursor,W()?(Dr.bra=Dr.cursor,Dr.slice_del(),K()):(Dr.cursor=Dr.limit-i,a()&&(Dr.bra=Dr.cursor,Dr.slice_del(),Dr.ket=Dr.cursor,W()&&(Dr.bra=Dr.cursor,Dr.slice_del(),K()))),!0;if(Dr.cursor=Dr.limit-r,w()){if(Dr.bra=Dr.cursor,Dr.slice_del(),Dr.ket=Dr.cursor,e=Dr.limit-Dr.cursor,d())Dr.bra=Dr.cursor,Dr.slice_del();else{if(Dr.cursor=Dr.limit-e,Dr.ket=Dr.cursor,!a()&&(Dr.cursor=Dr.limit-e,!m()&&(Dr.cursor=Dr.limit-e,!K())))return!0;Dr.bra=Dr.cursor,Dr.slice_del(),Dr.ket=Dr.cursor,W()&&(Dr.bra=Dr.cursor,Dr.slice_del(),K())}return!0}if(Dr.cursor=Dr.limit-r,g()){if(n=Dr.limit-Dr.cursor,d())Dr.bra=Dr.cursor,Dr.slice_del();else if(Dr.cursor=Dr.limit-n,m())Dr.bra=Dr.cursor,Dr.slice_del(),Dr.ket=Dr.cursor,W()&&(Dr.bra=Dr.cursor,Dr.slice_del(),K());else if(Dr.cursor=Dr.limit-n,!K())return!1;return!0}}return!1}function M(r){if(Dr.ket=Dr.cursor,!g()&&(Dr.cursor=Dr.limit-r,!k()))return!1;var i=Dr.limit-Dr.cursor;if(d())Dr.bra=Dr.cursor,Dr.slice_del();else if(Dr.cursor=Dr.limit-i,m())Dr.bra=Dr.cursor,Dr.slice_del(),Dr.ket=Dr.cursor,W()&&(Dr.bra=Dr.cursor,Dr.slice_del(),K());else if(Dr.cursor=Dr.limit-i,!K())return!1;return!0}function N(r){if(Dr.ket=Dr.cursor,!z()&&(Dr.cursor=Dr.limit-r,!b()))return!1;var i=Dr.limit-Dr.cursor;return!(!m()&&(Dr.cursor=Dr.limit-i,!d()))&&(Dr.bra=Dr.cursor,Dr.slice_del(),Dr.ket=Dr.cursor,W()&&(Dr.bra=Dr.cursor,Dr.slice_del(),K()),!0)}function O(){var r,i=Dr.limit-Dr.cursor;return Dr.ket=Dr.cursor,!(!w()&&(Dr.cursor=Dr.limit-i,!v()))&&(Dr.bra=Dr.cursor,Dr.slice_del(),r=Dr.limit-Dr.cursor,Dr.ket=Dr.cursor,!(!W()||(Dr.bra=Dr.cursor,Dr.slice_del(),!K()))||(Dr.cursor=Dr.limit-r,Dr.ket=Dr.cursor,!(a()||(Dr.cursor=Dr.limit-r,m()||(Dr.cursor=Dr.limit-r,K())))||(Dr.bra=Dr.cursor,Dr.slice_del(),Dr.ket=Dr.cursor,W()&&(Dr.bra=Dr.cursor,Dr.slice_del(),K()),!0)))}function Q(){var r,i,e=Dr.limit-Dr.cursor;if(Dr.ket=Dr.cursor,!p()&&(Dr.cursor=Dr.limit-e,!f()&&(Dr.cursor=Dr.limit-e,!_())))return!1;if(Dr.bra=Dr.cursor,Dr.slice_del(),Dr.ket=Dr.cursor,r=Dr.limit-Dr.cursor,a())Dr.bra=Dr.cursor,Dr.slice_del(),i=Dr.limit-Dr.cursor,Dr.ket=Dr.cursor,W()||(Dr.cursor=Dr.limit-i);else if(Dr.cursor=Dr.limit-r,!W())return!0;return Dr.bra=Dr.cursor,Dr.slice_del(),Dr.ket=Dr.cursor,K(),!0}function R(){var r,i,e=Dr.limit-Dr.cursor;if(Dr.ket=Dr.cursor,W())return Dr.bra=Dr.cursor,Dr.slice_del(),void K();if(Dr.cursor=Dr.limit-e,Dr.ket=Dr.cursor,q())if(Dr.bra=Dr.cursor,Dr.slice_del(),r=Dr.limit-Dr.cursor,Dr.ket=Dr.cursor,d())Dr.bra=Dr.cursor,Dr.slice_del();else{if(Dr.cursor=Dr.limit-r,Dr.ket=Dr.cursor,!a()&&(Dr.cursor=Dr.limit-r,!m())){if(Dr.cursor=Dr.limit-r,Dr.ket=Dr.cursor,!W())return;if(Dr.bra=Dr.cursor,Dr.slice_del(),!K())return}Dr.bra=Dr.cursor,Dr.slice_del(),Dr.ket=Dr.cursor,W()&&(Dr.bra=Dr.cursor,Dr.slice_del(),K())}else if(Dr.cursor=Dr.limit-e,!M(e)&&(Dr.cursor=Dr.limit-e,!N(e))){if(Dr.cursor=Dr.limit-e,Dr.ket=Dr.cursor,y())return Dr.bra=Dr.cursor,Dr.slice_del(),Dr.ket=Dr.cursor,i=Dr.limit-Dr.cursor,void(a()?(Dr.bra=Dr.cursor,Dr.slice_del(),Dr.ket=Dr.cursor,W()&&(Dr.bra=Dr.cursor,Dr.slice_del(),K())):(Dr.cursor=Dr.limit-i,W()?(Dr.bra=Dr.cursor,Dr.slice_del(),K()):(Dr.cursor=Dr.limit-i,K())));if(Dr.cursor=Dr.limit-e,!O()){if(Dr.cursor=Dr.limit-e,d())return Dr.bra=Dr.cursor,void Dr.slice_del();Dr.cursor=Dr.limit-e,K()||(Dr.cursor=Dr.limit-e,Q()||(Dr.cursor=Dr.limit-e,Dr.ket=Dr.cursor,(a()||(Dr.cursor=Dr.limit-e,m()))&&(Dr.bra=Dr.cursor,Dr.slice_del(),Dr.ket=Dr.cursor,W()&&(Dr.bra=Dr.cursor,Dr.slice_del(),K()))))}}}function U(){var r;if(Dr.ket=Dr.cursor,r=Dr.find_among_b(Sr,4))switch(Dr.bra=Dr.cursor,r){case 1:Dr.slice_from("p");break;case 2:Dr.slice_from("ç");break;case 3:Dr.slice_from("t");break;case 4:Dr.slice_from("k")}}function V(){for(;;){var r=Dr.limit-Dr.cursor;if(Dr.in_grouping_b(Wr,97,305)){Dr.cursor=Dr.limit-r;break}if(Dr.cursor=Dr.limit-r,Dr.cursor<=Dr.limit_backward)return!1;Dr.cursor--}return!0}function X(r,i,e){if(Dr.cursor=Dr.limit-r,V()){var n=Dr.limit-Dr.cursor;if(!Dr.eq_s_b(1,i)&&(Dr.cursor=Dr.limit-n,!Dr.eq_s_b(1,e)))return!0;Dr.cursor=Dr.limit-r;var t=Dr.cursor;return Dr.insert(Dr.cursor,Dr.cursor,e),Dr.cursor=t,!1}return!0}function Y(){var r=Dr.limit-Dr.cursor;(Dr.eq_s_b(1,"d")||(Dr.cursor=Dr.limit-r,Dr.eq_s_b(1,"g")))&&X(r,"a","ı")&&X(r,"e","i")&&X(r,"o","u")&&X(r,"ö","ü")}function $(){for(var r,i=Dr.cursor,e=2;;){for(r=Dr.cursor;!Dr.in_grouping(Wr,97,305);){if(Dr.cursor>=Dr.limit)return Dr.cursor=r,!(e>0)&&(Dr.cursor=i,!0);Dr.cursor++}e--}}function rr(r,i,e){for(;!Dr.eq_s(i,e);){if(Dr.cursor>=Dr.limit)return!0;Dr.cursor++}return(tr=i)!=Dr.limit||(Dr.cursor=r,!1)}function ir(){var r=Dr.cursor;return!rr(r,2,"ad")||(Dr.cursor=r,!rr(r,5,"soyad"))}function er(){var r=Dr.cursor;return!ir()&&(Dr.limit_backward=r,Dr.cursor=Dr.limit,Y(),Dr.cursor=Dr.limit,U(),!0)}var nr,tr,ur=[new i("m",-1,-1),new i("n",-1,-1),new i("miz",-1,-1),new i("niz",-1,-1),new i("muz",-1,-1),new i("nuz",-1,-1),new i("müz",-1,-1),new i("nüz",-1,-1),new i("mız",-1,-1),new i("nız",-1,-1)],or=[new i("leri",-1,-1),new i("ları",-1,-1)],sr=[new i("ni",-1,-1),new i("nu",-1,-1),new i("nü",-1,-1),new i("nı",-1,-1)],cr=[new i("in",-1,-1),new i("un",-1,-1),new i("ün",-1,-1),new i("ın",-1,-1)],lr=[new i("a",-1,-1),new i("e",-1,-1)],ar=[new i("na",-1,-1),new i("ne",-1,-1)],mr=[new i("da",-1,-1),new i("ta",-1,-1),new i("de",-1,-1),new i("te",-1,-1)],dr=[new i("nda",-1,-1),new i("nde",-1,-1)],fr=[new i("dan",-1,-1),new i("tan",-1,-1),new i("den",-1,-1),new i("ten",-1,-1)],br=[new i("ndan",-1,-1),new i("nden",-1,-1)],wr=[new i("la",-1,-1),new i("le",-1,-1)],_r=[new i("ca",-1,-1),new i("ce",-1,-1)],kr=[new i("im",-1,-1),new i("um",-1,-1),new i("üm",-1,-1),new i("ım",-1,-1)],pr=[new i("sin",-1,-1),new i("sun",-1,-1),new i("sün",-1,-1),new i("sın",-1,-1)],gr=[new i("iz",-1,-1),new i("uz",-1,-1),new i("üz",-1,-1),new i("ız",-1,-1)],yr=[new i("siniz",-1,-1),new i("sunuz",-1,-1),new i("sünüz",-1,-1),new i("sınız",-1,-1)],zr=[new i("lar",-1,-1),new i("ler",-1,-1)],vr=[new i("niz",-1,-1),new i("nuz",-1,-1),new i("nüz",-1,-1),new i("nız",-1,-1)],hr=[new i("dir",-1,-1),new i("tir",-1,-1),new i("dur",-1,-1),new i("tur",-1,-1),new i("dür",-1,-1),new i("tür",-1,-1),new i("dır",-1,-1),new i("tır",-1,-1)],qr=[new i("casına",-1,-1),new i("cesine",-1,-1)],Cr=[new i("di",-1,-1),new i("ti",-1,-1),new i("dik",-1,-1),new i("tik",-1,-1),new i("duk",-1,-1),new i("tuk",-1,-1),new i("dük",-1,-1),new i("tük",-1,-1),new i("dık",-1,-1),new i("tık",-1,-1),new i("dim",-1,-1),new i("tim",-1,-1),new i("dum",-1,-1),new i("tum",-1,-1),new i("düm",-1,-1),new i("tüm",-1,-1),new i("dım",-1,-1),new i("tım",-1,-1),new i("din",-1,-1),new i("tin",-1,-1),new i("dun",-1,-1),new i("tun",-1,-1),new i("dün",-1,-1),new i("tün",-1,-1),new i("dın",-1,-1),new i("tın",-1,-1),new i("du",-1,-1),new i("tu",-1,-1),new i("dü",-1,-1),new i("tü",-1,-1),new i("dı",-1,-1),new i("tı",-1,-1)],Pr=[new i("sa",-1,-1),new i("se",-1,-1),new i("sak",-1,-1),new i("sek",-1,-1),new i("sam",-1,-1),new i("sem",-1,-1),new i("san",-1,-1),new i("sen",-1,-1)],Fr=[new i("miş",-1,-1),new i("muş",-1,-1),new i("müş",-1,-1),new i("mış",-1,-1)],Sr=[new i("b",-1,1),new i("c",-1,2),new i("d",-1,3),new i("ğ",-1,4)],Wr=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,32,8,0,0,0,0,0,0,1],Lr=[1,16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8,0,0,0,0,0,0,1],xr=[1,64,16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1],Ar=[17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,130],Er=[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1],jr=[17],Tr=[65],Zr=[65],Br=[["a",xr,97,305],["e",Ar,101,252],["ı",Er,97,305],["i",jr,101,105],["o",Tr,111,117],["ö",Zr,246,252],["u",Tr,111,117]],Dr=new e;this.setCurrent=function(r){Dr.setCurrent(r)},this.getCurrent=function(){return Dr.getCurrent()},this.stem=function(){return!!($()&&(Dr.limit_backward=Dr.cursor,Dr.cursor=Dr.limit,J(),Dr.cursor=Dr.limit,nr&&(R(),Dr.cursor=Dr.limit_backward,er())))}};return function(r){return"function"==typeof r.update?r.update(function(r){return n.setCurrent(r),n.stem(),n.getCurrent()}):(n.setCurrent(r),n.stem(),n.getCurrent())}}(),r.Pipeline.registerFunction(r.tr.stemmer,"stemmer-tr"),r.tr.stopWordFilter=r.generateStopWordFilter("acaba altmış altı ama ancak arada aslında ayrıca bana bazı belki ben benden beni benim beri beş bile bin bir biri birkaç birkez birçok birşey birşeyi biz bizden bize bizi bizim bu buna bunda bundan bunlar bunları bunların bunu bunun burada böyle böylece da daha dahi de defa değil diye diğer doksan dokuz dolayı dolayısıyla dört edecek eden ederek edilecek ediliyor edilmesi ediyor elli en etmesi etti ettiği ettiğini eğer gibi göre halen hangi hatta hem henüz hep hepsi her herhangi herkesin hiç hiçbir iki ile ilgili ise itibaren itibariyle için işte kadar karşın katrilyon kendi kendilerine kendini kendisi kendisine kendisini kez ki kim kimden kime kimi kimse kırk milyar milyon mu mü mı nasıl ne neden nedenle nerde nerede nereye niye niçin o olan olarak oldu olduklarını olduğu olduğunu olmadı olmadığı olmak olması olmayan olmaz olsa olsun olup olur olursa oluyor on ona ondan onlar onlardan onları onların onu onun otuz oysa pek rağmen sadece sanki sekiz seksen sen senden seni senin siz sizden sizi sizin tarafından trilyon tüm var vardı ve veya ya yani yapacak yapmak yaptı yaptıkları yaptığı yaptığını yapılan yapılması yapıyor yedi yerine yetmiş yine yirmi yoksa yüz zaten çok çünkü öyle üzere üç şey şeyden şeyi şeyler şu şuna şunda şundan şunları şunu şöyle".split(" ")),r.Pipeline.registerFunction(r.tr.stopWordFilter,"stopWordFilter-tr")}}); \ No newline at end of file diff --git a/assets/javascripts/lunr/min/lunr.vi.min.js b/assets/javascripts/lunr/min/lunr.vi.min.js new file mode 100644 index 0000000..22aed28 --- /dev/null +++ b/assets/javascripts/lunr/min/lunr.vi.min.js @@ -0,0 +1 @@ +!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.vi=function(){this.pipeline.reset(),this.pipeline.add(e.vi.stopWordFilter,e.vi.trimmer)},e.vi.wordCharacters="[A-Za-ẓ̀͐́͑̉̃̓ÂâÊêÔôĂ-ăĐ-đƠ-ơƯ-ư]",e.vi.trimmer=e.trimmerSupport.generateTrimmer(e.vi.wordCharacters),e.Pipeline.registerFunction(e.vi.trimmer,"trimmer-vi"),e.vi.stopWordFilter=e.generateStopWordFilter("là cái nhưng mà".split(" "))}}); \ No newline at end of file diff --git a/assets/javascripts/lunr/min/lunr.zh.min.js b/assets/javascripts/lunr/min/lunr.zh.min.js new file mode 100644 index 0000000..9838ef9 --- /dev/null +++ b/assets/javascripts/lunr/min/lunr.zh.min.js @@ -0,0 +1 @@ +!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r(require("@node-rs/jieba")):r()(e.lunr)}(this,function(e){return function(r,t){if(void 0===r)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===r.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");var i="2"==r.version[0];r.zh=function(){this.pipeline.reset(),this.pipeline.add(r.zh.trimmer,r.zh.stopWordFilter,r.zh.stemmer),i?this.tokenizer=r.zh.tokenizer:(r.tokenizer&&(r.tokenizer=r.zh.tokenizer),this.tokenizerFn&&(this.tokenizerFn=r.zh.tokenizer))},r.zh.tokenizer=function(n){if(!arguments.length||null==n||void 0==n)return[];if(Array.isArray(n))return n.map(function(e){return i?new r.Token(e.toLowerCase()):e.toLowerCase()});t&&e.load(t);var o=n.toString().trim().toLowerCase(),s=[];e.cut(o,!0).forEach(function(e){s=s.concat(e.split(" "))}),s=s.filter(function(e){return!!e});var u=0;return s.map(function(e,t){if(i){var n=o.indexOf(e,u),s={};return s.position=[n,e.length],s.index=t,u=n,new r.Token(e,s)}return e})},r.zh.wordCharacters="\\w一-龥",r.zh.trimmer=r.trimmerSupport.generateTrimmer(r.zh.wordCharacters),r.Pipeline.registerFunction(r.zh.trimmer,"trimmer-zh"),r.zh.stemmer=function(){return function(e){return e}}(),r.Pipeline.registerFunction(r.zh.stemmer,"stemmer-zh"),r.zh.stopWordFilter=r.generateStopWordFilter("的 一 不 在 人 有 是 为 以 于 上 他 而 后 之 来 及 了 因 下 可 到 由 这 与 也 此 但 并 个 其 已 无 小 我 们 起 最 再 今 去 好 只 又 或 很 亦 某 把 那 你 乃 它 吧 被 比 别 趁 当 从 到 得 打 凡 儿 尔 该 各 给 跟 和 何 还 即 几 既 看 据 距 靠 啦 了 另 么 每 们 嘛 拿 哪 那 您 凭 且 却 让 仍 啥 如 若 使 谁 虽 随 同 所 她 哇 嗡 往 哪 些 向 沿 哟 用 于 咱 则 怎 曾 至 致 着 诸 自".split(" ")),r.Pipeline.registerFunction(r.zh.stopWordFilter,"stopWordFilter-zh")}}); \ No newline at end of file diff --git a/assets/javascripts/lunr/tinyseg.js b/assets/javascripts/lunr/tinyseg.js new file mode 100644 index 0000000..167fa6d --- /dev/null +++ b/assets/javascripts/lunr/tinyseg.js @@ -0,0 +1,206 @@ +/** + * export the module via AMD, CommonJS or as a browser global + * Export code from https://github.com/umdjs/umd/blob/master/returnExports.js + */ +;(function (root, factory) { + if (typeof define === 'function' && define.amd) { + // AMD. Register as an anonymous module. + define(factory) + } else if (typeof exports === 'object') { + /** + * Node. Does not work with strict CommonJS, but + * only CommonJS-like environments that support module.exports, + * like Node. + */ + module.exports = factory() + } else { + // Browser globals (root is window) + factory()(root.lunr); + } +}(this, function () { + /** + * Just return a value to define the module export. + * This example returns an object, but the module + * can return a function as the exported value. + */ + + return function(lunr) { + // TinySegmenter 0.1 -- Super compact Japanese tokenizer in Javascript + // (c) 2008 Taku Kudo + // TinySegmenter is freely distributable under the terms of a new BSD licence. + // For details, see http://chasen.org/~taku/software/TinySegmenter/LICENCE.txt + + function TinySegmenter() { + var patterns = { + "[一二三四五六七八九十百千万億兆]":"M", + "[一-龠々〆ヵヶ]":"H", + "[ぁ-ん]":"I", + "[ァ-ヴーア-ン゙ー]":"K", + "[a-zA-Za-zA-Z]":"A", + "[0-90-9]":"N" + } + this.chartype_ = []; + for (var i in patterns) { + var regexp = new RegExp(i); + this.chartype_.push([regexp, patterns[i]]); + } + + this.BIAS__ = -332 + this.BC1__ = {"HH":6,"II":2461,"KH":406,"OH":-1378}; + this.BC2__ = {"AA":-3267,"AI":2744,"AN":-878,"HH":-4070,"HM":-1711,"HN":4012,"HO":3761,"IA":1327,"IH":-1184,"II":-1332,"IK":1721,"IO":5492,"KI":3831,"KK":-8741,"MH":-3132,"MK":3334,"OO":-2920}; + this.BC3__ = {"HH":996,"HI":626,"HK":-721,"HN":-1307,"HO":-836,"IH":-301,"KK":2762,"MK":1079,"MM":4034,"OA":-1652,"OH":266}; + this.BP1__ = {"BB":295,"OB":304,"OO":-125,"UB":352}; + this.BP2__ = {"BO":60,"OO":-1762}; + this.BQ1__ = {"BHH":1150,"BHM":1521,"BII":-1158,"BIM":886,"BMH":1208,"BNH":449,"BOH":-91,"BOO":-2597,"OHI":451,"OIH":-296,"OKA":1851,"OKH":-1020,"OKK":904,"OOO":2965}; + this.BQ2__ = {"BHH":118,"BHI":-1159,"BHM":466,"BIH":-919,"BKK":-1720,"BKO":864,"OHH":-1139,"OHM":-181,"OIH":153,"UHI":-1146}; + this.BQ3__ = {"BHH":-792,"BHI":2664,"BII":-299,"BKI":419,"BMH":937,"BMM":8335,"BNN":998,"BOH":775,"OHH":2174,"OHM":439,"OII":280,"OKH":1798,"OKI":-793,"OKO":-2242,"OMH":-2402,"OOO":11699}; + this.BQ4__ = {"BHH":-3895,"BIH":3761,"BII":-4654,"BIK":1348,"BKK":-1806,"BMI":-3385,"BOO":-12396,"OAH":926,"OHH":266,"OHK":-2036,"ONN":-973}; + this.BW1__ = {",と":660,",同":727,"B1あ":1404,"B1同":542,"、と":660,"、同":727,"」と":1682,"あっ":1505,"いう":1743,"いっ":-2055,"いる":672,"うし":-4817,"うん":665,"から":3472,"がら":600,"こう":-790,"こと":2083,"こん":-1262,"さら":-4143,"さん":4573,"した":2641,"して":1104,"すで":-3399,"そこ":1977,"それ":-871,"たち":1122,"ため":601,"った":3463,"つい":-802,"てい":805,"てき":1249,"でき":1127,"です":3445,"では":844,"とい":-4915,"とみ":1922,"どこ":3887,"ない":5713,"なっ":3015,"など":7379,"なん":-1113,"にし":2468,"には":1498,"にも":1671,"に対":-912,"の一":-501,"の中":741,"ませ":2448,"まで":1711,"まま":2600,"まる":-2155,"やむ":-1947,"よっ":-2565,"れた":2369,"れで":-913,"をし":1860,"を見":731,"亡く":-1886,"京都":2558,"取り":-2784,"大き":-2604,"大阪":1497,"平方":-2314,"引き":-1336,"日本":-195,"本当":-2423,"毎日":-2113,"目指":-724,"B1あ":1404,"B1同":542,"」と":1682}; + this.BW2__ = {"..":-11822,"11":-669,"――":-5730,"−−":-13175,"いう":-1609,"うか":2490,"かし":-1350,"かも":-602,"から":-7194,"かれ":4612,"がい":853,"がら":-3198,"きた":1941,"くな":-1597,"こと":-8392,"この":-4193,"させ":4533,"され":13168,"さん":-3977,"しい":-1819,"しか":-545,"した":5078,"して":972,"しな":939,"その":-3744,"たい":-1253,"たた":-662,"ただ":-3857,"たち":-786,"たと":1224,"たは":-939,"った":4589,"って":1647,"っと":-2094,"てい":6144,"てき":3640,"てく":2551,"ては":-3110,"ても":-3065,"でい":2666,"でき":-1528,"でし":-3828,"です":-4761,"でも":-4203,"とい":1890,"とこ":-1746,"とと":-2279,"との":720,"とみ":5168,"とも":-3941,"ない":-2488,"なが":-1313,"など":-6509,"なの":2614,"なん":3099,"にお":-1615,"にし":2748,"にな":2454,"によ":-7236,"に対":-14943,"に従":-4688,"に関":-11388,"のか":2093,"ので":-7059,"のに":-6041,"のの":-6125,"はい":1073,"はが":-1033,"はず":-2532,"ばれ":1813,"まし":-1316,"まで":-6621,"まれ":5409,"めて":-3153,"もい":2230,"もの":-10713,"らか":-944,"らし":-1611,"らに":-1897,"りし":651,"りま":1620,"れた":4270,"れて":849,"れば":4114,"ろう":6067,"われ":7901,"を通":-11877,"んだ":728,"んな":-4115,"一人":602,"一方":-1375,"一日":970,"一部":-1051,"上が":-4479,"会社":-1116,"出て":2163,"分の":-7758,"同党":970,"同日":-913,"大阪":-2471,"委員":-1250,"少な":-1050,"年度":-8669,"年間":-1626,"府県":-2363,"手権":-1982,"新聞":-4066,"日新":-722,"日本":-7068,"日米":3372,"曜日":-601,"朝鮮":-2355,"本人":-2697,"東京":-1543,"然と":-1384,"社会":-1276,"立て":-990,"第に":-1612,"米国":-4268,"11":-669}; + this.BW3__ = {"あた":-2194,"あり":719,"ある":3846,"い.":-1185,"い。":-1185,"いい":5308,"いえ":2079,"いく":3029,"いた":2056,"いっ":1883,"いる":5600,"いわ":1527,"うち":1117,"うと":4798,"えと":1454,"か.":2857,"か。":2857,"かけ":-743,"かっ":-4098,"かに":-669,"から":6520,"かり":-2670,"が,":1816,"が、":1816,"がき":-4855,"がけ":-1127,"がっ":-913,"がら":-4977,"がり":-2064,"きた":1645,"けど":1374,"こと":7397,"この":1542,"ころ":-2757,"さい":-714,"さを":976,"し,":1557,"し、":1557,"しい":-3714,"した":3562,"して":1449,"しな":2608,"しま":1200,"す.":-1310,"す。":-1310,"する":6521,"ず,":3426,"ず、":3426,"ずに":841,"そう":428,"た.":8875,"た。":8875,"たい":-594,"たの":812,"たり":-1183,"たる":-853,"だ.":4098,"だ。":4098,"だっ":1004,"った":-4748,"って":300,"てい":6240,"てお":855,"ても":302,"です":1437,"でに":-1482,"では":2295,"とう":-1387,"とし":2266,"との":541,"とも":-3543,"どう":4664,"ない":1796,"なく":-903,"など":2135,"に,":-1021,"に、":-1021,"にし":1771,"にな":1906,"には":2644,"の,":-724,"の、":-724,"の子":-1000,"は,":1337,"は、":1337,"べき":2181,"まし":1113,"ます":6943,"まっ":-1549,"まで":6154,"まれ":-793,"らし":1479,"られ":6820,"るる":3818,"れ,":854,"れ、":854,"れた":1850,"れて":1375,"れば":-3246,"れる":1091,"われ":-605,"んだ":606,"んで":798,"カ月":990,"会議":860,"入り":1232,"大会":2217,"始め":1681,"市":965,"新聞":-5055,"日,":974,"日、":974,"社会":2024,"カ月":990}; + this.TC1__ = {"AAA":1093,"HHH":1029,"HHM":580,"HII":998,"HOH":-390,"HOM":-331,"IHI":1169,"IOH":-142,"IOI":-1015,"IOM":467,"MMH":187,"OOI":-1832}; + this.TC2__ = {"HHO":2088,"HII":-1023,"HMM":-1154,"IHI":-1965,"KKH":703,"OII":-2649}; + this.TC3__ = {"AAA":-294,"HHH":346,"HHI":-341,"HII":-1088,"HIK":731,"HOH":-1486,"IHH":128,"IHI":-3041,"IHO":-1935,"IIH":-825,"IIM":-1035,"IOI":-542,"KHH":-1216,"KKA":491,"KKH":-1217,"KOK":-1009,"MHH":-2694,"MHM":-457,"MHO":123,"MMH":-471,"NNH":-1689,"NNO":662,"OHO":-3393}; + this.TC4__ = {"HHH":-203,"HHI":1344,"HHK":365,"HHM":-122,"HHN":182,"HHO":669,"HIH":804,"HII":679,"HOH":446,"IHH":695,"IHO":-2324,"IIH":321,"III":1497,"IIO":656,"IOO":54,"KAK":4845,"KKA":3386,"KKK":3065,"MHH":-405,"MHI":201,"MMH":-241,"MMM":661,"MOM":841}; + this.TQ1__ = {"BHHH":-227,"BHHI":316,"BHIH":-132,"BIHH":60,"BIII":1595,"BNHH":-744,"BOHH":225,"BOOO":-908,"OAKK":482,"OHHH":281,"OHIH":249,"OIHI":200,"OIIH":-68}; + this.TQ2__ = {"BIHH":-1401,"BIII":-1033,"BKAK":-543,"BOOO":-5591}; + this.TQ3__ = {"BHHH":478,"BHHM":-1073,"BHIH":222,"BHII":-504,"BIIH":-116,"BIII":-105,"BMHI":-863,"BMHM":-464,"BOMH":620,"OHHH":346,"OHHI":1729,"OHII":997,"OHMH":481,"OIHH":623,"OIIH":1344,"OKAK":2792,"OKHH":587,"OKKA":679,"OOHH":110,"OOII":-685}; + this.TQ4__ = {"BHHH":-721,"BHHM":-3604,"BHII":-966,"BIIH":-607,"BIII":-2181,"OAAA":-2763,"OAKK":180,"OHHH":-294,"OHHI":2446,"OHHO":480,"OHIH":-1573,"OIHH":1935,"OIHI":-493,"OIIH":626,"OIII":-4007,"OKAK":-8156}; + this.TW1__ = {"につい":-4681,"東京都":2026}; + this.TW2__ = {"ある程":-2049,"いった":-1256,"ころが":-2434,"しょう":3873,"その後":-4430,"だって":-1049,"ていた":1833,"として":-4657,"ともに":-4517,"もので":1882,"一気に":-792,"初めて":-1512,"同時に":-8097,"大きな":-1255,"対して":-2721,"社会党":-3216}; + this.TW3__ = {"いただ":-1734,"してい":1314,"として":-4314,"につい":-5483,"にとっ":-5989,"に当た":-6247,"ので,":-727,"ので、":-727,"のもの":-600,"れから":-3752,"十二月":-2287}; + this.TW4__ = {"いう.":8576,"いう。":8576,"からな":-2348,"してい":2958,"たが,":1516,"たが、":1516,"ている":1538,"という":1349,"ました":5543,"ません":1097,"ようと":-4258,"よると":5865}; + this.UC1__ = {"A":484,"K":93,"M":645,"O":-505}; + this.UC2__ = {"A":819,"H":1059,"I":409,"M":3987,"N":5775,"O":646}; + this.UC3__ = {"A":-1370,"I":2311}; + this.UC4__ = {"A":-2643,"H":1809,"I":-1032,"K":-3450,"M":3565,"N":3876,"O":6646}; + this.UC5__ = {"H":313,"I":-1238,"K":-799,"M":539,"O":-831}; + this.UC6__ = {"H":-506,"I":-253,"K":87,"M":247,"O":-387}; + this.UP1__ = {"O":-214}; + this.UP2__ = {"B":69,"O":935}; + this.UP3__ = {"B":189}; + this.UQ1__ = {"BH":21,"BI":-12,"BK":-99,"BN":142,"BO":-56,"OH":-95,"OI":477,"OK":410,"OO":-2422}; + this.UQ2__ = {"BH":216,"BI":113,"OK":1759}; + this.UQ3__ = {"BA":-479,"BH":42,"BI":1913,"BK":-7198,"BM":3160,"BN":6427,"BO":14761,"OI":-827,"ON":-3212}; + this.UW1__ = {",":156,"、":156,"「":-463,"あ":-941,"う":-127,"が":-553,"き":121,"こ":505,"で":-201,"と":-547,"ど":-123,"に":-789,"の":-185,"は":-847,"も":-466,"や":-470,"よ":182,"ら":-292,"り":208,"れ":169,"を":-446,"ん":-137,"・":-135,"主":-402,"京":-268,"区":-912,"午":871,"国":-460,"大":561,"委":729,"市":-411,"日":-141,"理":361,"生":-408,"県":-386,"都":-718,"「":-463,"・":-135}; + this.UW2__ = {",":-829,"、":-829,"〇":892,"「":-645,"」":3145,"あ":-538,"い":505,"う":134,"お":-502,"か":1454,"が":-856,"く":-412,"こ":1141,"さ":878,"ざ":540,"し":1529,"す":-675,"せ":300,"そ":-1011,"た":188,"だ":1837,"つ":-949,"て":-291,"で":-268,"と":-981,"ど":1273,"な":1063,"に":-1764,"の":130,"は":-409,"ひ":-1273,"べ":1261,"ま":600,"も":-1263,"や":-402,"よ":1639,"り":-579,"る":-694,"れ":571,"を":-2516,"ん":2095,"ア":-587,"カ":306,"キ":568,"ッ":831,"三":-758,"不":-2150,"世":-302,"中":-968,"主":-861,"事":492,"人":-123,"会":978,"保":362,"入":548,"初":-3025,"副":-1566,"北":-3414,"区":-422,"大":-1769,"天":-865,"太":-483,"子":-1519,"学":760,"実":1023,"小":-2009,"市":-813,"年":-1060,"強":1067,"手":-1519,"揺":-1033,"政":1522,"文":-1355,"新":-1682,"日":-1815,"明":-1462,"最":-630,"朝":-1843,"本":-1650,"東":-931,"果":-665,"次":-2378,"民":-180,"気":-1740,"理":752,"発":529,"目":-1584,"相":-242,"県":-1165,"立":-763,"第":810,"米":509,"自":-1353,"行":838,"西":-744,"見":-3874,"調":1010,"議":1198,"込":3041,"開":1758,"間":-1257,"「":-645,"」":3145,"ッ":831,"ア":-587,"カ":306,"キ":568}; + this.UW3__ = {",":4889,"1":-800,"−":-1723,"、":4889,"々":-2311,"〇":5827,"」":2670,"〓":-3573,"あ":-2696,"い":1006,"う":2342,"え":1983,"お":-4864,"か":-1163,"が":3271,"く":1004,"け":388,"げ":401,"こ":-3552,"ご":-3116,"さ":-1058,"し":-395,"す":584,"せ":3685,"そ":-5228,"た":842,"ち":-521,"っ":-1444,"つ":-1081,"て":6167,"で":2318,"と":1691,"ど":-899,"な":-2788,"に":2745,"の":4056,"は":4555,"ひ":-2171,"ふ":-1798,"へ":1199,"ほ":-5516,"ま":-4384,"み":-120,"め":1205,"も":2323,"や":-788,"よ":-202,"ら":727,"り":649,"る":5905,"れ":2773,"わ":-1207,"を":6620,"ん":-518,"ア":551,"グ":1319,"ス":874,"ッ":-1350,"ト":521,"ム":1109,"ル":1591,"ロ":2201,"ン":278,"・":-3794,"一":-1619,"下":-1759,"世":-2087,"両":3815,"中":653,"主":-758,"予":-1193,"二":974,"人":2742,"今":792,"他":1889,"以":-1368,"低":811,"何":4265,"作":-361,"保":-2439,"元":4858,"党":3593,"全":1574,"公":-3030,"六":755,"共":-1880,"円":5807,"再":3095,"分":457,"初":2475,"別":1129,"前":2286,"副":4437,"力":365,"動":-949,"務":-1872,"化":1327,"北":-1038,"区":4646,"千":-2309,"午":-783,"協":-1006,"口":483,"右":1233,"各":3588,"合":-241,"同":3906,"和":-837,"員":4513,"国":642,"型":1389,"場":1219,"外":-241,"妻":2016,"学":-1356,"安":-423,"実":-1008,"家":1078,"小":-513,"少":-3102,"州":1155,"市":3197,"平":-1804,"年":2416,"広":-1030,"府":1605,"度":1452,"建":-2352,"当":-3885,"得":1905,"思":-1291,"性":1822,"戸":-488,"指":-3973,"政":-2013,"教":-1479,"数":3222,"文":-1489,"新":1764,"日":2099,"旧":5792,"昨":-661,"時":-1248,"曜":-951,"最":-937,"月":4125,"期":360,"李":3094,"村":364,"東":-805,"核":5156,"森":2438,"業":484,"氏":2613,"民":-1694,"決":-1073,"法":1868,"海":-495,"無":979,"物":461,"特":-3850,"生":-273,"用":914,"町":1215,"的":7313,"直":-1835,"省":792,"県":6293,"知":-1528,"私":4231,"税":401,"立":-960,"第":1201,"米":7767,"系":3066,"約":3663,"級":1384,"統":-4229,"総":1163,"線":1255,"者":6457,"能":725,"自":-2869,"英":785,"見":1044,"調":-562,"財":-733,"費":1777,"車":1835,"軍":1375,"込":-1504,"通":-1136,"選":-681,"郎":1026,"郡":4404,"部":1200,"金":2163,"長":421,"開":-1432,"間":1302,"関":-1282,"雨":2009,"電":-1045,"非":2066,"駅":1620,"1":-800,"」":2670,"・":-3794,"ッ":-1350,"ア":551,"グ":1319,"ス":874,"ト":521,"ム":1109,"ル":1591,"ロ":2201,"ン":278}; + this.UW4__ = {",":3930,".":3508,"―":-4841,"、":3930,"。":3508,"〇":4999,"「":1895,"」":3798,"〓":-5156,"あ":4752,"い":-3435,"う":-640,"え":-2514,"お":2405,"か":530,"が":6006,"き":-4482,"ぎ":-3821,"く":-3788,"け":-4376,"げ":-4734,"こ":2255,"ご":1979,"さ":2864,"し":-843,"じ":-2506,"す":-731,"ず":1251,"せ":181,"そ":4091,"た":5034,"だ":5408,"ち":-3654,"っ":-5882,"つ":-1659,"て":3994,"で":7410,"と":4547,"な":5433,"に":6499,"ぬ":1853,"ね":1413,"の":7396,"は":8578,"ば":1940,"ひ":4249,"び":-4134,"ふ":1345,"へ":6665,"べ":-744,"ほ":1464,"ま":1051,"み":-2082,"む":-882,"め":-5046,"も":4169,"ゃ":-2666,"や":2795,"ょ":-1544,"よ":3351,"ら":-2922,"り":-9726,"る":-14896,"れ":-2613,"ろ":-4570,"わ":-1783,"を":13150,"ん":-2352,"カ":2145,"コ":1789,"セ":1287,"ッ":-724,"ト":-403,"メ":-1635,"ラ":-881,"リ":-541,"ル":-856,"ン":-3637,"・":-4371,"ー":-11870,"一":-2069,"中":2210,"予":782,"事":-190,"井":-1768,"人":1036,"以":544,"会":950,"体":-1286,"作":530,"側":4292,"先":601,"党":-2006,"共":-1212,"内":584,"円":788,"初":1347,"前":1623,"副":3879,"力":-302,"動":-740,"務":-2715,"化":776,"区":4517,"協":1013,"参":1555,"合":-1834,"和":-681,"員":-910,"器":-851,"回":1500,"国":-619,"園":-1200,"地":866,"場":-1410,"塁":-2094,"士":-1413,"多":1067,"大":571,"子":-4802,"学":-1397,"定":-1057,"寺":-809,"小":1910,"屋":-1328,"山":-1500,"島":-2056,"川":-2667,"市":2771,"年":374,"庁":-4556,"後":456,"性":553,"感":916,"所":-1566,"支":856,"改":787,"政":2182,"教":704,"文":522,"方":-856,"日":1798,"時":1829,"最":845,"月":-9066,"木":-485,"来":-442,"校":-360,"業":-1043,"氏":5388,"民":-2716,"気":-910,"沢":-939,"済":-543,"物":-735,"率":672,"球":-1267,"生":-1286,"産":-1101,"田":-2900,"町":1826,"的":2586,"目":922,"省":-3485,"県":2997,"空":-867,"立":-2112,"第":788,"米":2937,"系":786,"約":2171,"経":1146,"統":-1169,"総":940,"線":-994,"署":749,"者":2145,"能":-730,"般":-852,"行":-792,"規":792,"警":-1184,"議":-244,"谷":-1000,"賞":730,"車":-1481,"軍":1158,"輪":-1433,"込":-3370,"近":929,"道":-1291,"選":2596,"郎":-4866,"都":1192,"野":-1100,"銀":-2213,"長":357,"間":-2344,"院":-2297,"際":-2604,"電":-878,"領":-1659,"題":-792,"館":-1984,"首":1749,"高":2120,"「":1895,"」":3798,"・":-4371,"ッ":-724,"ー":-11870,"カ":2145,"コ":1789,"セ":1287,"ト":-403,"メ":-1635,"ラ":-881,"リ":-541,"ル":-856,"ン":-3637}; + this.UW5__ = {",":465,".":-299,"1":-514,"E2":-32768,"]":-2762,"、":465,"。":-299,"「":363,"あ":1655,"い":331,"う":-503,"え":1199,"お":527,"か":647,"が":-421,"き":1624,"ぎ":1971,"く":312,"げ":-983,"さ":-1537,"し":-1371,"す":-852,"だ":-1186,"ち":1093,"っ":52,"つ":921,"て":-18,"で":-850,"と":-127,"ど":1682,"な":-787,"に":-1224,"の":-635,"は":-578,"べ":1001,"み":502,"め":865,"ゃ":3350,"ょ":854,"り":-208,"る":429,"れ":504,"わ":419,"を":-1264,"ん":327,"イ":241,"ル":451,"ン":-343,"中":-871,"京":722,"会":-1153,"党":-654,"務":3519,"区":-901,"告":848,"員":2104,"大":-1296,"学":-548,"定":1785,"嵐":-1304,"市":-2991,"席":921,"年":1763,"思":872,"所":-814,"挙":1618,"新":-1682,"日":218,"月":-4353,"査":932,"格":1356,"機":-1508,"氏":-1347,"田":240,"町":-3912,"的":-3149,"相":1319,"省":-1052,"県":-4003,"研":-997,"社":-278,"空":-813,"統":1955,"者":-2233,"表":663,"語":-1073,"議":1219,"選":-1018,"郎":-368,"長":786,"間":1191,"題":2368,"館":-689,"1":-514,"E2":-32768,"「":363,"イ":241,"ル":451,"ン":-343}; + this.UW6__ = {",":227,".":808,"1":-270,"E1":306,"、":227,"。":808,"あ":-307,"う":189,"か":241,"が":-73,"く":-121,"こ":-200,"じ":1782,"す":383,"た":-428,"っ":573,"て":-1014,"で":101,"と":-105,"な":-253,"に":-149,"の":-417,"は":-236,"も":-206,"り":187,"る":-135,"を":195,"ル":-673,"ン":-496,"一":-277,"中":201,"件":-800,"会":624,"前":302,"区":1792,"員":-1212,"委":798,"学":-960,"市":887,"広":-695,"後":535,"業":-697,"相":753,"社":-507,"福":974,"空":-822,"者":1811,"連":463,"郎":1082,"1":-270,"E1":306,"ル":-673,"ン":-496}; + + return this; + } + TinySegmenter.prototype.ctype_ = function(str) { + for (var i in this.chartype_) { + if (str.match(this.chartype_[i][0])) { + return this.chartype_[i][1]; + } + } + return "O"; + } + + TinySegmenter.prototype.ts_ = function(v) { + if (v) { return v; } + return 0; + } + + TinySegmenter.prototype.segment = function(input) { + if (input == null || input == undefined || input == "") { + return []; + } + var result = []; + var seg = ["B3","B2","B1"]; + var ctype = ["O","O","O"]; + var o = input.split(""); + for (i = 0; i < o.length; ++i) { + seg.push(o[i]); + ctype.push(this.ctype_(o[i])) + } + seg.push("E1"); + seg.push("E2"); + seg.push("E3"); + ctype.push("O"); + ctype.push("O"); + ctype.push("O"); + var word = seg[3]; + var p1 = "U"; + var p2 = "U"; + var p3 = "U"; + for (var i = 4; i < seg.length - 3; ++i) { + var score = this.BIAS__; + var w1 = seg[i-3]; + var w2 = seg[i-2]; + var w3 = seg[i-1]; + var w4 = seg[i]; + var w5 = seg[i+1]; + var w6 = seg[i+2]; + var c1 = ctype[i-3]; + var c2 = ctype[i-2]; + var c3 = ctype[i-1]; + var c4 = ctype[i]; + var c5 = ctype[i+1]; + var c6 = ctype[i+2]; + score += this.ts_(this.UP1__[p1]); + score += this.ts_(this.UP2__[p2]); + score += this.ts_(this.UP3__[p3]); + score += this.ts_(this.BP1__[p1 + p2]); + score += this.ts_(this.BP2__[p2 + p3]); + score += this.ts_(this.UW1__[w1]); + score += this.ts_(this.UW2__[w2]); + score += this.ts_(this.UW3__[w3]); + score += this.ts_(this.UW4__[w4]); + score += this.ts_(this.UW5__[w5]); + score += this.ts_(this.UW6__[w6]); + score += this.ts_(this.BW1__[w2 + w3]); + score += this.ts_(this.BW2__[w3 + w4]); + score += this.ts_(this.BW3__[w4 + w5]); + score += this.ts_(this.TW1__[w1 + w2 + w3]); + score += this.ts_(this.TW2__[w2 + w3 + w4]); + score += this.ts_(this.TW3__[w3 + w4 + w5]); + score += this.ts_(this.TW4__[w4 + w5 + w6]); + score += this.ts_(this.UC1__[c1]); + score += this.ts_(this.UC2__[c2]); + score += this.ts_(this.UC3__[c3]); + score += this.ts_(this.UC4__[c4]); + score += this.ts_(this.UC5__[c5]); + score += this.ts_(this.UC6__[c6]); + score += this.ts_(this.BC1__[c2 + c3]); + score += this.ts_(this.BC2__[c3 + c4]); + score += this.ts_(this.BC3__[c4 + c5]); + score += this.ts_(this.TC1__[c1 + c2 + c3]); + score += this.ts_(this.TC2__[c2 + c3 + c4]); + score += this.ts_(this.TC3__[c3 + c4 + c5]); + score += this.ts_(this.TC4__[c4 + c5 + c6]); + // score += this.ts_(this.TC5__[c4 + c5 + c6]); + score += this.ts_(this.UQ1__[p1 + c1]); + score += this.ts_(this.UQ2__[p2 + c2]); + score += this.ts_(this.UQ3__[p3 + c3]); + score += this.ts_(this.BQ1__[p2 + c2 + c3]); + score += this.ts_(this.BQ2__[p2 + c3 + c4]); + score += this.ts_(this.BQ3__[p3 + c2 + c3]); + score += this.ts_(this.BQ4__[p3 + c3 + c4]); + score += this.ts_(this.TQ1__[p2 + c1 + c2 + c3]); + score += this.ts_(this.TQ2__[p2 + c2 + c3 + c4]); + score += this.ts_(this.TQ3__[p3 + c1 + c2 + c3]); + score += this.ts_(this.TQ4__[p3 + c2 + c3 + c4]); + var p = "O"; + if (score > 0) { + result.push(word); + word = ""; + p = "B"; + } + p1 = p2; + p2 = p3; + p3 = p; + word += seg[i]; + } + result.push(word); + + return result; + } + + lunr.TinySegmenter = TinySegmenter; + }; + +})); \ No newline at end of file diff --git a/assets/javascripts/lunr/wordcut.js b/assets/javascripts/lunr/wordcut.js new file mode 100644 index 0000000..146f4b4 --- /dev/null +++ b/assets/javascripts/lunr/wordcut.js @@ -0,0 +1,6708 @@ +(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}(g.lunr || (g.lunr = {})).wordcut = f()}})(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o 1; + }) + this.addWords(words, false) + } + if(finalize){ + this.finalizeDict(); + } + }, + + dictSeek: function (l, r, ch, strOffset, pos) { + var ans = null; + while (l <= r) { + var m = Math.floor((l + r) / 2), + dict_item = this.dict[m], + len = dict_item.length; + if (len <= strOffset) { + l = m + 1; + } else { + var ch_ = dict_item[strOffset]; + if (ch_ < ch) { + l = m + 1; + } else if (ch_ > ch) { + r = m - 1; + } else { + ans = m; + if (pos == LEFT) { + r = m - 1; + } else { + l = m + 1; + } + } + } + } + return ans; + }, + + isFinal: function (acceptor) { + return this.dict[acceptor.l].length == acceptor.strOffset; + }, + + createAcceptor: function () { + return { + l: 0, + r: this.dict.length - 1, + strOffset: 0, + isFinal: false, + dict: this, + transit: function (ch) { + return this.dict.transit(this, ch); + }, + isError: false, + tag: "DICT", + w: 1, + type: "DICT" + }; + }, + + transit: function (acceptor, ch) { + var l = this.dictSeek(acceptor.l, + acceptor.r, + ch, + acceptor.strOffset, + LEFT); + if (l !== null) { + var r = this.dictSeek(l, + acceptor.r, + ch, + acceptor.strOffset, + RIGHT); + acceptor.l = l; + acceptor.r = r; + acceptor.strOffset++; + acceptor.isFinal = this.isFinal(acceptor); + } else { + acceptor.isError = true; + } + return acceptor; + }, + + sortuniq: function(a){ + return a.sort().filter(function(item, pos, arr){ + return !pos || item != arr[pos - 1]; + }) + }, + + flatten: function(a){ + //[[1,2],[3]] -> [1,2,3] + return [].concat.apply([], a); + } +}; +module.exports = WordcutDict; + +}).call(this,"/dist/tmp") +},{"glob":16,"path":22}],3:[function(require,module,exports){ +var WordRule = { + createAcceptor: function(tag) { + if (tag["WORD_RULE"]) + return null; + + return {strOffset: 0, + isFinal: false, + transit: function(ch) { + var lch = ch.toLowerCase(); + if (lch >= "a" && lch <= "z") { + this.isFinal = true; + this.strOffset++; + } else { + this.isError = true; + } + return this; + }, + isError: false, + tag: "WORD_RULE", + type: "WORD_RULE", + w: 1}; + } +}; + +var NumberRule = { + createAcceptor: function(tag) { + if (tag["NUMBER_RULE"]) + return null; + + return {strOffset: 0, + isFinal: false, + transit: function(ch) { + if (ch >= "0" && ch <= "9") { + this.isFinal = true; + this.strOffset++; + } else { + this.isError = true; + } + return this; + }, + isError: false, + tag: "NUMBER_RULE", + type: "NUMBER_RULE", + w: 1}; + } +}; + +var SpaceRule = { + tag: "SPACE_RULE", + createAcceptor: function(tag) { + + if (tag["SPACE_RULE"]) + return null; + + return {strOffset: 0, + isFinal: false, + transit: function(ch) { + if (ch == " " || ch == "\t" || ch == "\r" || ch == "\n" || + ch == "\u00A0" || ch=="\u2003"//nbsp and emsp + ) { + this.isFinal = true; + this.strOffset++; + } else { + this.isError = true; + } + return this; + }, + isError: false, + tag: SpaceRule.tag, + w: 1, + type: "SPACE_RULE"}; + } +} + +var SingleSymbolRule = { + tag: "SINSYM", + createAcceptor: function(tag) { + return {strOffset: 0, + isFinal: false, + transit: function(ch) { + if (this.strOffset == 0 && ch.match(/^[\@\(\)\/\,\-\."`]$/)) { + this.isFinal = true; + this.strOffset++; + } else { + this.isError = true; + } + return this; + }, + isError: false, + tag: "SINSYM", + w: 1, + type: "SINSYM"}; + } +} + + +var LatinRules = [WordRule, SpaceRule, SingleSymbolRule, NumberRule]; + +module.exports = LatinRules; + +},{}],4:[function(require,module,exports){ +var _ = require("underscore") + , WordcutCore = require("./wordcut_core"); +var PathInfoBuilder = { + + /* + buildByPartAcceptors: function(path, acceptors, i) { + var + var genInfos = partAcceptors.reduce(function(genInfos, acceptor) { + + }, []); + + return genInfos; + } + */ + + buildByAcceptors: function(path, finalAcceptors, i) { + var self = this; + var infos = finalAcceptors.map(function(acceptor) { + var p = i - acceptor.strOffset + 1 + , _info = path[p]; + + var info = {p: p, + mw: _info.mw + (acceptor.mw === undefined ? 0 : acceptor.mw), + w: acceptor.w + _info.w, + unk: (acceptor.unk ? acceptor.unk : 0) + _info.unk, + type: acceptor.type}; + + if (acceptor.type == "PART") { + for(var j = p + 1; j <= i; j++) { + path[j].merge = p; + } + info.merge = p; + } + + return info; + }); + return infos.filter(function(info) { return info; }); + }, + + fallback: function(path, leftBoundary, text, i) { + var _info = path[leftBoundary]; + if (text[i].match(/[\u0E48-\u0E4E]/)) { + if (leftBoundary != 0) + leftBoundary = path[leftBoundary].p; + return {p: leftBoundary, + mw: 0, + w: 1 + _info.w, + unk: 1 + _info.unk, + type: "UNK"}; +/* } else if(leftBoundary > 0 && path[leftBoundary].type !== "UNK") { + leftBoundary = path[leftBoundary].p; + return {p: leftBoundary, + w: 1 + _info.w, + unk: 1 + _info.unk, + type: "UNK"}; */ + } else { + return {p: leftBoundary, + mw: _info.mw, + w: 1 + _info.w, + unk: 1 + _info.unk, + type: "UNK"}; + } + }, + + build: function(path, finalAcceptors, i, leftBoundary, text) { + var basicPathInfos = this.buildByAcceptors(path, finalAcceptors, i); + if (basicPathInfos.length > 0) { + return basicPathInfos; + } else { + return [this.fallback(path, leftBoundary, text, i)]; + } + } +}; + +module.exports = function() { + return _.clone(PathInfoBuilder); +} + +},{"./wordcut_core":8,"underscore":25}],5:[function(require,module,exports){ +var _ = require("underscore"); + + +var PathSelector = { + selectPath: function(paths) { + var path = paths.reduce(function(selectedPath, path) { + if (selectedPath == null) { + return path; + } else { + if (path.unk < selectedPath.unk) + return path; + if (path.unk == selectedPath.unk) { + if (path.mw < selectedPath.mw) + return path + if (path.mw == selectedPath.mw) { + if (path.w < selectedPath.w) + return path; + } + } + return selectedPath; + } + }, null); + return path; + }, + + createPath: function() { + return [{p:null, w:0, unk:0, type: "INIT", mw:0}]; + } +}; + +module.exports = function() { + return _.clone(PathSelector); +}; + +},{"underscore":25}],6:[function(require,module,exports){ +function isMatch(pat, offset, ch) { + if (pat.length <= offset) + return false; + var _ch = pat[offset]; + return _ch == ch || + (_ch.match(/[กข]/) && ch.match(/[ก-ฮ]/)) || + (_ch.match(/[มบ]/) && ch.match(/[ก-ฮ]/)) || + (_ch.match(/\u0E49/) && ch.match(/[\u0E48-\u0E4B]/)); +} + +var Rule0 = { + pat: "เหก็ม", + createAcceptor: function(tag) { + return {strOffset: 0, + isFinal: false, + transit: function(ch) { + if (isMatch(Rule0.pat, this.strOffset,ch)) { + this.isFinal = (this.strOffset + 1 == Rule0.pat.length); + this.strOffset++; + } else { + this.isError = true; + } + return this; + }, + isError: false, + tag: "THAI_RULE", + type: "THAI_RULE", + w: 1}; + } +}; + +var PartRule = { + createAcceptor: function(tag) { + return {strOffset: 0, + patterns: [ + "แก", "เก", "ก้", "กก์", "กา", "กี", "กิ", "กืก" + ], + isFinal: false, + transit: function(ch) { + var offset = this.strOffset; + this.patterns = this.patterns.filter(function(pat) { + return isMatch(pat, offset, ch); + }); + + if (this.patterns.length > 0) { + var len = 1 + offset; + this.isFinal = this.patterns.some(function(pat) { + return pat.length == len; + }); + this.strOffset++; + } else { + this.isError = true; + } + return this; + }, + isError: false, + tag: "PART", + type: "PART", + unk: 1, + w: 1}; + } +}; + +var ThaiRules = [Rule0, PartRule]; + +module.exports = ThaiRules; + +},{}],7:[function(require,module,exports){ +var sys = require("sys") + , WordcutDict = require("./dict") + , WordcutCore = require("./wordcut_core") + , PathInfoBuilder = require("./path_info_builder") + , PathSelector = require("./path_selector") + , Acceptors = require("./acceptors") + , latinRules = require("./latin_rules") + , thaiRules = require("./thai_rules") + , _ = require("underscore"); + + +var Wordcut = Object.create(WordcutCore); +Wordcut.defaultPathInfoBuilder = PathInfoBuilder; +Wordcut.defaultPathSelector = PathSelector; +Wordcut.defaultAcceptors = Acceptors; +Wordcut.defaultLatinRules = latinRules; +Wordcut.defaultThaiRules = thaiRules; +Wordcut.defaultDict = WordcutDict; + + +Wordcut.initNoDict = function(dict_path) { + var self = this; + self.pathInfoBuilder = new self.defaultPathInfoBuilder; + self.pathSelector = new self.defaultPathSelector; + self.acceptors = new self.defaultAcceptors; + self.defaultLatinRules.forEach(function(rule) { + self.acceptors.creators.push(rule); + }); + self.defaultThaiRules.forEach(function(rule) { + self.acceptors.creators.push(rule); + }); +}; + +Wordcut.init = function(dict_path, withDefault, additionalWords) { + withDefault = withDefault || false; + this.initNoDict(); + var dict = _.clone(this.defaultDict); + dict.init(dict_path, withDefault, additionalWords); + this.acceptors.creators.push(dict); +}; + +module.exports = Wordcut; + +},{"./acceptors":1,"./dict":2,"./latin_rules":3,"./path_info_builder":4,"./path_selector":5,"./thai_rules":6,"./wordcut_core":8,"sys":28,"underscore":25}],8:[function(require,module,exports){ +var WordcutCore = { + + buildPath: function(text) { + var self = this + , path = self.pathSelector.createPath() + , leftBoundary = 0; + self.acceptors.reset(); + for (var i = 0; i < text.length; i++) { + var ch = text[i]; + self.acceptors.transit(ch); + + var possiblePathInfos = self + .pathInfoBuilder + .build(path, + self.acceptors.getFinalAcceptors(), + i, + leftBoundary, + text); + var selectedPath = self.pathSelector.selectPath(possiblePathInfos) + + path.push(selectedPath); + if (selectedPath.type !== "UNK") { + leftBoundary = i; + } + } + return path; + }, + + pathToRanges: function(path) { + var e = path.length - 1 + , ranges = []; + + while (e > 0) { + var info = path[e] + , s = info.p; + + if (info.merge !== undefined && ranges.length > 0) { + var r = ranges[ranges.length - 1]; + r.s = info.merge; + s = r.s; + } else { + ranges.push({s:s, e:e}); + } + e = s; + } + return ranges.reverse(); + }, + + rangesToText: function(text, ranges, delimiter) { + return ranges.map(function(r) { + return text.substring(r.s, r.e); + }).join(delimiter); + }, + + cut: function(text, delimiter) { + var path = this.buildPath(text) + , ranges = this.pathToRanges(path); + return this + .rangesToText(text, ranges, + (delimiter === undefined ? "|" : delimiter)); + }, + + cutIntoRanges: function(text, noText) { + var path = this.buildPath(text) + , ranges = this.pathToRanges(path); + + if (!noText) { + ranges.forEach(function(r) { + r.text = text.substring(r.s, r.e); + }); + } + return ranges; + }, + + cutIntoArray: function(text) { + var path = this.buildPath(text) + , ranges = this.pathToRanges(path); + + return ranges.map(function(r) { + return text.substring(r.s, r.e) + }); + } +}; + +module.exports = WordcutCore; + +},{}],9:[function(require,module,exports){ +// http://wiki.commonjs.org/wiki/Unit_Testing/1.0 +// +// THIS IS NOT TESTED NOR LIKELY TO WORK OUTSIDE V8! +// +// Originally from narwhal.js (http://narwhaljs.org) +// Copyright (c) 2009 Thomas Robinson <280north.com> +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the 'Software'), to +// deal in the Software without restriction, including without limitation the +// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +// sell copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +// ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +// when used in node, this will actually load the util module we depend on +// versus loading the builtin util module as happens otherwise +// this is a bug in node module loading as far as I am concerned +var util = require('util/'); + +var pSlice = Array.prototype.slice; +var hasOwn = Object.prototype.hasOwnProperty; + +// 1. The assert module provides functions that throw +// AssertionError's when particular conditions are not met. The +// assert module must conform to the following interface. + +var assert = module.exports = ok; + +// 2. The AssertionError is defined in assert. +// new assert.AssertionError({ message: message, +// actual: actual, +// expected: expected }) + +assert.AssertionError = function AssertionError(options) { + this.name = 'AssertionError'; + this.actual = options.actual; + this.expected = options.expected; + this.operator = options.operator; + if (options.message) { + this.message = options.message; + this.generatedMessage = false; + } else { + this.message = getMessage(this); + this.generatedMessage = true; + } + var stackStartFunction = options.stackStartFunction || fail; + + if (Error.captureStackTrace) { + Error.captureStackTrace(this, stackStartFunction); + } + else { + // non v8 browsers so we can have a stacktrace + var err = new Error(); + if (err.stack) { + var out = err.stack; + + // try to strip useless frames + var fn_name = stackStartFunction.name; + var idx = out.indexOf('\n' + fn_name); + if (idx >= 0) { + // once we have located the function frame + // we need to strip out everything before it (and its line) + var next_line = out.indexOf('\n', idx + 1); + out = out.substring(next_line + 1); + } + + this.stack = out; + } + } +}; + +// assert.AssertionError instanceof Error +util.inherits(assert.AssertionError, Error); + +function replacer(key, value) { + if (util.isUndefined(value)) { + return '' + value; + } + if (util.isNumber(value) && !isFinite(value)) { + return value.toString(); + } + if (util.isFunction(value) || util.isRegExp(value)) { + return value.toString(); + } + return value; +} + +function truncate(s, n) { + if (util.isString(s)) { + return s.length < n ? s : s.slice(0, n); + } else { + return s; + } +} + +function getMessage(self) { + return truncate(JSON.stringify(self.actual, replacer), 128) + ' ' + + self.operator + ' ' + + truncate(JSON.stringify(self.expected, replacer), 128); +} + +// At present only the three keys mentioned above are used and +// understood by the spec. Implementations or sub modules can pass +// other keys to the AssertionError's constructor - they will be +// ignored. + +// 3. All of the following functions must throw an AssertionError +// when a corresponding condition is not met, with a message that +// may be undefined if not provided. All assertion methods provide +// both the actual and expected values to the assertion error for +// display purposes. + +function fail(actual, expected, message, operator, stackStartFunction) { + throw new assert.AssertionError({ + message: message, + actual: actual, + expected: expected, + operator: operator, + stackStartFunction: stackStartFunction + }); +} + +// EXTENSION! allows for well behaved errors defined elsewhere. +assert.fail = fail; + +// 4. Pure assertion tests whether a value is truthy, as determined +// by !!guard. +// assert.ok(guard, message_opt); +// This statement is equivalent to assert.equal(true, !!guard, +// message_opt);. To test strictly for the value true, use +// assert.strictEqual(true, guard, message_opt);. + +function ok(value, message) { + if (!value) fail(value, true, message, '==', assert.ok); +} +assert.ok = ok; + +// 5. The equality assertion tests shallow, coercive equality with +// ==. +// assert.equal(actual, expected, message_opt); + +assert.equal = function equal(actual, expected, message) { + if (actual != expected) fail(actual, expected, message, '==', assert.equal); +}; + +// 6. The non-equality assertion tests for whether two objects are not equal +// with != assert.notEqual(actual, expected, message_opt); + +assert.notEqual = function notEqual(actual, expected, message) { + if (actual == expected) { + fail(actual, expected, message, '!=', assert.notEqual); + } +}; + +// 7. The equivalence assertion tests a deep equality relation. +// assert.deepEqual(actual, expected, message_opt); + +assert.deepEqual = function deepEqual(actual, expected, message) { + if (!_deepEqual(actual, expected)) { + fail(actual, expected, message, 'deepEqual', assert.deepEqual); + } +}; + +function _deepEqual(actual, expected) { + // 7.1. All identical values are equivalent, as determined by ===. + if (actual === expected) { + return true; + + } else if (util.isBuffer(actual) && util.isBuffer(expected)) { + if (actual.length != expected.length) return false; + + for (var i = 0; i < actual.length; i++) { + if (actual[i] !== expected[i]) return false; + } + + return true; + + // 7.2. If the expected value is a Date object, the actual value is + // equivalent if it is also a Date object that refers to the same time. + } else if (util.isDate(actual) && util.isDate(expected)) { + return actual.getTime() === expected.getTime(); + + // 7.3 If the expected value is a RegExp object, the actual value is + // equivalent if it is also a RegExp object with the same source and + // properties (`global`, `multiline`, `lastIndex`, `ignoreCase`). + } else if (util.isRegExp(actual) && util.isRegExp(expected)) { + return actual.source === expected.source && + actual.global === expected.global && + actual.multiline === expected.multiline && + actual.lastIndex === expected.lastIndex && + actual.ignoreCase === expected.ignoreCase; + + // 7.4. Other pairs that do not both pass typeof value == 'object', + // equivalence is determined by ==. + } else if (!util.isObject(actual) && !util.isObject(expected)) { + return actual == expected; + + // 7.5 For all other Object pairs, including Array objects, equivalence is + // determined by having the same number of owned properties (as verified + // with Object.prototype.hasOwnProperty.call), the same set of keys + // (although not necessarily the same order), equivalent values for every + // corresponding key, and an identical 'prototype' property. Note: this + // accounts for both named and indexed properties on Arrays. + } else { + return objEquiv(actual, expected); + } +} + +function isArguments(object) { + return Object.prototype.toString.call(object) == '[object Arguments]'; +} + +function objEquiv(a, b) { + if (util.isNullOrUndefined(a) || util.isNullOrUndefined(b)) + return false; + // an identical 'prototype' property. + if (a.prototype !== b.prototype) return false; + // if one is a primitive, the other must be same + if (util.isPrimitive(a) || util.isPrimitive(b)) { + return a === b; + } + var aIsArgs = isArguments(a), + bIsArgs = isArguments(b); + if ((aIsArgs && !bIsArgs) || (!aIsArgs && bIsArgs)) + return false; + if (aIsArgs) { + a = pSlice.call(a); + b = pSlice.call(b); + return _deepEqual(a, b); + } + var ka = objectKeys(a), + kb = objectKeys(b), + key, i; + // having the same number of owned properties (keys incorporates + // hasOwnProperty) + if (ka.length != kb.length) + return false; + //the same set of keys (although not necessarily the same order), + ka.sort(); + kb.sort(); + //~~~cheap key test + for (i = ka.length - 1; i >= 0; i--) { + if (ka[i] != kb[i]) + return false; + } + //equivalent values for every corresponding key, and + //~~~possibly expensive deep test + for (i = ka.length - 1; i >= 0; i--) { + key = ka[i]; + if (!_deepEqual(a[key], b[key])) return false; + } + return true; +} + +// 8. The non-equivalence assertion tests for any deep inequality. +// assert.notDeepEqual(actual, expected, message_opt); + +assert.notDeepEqual = function notDeepEqual(actual, expected, message) { + if (_deepEqual(actual, expected)) { + fail(actual, expected, message, 'notDeepEqual', assert.notDeepEqual); + } +}; + +// 9. The strict equality assertion tests strict equality, as determined by ===. +// assert.strictEqual(actual, expected, message_opt); + +assert.strictEqual = function strictEqual(actual, expected, message) { + if (actual !== expected) { + fail(actual, expected, message, '===', assert.strictEqual); + } +}; + +// 10. The strict non-equality assertion tests for strict inequality, as +// determined by !==. assert.notStrictEqual(actual, expected, message_opt); + +assert.notStrictEqual = function notStrictEqual(actual, expected, message) { + if (actual === expected) { + fail(actual, expected, message, '!==', assert.notStrictEqual); + } +}; + +function expectedException(actual, expected) { + if (!actual || !expected) { + return false; + } + + if (Object.prototype.toString.call(expected) == '[object RegExp]') { + return expected.test(actual); + } else if (actual instanceof expected) { + return true; + } else if (expected.call({}, actual) === true) { + return true; + } + + return false; +} + +function _throws(shouldThrow, block, expected, message) { + var actual; + + if (util.isString(expected)) { + message = expected; + expected = null; + } + + try { + block(); + } catch (e) { + actual = e; + } + + message = (expected && expected.name ? ' (' + expected.name + ').' : '.') + + (message ? ' ' + message : '.'); + + if (shouldThrow && !actual) { + fail(actual, expected, 'Missing expected exception' + message); + } + + if (!shouldThrow && expectedException(actual, expected)) { + fail(actual, expected, 'Got unwanted exception' + message); + } + + if ((shouldThrow && actual && expected && + !expectedException(actual, expected)) || (!shouldThrow && actual)) { + throw actual; + } +} + +// 11. Expected to throw an error: +// assert.throws(block, Error_opt, message_opt); + +assert.throws = function(block, /*optional*/error, /*optional*/message) { + _throws.apply(this, [true].concat(pSlice.call(arguments))); +}; + +// EXTENSION! This is annoying to write outside this module. +assert.doesNotThrow = function(block, /*optional*/message) { + _throws.apply(this, [false].concat(pSlice.call(arguments))); +}; + +assert.ifError = function(err) { if (err) {throw err;}}; + +var objectKeys = Object.keys || function (obj) { + var keys = []; + for (var key in obj) { + if (hasOwn.call(obj, key)) keys.push(key); + } + return keys; +}; + +},{"util/":28}],10:[function(require,module,exports){ +'use strict'; +module.exports = balanced; +function balanced(a, b, str) { + if (a instanceof RegExp) a = maybeMatch(a, str); + if (b instanceof RegExp) b = maybeMatch(b, str); + + var r = range(a, b, str); + + return r && { + start: r[0], + end: r[1], + pre: str.slice(0, r[0]), + body: str.slice(r[0] + a.length, r[1]), + post: str.slice(r[1] + b.length) + }; +} + +function maybeMatch(reg, str) { + var m = str.match(reg); + return m ? m[0] : null; +} + +balanced.range = range; +function range(a, b, str) { + var begs, beg, left, right, result; + var ai = str.indexOf(a); + var bi = str.indexOf(b, ai + 1); + var i = ai; + + if (ai >= 0 && bi > 0) { + begs = []; + left = str.length; + + while (i >= 0 && !result) { + if (i == ai) { + begs.push(i); + ai = str.indexOf(a, i + 1); + } else if (begs.length == 1) { + result = [ begs.pop(), bi ]; + } else { + beg = begs.pop(); + if (beg < left) { + left = beg; + right = bi; + } + + bi = str.indexOf(b, i + 1); + } + + i = ai < bi && ai >= 0 ? ai : bi; + } + + if (begs.length) { + result = [ left, right ]; + } + } + + return result; +} + +},{}],11:[function(require,module,exports){ +var concatMap = require('concat-map'); +var balanced = require('balanced-match'); + +module.exports = expandTop; + +var escSlash = '\0SLASH'+Math.random()+'\0'; +var escOpen = '\0OPEN'+Math.random()+'\0'; +var escClose = '\0CLOSE'+Math.random()+'\0'; +var escComma = '\0COMMA'+Math.random()+'\0'; +var escPeriod = '\0PERIOD'+Math.random()+'\0'; + +function numeric(str) { + return parseInt(str, 10) == str + ? parseInt(str, 10) + : str.charCodeAt(0); +} + +function escapeBraces(str) { + return str.split('\\\\').join(escSlash) + .split('\\{').join(escOpen) + .split('\\}').join(escClose) + .split('\\,').join(escComma) + .split('\\.').join(escPeriod); +} + +function unescapeBraces(str) { + return str.split(escSlash).join('\\') + .split(escOpen).join('{') + .split(escClose).join('}') + .split(escComma).join(',') + .split(escPeriod).join('.'); +} + + +// Basically just str.split(","), but handling cases +// where we have nested braced sections, which should be +// treated as individual members, like {a,{b,c},d} +function parseCommaParts(str) { + if (!str) + return ['']; + + var parts = []; + var m = balanced('{', '}', str); + + if (!m) + return str.split(','); + + var pre = m.pre; + var body = m.body; + var post = m.post; + var p = pre.split(','); + + p[p.length-1] += '{' + body + '}'; + var postParts = parseCommaParts(post); + if (post.length) { + p[p.length-1] += postParts.shift(); + p.push.apply(p, postParts); + } + + parts.push.apply(parts, p); + + return parts; +} + +function expandTop(str) { + if (!str) + return []; + + // I don't know why Bash 4.3 does this, but it does. + // Anything starting with {} will have the first two bytes preserved + // but *only* at the top level, so {},a}b will not expand to anything, + // but a{},b}c will be expanded to [a}c,abc]. + // One could argue that this is a bug in Bash, but since the goal of + // this module is to match Bash's rules, we escape a leading {} + if (str.substr(0, 2) === '{}') { + str = '\\{\\}' + str.substr(2); + } + + return expand(escapeBraces(str), true).map(unescapeBraces); +} + +function identity(e) { + return e; +} + +function embrace(str) { + return '{' + str + '}'; +} +function isPadded(el) { + return /^-?0\d/.test(el); +} + +function lte(i, y) { + return i <= y; +} +function gte(i, y) { + return i >= y; +} + +function expand(str, isTop) { + var expansions = []; + + var m = balanced('{', '}', str); + if (!m || /\$$/.test(m.pre)) return [str]; + + var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); + var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); + var isSequence = isNumericSequence || isAlphaSequence; + var isOptions = m.body.indexOf(',') >= 0; + if (!isSequence && !isOptions) { + // {a},b} + if (m.post.match(/,.*\}/)) { + str = m.pre + '{' + m.body + escClose + m.post; + return expand(str); + } + return [str]; + } + + var n; + if (isSequence) { + n = m.body.split(/\.\./); + } else { + n = parseCommaParts(m.body); + if (n.length === 1) { + // x{{a,b}}y ==> x{a}y x{b}y + n = expand(n[0], false).map(embrace); + if (n.length === 1) { + var post = m.post.length + ? expand(m.post, false) + : ['']; + return post.map(function(p) { + return m.pre + n[0] + p; + }); + } + } + } + + // at this point, n is the parts, and we know it's not a comma set + // with a single entry. + + // no need to expand pre, since it is guaranteed to be free of brace-sets + var pre = m.pre; + var post = m.post.length + ? expand(m.post, false) + : ['']; + + var N; + + if (isSequence) { + var x = numeric(n[0]); + var y = numeric(n[1]); + var width = Math.max(n[0].length, n[1].length) + var incr = n.length == 3 + ? Math.abs(numeric(n[2])) + : 1; + var test = lte; + var reverse = y < x; + if (reverse) { + incr *= -1; + test = gte; + } + var pad = n.some(isPadded); + + N = []; + + for (var i = x; test(i, y); i += incr) { + var c; + if (isAlphaSequence) { + c = String.fromCharCode(i); + if (c === '\\') + c = ''; + } else { + c = String(i); + if (pad) { + var need = width - c.length; + if (need > 0) { + var z = new Array(need + 1).join('0'); + if (i < 0) + c = '-' + z + c.slice(1); + else + c = z + c; + } + } + } + N.push(c); + } + } else { + N = concatMap(n, function(el) { return expand(el, false) }); + } + + for (var j = 0; j < N.length; j++) { + for (var k = 0; k < post.length; k++) { + var expansion = pre + N[j] + post[k]; + if (!isTop || isSequence || expansion) + expansions.push(expansion); + } + } + + return expansions; +} + + +},{"balanced-match":10,"concat-map":13}],12:[function(require,module,exports){ + +},{}],13:[function(require,module,exports){ +module.exports = function (xs, fn) { + var res = []; + for (var i = 0; i < xs.length; i++) { + var x = fn(xs[i], i); + if (isArray(x)) res.push.apply(res, x); + else res.push(x); + } + return res; +}; + +var isArray = Array.isArray || function (xs) { + return Object.prototype.toString.call(xs) === '[object Array]'; +}; + +},{}],14:[function(require,module,exports){ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +function EventEmitter() { + this._events = this._events || {}; + this._maxListeners = this._maxListeners || undefined; +} +module.exports = EventEmitter; + +// Backwards-compat with node 0.10.x +EventEmitter.EventEmitter = EventEmitter; + +EventEmitter.prototype._events = undefined; +EventEmitter.prototype._maxListeners = undefined; + +// By default EventEmitters will print a warning if more than 10 listeners are +// added to it. This is a useful default which helps finding memory leaks. +EventEmitter.defaultMaxListeners = 10; + +// Obviously not all Emitters should be limited to 10. This function allows +// that to be increased. Set to zero for unlimited. +EventEmitter.prototype.setMaxListeners = function(n) { + if (!isNumber(n) || n < 0 || isNaN(n)) + throw TypeError('n must be a positive number'); + this._maxListeners = n; + return this; +}; + +EventEmitter.prototype.emit = function(type) { + var er, handler, len, args, i, listeners; + + if (!this._events) + this._events = {}; + + // If there is no 'error' event listener then throw. + if (type === 'error') { + if (!this._events.error || + (isObject(this._events.error) && !this._events.error.length)) { + er = arguments[1]; + if (er instanceof Error) { + throw er; // Unhandled 'error' event + } + throw TypeError('Uncaught, unspecified "error" event.'); + } + } + + handler = this._events[type]; + + if (isUndefined(handler)) + return false; + + if (isFunction(handler)) { + switch (arguments.length) { + // fast cases + case 1: + handler.call(this); + break; + case 2: + handler.call(this, arguments[1]); + break; + case 3: + handler.call(this, arguments[1], arguments[2]); + break; + // slower + default: + len = arguments.length; + args = new Array(len - 1); + for (i = 1; i < len; i++) + args[i - 1] = arguments[i]; + handler.apply(this, args); + } + } else if (isObject(handler)) { + len = arguments.length; + args = new Array(len - 1); + for (i = 1; i < len; i++) + args[i - 1] = arguments[i]; + + listeners = handler.slice(); + len = listeners.length; + for (i = 0; i < len; i++) + listeners[i].apply(this, args); + } + + return true; +}; + +EventEmitter.prototype.addListener = function(type, listener) { + var m; + + if (!isFunction(listener)) + throw TypeError('listener must be a function'); + + if (!this._events) + this._events = {}; + + // To avoid recursion in the case that type === "newListener"! Before + // adding it to the listeners, first emit "newListener". + if (this._events.newListener) + this.emit('newListener', type, + isFunction(listener.listener) ? + listener.listener : listener); + + if (!this._events[type]) + // Optimize the case of one listener. Don't need the extra array object. + this._events[type] = listener; + else if (isObject(this._events[type])) + // If we've already got an array, just append. + this._events[type].push(listener); + else + // Adding the second element, need to change to array. + this._events[type] = [this._events[type], listener]; + + // Check for listener leak + if (isObject(this._events[type]) && !this._events[type].warned) { + var m; + if (!isUndefined(this._maxListeners)) { + m = this._maxListeners; + } else { + m = EventEmitter.defaultMaxListeners; + } + + if (m && m > 0 && this._events[type].length > m) { + this._events[type].warned = true; + console.error('(node) warning: possible EventEmitter memory ' + + 'leak detected. %d listeners added. ' + + 'Use emitter.setMaxListeners() to increase limit.', + this._events[type].length); + if (typeof console.trace === 'function') { + // not supported in IE 10 + console.trace(); + } + } + } + + return this; +}; + +EventEmitter.prototype.on = EventEmitter.prototype.addListener; + +EventEmitter.prototype.once = function(type, listener) { + if (!isFunction(listener)) + throw TypeError('listener must be a function'); + + var fired = false; + + function g() { + this.removeListener(type, g); + + if (!fired) { + fired = true; + listener.apply(this, arguments); + } + } + + g.listener = listener; + this.on(type, g); + + return this; +}; + +// emits a 'removeListener' event iff the listener was removed +EventEmitter.prototype.removeListener = function(type, listener) { + var list, position, length, i; + + if (!isFunction(listener)) + throw TypeError('listener must be a function'); + + if (!this._events || !this._events[type]) + return this; + + list = this._events[type]; + length = list.length; + position = -1; + + if (list === listener || + (isFunction(list.listener) && list.listener === listener)) { + delete this._events[type]; + if (this._events.removeListener) + this.emit('removeListener', type, listener); + + } else if (isObject(list)) { + for (i = length; i-- > 0;) { + if (list[i] === listener || + (list[i].listener && list[i].listener === listener)) { + position = i; + break; + } + } + + if (position < 0) + return this; + + if (list.length === 1) { + list.length = 0; + delete this._events[type]; + } else { + list.splice(position, 1); + } + + if (this._events.removeListener) + this.emit('removeListener', type, listener); + } + + return this; +}; + +EventEmitter.prototype.removeAllListeners = function(type) { + var key, listeners; + + if (!this._events) + return this; + + // not listening for removeListener, no need to emit + if (!this._events.removeListener) { + if (arguments.length === 0) + this._events = {}; + else if (this._events[type]) + delete this._events[type]; + return this; + } + + // emit removeListener for all listeners on all events + if (arguments.length === 0) { + for (key in this._events) { + if (key === 'removeListener') continue; + this.removeAllListeners(key); + } + this.removeAllListeners('removeListener'); + this._events = {}; + return this; + } + + listeners = this._events[type]; + + if (isFunction(listeners)) { + this.removeListener(type, listeners); + } else { + // LIFO order + while (listeners.length) + this.removeListener(type, listeners[listeners.length - 1]); + } + delete this._events[type]; + + return this; +}; + +EventEmitter.prototype.listeners = function(type) { + var ret; + if (!this._events || !this._events[type]) + ret = []; + else if (isFunction(this._events[type])) + ret = [this._events[type]]; + else + ret = this._events[type].slice(); + return ret; +}; + +EventEmitter.listenerCount = function(emitter, type) { + var ret; + if (!emitter._events || !emitter._events[type]) + ret = 0; + else if (isFunction(emitter._events[type])) + ret = 1; + else + ret = emitter._events[type].length; + return ret; +}; + +function isFunction(arg) { + return typeof arg === 'function'; +} + +function isNumber(arg) { + return typeof arg === 'number'; +} + +function isObject(arg) { + return typeof arg === 'object' && arg !== null; +} + +function isUndefined(arg) { + return arg === void 0; +} + +},{}],15:[function(require,module,exports){ +(function (process){ +exports.alphasort = alphasort +exports.alphasorti = alphasorti +exports.setopts = setopts +exports.ownProp = ownProp +exports.makeAbs = makeAbs +exports.finish = finish +exports.mark = mark +exports.isIgnored = isIgnored +exports.childrenIgnored = childrenIgnored + +function ownProp (obj, field) { + return Object.prototype.hasOwnProperty.call(obj, field) +} + +var path = require("path") +var minimatch = require("minimatch") +var isAbsolute = require("path-is-absolute") +var Minimatch = minimatch.Minimatch + +function alphasorti (a, b) { + return a.toLowerCase().localeCompare(b.toLowerCase()) +} + +function alphasort (a, b) { + return a.localeCompare(b) +} + +function setupIgnores (self, options) { + self.ignore = options.ignore || [] + + if (!Array.isArray(self.ignore)) + self.ignore = [self.ignore] + + if (self.ignore.length) { + self.ignore = self.ignore.map(ignoreMap) + } +} + +function ignoreMap (pattern) { + var gmatcher = null + if (pattern.slice(-3) === '/**') { + var gpattern = pattern.replace(/(\/\*\*)+$/, '') + gmatcher = new Minimatch(gpattern) + } + + return { + matcher: new Minimatch(pattern), + gmatcher: gmatcher + } +} + +function setopts (self, pattern, options) { + if (!options) + options = {} + + // base-matching: just use globstar for that. + if (options.matchBase && -1 === pattern.indexOf("/")) { + if (options.noglobstar) { + throw new Error("base matching requires globstar") + } + pattern = "**/" + pattern + } + + self.silent = !!options.silent + self.pattern = pattern + self.strict = options.strict !== false + self.realpath = !!options.realpath + self.realpathCache = options.realpathCache || Object.create(null) + self.follow = !!options.follow + self.dot = !!options.dot + self.mark = !!options.mark + self.nodir = !!options.nodir + if (self.nodir) + self.mark = true + self.sync = !!options.sync + self.nounique = !!options.nounique + self.nonull = !!options.nonull + self.nosort = !!options.nosort + self.nocase = !!options.nocase + self.stat = !!options.stat + self.noprocess = !!options.noprocess + + self.maxLength = options.maxLength || Infinity + self.cache = options.cache || Object.create(null) + self.statCache = options.statCache || Object.create(null) + self.symlinks = options.symlinks || Object.create(null) + + setupIgnores(self, options) + + self.changedCwd = false + var cwd = process.cwd() + if (!ownProp(options, "cwd")) + self.cwd = cwd + else { + self.cwd = options.cwd + self.changedCwd = path.resolve(options.cwd) !== cwd + } + + self.root = options.root || path.resolve(self.cwd, "/") + self.root = path.resolve(self.root) + if (process.platform === "win32") + self.root = self.root.replace(/\\/g, "/") + + self.nomount = !!options.nomount + + // disable comments and negation unless the user explicitly + // passes in false as the option. + options.nonegate = options.nonegate === false ? false : true + options.nocomment = options.nocomment === false ? false : true + deprecationWarning(options) + + self.minimatch = new Minimatch(pattern, options) + self.options = self.minimatch.options +} + +// TODO(isaacs): remove entirely in v6 +// exported to reset in tests +exports.deprecationWarned +function deprecationWarning(options) { + if (!options.nonegate || !options.nocomment) { + if (process.noDeprecation !== true && !exports.deprecationWarned) { + var msg = 'glob WARNING: comments and negation will be disabled in v6' + if (process.throwDeprecation) + throw new Error(msg) + else if (process.traceDeprecation) + console.trace(msg) + else + console.error(msg) + + exports.deprecationWarned = true + } + } +} + +function finish (self) { + var nou = self.nounique + var all = nou ? [] : Object.create(null) + + for (var i = 0, l = self.matches.length; i < l; i ++) { + var matches = self.matches[i] + if (!matches || Object.keys(matches).length === 0) { + if (self.nonull) { + // do like the shell, and spit out the literal glob + var literal = self.minimatch.globSet[i] + if (nou) + all.push(literal) + else + all[literal] = true + } + } else { + // had matches + var m = Object.keys(matches) + if (nou) + all.push.apply(all, m) + else + m.forEach(function (m) { + all[m] = true + }) + } + } + + if (!nou) + all = Object.keys(all) + + if (!self.nosort) + all = all.sort(self.nocase ? alphasorti : alphasort) + + // at *some* point we statted all of these + if (self.mark) { + for (var i = 0; i < all.length; i++) { + all[i] = self._mark(all[i]) + } + if (self.nodir) { + all = all.filter(function (e) { + return !(/\/$/.test(e)) + }) + } + } + + if (self.ignore.length) + all = all.filter(function(m) { + return !isIgnored(self, m) + }) + + self.found = all +} + +function mark (self, p) { + var abs = makeAbs(self, p) + var c = self.cache[abs] + var m = p + if (c) { + var isDir = c === 'DIR' || Array.isArray(c) + var slash = p.slice(-1) === '/' + + if (isDir && !slash) + m += '/' + else if (!isDir && slash) + m = m.slice(0, -1) + + if (m !== p) { + var mabs = makeAbs(self, m) + self.statCache[mabs] = self.statCache[abs] + self.cache[mabs] = self.cache[abs] + } + } + + return m +} + +// lotta situps... +function makeAbs (self, f) { + var abs = f + if (f.charAt(0) === '/') { + abs = path.join(self.root, f) + } else if (isAbsolute(f) || f === '') { + abs = f + } else if (self.changedCwd) { + abs = path.resolve(self.cwd, f) + } else { + abs = path.resolve(f) + } + return abs +} + + +// Return true, if pattern ends with globstar '**', for the accompanying parent directory. +// Ex:- If node_modules/** is the pattern, add 'node_modules' to ignore list along with it's contents +function isIgnored (self, path) { + if (!self.ignore.length) + return false + + return self.ignore.some(function(item) { + return item.matcher.match(path) || !!(item.gmatcher && item.gmatcher.match(path)) + }) +} + +function childrenIgnored (self, path) { + if (!self.ignore.length) + return false + + return self.ignore.some(function(item) { + return !!(item.gmatcher && item.gmatcher.match(path)) + }) +} + +}).call(this,require('_process')) +},{"_process":24,"minimatch":20,"path":22,"path-is-absolute":23}],16:[function(require,module,exports){ +(function (process){ +// Approach: +// +// 1. Get the minimatch set +// 2. For each pattern in the set, PROCESS(pattern, false) +// 3. Store matches per-set, then uniq them +// +// PROCESS(pattern, inGlobStar) +// Get the first [n] items from pattern that are all strings +// Join these together. This is PREFIX. +// If there is no more remaining, then stat(PREFIX) and +// add to matches if it succeeds. END. +// +// If inGlobStar and PREFIX is symlink and points to dir +// set ENTRIES = [] +// else readdir(PREFIX) as ENTRIES +// If fail, END +// +// with ENTRIES +// If pattern[n] is GLOBSTAR +// // handle the case where the globstar match is empty +// // by pruning it out, and testing the resulting pattern +// PROCESS(pattern[0..n] + pattern[n+1 .. $], false) +// // handle other cases. +// for ENTRY in ENTRIES (not dotfiles) +// // attach globstar + tail onto the entry +// // Mark that this entry is a globstar match +// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $], true) +// +// else // not globstar +// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot) +// Test ENTRY against pattern[n] +// If fails, continue +// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $]) +// +// Caveat: +// Cache all stats and readdirs results to minimize syscall. Since all +// we ever care about is existence and directory-ness, we can just keep +// `true` for files, and [children,...] for directories, or `false` for +// things that don't exist. + +module.exports = glob + +var fs = require('fs') +var minimatch = require('minimatch') +var Minimatch = minimatch.Minimatch +var inherits = require('inherits') +var EE = require('events').EventEmitter +var path = require('path') +var assert = require('assert') +var isAbsolute = require('path-is-absolute') +var globSync = require('./sync.js') +var common = require('./common.js') +var alphasort = common.alphasort +var alphasorti = common.alphasorti +var setopts = common.setopts +var ownProp = common.ownProp +var inflight = require('inflight') +var util = require('util') +var childrenIgnored = common.childrenIgnored +var isIgnored = common.isIgnored + +var once = require('once') + +function glob (pattern, options, cb) { + if (typeof options === 'function') cb = options, options = {} + if (!options) options = {} + + if (options.sync) { + if (cb) + throw new TypeError('callback provided to sync glob') + return globSync(pattern, options) + } + + return new Glob(pattern, options, cb) +} + +glob.sync = globSync +var GlobSync = glob.GlobSync = globSync.GlobSync + +// old api surface +glob.glob = glob + +glob.hasMagic = function (pattern, options_) { + var options = util._extend({}, options_) + options.noprocess = true + + var g = new Glob(pattern, options) + var set = g.minimatch.set + if (set.length > 1) + return true + + for (var j = 0; j < set[0].length; j++) { + if (typeof set[0][j] !== 'string') + return true + } + + return false +} + +glob.Glob = Glob +inherits(Glob, EE) +function Glob (pattern, options, cb) { + if (typeof options === 'function') { + cb = options + options = null + } + + if (options && options.sync) { + if (cb) + throw new TypeError('callback provided to sync glob') + return new GlobSync(pattern, options) + } + + if (!(this instanceof Glob)) + return new Glob(pattern, options, cb) + + setopts(this, pattern, options) + this._didRealPath = false + + // process each pattern in the minimatch set + var n = this.minimatch.set.length + + // The matches are stored as {: true,...} so that + // duplicates are automagically pruned. + // Later, we do an Object.keys() on these. + // Keep them as a list so we can fill in when nonull is set. + this.matches = new Array(n) + + if (typeof cb === 'function') { + cb = once(cb) + this.on('error', cb) + this.on('end', function (matches) { + cb(null, matches) + }) + } + + var self = this + var n = this.minimatch.set.length + this._processing = 0 + this.matches = new Array(n) + + this._emitQueue = [] + this._processQueue = [] + this.paused = false + + if (this.noprocess) + return this + + if (n === 0) + return done() + + for (var i = 0; i < n; i ++) { + this._process(this.minimatch.set[i], i, false, done) + } + + function done () { + --self._processing + if (self._processing <= 0) + self._finish() + } +} + +Glob.prototype._finish = function () { + assert(this instanceof Glob) + if (this.aborted) + return + + if (this.realpath && !this._didRealpath) + return this._realpath() + + common.finish(this) + this.emit('end', this.found) +} + +Glob.prototype._realpath = function () { + if (this._didRealpath) + return + + this._didRealpath = true + + var n = this.matches.length + if (n === 0) + return this._finish() + + var self = this + for (var i = 0; i < this.matches.length; i++) + this._realpathSet(i, next) + + function next () { + if (--n === 0) + self._finish() + } +} + +Glob.prototype._realpathSet = function (index, cb) { + var matchset = this.matches[index] + if (!matchset) + return cb() + + var found = Object.keys(matchset) + var self = this + var n = found.length + + if (n === 0) + return cb() + + var set = this.matches[index] = Object.create(null) + found.forEach(function (p, i) { + // If there's a problem with the stat, then it means that + // one or more of the links in the realpath couldn't be + // resolved. just return the abs value in that case. + p = self._makeAbs(p) + fs.realpath(p, self.realpathCache, function (er, real) { + if (!er) + set[real] = true + else if (er.syscall === 'stat') + set[p] = true + else + self.emit('error', er) // srsly wtf right here + + if (--n === 0) { + self.matches[index] = set + cb() + } + }) + }) +} + +Glob.prototype._mark = function (p) { + return common.mark(this, p) +} + +Glob.prototype._makeAbs = function (f) { + return common.makeAbs(this, f) +} + +Glob.prototype.abort = function () { + this.aborted = true + this.emit('abort') +} + +Glob.prototype.pause = function () { + if (!this.paused) { + this.paused = true + this.emit('pause') + } +} + +Glob.prototype.resume = function () { + if (this.paused) { + this.emit('resume') + this.paused = false + if (this._emitQueue.length) { + var eq = this._emitQueue.slice(0) + this._emitQueue.length = 0 + for (var i = 0; i < eq.length; i ++) { + var e = eq[i] + this._emitMatch(e[0], e[1]) + } + } + if (this._processQueue.length) { + var pq = this._processQueue.slice(0) + this._processQueue.length = 0 + for (var i = 0; i < pq.length; i ++) { + var p = pq[i] + this._processing-- + this._process(p[0], p[1], p[2], p[3]) + } + } + } +} + +Glob.prototype._process = function (pattern, index, inGlobStar, cb) { + assert(this instanceof Glob) + assert(typeof cb === 'function') + + if (this.aborted) + return + + this._processing++ + if (this.paused) { + this._processQueue.push([pattern, index, inGlobStar, cb]) + return + } + + //console.error('PROCESS %d', this._processing, pattern) + + // Get the first [n] parts of pattern that are all strings. + var n = 0 + while (typeof pattern[n] === 'string') { + n ++ + } + // now n is the index of the first one that is *not* a string. + + // see if there's anything else + var prefix + switch (n) { + // if not, then this is rather simple + case pattern.length: + this._processSimple(pattern.join('/'), index, cb) + return + + case 0: + // pattern *starts* with some non-trivial item. + // going to readdir(cwd), but not include the prefix in matches. + prefix = null + break + + default: + // pattern has some string bits in the front. + // whatever it starts with, whether that's 'absolute' like /foo/bar, + // or 'relative' like '../baz' + prefix = pattern.slice(0, n).join('/') + break + } + + var remain = pattern.slice(n) + + // get the list of entries. + var read + if (prefix === null) + read = '.' + else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) { + if (!prefix || !isAbsolute(prefix)) + prefix = '/' + prefix + read = prefix + } else + read = prefix + + var abs = this._makeAbs(read) + + //if ignored, skip _processing + if (childrenIgnored(this, read)) + return cb() + + var isGlobStar = remain[0] === minimatch.GLOBSTAR + if (isGlobStar) + this._processGlobStar(prefix, read, abs, remain, index, inGlobStar, cb) + else + this._processReaddir(prefix, read, abs, remain, index, inGlobStar, cb) +} + +Glob.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar, cb) { + var self = this + this._readdir(abs, inGlobStar, function (er, entries) { + return self._processReaddir2(prefix, read, abs, remain, index, inGlobStar, entries, cb) + }) +} + +Glob.prototype._processReaddir2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) { + + // if the abs isn't a dir, then nothing can match! + if (!entries) + return cb() + + // It will only match dot entries if it starts with a dot, or if + // dot is set. Stuff like @(.foo|.bar) isn't allowed. + var pn = remain[0] + var negate = !!this.minimatch.negate + var rawGlob = pn._glob + var dotOk = this.dot || rawGlob.charAt(0) === '.' + + var matchedEntries = [] + for (var i = 0; i < entries.length; i++) { + var e = entries[i] + if (e.charAt(0) !== '.' || dotOk) { + var m + if (negate && !prefix) { + m = !e.match(pn) + } else { + m = e.match(pn) + } + if (m) + matchedEntries.push(e) + } + } + + //console.error('prd2', prefix, entries, remain[0]._glob, matchedEntries) + + var len = matchedEntries.length + // If there are no matched entries, then nothing matches. + if (len === 0) + return cb() + + // if this is the last remaining pattern bit, then no need for + // an additional stat *unless* the user has specified mark or + // stat explicitly. We know they exist, since readdir returned + // them. + + if (remain.length === 1 && !this.mark && !this.stat) { + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + if (prefix) { + if (prefix !== '/') + e = prefix + '/' + e + else + e = prefix + e + } + + if (e.charAt(0) === '/' && !this.nomount) { + e = path.join(this.root, e) + } + this._emitMatch(index, e) + } + // This was the last one, and no stats were needed + return cb() + } + + // now test all matched entries as stand-ins for that part + // of the pattern. + remain.shift() + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + var newPattern + if (prefix) { + if (prefix !== '/') + e = prefix + '/' + e + else + e = prefix + e + } + this._process([e].concat(remain), index, inGlobStar, cb) + } + cb() +} + +Glob.prototype._emitMatch = function (index, e) { + if (this.aborted) + return + + if (this.matches[index][e]) + return + + if (isIgnored(this, e)) + return + + if (this.paused) { + this._emitQueue.push([index, e]) + return + } + + var abs = this._makeAbs(e) + + if (this.nodir) { + var c = this.cache[abs] + if (c === 'DIR' || Array.isArray(c)) + return + } + + if (this.mark) + e = this._mark(e) + + this.matches[index][e] = true + + var st = this.statCache[abs] + if (st) + this.emit('stat', e, st) + + this.emit('match', e) +} + +Glob.prototype._readdirInGlobStar = function (abs, cb) { + if (this.aborted) + return + + // follow all symlinked directories forever + // just proceed as if this is a non-globstar situation + if (this.follow) + return this._readdir(abs, false, cb) + + var lstatkey = 'lstat\0' + abs + var self = this + var lstatcb = inflight(lstatkey, lstatcb_) + + if (lstatcb) + fs.lstat(abs, lstatcb) + + function lstatcb_ (er, lstat) { + if (er) + return cb() + + var isSym = lstat.isSymbolicLink() + self.symlinks[abs] = isSym + + // If it's not a symlink or a dir, then it's definitely a regular file. + // don't bother doing a readdir in that case. + if (!isSym && !lstat.isDirectory()) { + self.cache[abs] = 'FILE' + cb() + } else + self._readdir(abs, false, cb) + } +} + +Glob.prototype._readdir = function (abs, inGlobStar, cb) { + if (this.aborted) + return + + cb = inflight('readdir\0'+abs+'\0'+inGlobStar, cb) + if (!cb) + return + + //console.error('RD %j %j', +inGlobStar, abs) + if (inGlobStar && !ownProp(this.symlinks, abs)) + return this._readdirInGlobStar(abs, cb) + + if (ownProp(this.cache, abs)) { + var c = this.cache[abs] + if (!c || c === 'FILE') + return cb() + + if (Array.isArray(c)) + return cb(null, c) + } + + var self = this + fs.readdir(abs, readdirCb(this, abs, cb)) +} + +function readdirCb (self, abs, cb) { + return function (er, entries) { + if (er) + self._readdirError(abs, er, cb) + else + self._readdirEntries(abs, entries, cb) + } +} + +Glob.prototype._readdirEntries = function (abs, entries, cb) { + if (this.aborted) + return + + // if we haven't asked to stat everything, then just + // assume that everything in there exists, so we can avoid + // having to stat it a second time. + if (!this.mark && !this.stat) { + for (var i = 0; i < entries.length; i ++) { + var e = entries[i] + if (abs === '/') + e = abs + e + else + e = abs + '/' + e + this.cache[e] = true + } + } + + this.cache[abs] = entries + return cb(null, entries) +} + +Glob.prototype._readdirError = function (f, er, cb) { + if (this.aborted) + return + + // handle errors, and cache the information + switch (er.code) { + case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205 + case 'ENOTDIR': // totally normal. means it *does* exist. + this.cache[this._makeAbs(f)] = 'FILE' + break + + case 'ENOENT': // not terribly unusual + case 'ELOOP': + case 'ENAMETOOLONG': + case 'UNKNOWN': + this.cache[this._makeAbs(f)] = false + break + + default: // some unusual error. Treat as failure. + this.cache[this._makeAbs(f)] = false + if (this.strict) { + this.emit('error', er) + // If the error is handled, then we abort + // if not, we threw out of here + this.abort() + } + if (!this.silent) + console.error('glob error', er) + break + } + + return cb() +} + +Glob.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar, cb) { + var self = this + this._readdir(abs, inGlobStar, function (er, entries) { + self._processGlobStar2(prefix, read, abs, remain, index, inGlobStar, entries, cb) + }) +} + + +Glob.prototype._processGlobStar2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) { + //console.error('pgs2', prefix, remain[0], entries) + + // no entries means not a dir, so it can never have matches + // foo.txt/** doesn't match foo.txt + if (!entries) + return cb() + + // test without the globstar, and with every child both below + // and replacing the globstar. + var remainWithoutGlobStar = remain.slice(1) + var gspref = prefix ? [ prefix ] : [] + var noGlobStar = gspref.concat(remainWithoutGlobStar) + + // the noGlobStar pattern exits the inGlobStar state + this._process(noGlobStar, index, false, cb) + + var isSym = this.symlinks[abs] + var len = entries.length + + // If it's a symlink, and we're in a globstar, then stop + if (isSym && inGlobStar) + return cb() + + for (var i = 0; i < len; i++) { + var e = entries[i] + if (e.charAt(0) === '.' && !this.dot) + continue + + // these two cases enter the inGlobStar state + var instead = gspref.concat(entries[i], remainWithoutGlobStar) + this._process(instead, index, true, cb) + + var below = gspref.concat(entries[i], remain) + this._process(below, index, true, cb) + } + + cb() +} + +Glob.prototype._processSimple = function (prefix, index, cb) { + // XXX review this. Shouldn't it be doing the mounting etc + // before doing stat? kinda weird? + var self = this + this._stat(prefix, function (er, exists) { + self._processSimple2(prefix, index, er, exists, cb) + }) +} +Glob.prototype._processSimple2 = function (prefix, index, er, exists, cb) { + + //console.error('ps2', prefix, exists) + + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + // If it doesn't exist, then just mark the lack of results + if (!exists) + return cb() + + if (prefix && isAbsolute(prefix) && !this.nomount) { + var trail = /[\/\\]$/.test(prefix) + if (prefix.charAt(0) === '/') { + prefix = path.join(this.root, prefix) + } else { + prefix = path.resolve(this.root, prefix) + if (trail) + prefix += '/' + } + } + + if (process.platform === 'win32') + prefix = prefix.replace(/\\/g, '/') + + // Mark this as a match + this._emitMatch(index, prefix) + cb() +} + +// Returns either 'DIR', 'FILE', or false +Glob.prototype._stat = function (f, cb) { + var abs = this._makeAbs(f) + var needDir = f.slice(-1) === '/' + + if (f.length > this.maxLength) + return cb() + + if (!this.stat && ownProp(this.cache, abs)) { + var c = this.cache[abs] + + if (Array.isArray(c)) + c = 'DIR' + + // It exists, but maybe not how we need it + if (!needDir || c === 'DIR') + return cb(null, c) + + if (needDir && c === 'FILE') + return cb() + + // otherwise we have to stat, because maybe c=true + // if we know it exists, but not what it is. + } + + var exists + var stat = this.statCache[abs] + if (stat !== undefined) { + if (stat === false) + return cb(null, stat) + else { + var type = stat.isDirectory() ? 'DIR' : 'FILE' + if (needDir && type === 'FILE') + return cb() + else + return cb(null, type, stat) + } + } + + var self = this + var statcb = inflight('stat\0' + abs, lstatcb_) + if (statcb) + fs.lstat(abs, statcb) + + function lstatcb_ (er, lstat) { + if (lstat && lstat.isSymbolicLink()) { + // If it's a symlink, then treat it as the target, unless + // the target does not exist, then treat it as a file. + return fs.stat(abs, function (er, stat) { + if (er) + self._stat2(f, abs, null, lstat, cb) + else + self._stat2(f, abs, er, stat, cb) + }) + } else { + self._stat2(f, abs, er, lstat, cb) + } + } +} + +Glob.prototype._stat2 = function (f, abs, er, stat, cb) { + if (er) { + this.statCache[abs] = false + return cb() + } + + var needDir = f.slice(-1) === '/' + this.statCache[abs] = stat + + if (abs.slice(-1) === '/' && !stat.isDirectory()) + return cb(null, false, stat) + + var c = stat.isDirectory() ? 'DIR' : 'FILE' + this.cache[abs] = this.cache[abs] || c + + if (needDir && c !== 'DIR') + return cb() + + return cb(null, c, stat) +} + +}).call(this,require('_process')) +},{"./common.js":15,"./sync.js":17,"_process":24,"assert":9,"events":14,"fs":12,"inflight":18,"inherits":19,"minimatch":20,"once":21,"path":22,"path-is-absolute":23,"util":28}],17:[function(require,module,exports){ +(function (process){ +module.exports = globSync +globSync.GlobSync = GlobSync + +var fs = require('fs') +var minimatch = require('minimatch') +var Minimatch = minimatch.Minimatch +var Glob = require('./glob.js').Glob +var util = require('util') +var path = require('path') +var assert = require('assert') +var isAbsolute = require('path-is-absolute') +var common = require('./common.js') +var alphasort = common.alphasort +var alphasorti = common.alphasorti +var setopts = common.setopts +var ownProp = common.ownProp +var childrenIgnored = common.childrenIgnored + +function globSync (pattern, options) { + if (typeof options === 'function' || arguments.length === 3) + throw new TypeError('callback provided to sync glob\n'+ + 'See: https://github.com/isaacs/node-glob/issues/167') + + return new GlobSync(pattern, options).found +} + +function GlobSync (pattern, options) { + if (!pattern) + throw new Error('must provide pattern') + + if (typeof options === 'function' || arguments.length === 3) + throw new TypeError('callback provided to sync glob\n'+ + 'See: https://github.com/isaacs/node-glob/issues/167') + + if (!(this instanceof GlobSync)) + return new GlobSync(pattern, options) + + setopts(this, pattern, options) + + if (this.noprocess) + return this + + var n = this.minimatch.set.length + this.matches = new Array(n) + for (var i = 0; i < n; i ++) { + this._process(this.minimatch.set[i], i, false) + } + this._finish() +} + +GlobSync.prototype._finish = function () { + assert(this instanceof GlobSync) + if (this.realpath) { + var self = this + this.matches.forEach(function (matchset, index) { + var set = self.matches[index] = Object.create(null) + for (var p in matchset) { + try { + p = self._makeAbs(p) + var real = fs.realpathSync(p, self.realpathCache) + set[real] = true + } catch (er) { + if (er.syscall === 'stat') + set[self._makeAbs(p)] = true + else + throw er + } + } + }) + } + common.finish(this) +} + + +GlobSync.prototype._process = function (pattern, index, inGlobStar) { + assert(this instanceof GlobSync) + + // Get the first [n] parts of pattern that are all strings. + var n = 0 + while (typeof pattern[n] === 'string') { + n ++ + } + // now n is the index of the first one that is *not* a string. + + // See if there's anything else + var prefix + switch (n) { + // if not, then this is rather simple + case pattern.length: + this._processSimple(pattern.join('/'), index) + return + + case 0: + // pattern *starts* with some non-trivial item. + // going to readdir(cwd), but not include the prefix in matches. + prefix = null + break + + default: + // pattern has some string bits in the front. + // whatever it starts with, whether that's 'absolute' like /foo/bar, + // or 'relative' like '../baz' + prefix = pattern.slice(0, n).join('/') + break + } + + var remain = pattern.slice(n) + + // get the list of entries. + var read + if (prefix === null) + read = '.' + else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) { + if (!prefix || !isAbsolute(prefix)) + prefix = '/' + prefix + read = prefix + } else + read = prefix + + var abs = this._makeAbs(read) + + //if ignored, skip processing + if (childrenIgnored(this, read)) + return + + var isGlobStar = remain[0] === minimatch.GLOBSTAR + if (isGlobStar) + this._processGlobStar(prefix, read, abs, remain, index, inGlobStar) + else + this._processReaddir(prefix, read, abs, remain, index, inGlobStar) +} + + +GlobSync.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar) { + var entries = this._readdir(abs, inGlobStar) + + // if the abs isn't a dir, then nothing can match! + if (!entries) + return + + // It will only match dot entries if it starts with a dot, or if + // dot is set. Stuff like @(.foo|.bar) isn't allowed. + var pn = remain[0] + var negate = !!this.minimatch.negate + var rawGlob = pn._glob + var dotOk = this.dot || rawGlob.charAt(0) === '.' + + var matchedEntries = [] + for (var i = 0; i < entries.length; i++) { + var e = entries[i] + if (e.charAt(0) !== '.' || dotOk) { + var m + if (negate && !prefix) { + m = !e.match(pn) + } else { + m = e.match(pn) + } + if (m) + matchedEntries.push(e) + } + } + + var len = matchedEntries.length + // If there are no matched entries, then nothing matches. + if (len === 0) + return + + // if this is the last remaining pattern bit, then no need for + // an additional stat *unless* the user has specified mark or + // stat explicitly. We know they exist, since readdir returned + // them. + + if (remain.length === 1 && !this.mark && !this.stat) { + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + if (prefix) { + if (prefix.slice(-1) !== '/') + e = prefix + '/' + e + else + e = prefix + e + } + + if (e.charAt(0) === '/' && !this.nomount) { + e = path.join(this.root, e) + } + this.matches[index][e] = true + } + // This was the last one, and no stats were needed + return + } + + // now test all matched entries as stand-ins for that part + // of the pattern. + remain.shift() + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + var newPattern + if (prefix) + newPattern = [prefix, e] + else + newPattern = [e] + this._process(newPattern.concat(remain), index, inGlobStar) + } +} + + +GlobSync.prototype._emitMatch = function (index, e) { + var abs = this._makeAbs(e) + if (this.mark) + e = this._mark(e) + + if (this.matches[index][e]) + return + + if (this.nodir) { + var c = this.cache[this._makeAbs(e)] + if (c === 'DIR' || Array.isArray(c)) + return + } + + this.matches[index][e] = true + if (this.stat) + this._stat(e) +} + + +GlobSync.prototype._readdirInGlobStar = function (abs) { + // follow all symlinked directories forever + // just proceed as if this is a non-globstar situation + if (this.follow) + return this._readdir(abs, false) + + var entries + var lstat + var stat + try { + lstat = fs.lstatSync(abs) + } catch (er) { + // lstat failed, doesn't exist + return null + } + + var isSym = lstat.isSymbolicLink() + this.symlinks[abs] = isSym + + // If it's not a symlink or a dir, then it's definitely a regular file. + // don't bother doing a readdir in that case. + if (!isSym && !lstat.isDirectory()) + this.cache[abs] = 'FILE' + else + entries = this._readdir(abs, false) + + return entries +} + +GlobSync.prototype._readdir = function (abs, inGlobStar) { + var entries + + if (inGlobStar && !ownProp(this.symlinks, abs)) + return this._readdirInGlobStar(abs) + + if (ownProp(this.cache, abs)) { + var c = this.cache[abs] + if (!c || c === 'FILE') + return null + + if (Array.isArray(c)) + return c + } + + try { + return this._readdirEntries(abs, fs.readdirSync(abs)) + } catch (er) { + this._readdirError(abs, er) + return null + } +} + +GlobSync.prototype._readdirEntries = function (abs, entries) { + // if we haven't asked to stat everything, then just + // assume that everything in there exists, so we can avoid + // having to stat it a second time. + if (!this.mark && !this.stat) { + for (var i = 0; i < entries.length; i ++) { + var e = entries[i] + if (abs === '/') + e = abs + e + else + e = abs + '/' + e + this.cache[e] = true + } + } + + this.cache[abs] = entries + + // mark and cache dir-ness + return entries +} + +GlobSync.prototype._readdirError = function (f, er) { + // handle errors, and cache the information + switch (er.code) { + case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205 + case 'ENOTDIR': // totally normal. means it *does* exist. + this.cache[this._makeAbs(f)] = 'FILE' + break + + case 'ENOENT': // not terribly unusual + case 'ELOOP': + case 'ENAMETOOLONG': + case 'UNKNOWN': + this.cache[this._makeAbs(f)] = false + break + + default: // some unusual error. Treat as failure. + this.cache[this._makeAbs(f)] = false + if (this.strict) + throw er + if (!this.silent) + console.error('glob error', er) + break + } +} + +GlobSync.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar) { + + var entries = this._readdir(abs, inGlobStar) + + // no entries means not a dir, so it can never have matches + // foo.txt/** doesn't match foo.txt + if (!entries) + return + + // test without the globstar, and with every child both below + // and replacing the globstar. + var remainWithoutGlobStar = remain.slice(1) + var gspref = prefix ? [ prefix ] : [] + var noGlobStar = gspref.concat(remainWithoutGlobStar) + + // the noGlobStar pattern exits the inGlobStar state + this._process(noGlobStar, index, false) + + var len = entries.length + var isSym = this.symlinks[abs] + + // If it's a symlink, and we're in a globstar, then stop + if (isSym && inGlobStar) + return + + for (var i = 0; i < len; i++) { + var e = entries[i] + if (e.charAt(0) === '.' && !this.dot) + continue + + // these two cases enter the inGlobStar state + var instead = gspref.concat(entries[i], remainWithoutGlobStar) + this._process(instead, index, true) + + var below = gspref.concat(entries[i], remain) + this._process(below, index, true) + } +} + +GlobSync.prototype._processSimple = function (prefix, index) { + // XXX review this. Shouldn't it be doing the mounting etc + // before doing stat? kinda weird? + var exists = this._stat(prefix) + + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + // If it doesn't exist, then just mark the lack of results + if (!exists) + return + + if (prefix && isAbsolute(prefix) && !this.nomount) { + var trail = /[\/\\]$/.test(prefix) + if (prefix.charAt(0) === '/') { + prefix = path.join(this.root, prefix) + } else { + prefix = path.resolve(this.root, prefix) + if (trail) + prefix += '/' + } + } + + if (process.platform === 'win32') + prefix = prefix.replace(/\\/g, '/') + + // Mark this as a match + this.matches[index][prefix] = true +} + +// Returns either 'DIR', 'FILE', or false +GlobSync.prototype._stat = function (f) { + var abs = this._makeAbs(f) + var needDir = f.slice(-1) === '/' + + if (f.length > this.maxLength) + return false + + if (!this.stat && ownProp(this.cache, abs)) { + var c = this.cache[abs] + + if (Array.isArray(c)) + c = 'DIR' + + // It exists, but maybe not how we need it + if (!needDir || c === 'DIR') + return c + + if (needDir && c === 'FILE') + return false + + // otherwise we have to stat, because maybe c=true + // if we know it exists, but not what it is. + } + + var exists + var stat = this.statCache[abs] + if (!stat) { + var lstat + try { + lstat = fs.lstatSync(abs) + } catch (er) { + return false + } + + if (lstat.isSymbolicLink()) { + try { + stat = fs.statSync(abs) + } catch (er) { + stat = lstat + } + } else { + stat = lstat + } + } + + this.statCache[abs] = stat + + var c = stat.isDirectory() ? 'DIR' : 'FILE' + this.cache[abs] = this.cache[abs] || c + + if (needDir && c !== 'DIR') + return false + + return c +} + +GlobSync.prototype._mark = function (p) { + return common.mark(this, p) +} + +GlobSync.prototype._makeAbs = function (f) { + return common.makeAbs(this, f) +} + +}).call(this,require('_process')) +},{"./common.js":15,"./glob.js":16,"_process":24,"assert":9,"fs":12,"minimatch":20,"path":22,"path-is-absolute":23,"util":28}],18:[function(require,module,exports){ +(function (process){ +var wrappy = require('wrappy') +var reqs = Object.create(null) +var once = require('once') + +module.exports = wrappy(inflight) + +function inflight (key, cb) { + if (reqs[key]) { + reqs[key].push(cb) + return null + } else { + reqs[key] = [cb] + return makeres(key) + } +} + +function makeres (key) { + return once(function RES () { + var cbs = reqs[key] + var len = cbs.length + var args = slice(arguments) + + // XXX It's somewhat ambiguous whether a new callback added in this + // pass should be queued for later execution if something in the + // list of callbacks throws, or if it should just be discarded. + // However, it's such an edge case that it hardly matters, and either + // choice is likely as surprising as the other. + // As it happens, we do go ahead and schedule it for later execution. + try { + for (var i = 0; i < len; i++) { + cbs[i].apply(null, args) + } + } finally { + if (cbs.length > len) { + // added more in the interim. + // de-zalgo, just in case, but don't call again. + cbs.splice(0, len) + process.nextTick(function () { + RES.apply(null, args) + }) + } else { + delete reqs[key] + } + } + }) +} + +function slice (args) { + var length = args.length + var array = [] + + for (var i = 0; i < length; i++) array[i] = args[i] + return array +} + +}).call(this,require('_process')) +},{"_process":24,"once":21,"wrappy":29}],19:[function(require,module,exports){ +if (typeof Object.create === 'function') { + // implementation from standard node.js 'util' module + module.exports = function inherits(ctor, superCtor) { + ctor.super_ = superCtor + ctor.prototype = Object.create(superCtor.prototype, { + constructor: { + value: ctor, + enumerable: false, + writable: true, + configurable: true + } + }); + }; +} else { + // old school shim for old browsers + module.exports = function inherits(ctor, superCtor) { + ctor.super_ = superCtor + var TempCtor = function () {} + TempCtor.prototype = superCtor.prototype + ctor.prototype = new TempCtor() + ctor.prototype.constructor = ctor + } +} + +},{}],20:[function(require,module,exports){ +module.exports = minimatch +minimatch.Minimatch = Minimatch + +var path = { sep: '/' } +try { + path = require('path') +} catch (er) {} + +var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {} +var expand = require('brace-expansion') + +var plTypes = { + '!': { open: '(?:(?!(?:', close: '))[^/]*?)'}, + '?': { open: '(?:', close: ')?' }, + '+': { open: '(?:', close: ')+' }, + '*': { open: '(?:', close: ')*' }, + '@': { open: '(?:', close: ')' } +} + +// any single thing other than / +// don't need to escape / when using new RegExp() +var qmark = '[^/]' + +// * => any number of characters +var star = qmark + '*?' + +// ** when dots are allowed. Anything goes, except .. and . +// not (^ or / followed by one or two dots followed by $ or /), +// followed by anything, any number of times. +var twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?' + +// not a ^ or / followed by a dot, +// followed by anything, any number of times. +var twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?' + +// characters that need to be escaped in RegExp. +var reSpecials = charSet('().*{}+?[]^$\\!') + +// "abc" -> { a:true, b:true, c:true } +function charSet (s) { + return s.split('').reduce(function (set, c) { + set[c] = true + return set + }, {}) +} + +// normalizes slashes. +var slashSplit = /\/+/ + +minimatch.filter = filter +function filter (pattern, options) { + options = options || {} + return function (p, i, list) { + return minimatch(p, pattern, options) + } +} + +function ext (a, b) { + a = a || {} + b = b || {} + var t = {} + Object.keys(b).forEach(function (k) { + t[k] = b[k] + }) + Object.keys(a).forEach(function (k) { + t[k] = a[k] + }) + return t +} + +minimatch.defaults = function (def) { + if (!def || !Object.keys(def).length) return minimatch + + var orig = minimatch + + var m = function minimatch (p, pattern, options) { + return orig.minimatch(p, pattern, ext(def, options)) + } + + m.Minimatch = function Minimatch (pattern, options) { + return new orig.Minimatch(pattern, ext(def, options)) + } + + return m +} + +Minimatch.defaults = function (def) { + if (!def || !Object.keys(def).length) return Minimatch + return minimatch.defaults(def).Minimatch +} + +function minimatch (p, pattern, options) { + if (typeof pattern !== 'string') { + throw new TypeError('glob pattern string required') + } + + if (!options) options = {} + + // shortcut: comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + return false + } + + // "" only matches "" + if (pattern.trim() === '') return p === '' + + return new Minimatch(pattern, options).match(p) +} + +function Minimatch (pattern, options) { + if (!(this instanceof Minimatch)) { + return new Minimatch(pattern, options) + } + + if (typeof pattern !== 'string') { + throw new TypeError('glob pattern string required') + } + + if (!options) options = {} + pattern = pattern.trim() + + // windows support: need to use /, not \ + if (path.sep !== '/') { + pattern = pattern.split(path.sep).join('/') + } + + this.options = options + this.set = [] + this.pattern = pattern + this.regexp = null + this.negate = false + this.comment = false + this.empty = false + + // make the set of regexps etc. + this.make() +} + +Minimatch.prototype.debug = function () {} + +Minimatch.prototype.make = make +function make () { + // don't do it more than once. + if (this._made) return + + var pattern = this.pattern + var options = this.options + + // empty patterns and comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + this.comment = true + return + } + if (!pattern) { + this.empty = true + return + } + + // step 1: figure out negation, etc. + this.parseNegate() + + // step 2: expand braces + var set = this.globSet = this.braceExpand() + + if (options.debug) this.debug = console.error + + this.debug(this.pattern, set) + + // step 3: now we have a set, so turn each one into a series of path-portion + // matching patterns. + // These will be regexps, except in the case of "**", which is + // set to the GLOBSTAR object for globstar behavior, + // and will not contain any / characters + set = this.globParts = set.map(function (s) { + return s.split(slashSplit) + }) + + this.debug(this.pattern, set) + + // glob --> regexps + set = set.map(function (s, si, set) { + return s.map(this.parse, this) + }, this) + + this.debug(this.pattern, set) + + // filter out everything that didn't compile properly. + set = set.filter(function (s) { + return s.indexOf(false) === -1 + }) + + this.debug(this.pattern, set) + + this.set = set +} + +Minimatch.prototype.parseNegate = parseNegate +function parseNegate () { + var pattern = this.pattern + var negate = false + var options = this.options + var negateOffset = 0 + + if (options.nonegate) return + + for (var i = 0, l = pattern.length + ; i < l && pattern.charAt(i) === '!' + ; i++) { + negate = !negate + negateOffset++ + } + + if (negateOffset) this.pattern = pattern.substr(negateOffset) + this.negate = negate +} + +// Brace expansion: +// a{b,c}d -> abd acd +// a{b,}c -> abc ac +// a{0..3}d -> a0d a1d a2d a3d +// a{b,c{d,e}f}g -> abg acdfg acefg +// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg +// +// Invalid sets are not expanded. +// a{2..}b -> a{2..}b +// a{b}c -> a{b}c +minimatch.braceExpand = function (pattern, options) { + return braceExpand(pattern, options) +} + +Minimatch.prototype.braceExpand = braceExpand + +function braceExpand (pattern, options) { + if (!options) { + if (this instanceof Minimatch) { + options = this.options + } else { + options = {} + } + } + + pattern = typeof pattern === 'undefined' + ? this.pattern : pattern + + if (typeof pattern === 'undefined') { + throw new TypeError('undefined pattern') + } + + if (options.nobrace || + !pattern.match(/\{.*\}/)) { + // shortcut. no need to expand. + return [pattern] + } + + return expand(pattern) +} + +// parse a component of the expanded set. +// At this point, no pattern may contain "/" in it +// so we're going to return a 2d array, where each entry is the full +// pattern, split on '/', and then turned into a regular expression. +// A regexp is made at the end which joins each array with an +// escaped /, and another full one which joins each regexp with |. +// +// Following the lead of Bash 4.1, note that "**" only has special meaning +// when it is the *only* thing in a path portion. Otherwise, any series +// of * is equivalent to a single *. Globstar behavior is enabled by +// default, and can be disabled by setting options.noglobstar. +Minimatch.prototype.parse = parse +var SUBPARSE = {} +function parse (pattern, isSub) { + if (pattern.length > 1024 * 64) { + throw new TypeError('pattern is too long') + } + + var options = this.options + + // shortcuts + if (!options.noglobstar && pattern === '**') return GLOBSTAR + if (pattern === '') return '' + + var re = '' + var hasMagic = !!options.nocase + var escaping = false + // ? => one single character + var patternListStack = [] + var negativeLists = [] + var stateChar + var inClass = false + var reClassStart = -1 + var classStart = -1 + // . and .. never match anything that doesn't start with ., + // even when options.dot is set. + var patternStart = pattern.charAt(0) === '.' ? '' // anything + // not (start or / followed by . or .. followed by / or end) + : options.dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))' + : '(?!\\.)' + var self = this + + function clearStateChar () { + if (stateChar) { + // we had some state-tracking character + // that wasn't consumed by this pass. + switch (stateChar) { + case '*': + re += star + hasMagic = true + break + case '?': + re += qmark + hasMagic = true + break + default: + re += '\\' + stateChar + break + } + self.debug('clearStateChar %j %j', stateChar, re) + stateChar = false + } + } + + for (var i = 0, len = pattern.length, c + ; (i < len) && (c = pattern.charAt(i)) + ; i++) { + this.debug('%s\t%s %s %j', pattern, i, re, c) + + // skip over any that are escaped. + if (escaping && reSpecials[c]) { + re += '\\' + c + escaping = false + continue + } + + switch (c) { + case '/': + // completely not allowed, even escaped. + // Should already be path-split by now. + return false + + case '\\': + clearStateChar() + escaping = true + continue + + // the various stateChar values + // for the "extglob" stuff. + case '?': + case '*': + case '+': + case '@': + case '!': + this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c) + + // all of those are literals inside a class, except that + // the glob [!a] means [^a] in regexp + if (inClass) { + this.debug(' in class') + if (c === '!' && i === classStart + 1) c = '^' + re += c + continue + } + + // if we already have a stateChar, then it means + // that there was something like ** or +? in there. + // Handle the stateChar, then proceed with this one. + self.debug('call clearStateChar %j', stateChar) + clearStateChar() + stateChar = c + // if extglob is disabled, then +(asdf|foo) isn't a thing. + // just clear the statechar *now*, rather than even diving into + // the patternList stuff. + if (options.noext) clearStateChar() + continue + + case '(': + if (inClass) { + re += '(' + continue + } + + if (!stateChar) { + re += '\\(' + continue + } + + patternListStack.push({ + type: stateChar, + start: i - 1, + reStart: re.length, + open: plTypes[stateChar].open, + close: plTypes[stateChar].close + }) + // negation is (?:(?!js)[^/]*) + re += stateChar === '!' ? '(?:(?!(?:' : '(?:' + this.debug('plType %j %j', stateChar, re) + stateChar = false + continue + + case ')': + if (inClass || !patternListStack.length) { + re += '\\)' + continue + } + + clearStateChar() + hasMagic = true + var pl = patternListStack.pop() + // negation is (?:(?!js)[^/]*) + // The others are (?:) + re += pl.close + if (pl.type === '!') { + negativeLists.push(pl) + } + pl.reEnd = re.length + continue + + case '|': + if (inClass || !patternListStack.length || escaping) { + re += '\\|' + escaping = false + continue + } + + clearStateChar() + re += '|' + continue + + // these are mostly the same in regexp and glob + case '[': + // swallow any state-tracking char before the [ + clearStateChar() + + if (inClass) { + re += '\\' + c + continue + } + + inClass = true + classStart = i + reClassStart = re.length + re += c + continue + + case ']': + // a right bracket shall lose its special + // meaning and represent itself in + // a bracket expression if it occurs + // first in the list. -- POSIX.2 2.8.3.2 + if (i === classStart + 1 || !inClass) { + re += '\\' + c + escaping = false + continue + } + + // handle the case where we left a class open. + // "[z-a]" is valid, equivalent to "\[z-a\]" + if (inClass) { + // split where the last [ was, make sure we don't have + // an invalid re. if so, re-walk the contents of the + // would-be class to re-translate any characters that + // were passed through as-is + // TODO: It would probably be faster to determine this + // without a try/catch and a new RegExp, but it's tricky + // to do safely. For now, this is safe and works. + var cs = pattern.substring(classStart + 1, i) + try { + RegExp('[' + cs + ']') + } catch (er) { + // not a valid class! + var sp = this.parse(cs, SUBPARSE) + re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]' + hasMagic = hasMagic || sp[1] + inClass = false + continue + } + } + + // finish up the class. + hasMagic = true + inClass = false + re += c + continue + + default: + // swallow any state char that wasn't consumed + clearStateChar() + + if (escaping) { + // no need + escaping = false + } else if (reSpecials[c] + && !(c === '^' && inClass)) { + re += '\\' + } + + re += c + + } // switch + } // for + + // handle the case where we left a class open. + // "[abc" is valid, equivalent to "\[abc" + if (inClass) { + // split where the last [ was, and escape it + // this is a huge pita. We now have to re-walk + // the contents of the would-be class to re-translate + // any characters that were passed through as-is + cs = pattern.substr(classStart + 1) + sp = this.parse(cs, SUBPARSE) + re = re.substr(0, reClassStart) + '\\[' + sp[0] + hasMagic = hasMagic || sp[1] + } + + // handle the case where we had a +( thing at the *end* + // of the pattern. + // each pattern list stack adds 3 chars, and we need to go through + // and escape any | chars that were passed through as-is for the regexp. + // Go through and escape them, taking care not to double-escape any + // | chars that were already escaped. + for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) { + var tail = re.slice(pl.reStart + pl.open.length) + this.debug('setting tail', re, pl) + // maybe some even number of \, then maybe 1 \, followed by a | + tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, function (_, $1, $2) { + if (!$2) { + // the | isn't already escaped, so escape it. + $2 = '\\' + } + + // need to escape all those slashes *again*, without escaping the + // one that we need for escaping the | character. As it works out, + // escaping an even number of slashes can be done by simply repeating + // it exactly after itself. That's why this trick works. + // + // I am sorry that you have to see this. + return $1 + $1 + $2 + '|' + }) + + this.debug('tail=%j\n %s', tail, tail, pl, re) + var t = pl.type === '*' ? star + : pl.type === '?' ? qmark + : '\\' + pl.type + + hasMagic = true + re = re.slice(0, pl.reStart) + t + '\\(' + tail + } + + // handle trailing things that only matter at the very end. + clearStateChar() + if (escaping) { + // trailing \\ + re += '\\\\' + } + + // only need to apply the nodot start if the re starts with + // something that could conceivably capture a dot + var addPatternStart = false + switch (re.charAt(0)) { + case '.': + case '[': + case '(': addPatternStart = true + } + + // Hack to work around lack of negative lookbehind in JS + // A pattern like: *.!(x).!(y|z) needs to ensure that a name + // like 'a.xyz.yz' doesn't match. So, the first negative + // lookahead, has to look ALL the way ahead, to the end of + // the pattern. + for (var n = negativeLists.length - 1; n > -1; n--) { + var nl = negativeLists[n] + + var nlBefore = re.slice(0, nl.reStart) + var nlFirst = re.slice(nl.reStart, nl.reEnd - 8) + var nlLast = re.slice(nl.reEnd - 8, nl.reEnd) + var nlAfter = re.slice(nl.reEnd) + + nlLast += nlAfter + + // Handle nested stuff like *(*.js|!(*.json)), where open parens + // mean that we should *not* include the ) in the bit that is considered + // "after" the negated section. + var openParensBefore = nlBefore.split('(').length - 1 + var cleanAfter = nlAfter + for (i = 0; i < openParensBefore; i++) { + cleanAfter = cleanAfter.replace(/\)[+*?]?/, '') + } + nlAfter = cleanAfter + + var dollar = '' + if (nlAfter === '' && isSub !== SUBPARSE) { + dollar = '$' + } + var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast + re = newRe + } + + // if the re is not "" at this point, then we need to make sure + // it doesn't match against an empty path part. + // Otherwise a/* will match a/, which it should not. + if (re !== '' && hasMagic) { + re = '(?=.)' + re + } + + if (addPatternStart) { + re = patternStart + re + } + + // parsing just a piece of a larger pattern. + if (isSub === SUBPARSE) { + return [re, hasMagic] + } + + // skip the regexp for non-magical patterns + // unescape anything in it, though, so that it'll be + // an exact match against a file etc. + if (!hasMagic) { + return globUnescape(pattern) + } + + var flags = options.nocase ? 'i' : '' + try { + var regExp = new RegExp('^' + re + '$', flags) + } catch (er) { + // If it was an invalid regular expression, then it can't match + // anything. This trick looks for a character after the end of + // the string, which is of course impossible, except in multi-line + // mode, but it's not a /m regex. + return new RegExp('$.') + } + + regExp._glob = pattern + regExp._src = re + + return regExp +} + +minimatch.makeRe = function (pattern, options) { + return new Minimatch(pattern, options || {}).makeRe() +} + +Minimatch.prototype.makeRe = makeRe +function makeRe () { + if (this.regexp || this.regexp === false) return this.regexp + + // at this point, this.set is a 2d array of partial + // pattern strings, or "**". + // + // It's better to use .match(). This function shouldn't + // be used, really, but it's pretty convenient sometimes, + // when you just want to work with a regex. + var set = this.set + + if (!set.length) { + this.regexp = false + return this.regexp + } + var options = this.options + + var twoStar = options.noglobstar ? star + : options.dot ? twoStarDot + : twoStarNoDot + var flags = options.nocase ? 'i' : '' + + var re = set.map(function (pattern) { + return pattern.map(function (p) { + return (p === GLOBSTAR) ? twoStar + : (typeof p === 'string') ? regExpEscape(p) + : p._src + }).join('\\\/') + }).join('|') + + // must match entire pattern + // ending in a * or ** will make it less strict. + re = '^(?:' + re + ')$' + + // can match anything, as long as it's not this. + if (this.negate) re = '^(?!' + re + ').*$' + + try { + this.regexp = new RegExp(re, flags) + } catch (ex) { + this.regexp = false + } + return this.regexp +} + +minimatch.match = function (list, pattern, options) { + options = options || {} + var mm = new Minimatch(pattern, options) + list = list.filter(function (f) { + return mm.match(f) + }) + if (mm.options.nonull && !list.length) { + list.push(pattern) + } + return list +} + +Minimatch.prototype.match = match +function match (f, partial) { + this.debug('match', f, this.pattern) + // short-circuit in the case of busted things. + // comments, etc. + if (this.comment) return false + if (this.empty) return f === '' + + if (f === '/' && partial) return true + + var options = this.options + + // windows: need to use /, not \ + if (path.sep !== '/') { + f = f.split(path.sep).join('/') + } + + // treat the test path as a set of pathparts. + f = f.split(slashSplit) + this.debug(this.pattern, 'split', f) + + // just ONE of the pattern sets in this.set needs to match + // in order for it to be valid. If negating, then just one + // match means that we have failed. + // Either way, return on the first hit. + + var set = this.set + this.debug(this.pattern, 'set', set) + + // Find the basename of the path by looking for the last non-empty segment + var filename + var i + for (i = f.length - 1; i >= 0; i--) { + filename = f[i] + if (filename) break + } + + for (i = 0; i < set.length; i++) { + var pattern = set[i] + var file = f + if (options.matchBase && pattern.length === 1) { + file = [filename] + } + var hit = this.matchOne(file, pattern, partial) + if (hit) { + if (options.flipNegate) return true + return !this.negate + } + } + + // didn't get any hits. this is success if it's a negative + // pattern, failure otherwise. + if (options.flipNegate) return false + return this.negate +} + +// set partial to true to test if, for example, +// "/a/b" matches the start of "/*/b/*/d" +// Partial means, if you run out of file before you run +// out of pattern, then that's fine, as long as all +// the parts match. +Minimatch.prototype.matchOne = function (file, pattern, partial) { + var options = this.options + + this.debug('matchOne', + { 'this': this, file: file, pattern: pattern }) + + this.debug('matchOne', file.length, pattern.length) + + for (var fi = 0, + pi = 0, + fl = file.length, + pl = pattern.length + ; (fi < fl) && (pi < pl) + ; fi++, pi++) { + this.debug('matchOne loop') + var p = pattern[pi] + var f = file[fi] + + this.debug(pattern, p, f) + + // should be impossible. + // some invalid regexp stuff in the set. + if (p === false) return false + + if (p === GLOBSTAR) { + this.debug('GLOBSTAR', [pattern, p, f]) + + // "**" + // a/**/b/**/c would match the following: + // a/b/x/y/z/c + // a/x/y/z/b/c + // a/b/x/b/x/c + // a/b/c + // To do this, take the rest of the pattern after + // the **, and see if it would match the file remainder. + // If so, return success. + // If not, the ** "swallows" a segment, and try again. + // This is recursively awful. + // + // a/**/b/**/c matching a/b/x/y/z/c + // - a matches a + // - doublestar + // - matchOne(b/x/y/z/c, b/**/c) + // - b matches b + // - doublestar + // - matchOne(x/y/z/c, c) -> no + // - matchOne(y/z/c, c) -> no + // - matchOne(z/c, c) -> no + // - matchOne(c, c) yes, hit + var fr = fi + var pr = pi + 1 + if (pr === pl) { + this.debug('** at the end') + // a ** at the end will just swallow the rest. + // We have found a match. + // however, it will not swallow /.x, unless + // options.dot is set. + // . and .. are *never* matched by **, for explosively + // exponential reasons. + for (; fi < fl; fi++) { + if (file[fi] === '.' || file[fi] === '..' || + (!options.dot && file[fi].charAt(0) === '.')) return false + } + return true + } + + // ok, let's see if we can swallow whatever we can. + while (fr < fl) { + var swallowee = file[fr] + + this.debug('\nglobstar while', file, fr, pattern, pr, swallowee) + + // XXX remove this slice. Just pass the start index. + if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { + this.debug('globstar found match!', fr, fl, swallowee) + // found a match. + return true + } else { + // can't swallow "." or ".." ever. + // can only swallow ".foo" when explicitly asked. + if (swallowee === '.' || swallowee === '..' || + (!options.dot && swallowee.charAt(0) === '.')) { + this.debug('dot detected!', file, fr, pattern, pr) + break + } + + // ** swallows a segment, and continue. + this.debug('globstar swallow a segment, and continue') + fr++ + } + } + + // no match was found. + // However, in partial mode, we can't say this is necessarily over. + // If there's more *pattern* left, then + if (partial) { + // ran out of file + this.debug('\n>>> no match, partial?', file, fr, pattern, pr) + if (fr === fl) return true + } + return false + } + + // something other than ** + // non-magic patterns just have to match exactly + // patterns with magic have been turned into regexps. + var hit + if (typeof p === 'string') { + if (options.nocase) { + hit = f.toLowerCase() === p.toLowerCase() + } else { + hit = f === p + } + this.debug('string match', p, f, hit) + } else { + hit = f.match(p) + this.debug('pattern match', p, f, hit) + } + + if (!hit) return false + } + + // Note: ending in / means that we'll get a final "" + // at the end of the pattern. This can only match a + // corresponding "" at the end of the file. + // If the file ends in /, then it can only match a + // a pattern that ends in /, unless the pattern just + // doesn't have any more for it. But, a/b/ should *not* + // match "a/b/*", even though "" matches against the + // [^/]*? pattern, except in partial mode, where it might + // simply not be reached yet. + // However, a/b/ should still satisfy a/* + + // now either we fell off the end of the pattern, or we're done. + if (fi === fl && pi === pl) { + // ran out of pattern and filename at the same time. + // an exact hit! + return true + } else if (fi === fl) { + // ran out of file, but still had pattern left. + // this is ok if we're doing the match as part of + // a glob fs traversal. + return partial + } else if (pi === pl) { + // ran out of pattern, still have file left. + // this is only acceptable if we're on the very last + // empty segment of a file with a trailing slash. + // a/* should match a/b/ + var emptyFileEnd = (fi === fl - 1) && (file[fi] === '') + return emptyFileEnd + } + + // should be unreachable. + throw new Error('wtf?') +} + +// replace stuff like \* with * +function globUnescape (s) { + return s.replace(/\\(.)/g, '$1') +} + +function regExpEscape (s) { + return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&') +} + +},{"brace-expansion":11,"path":22}],21:[function(require,module,exports){ +var wrappy = require('wrappy') +module.exports = wrappy(once) +module.exports.strict = wrappy(onceStrict) + +once.proto = once(function () { + Object.defineProperty(Function.prototype, 'once', { + value: function () { + return once(this) + }, + configurable: true + }) + + Object.defineProperty(Function.prototype, 'onceStrict', { + value: function () { + return onceStrict(this) + }, + configurable: true + }) +}) + +function once (fn) { + var f = function () { + if (f.called) return f.value + f.called = true + return f.value = fn.apply(this, arguments) + } + f.called = false + return f +} + +function onceStrict (fn) { + var f = function () { + if (f.called) + throw new Error(f.onceError) + f.called = true + return f.value = fn.apply(this, arguments) + } + var name = fn.name || 'Function wrapped with `once`' + f.onceError = name + " shouldn't be called more than once" + f.called = false + return f +} + +},{"wrappy":29}],22:[function(require,module,exports){ +(function (process){ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// resolves . and .. elements in a path array with directory names there +// must be no slashes, empty elements, or device names (c:\) in the array +// (so also no leading and trailing slashes - it does not distinguish +// relative and absolute paths) +function normalizeArray(parts, allowAboveRoot) { + // if the path tries to go above the root, `up` ends up > 0 + var up = 0; + for (var i = parts.length - 1; i >= 0; i--) { + var last = parts[i]; + if (last === '.') { + parts.splice(i, 1); + } else if (last === '..') { + parts.splice(i, 1); + up++; + } else if (up) { + parts.splice(i, 1); + up--; + } + } + + // if the path is allowed to go above the root, restore leading ..s + if (allowAboveRoot) { + for (; up--; up) { + parts.unshift('..'); + } + } + + return parts; +} + +// Split a filename into [root, dir, basename, ext], unix version +// 'root' is just a slash, or nothing. +var splitPathRe = + /^(\/?|)([\s\S]*?)((?:\.{1,2}|[^\/]+?|)(\.[^.\/]*|))(?:[\/]*)$/; +var splitPath = function(filename) { + return splitPathRe.exec(filename).slice(1); +}; + +// path.resolve([from ...], to) +// posix version +exports.resolve = function() { + var resolvedPath = '', + resolvedAbsolute = false; + + for (var i = arguments.length - 1; i >= -1 && !resolvedAbsolute; i--) { + var path = (i >= 0) ? arguments[i] : process.cwd(); + + // Skip empty and invalid entries + if (typeof path !== 'string') { + throw new TypeError('Arguments to path.resolve must be strings'); + } else if (!path) { + continue; + } + + resolvedPath = path + '/' + resolvedPath; + resolvedAbsolute = path.charAt(0) === '/'; + } + + // At this point the path should be resolved to a full absolute path, but + // handle relative paths to be safe (might happen when process.cwd() fails) + + // Normalize the path + resolvedPath = normalizeArray(filter(resolvedPath.split('/'), function(p) { + return !!p; + }), !resolvedAbsolute).join('/'); + + return ((resolvedAbsolute ? '/' : '') + resolvedPath) || '.'; +}; + +// path.normalize(path) +// posix version +exports.normalize = function(path) { + var isAbsolute = exports.isAbsolute(path), + trailingSlash = substr(path, -1) === '/'; + + // Normalize the path + path = normalizeArray(filter(path.split('/'), function(p) { + return !!p; + }), !isAbsolute).join('/'); + + if (!path && !isAbsolute) { + path = '.'; + } + if (path && trailingSlash) { + path += '/'; + } + + return (isAbsolute ? '/' : '') + path; +}; + +// posix version +exports.isAbsolute = function(path) { + return path.charAt(0) === '/'; +}; + +// posix version +exports.join = function() { + var paths = Array.prototype.slice.call(arguments, 0); + return exports.normalize(filter(paths, function(p, index) { + if (typeof p !== 'string') { + throw new TypeError('Arguments to path.join must be strings'); + } + return p; + }).join('/')); +}; + + +// path.relative(from, to) +// posix version +exports.relative = function(from, to) { + from = exports.resolve(from).substr(1); + to = exports.resolve(to).substr(1); + + function trim(arr) { + var start = 0; + for (; start < arr.length; start++) { + if (arr[start] !== '') break; + } + + var end = arr.length - 1; + for (; end >= 0; end--) { + if (arr[end] !== '') break; + } + + if (start > end) return []; + return arr.slice(start, end - start + 1); + } + + var fromParts = trim(from.split('/')); + var toParts = trim(to.split('/')); + + var length = Math.min(fromParts.length, toParts.length); + var samePartsLength = length; + for (var i = 0; i < length; i++) { + if (fromParts[i] !== toParts[i]) { + samePartsLength = i; + break; + } + } + + var outputParts = []; + for (var i = samePartsLength; i < fromParts.length; i++) { + outputParts.push('..'); + } + + outputParts = outputParts.concat(toParts.slice(samePartsLength)); + + return outputParts.join('/'); +}; + +exports.sep = '/'; +exports.delimiter = ':'; + +exports.dirname = function(path) { + var result = splitPath(path), + root = result[0], + dir = result[1]; + + if (!root && !dir) { + // No dirname whatsoever + return '.'; + } + + if (dir) { + // It has a dirname, strip trailing slash + dir = dir.substr(0, dir.length - 1); + } + + return root + dir; +}; + + +exports.basename = function(path, ext) { + var f = splitPath(path)[2]; + // TODO: make this comparison case-insensitive on windows? + if (ext && f.substr(-1 * ext.length) === ext) { + f = f.substr(0, f.length - ext.length); + } + return f; +}; + + +exports.extname = function(path) { + return splitPath(path)[3]; +}; + +function filter (xs, f) { + if (xs.filter) return xs.filter(f); + var res = []; + for (var i = 0; i < xs.length; i++) { + if (f(xs[i], i, xs)) res.push(xs[i]); + } + return res; +} + +// String.prototype.substr - negative index don't work in IE8 +var substr = 'ab'.substr(-1) === 'b' + ? function (str, start, len) { return str.substr(start, len) } + : function (str, start, len) { + if (start < 0) start = str.length + start; + return str.substr(start, len); + } +; + +}).call(this,require('_process')) +},{"_process":24}],23:[function(require,module,exports){ +(function (process){ +'use strict'; + +function posix(path) { + return path.charAt(0) === '/'; +} + +function win32(path) { + // https://github.com/nodejs/node/blob/b3fcc245fb25539909ef1d5eaa01dbf92e168633/lib/path.js#L56 + var splitDeviceRe = /^([a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/]+[^\\\/]+)?([\\\/])?([\s\S]*?)$/; + var result = splitDeviceRe.exec(path); + var device = result[1] || ''; + var isUnc = Boolean(device && device.charAt(1) !== ':'); + + // UNC paths are always absolute + return Boolean(result[2] || isUnc); +} + +module.exports = process.platform === 'win32' ? win32 : posix; +module.exports.posix = posix; +module.exports.win32 = win32; + +}).call(this,require('_process')) +},{"_process":24}],24:[function(require,module,exports){ +// shim for using process in browser +var process = module.exports = {}; + +// cached from whatever global is present so that test runners that stub it +// don't break things. But we need to wrap it in a try catch in case it is +// wrapped in strict mode code which doesn't define any globals. It's inside a +// function because try/catches deoptimize in certain engines. + +var cachedSetTimeout; +var cachedClearTimeout; + +function defaultSetTimout() { + throw new Error('setTimeout has not been defined'); +} +function defaultClearTimeout () { + throw new Error('clearTimeout has not been defined'); +} +(function () { + try { + if (typeof setTimeout === 'function') { + cachedSetTimeout = setTimeout; + } else { + cachedSetTimeout = defaultSetTimout; + } + } catch (e) { + cachedSetTimeout = defaultSetTimout; + } + try { + if (typeof clearTimeout === 'function') { + cachedClearTimeout = clearTimeout; + } else { + cachedClearTimeout = defaultClearTimeout; + } + } catch (e) { + cachedClearTimeout = defaultClearTimeout; + } +} ()) +function runTimeout(fun) { + if (cachedSetTimeout === setTimeout) { + //normal enviroments in sane situations + return setTimeout(fun, 0); + } + // if setTimeout wasn't available but was latter defined + if ((cachedSetTimeout === defaultSetTimout || !cachedSetTimeout) && setTimeout) { + cachedSetTimeout = setTimeout; + return setTimeout(fun, 0); + } + try { + // when when somebody has screwed with setTimeout but no I.E. maddness + return cachedSetTimeout(fun, 0); + } catch(e){ + try { + // When we are in I.E. but the script has been evaled so I.E. doesn't trust the global object when called normally + return cachedSetTimeout.call(null, fun, 0); + } catch(e){ + // same as above but when it's a version of I.E. that must have the global object for 'this', hopfully our context correct otherwise it will throw a global error + return cachedSetTimeout.call(this, fun, 0); + } + } + + +} +function runClearTimeout(marker) { + if (cachedClearTimeout === clearTimeout) { + //normal enviroments in sane situations + return clearTimeout(marker); + } + // if clearTimeout wasn't available but was latter defined + if ((cachedClearTimeout === defaultClearTimeout || !cachedClearTimeout) && clearTimeout) { + cachedClearTimeout = clearTimeout; + return clearTimeout(marker); + } + try { + // when when somebody has screwed with setTimeout but no I.E. maddness + return cachedClearTimeout(marker); + } catch (e){ + try { + // When we are in I.E. but the script has been evaled so I.E. doesn't trust the global object when called normally + return cachedClearTimeout.call(null, marker); + } catch (e){ + // same as above but when it's a version of I.E. that must have the global object for 'this', hopfully our context correct otherwise it will throw a global error. + // Some versions of I.E. have different rules for clearTimeout vs setTimeout + return cachedClearTimeout.call(this, marker); + } + } + + + +} +var queue = []; +var draining = false; +var currentQueue; +var queueIndex = -1; + +function cleanUpNextTick() { + if (!draining || !currentQueue) { + return; + } + draining = false; + if (currentQueue.length) { + queue = currentQueue.concat(queue); + } else { + queueIndex = -1; + } + if (queue.length) { + drainQueue(); + } +} + +function drainQueue() { + if (draining) { + return; + } + var timeout = runTimeout(cleanUpNextTick); + draining = true; + + var len = queue.length; + while(len) { + currentQueue = queue; + queue = []; + while (++queueIndex < len) { + if (currentQueue) { + currentQueue[queueIndex].run(); + } + } + queueIndex = -1; + len = queue.length; + } + currentQueue = null; + draining = false; + runClearTimeout(timeout); +} + +process.nextTick = function (fun) { + var args = new Array(arguments.length - 1); + if (arguments.length > 1) { + for (var i = 1; i < arguments.length; i++) { + args[i - 1] = arguments[i]; + } + } + queue.push(new Item(fun, args)); + if (queue.length === 1 && !draining) { + runTimeout(drainQueue); + } +}; + +// v8 likes predictible objects +function Item(fun, array) { + this.fun = fun; + this.array = array; +} +Item.prototype.run = function () { + this.fun.apply(null, this.array); +}; +process.title = 'browser'; +process.browser = true; +process.env = {}; +process.argv = []; +process.version = ''; // empty string to avoid regexp issues +process.versions = {}; + +function noop() {} + +process.on = noop; +process.addListener = noop; +process.once = noop; +process.off = noop; +process.removeListener = noop; +process.removeAllListeners = noop; +process.emit = noop; +process.prependListener = noop; +process.prependOnceListener = noop; + +process.listeners = function (name) { return [] } + +process.binding = function (name) { + throw new Error('process.binding is not supported'); +}; + +process.cwd = function () { return '/' }; +process.chdir = function (dir) { + throw new Error('process.chdir is not supported'); +}; +process.umask = function() { return 0; }; + +},{}],25:[function(require,module,exports){ +// Underscore.js 1.8.3 +// http://underscorejs.org +// (c) 2009-2015 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors +// Underscore may be freely distributed under the MIT license. + +(function() { + + // Baseline setup + // -------------- + + // Establish the root object, `window` in the browser, or `exports` on the server. + var root = this; + + // Save the previous value of the `_` variable. + var previousUnderscore = root._; + + // Save bytes in the minified (but not gzipped) version: + var ArrayProto = Array.prototype, ObjProto = Object.prototype, FuncProto = Function.prototype; + + // Create quick reference variables for speed access to core prototypes. + var + push = ArrayProto.push, + slice = ArrayProto.slice, + toString = ObjProto.toString, + hasOwnProperty = ObjProto.hasOwnProperty; + + // All **ECMAScript 5** native function implementations that we hope to use + // are declared here. + var + nativeIsArray = Array.isArray, + nativeKeys = Object.keys, + nativeBind = FuncProto.bind, + nativeCreate = Object.create; + + // Naked function reference for surrogate-prototype-swapping. + var Ctor = function(){}; + + // Create a safe reference to the Underscore object for use below. + var _ = function(obj) { + if (obj instanceof _) return obj; + if (!(this instanceof _)) return new _(obj); + this._wrapped = obj; + }; + + // Export the Underscore object for **Node.js**, with + // backwards-compatibility for the old `require()` API. If we're in + // the browser, add `_` as a global object. + if (typeof exports !== 'undefined') { + if (typeof module !== 'undefined' && module.exports) { + exports = module.exports = _; + } + exports._ = _; + } else { + root._ = _; + } + + // Current version. + _.VERSION = '1.8.3'; + + // Internal function that returns an efficient (for current engines) version + // of the passed-in callback, to be repeatedly applied in other Underscore + // functions. + var optimizeCb = function(func, context, argCount) { + if (context === void 0) return func; + switch (argCount == null ? 3 : argCount) { + case 1: return function(value) { + return func.call(context, value); + }; + case 2: return function(value, other) { + return func.call(context, value, other); + }; + case 3: return function(value, index, collection) { + return func.call(context, value, index, collection); + }; + case 4: return function(accumulator, value, index, collection) { + return func.call(context, accumulator, value, index, collection); + }; + } + return function() { + return func.apply(context, arguments); + }; + }; + + // A mostly-internal function to generate callbacks that can be applied + // to each element in a collection, returning the desired result — either + // identity, an arbitrary callback, a property matcher, or a property accessor. + var cb = function(value, context, argCount) { + if (value == null) return _.identity; + if (_.isFunction(value)) return optimizeCb(value, context, argCount); + if (_.isObject(value)) return _.matcher(value); + return _.property(value); + }; + _.iteratee = function(value, context) { + return cb(value, context, Infinity); + }; + + // An internal function for creating assigner functions. + var createAssigner = function(keysFunc, undefinedOnly) { + return function(obj) { + var length = arguments.length; + if (length < 2 || obj == null) return obj; + for (var index = 1; index < length; index++) { + var source = arguments[index], + keys = keysFunc(source), + l = keys.length; + for (var i = 0; i < l; i++) { + var key = keys[i]; + if (!undefinedOnly || obj[key] === void 0) obj[key] = source[key]; + } + } + return obj; + }; + }; + + // An internal function for creating a new object that inherits from another. + var baseCreate = function(prototype) { + if (!_.isObject(prototype)) return {}; + if (nativeCreate) return nativeCreate(prototype); + Ctor.prototype = prototype; + var result = new Ctor; + Ctor.prototype = null; + return result; + }; + + var property = function(key) { + return function(obj) { + return obj == null ? void 0 : obj[key]; + }; + }; + + // Helper for collection methods to determine whether a collection + // should be iterated as an array or as an object + // Related: http://people.mozilla.org/~jorendorff/es6-draft.html#sec-tolength + // Avoids a very nasty iOS 8 JIT bug on ARM-64. #2094 + var MAX_ARRAY_INDEX = Math.pow(2, 53) - 1; + var getLength = property('length'); + var isArrayLike = function(collection) { + var length = getLength(collection); + return typeof length == 'number' && length >= 0 && length <= MAX_ARRAY_INDEX; + }; + + // Collection Functions + // -------------------- + + // The cornerstone, an `each` implementation, aka `forEach`. + // Handles raw objects in addition to array-likes. Treats all + // sparse array-likes as if they were dense. + _.each = _.forEach = function(obj, iteratee, context) { + iteratee = optimizeCb(iteratee, context); + var i, length; + if (isArrayLike(obj)) { + for (i = 0, length = obj.length; i < length; i++) { + iteratee(obj[i], i, obj); + } + } else { + var keys = _.keys(obj); + for (i = 0, length = keys.length; i < length; i++) { + iteratee(obj[keys[i]], keys[i], obj); + } + } + return obj; + }; + + // Return the results of applying the iteratee to each element. + _.map = _.collect = function(obj, iteratee, context) { + iteratee = cb(iteratee, context); + var keys = !isArrayLike(obj) && _.keys(obj), + length = (keys || obj).length, + results = Array(length); + for (var index = 0; index < length; index++) { + var currentKey = keys ? keys[index] : index; + results[index] = iteratee(obj[currentKey], currentKey, obj); + } + return results; + }; + + // Create a reducing function iterating left or right. + function createReduce(dir) { + // Optimized iterator function as using arguments.length + // in the main function will deoptimize the, see #1991. + function iterator(obj, iteratee, memo, keys, index, length) { + for (; index >= 0 && index < length; index += dir) { + var currentKey = keys ? keys[index] : index; + memo = iteratee(memo, obj[currentKey], currentKey, obj); + } + return memo; + } + + return function(obj, iteratee, memo, context) { + iteratee = optimizeCb(iteratee, context, 4); + var keys = !isArrayLike(obj) && _.keys(obj), + length = (keys || obj).length, + index = dir > 0 ? 0 : length - 1; + // Determine the initial value if none is provided. + if (arguments.length < 3) { + memo = obj[keys ? keys[index] : index]; + index += dir; + } + return iterator(obj, iteratee, memo, keys, index, length); + }; + } + + // **Reduce** builds up a single result from a list of values, aka `inject`, + // or `foldl`. + _.reduce = _.foldl = _.inject = createReduce(1); + + // The right-associative version of reduce, also known as `foldr`. + _.reduceRight = _.foldr = createReduce(-1); + + // Return the first value which passes a truth test. Aliased as `detect`. + _.find = _.detect = function(obj, predicate, context) { + var key; + if (isArrayLike(obj)) { + key = _.findIndex(obj, predicate, context); + } else { + key = _.findKey(obj, predicate, context); + } + if (key !== void 0 && key !== -1) return obj[key]; + }; + + // Return all the elements that pass a truth test. + // Aliased as `select`. + _.filter = _.select = function(obj, predicate, context) { + var results = []; + predicate = cb(predicate, context); + _.each(obj, function(value, index, list) { + if (predicate(value, index, list)) results.push(value); + }); + return results; + }; + + // Return all the elements for which a truth test fails. + _.reject = function(obj, predicate, context) { + return _.filter(obj, _.negate(cb(predicate)), context); + }; + + // Determine whether all of the elements match a truth test. + // Aliased as `all`. + _.every = _.all = function(obj, predicate, context) { + predicate = cb(predicate, context); + var keys = !isArrayLike(obj) && _.keys(obj), + length = (keys || obj).length; + for (var index = 0; index < length; index++) { + var currentKey = keys ? keys[index] : index; + if (!predicate(obj[currentKey], currentKey, obj)) return false; + } + return true; + }; + + // Determine if at least one element in the object matches a truth test. + // Aliased as `any`. + _.some = _.any = function(obj, predicate, context) { + predicate = cb(predicate, context); + var keys = !isArrayLike(obj) && _.keys(obj), + length = (keys || obj).length; + for (var index = 0; index < length; index++) { + var currentKey = keys ? keys[index] : index; + if (predicate(obj[currentKey], currentKey, obj)) return true; + } + return false; + }; + + // Determine if the array or object contains a given item (using `===`). + // Aliased as `includes` and `include`. + _.contains = _.includes = _.include = function(obj, item, fromIndex, guard) { + if (!isArrayLike(obj)) obj = _.values(obj); + if (typeof fromIndex != 'number' || guard) fromIndex = 0; + return _.indexOf(obj, item, fromIndex) >= 0; + }; + + // Invoke a method (with arguments) on every item in a collection. + _.invoke = function(obj, method) { + var args = slice.call(arguments, 2); + var isFunc = _.isFunction(method); + return _.map(obj, function(value) { + var func = isFunc ? method : value[method]; + return func == null ? func : func.apply(value, args); + }); + }; + + // Convenience version of a common use case of `map`: fetching a property. + _.pluck = function(obj, key) { + return _.map(obj, _.property(key)); + }; + + // Convenience version of a common use case of `filter`: selecting only objects + // containing specific `key:value` pairs. + _.where = function(obj, attrs) { + return _.filter(obj, _.matcher(attrs)); + }; + + // Convenience version of a common use case of `find`: getting the first object + // containing specific `key:value` pairs. + _.findWhere = function(obj, attrs) { + return _.find(obj, _.matcher(attrs)); + }; + + // Return the maximum element (or element-based computation). + _.max = function(obj, iteratee, context) { + var result = -Infinity, lastComputed = -Infinity, + value, computed; + if (iteratee == null && obj != null) { + obj = isArrayLike(obj) ? obj : _.values(obj); + for (var i = 0, length = obj.length; i < length; i++) { + value = obj[i]; + if (value > result) { + result = value; + } + } + } else { + iteratee = cb(iteratee, context); + _.each(obj, function(value, index, list) { + computed = iteratee(value, index, list); + if (computed > lastComputed || computed === -Infinity && result === -Infinity) { + result = value; + lastComputed = computed; + } + }); + } + return result; + }; + + // Return the minimum element (or element-based computation). + _.min = function(obj, iteratee, context) { + var result = Infinity, lastComputed = Infinity, + value, computed; + if (iteratee == null && obj != null) { + obj = isArrayLike(obj) ? obj : _.values(obj); + for (var i = 0, length = obj.length; i < length; i++) { + value = obj[i]; + if (value < result) { + result = value; + } + } + } else { + iteratee = cb(iteratee, context); + _.each(obj, function(value, index, list) { + computed = iteratee(value, index, list); + if (computed < lastComputed || computed === Infinity && result === Infinity) { + result = value; + lastComputed = computed; + } + }); + } + return result; + }; + + // Shuffle a collection, using the modern version of the + // [Fisher-Yates shuffle](http://en.wikipedia.org/wiki/Fisher–Yates_shuffle). + _.shuffle = function(obj) { + var set = isArrayLike(obj) ? obj : _.values(obj); + var length = set.length; + var shuffled = Array(length); + for (var index = 0, rand; index < length; index++) { + rand = _.random(0, index); + if (rand !== index) shuffled[index] = shuffled[rand]; + shuffled[rand] = set[index]; + } + return shuffled; + }; + + // Sample **n** random values from a collection. + // If **n** is not specified, returns a single random element. + // The internal `guard` argument allows it to work with `map`. + _.sample = function(obj, n, guard) { + if (n == null || guard) { + if (!isArrayLike(obj)) obj = _.values(obj); + return obj[_.random(obj.length - 1)]; + } + return _.shuffle(obj).slice(0, Math.max(0, n)); + }; + + // Sort the object's values by a criterion produced by an iteratee. + _.sortBy = function(obj, iteratee, context) { + iteratee = cb(iteratee, context); + return _.pluck(_.map(obj, function(value, index, list) { + return { + value: value, + index: index, + criteria: iteratee(value, index, list) + }; + }).sort(function(left, right) { + var a = left.criteria; + var b = right.criteria; + if (a !== b) { + if (a > b || a === void 0) return 1; + if (a < b || b === void 0) return -1; + } + return left.index - right.index; + }), 'value'); + }; + + // An internal function used for aggregate "group by" operations. + var group = function(behavior) { + return function(obj, iteratee, context) { + var result = {}; + iteratee = cb(iteratee, context); + _.each(obj, function(value, index) { + var key = iteratee(value, index, obj); + behavior(result, value, key); + }); + return result; + }; + }; + + // Groups the object's values by a criterion. Pass either a string attribute + // to group by, or a function that returns the criterion. + _.groupBy = group(function(result, value, key) { + if (_.has(result, key)) result[key].push(value); else result[key] = [value]; + }); + + // Indexes the object's values by a criterion, similar to `groupBy`, but for + // when you know that your index values will be unique. + _.indexBy = group(function(result, value, key) { + result[key] = value; + }); + + // Counts instances of an object that group by a certain criterion. Pass + // either a string attribute to count by, or a function that returns the + // criterion. + _.countBy = group(function(result, value, key) { + if (_.has(result, key)) result[key]++; else result[key] = 1; + }); + + // Safely create a real, live array from anything iterable. + _.toArray = function(obj) { + if (!obj) return []; + if (_.isArray(obj)) return slice.call(obj); + if (isArrayLike(obj)) return _.map(obj, _.identity); + return _.values(obj); + }; + + // Return the number of elements in an object. + _.size = function(obj) { + if (obj == null) return 0; + return isArrayLike(obj) ? obj.length : _.keys(obj).length; + }; + + // Split a collection into two arrays: one whose elements all satisfy the given + // predicate, and one whose elements all do not satisfy the predicate. + _.partition = function(obj, predicate, context) { + predicate = cb(predicate, context); + var pass = [], fail = []; + _.each(obj, function(value, key, obj) { + (predicate(value, key, obj) ? pass : fail).push(value); + }); + return [pass, fail]; + }; + + // Array Functions + // --------------- + + // Get the first element of an array. Passing **n** will return the first N + // values in the array. Aliased as `head` and `take`. The **guard** check + // allows it to work with `_.map`. + _.first = _.head = _.take = function(array, n, guard) { + if (array == null) return void 0; + if (n == null || guard) return array[0]; + return _.initial(array, array.length - n); + }; + + // Returns everything but the last entry of the array. Especially useful on + // the arguments object. Passing **n** will return all the values in + // the array, excluding the last N. + _.initial = function(array, n, guard) { + return slice.call(array, 0, Math.max(0, array.length - (n == null || guard ? 1 : n))); + }; + + // Get the last element of an array. Passing **n** will return the last N + // values in the array. + _.last = function(array, n, guard) { + if (array == null) return void 0; + if (n == null || guard) return array[array.length - 1]; + return _.rest(array, Math.max(0, array.length - n)); + }; + + // Returns everything but the first entry of the array. Aliased as `tail` and `drop`. + // Especially useful on the arguments object. Passing an **n** will return + // the rest N values in the array. + _.rest = _.tail = _.drop = function(array, n, guard) { + return slice.call(array, n == null || guard ? 1 : n); + }; + + // Trim out all falsy values from an array. + _.compact = function(array) { + return _.filter(array, _.identity); + }; + + // Internal implementation of a recursive `flatten` function. + var flatten = function(input, shallow, strict, startIndex) { + var output = [], idx = 0; + for (var i = startIndex || 0, length = getLength(input); i < length; i++) { + var value = input[i]; + if (isArrayLike(value) && (_.isArray(value) || _.isArguments(value))) { + //flatten current level of array or arguments object + if (!shallow) value = flatten(value, shallow, strict); + var j = 0, len = value.length; + output.length += len; + while (j < len) { + output[idx++] = value[j++]; + } + } else if (!strict) { + output[idx++] = value; + } + } + return output; + }; + + // Flatten out an array, either recursively (by default), or just one level. + _.flatten = function(array, shallow) { + return flatten(array, shallow, false); + }; + + // Return a version of the array that does not contain the specified value(s). + _.without = function(array) { + return _.difference(array, slice.call(arguments, 1)); + }; + + // Produce a duplicate-free version of the array. If the array has already + // been sorted, you have the option of using a faster algorithm. + // Aliased as `unique`. + _.uniq = _.unique = function(array, isSorted, iteratee, context) { + if (!_.isBoolean(isSorted)) { + context = iteratee; + iteratee = isSorted; + isSorted = false; + } + if (iteratee != null) iteratee = cb(iteratee, context); + var result = []; + var seen = []; + for (var i = 0, length = getLength(array); i < length; i++) { + var value = array[i], + computed = iteratee ? iteratee(value, i, array) : value; + if (isSorted) { + if (!i || seen !== computed) result.push(value); + seen = computed; + } else if (iteratee) { + if (!_.contains(seen, computed)) { + seen.push(computed); + result.push(value); + } + } else if (!_.contains(result, value)) { + result.push(value); + } + } + return result; + }; + + // Produce an array that contains the union: each distinct element from all of + // the passed-in arrays. + _.union = function() { + return _.uniq(flatten(arguments, true, true)); + }; + + // Produce an array that contains every item shared between all the + // passed-in arrays. + _.intersection = function(array) { + var result = []; + var argsLength = arguments.length; + for (var i = 0, length = getLength(array); i < length; i++) { + var item = array[i]; + if (_.contains(result, item)) continue; + for (var j = 1; j < argsLength; j++) { + if (!_.contains(arguments[j], item)) break; + } + if (j === argsLength) result.push(item); + } + return result; + }; + + // Take the difference between one array and a number of other arrays. + // Only the elements present in just the first array will remain. + _.difference = function(array) { + var rest = flatten(arguments, true, true, 1); + return _.filter(array, function(value){ + return !_.contains(rest, value); + }); + }; + + // Zip together multiple lists into a single array -- elements that share + // an index go together. + _.zip = function() { + return _.unzip(arguments); + }; + + // Complement of _.zip. Unzip accepts an array of arrays and groups + // each array's elements on shared indices + _.unzip = function(array) { + var length = array && _.max(array, getLength).length || 0; + var result = Array(length); + + for (var index = 0; index < length; index++) { + result[index] = _.pluck(array, index); + } + return result; + }; + + // Converts lists into objects. Pass either a single array of `[key, value]` + // pairs, or two parallel arrays of the same length -- one of keys, and one of + // the corresponding values. + _.object = function(list, values) { + var result = {}; + for (var i = 0, length = getLength(list); i < length; i++) { + if (values) { + result[list[i]] = values[i]; + } else { + result[list[i][0]] = list[i][1]; + } + } + return result; + }; + + // Generator function to create the findIndex and findLastIndex functions + function createPredicateIndexFinder(dir) { + return function(array, predicate, context) { + predicate = cb(predicate, context); + var length = getLength(array); + var index = dir > 0 ? 0 : length - 1; + for (; index >= 0 && index < length; index += dir) { + if (predicate(array[index], index, array)) return index; + } + return -1; + }; + } + + // Returns the first index on an array-like that passes a predicate test + _.findIndex = createPredicateIndexFinder(1); + _.findLastIndex = createPredicateIndexFinder(-1); + + // Use a comparator function to figure out the smallest index at which + // an object should be inserted so as to maintain order. Uses binary search. + _.sortedIndex = function(array, obj, iteratee, context) { + iteratee = cb(iteratee, context, 1); + var value = iteratee(obj); + var low = 0, high = getLength(array); + while (low < high) { + var mid = Math.floor((low + high) / 2); + if (iteratee(array[mid]) < value) low = mid + 1; else high = mid; + } + return low; + }; + + // Generator function to create the indexOf and lastIndexOf functions + function createIndexFinder(dir, predicateFind, sortedIndex) { + return function(array, item, idx) { + var i = 0, length = getLength(array); + if (typeof idx == 'number') { + if (dir > 0) { + i = idx >= 0 ? idx : Math.max(idx + length, i); + } else { + length = idx >= 0 ? Math.min(idx + 1, length) : idx + length + 1; + } + } else if (sortedIndex && idx && length) { + idx = sortedIndex(array, item); + return array[idx] === item ? idx : -1; + } + if (item !== item) { + idx = predicateFind(slice.call(array, i, length), _.isNaN); + return idx >= 0 ? idx + i : -1; + } + for (idx = dir > 0 ? i : length - 1; idx >= 0 && idx < length; idx += dir) { + if (array[idx] === item) return idx; + } + return -1; + }; + } + + // Return the position of the first occurrence of an item in an array, + // or -1 if the item is not included in the array. + // If the array is large and already in sort order, pass `true` + // for **isSorted** to use binary search. + _.indexOf = createIndexFinder(1, _.findIndex, _.sortedIndex); + _.lastIndexOf = createIndexFinder(-1, _.findLastIndex); + + // Generate an integer Array containing an arithmetic progression. A port of + // the native Python `range()` function. See + // [the Python documentation](http://docs.python.org/library/functions.html#range). + _.range = function(start, stop, step) { + if (stop == null) { + stop = start || 0; + start = 0; + } + step = step || 1; + + var length = Math.max(Math.ceil((stop - start) / step), 0); + var range = Array(length); + + for (var idx = 0; idx < length; idx++, start += step) { + range[idx] = start; + } + + return range; + }; + + // Function (ahem) Functions + // ------------------ + + // Determines whether to execute a function as a constructor + // or a normal function with the provided arguments + var executeBound = function(sourceFunc, boundFunc, context, callingContext, args) { + if (!(callingContext instanceof boundFunc)) return sourceFunc.apply(context, args); + var self = baseCreate(sourceFunc.prototype); + var result = sourceFunc.apply(self, args); + if (_.isObject(result)) return result; + return self; + }; + + // Create a function bound to a given object (assigning `this`, and arguments, + // optionally). Delegates to **ECMAScript 5**'s native `Function.bind` if + // available. + _.bind = function(func, context) { + if (nativeBind && func.bind === nativeBind) return nativeBind.apply(func, slice.call(arguments, 1)); + if (!_.isFunction(func)) throw new TypeError('Bind must be called on a function'); + var args = slice.call(arguments, 2); + var bound = function() { + return executeBound(func, bound, context, this, args.concat(slice.call(arguments))); + }; + return bound; + }; + + // Partially apply a function by creating a version that has had some of its + // arguments pre-filled, without changing its dynamic `this` context. _ acts + // as a placeholder, allowing any combination of arguments to be pre-filled. + _.partial = function(func) { + var boundArgs = slice.call(arguments, 1); + var bound = function() { + var position = 0, length = boundArgs.length; + var args = Array(length); + for (var i = 0; i < length; i++) { + args[i] = boundArgs[i] === _ ? arguments[position++] : boundArgs[i]; + } + while (position < arguments.length) args.push(arguments[position++]); + return executeBound(func, bound, this, this, args); + }; + return bound; + }; + + // Bind a number of an object's methods to that object. Remaining arguments + // are the method names to be bound. Useful for ensuring that all callbacks + // defined on an object belong to it. + _.bindAll = function(obj) { + var i, length = arguments.length, key; + if (length <= 1) throw new Error('bindAll must be passed function names'); + for (i = 1; i < length; i++) { + key = arguments[i]; + obj[key] = _.bind(obj[key], obj); + } + return obj; + }; + + // Memoize an expensive function by storing its results. + _.memoize = function(func, hasher) { + var memoize = function(key) { + var cache = memoize.cache; + var address = '' + (hasher ? hasher.apply(this, arguments) : key); + if (!_.has(cache, address)) cache[address] = func.apply(this, arguments); + return cache[address]; + }; + memoize.cache = {}; + return memoize; + }; + + // Delays a function for the given number of milliseconds, and then calls + // it with the arguments supplied. + _.delay = function(func, wait) { + var args = slice.call(arguments, 2); + return setTimeout(function(){ + return func.apply(null, args); + }, wait); + }; + + // Defers a function, scheduling it to run after the current call stack has + // cleared. + _.defer = _.partial(_.delay, _, 1); + + // Returns a function, that, when invoked, will only be triggered at most once + // during a given window of time. Normally, the throttled function will run + // as much as it can, without ever going more than once per `wait` duration; + // but if you'd like to disable the execution on the leading edge, pass + // `{leading: false}`. To disable execution on the trailing edge, ditto. + _.throttle = function(func, wait, options) { + var context, args, result; + var timeout = null; + var previous = 0; + if (!options) options = {}; + var later = function() { + previous = options.leading === false ? 0 : _.now(); + timeout = null; + result = func.apply(context, args); + if (!timeout) context = args = null; + }; + return function() { + var now = _.now(); + if (!previous && options.leading === false) previous = now; + var remaining = wait - (now - previous); + context = this; + args = arguments; + if (remaining <= 0 || remaining > wait) { + if (timeout) { + clearTimeout(timeout); + timeout = null; + } + previous = now; + result = func.apply(context, args); + if (!timeout) context = args = null; + } else if (!timeout && options.trailing !== false) { + timeout = setTimeout(later, remaining); + } + return result; + }; + }; + + // Returns a function, that, as long as it continues to be invoked, will not + // be triggered. The function will be called after it stops being called for + // N milliseconds. If `immediate` is passed, trigger the function on the + // leading edge, instead of the trailing. + _.debounce = function(func, wait, immediate) { + var timeout, args, context, timestamp, result; + + var later = function() { + var last = _.now() - timestamp; + + if (last < wait && last >= 0) { + timeout = setTimeout(later, wait - last); + } else { + timeout = null; + if (!immediate) { + result = func.apply(context, args); + if (!timeout) context = args = null; + } + } + }; + + return function() { + context = this; + args = arguments; + timestamp = _.now(); + var callNow = immediate && !timeout; + if (!timeout) timeout = setTimeout(later, wait); + if (callNow) { + result = func.apply(context, args); + context = args = null; + } + + return result; + }; + }; + + // Returns the first function passed as an argument to the second, + // allowing you to adjust arguments, run code before and after, and + // conditionally execute the original function. + _.wrap = function(func, wrapper) { + return _.partial(wrapper, func); + }; + + // Returns a negated version of the passed-in predicate. + _.negate = function(predicate) { + return function() { + return !predicate.apply(this, arguments); + }; + }; + + // Returns a function that is the composition of a list of functions, each + // consuming the return value of the function that follows. + _.compose = function() { + var args = arguments; + var start = args.length - 1; + return function() { + var i = start; + var result = args[start].apply(this, arguments); + while (i--) result = args[i].call(this, result); + return result; + }; + }; + + // Returns a function that will only be executed on and after the Nth call. + _.after = function(times, func) { + return function() { + if (--times < 1) { + return func.apply(this, arguments); + } + }; + }; + + // Returns a function that will only be executed up to (but not including) the Nth call. + _.before = function(times, func) { + var memo; + return function() { + if (--times > 0) { + memo = func.apply(this, arguments); + } + if (times <= 1) func = null; + return memo; + }; + }; + + // Returns a function that will be executed at most one time, no matter how + // often you call it. Useful for lazy initialization. + _.once = _.partial(_.before, 2); + + // Object Functions + // ---------------- + + // Keys in IE < 9 that won't be iterated by `for key in ...` and thus missed. + var hasEnumBug = !{toString: null}.propertyIsEnumerable('toString'); + var nonEnumerableProps = ['valueOf', 'isPrototypeOf', 'toString', + 'propertyIsEnumerable', 'hasOwnProperty', 'toLocaleString']; + + function collectNonEnumProps(obj, keys) { + var nonEnumIdx = nonEnumerableProps.length; + var constructor = obj.constructor; + var proto = (_.isFunction(constructor) && constructor.prototype) || ObjProto; + + // Constructor is a special case. + var prop = 'constructor'; + if (_.has(obj, prop) && !_.contains(keys, prop)) keys.push(prop); + + while (nonEnumIdx--) { + prop = nonEnumerableProps[nonEnumIdx]; + if (prop in obj && obj[prop] !== proto[prop] && !_.contains(keys, prop)) { + keys.push(prop); + } + } + } + + // Retrieve the names of an object's own properties. + // Delegates to **ECMAScript 5**'s native `Object.keys` + _.keys = function(obj) { + if (!_.isObject(obj)) return []; + if (nativeKeys) return nativeKeys(obj); + var keys = []; + for (var key in obj) if (_.has(obj, key)) keys.push(key); + // Ahem, IE < 9. + if (hasEnumBug) collectNonEnumProps(obj, keys); + return keys; + }; + + // Retrieve all the property names of an object. + _.allKeys = function(obj) { + if (!_.isObject(obj)) return []; + var keys = []; + for (var key in obj) keys.push(key); + // Ahem, IE < 9. + if (hasEnumBug) collectNonEnumProps(obj, keys); + return keys; + }; + + // Retrieve the values of an object's properties. + _.values = function(obj) { + var keys = _.keys(obj); + var length = keys.length; + var values = Array(length); + for (var i = 0; i < length; i++) { + values[i] = obj[keys[i]]; + } + return values; + }; + + // Returns the results of applying the iteratee to each element of the object + // In contrast to _.map it returns an object + _.mapObject = function(obj, iteratee, context) { + iteratee = cb(iteratee, context); + var keys = _.keys(obj), + length = keys.length, + results = {}, + currentKey; + for (var index = 0; index < length; index++) { + currentKey = keys[index]; + results[currentKey] = iteratee(obj[currentKey], currentKey, obj); + } + return results; + }; + + // Convert an object into a list of `[key, value]` pairs. + _.pairs = function(obj) { + var keys = _.keys(obj); + var length = keys.length; + var pairs = Array(length); + for (var i = 0; i < length; i++) { + pairs[i] = [keys[i], obj[keys[i]]]; + } + return pairs; + }; + + // Invert the keys and values of an object. The values must be serializable. + _.invert = function(obj) { + var result = {}; + var keys = _.keys(obj); + for (var i = 0, length = keys.length; i < length; i++) { + result[obj[keys[i]]] = keys[i]; + } + return result; + }; + + // Return a sorted list of the function names available on the object. + // Aliased as `methods` + _.functions = _.methods = function(obj) { + var names = []; + for (var key in obj) { + if (_.isFunction(obj[key])) names.push(key); + } + return names.sort(); + }; + + // Extend a given object with all the properties in passed-in object(s). + _.extend = createAssigner(_.allKeys); + + // Assigns a given object with all the own properties in the passed-in object(s) + // (https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object/assign) + _.extendOwn = _.assign = createAssigner(_.keys); + + // Returns the first key on an object that passes a predicate test + _.findKey = function(obj, predicate, context) { + predicate = cb(predicate, context); + var keys = _.keys(obj), key; + for (var i = 0, length = keys.length; i < length; i++) { + key = keys[i]; + if (predicate(obj[key], key, obj)) return key; + } + }; + + // Return a copy of the object only containing the whitelisted properties. + _.pick = function(object, oiteratee, context) { + var result = {}, obj = object, iteratee, keys; + if (obj == null) return result; + if (_.isFunction(oiteratee)) { + keys = _.allKeys(obj); + iteratee = optimizeCb(oiteratee, context); + } else { + keys = flatten(arguments, false, false, 1); + iteratee = function(value, key, obj) { return key in obj; }; + obj = Object(obj); + } + for (var i = 0, length = keys.length; i < length; i++) { + var key = keys[i]; + var value = obj[key]; + if (iteratee(value, key, obj)) result[key] = value; + } + return result; + }; + + // Return a copy of the object without the blacklisted properties. + _.omit = function(obj, iteratee, context) { + if (_.isFunction(iteratee)) { + iteratee = _.negate(iteratee); + } else { + var keys = _.map(flatten(arguments, false, false, 1), String); + iteratee = function(value, key) { + return !_.contains(keys, key); + }; + } + return _.pick(obj, iteratee, context); + }; + + // Fill in a given object with default properties. + _.defaults = createAssigner(_.allKeys, true); + + // Creates an object that inherits from the given prototype object. + // If additional properties are provided then they will be added to the + // created object. + _.create = function(prototype, props) { + var result = baseCreate(prototype); + if (props) _.extendOwn(result, props); + return result; + }; + + // Create a (shallow-cloned) duplicate of an object. + _.clone = function(obj) { + if (!_.isObject(obj)) return obj; + return _.isArray(obj) ? obj.slice() : _.extend({}, obj); + }; + + // Invokes interceptor with the obj, and then returns obj. + // The primary purpose of this method is to "tap into" a method chain, in + // order to perform operations on intermediate results within the chain. + _.tap = function(obj, interceptor) { + interceptor(obj); + return obj; + }; + + // Returns whether an object has a given set of `key:value` pairs. + _.isMatch = function(object, attrs) { + var keys = _.keys(attrs), length = keys.length; + if (object == null) return !length; + var obj = Object(object); + for (var i = 0; i < length; i++) { + var key = keys[i]; + if (attrs[key] !== obj[key] || !(key in obj)) return false; + } + return true; + }; + + + // Internal recursive comparison function for `isEqual`. + var eq = function(a, b, aStack, bStack) { + // Identical objects are equal. `0 === -0`, but they aren't identical. + // See the [Harmony `egal` proposal](http://wiki.ecmascript.org/doku.php?id=harmony:egal). + if (a === b) return a !== 0 || 1 / a === 1 / b; + // A strict comparison is necessary because `null == undefined`. + if (a == null || b == null) return a === b; + // Unwrap any wrapped objects. + if (a instanceof _) a = a._wrapped; + if (b instanceof _) b = b._wrapped; + // Compare `[[Class]]` names. + var className = toString.call(a); + if (className !== toString.call(b)) return false; + switch (className) { + // Strings, numbers, regular expressions, dates, and booleans are compared by value. + case '[object RegExp]': + // RegExps are coerced to strings for comparison (Note: '' + /a/i === '/a/i') + case '[object String]': + // Primitives and their corresponding object wrappers are equivalent; thus, `"5"` is + // equivalent to `new String("5")`. + return '' + a === '' + b; + case '[object Number]': + // `NaN`s are equivalent, but non-reflexive. + // Object(NaN) is equivalent to NaN + if (+a !== +a) return +b !== +b; + // An `egal` comparison is performed for other numeric values. + return +a === 0 ? 1 / +a === 1 / b : +a === +b; + case '[object Date]': + case '[object Boolean]': + // Coerce dates and booleans to numeric primitive values. Dates are compared by their + // millisecond representations. Note that invalid dates with millisecond representations + // of `NaN` are not equivalent. + return +a === +b; + } + + var areArrays = className === '[object Array]'; + if (!areArrays) { + if (typeof a != 'object' || typeof b != 'object') return false; + + // Objects with different constructors are not equivalent, but `Object`s or `Array`s + // from different frames are. + var aCtor = a.constructor, bCtor = b.constructor; + if (aCtor !== bCtor && !(_.isFunction(aCtor) && aCtor instanceof aCtor && + _.isFunction(bCtor) && bCtor instanceof bCtor) + && ('constructor' in a && 'constructor' in b)) { + return false; + } + } + // Assume equality for cyclic structures. The algorithm for detecting cyclic + // structures is adapted from ES 5.1 section 15.12.3, abstract operation `JO`. + + // Initializing stack of traversed objects. + // It's done here since we only need them for objects and arrays comparison. + aStack = aStack || []; + bStack = bStack || []; + var length = aStack.length; + while (length--) { + // Linear search. Performance is inversely proportional to the number of + // unique nested structures. + if (aStack[length] === a) return bStack[length] === b; + } + + // Add the first object to the stack of traversed objects. + aStack.push(a); + bStack.push(b); + + // Recursively compare objects and arrays. + if (areArrays) { + // Compare array lengths to determine if a deep comparison is necessary. + length = a.length; + if (length !== b.length) return false; + // Deep compare the contents, ignoring non-numeric properties. + while (length--) { + if (!eq(a[length], b[length], aStack, bStack)) return false; + } + } else { + // Deep compare objects. + var keys = _.keys(a), key; + length = keys.length; + // Ensure that both objects contain the same number of properties before comparing deep equality. + if (_.keys(b).length !== length) return false; + while (length--) { + // Deep compare each member + key = keys[length]; + if (!(_.has(b, key) && eq(a[key], b[key], aStack, bStack))) return false; + } + } + // Remove the first object from the stack of traversed objects. + aStack.pop(); + bStack.pop(); + return true; + }; + + // Perform a deep comparison to check if two objects are equal. + _.isEqual = function(a, b) { + return eq(a, b); + }; + + // Is a given array, string, or object empty? + // An "empty" object has no enumerable own-properties. + _.isEmpty = function(obj) { + if (obj == null) return true; + if (isArrayLike(obj) && (_.isArray(obj) || _.isString(obj) || _.isArguments(obj))) return obj.length === 0; + return _.keys(obj).length === 0; + }; + + // Is a given value a DOM element? + _.isElement = function(obj) { + return !!(obj && obj.nodeType === 1); + }; + + // Is a given value an array? + // Delegates to ECMA5's native Array.isArray + _.isArray = nativeIsArray || function(obj) { + return toString.call(obj) === '[object Array]'; + }; + + // Is a given variable an object? + _.isObject = function(obj) { + var type = typeof obj; + return type === 'function' || type === 'object' && !!obj; + }; + + // Add some isType methods: isArguments, isFunction, isString, isNumber, isDate, isRegExp, isError. + _.each(['Arguments', 'Function', 'String', 'Number', 'Date', 'RegExp', 'Error'], function(name) { + _['is' + name] = function(obj) { + return toString.call(obj) === '[object ' + name + ']'; + }; + }); + + // Define a fallback version of the method in browsers (ahem, IE < 9), where + // there isn't any inspectable "Arguments" type. + if (!_.isArguments(arguments)) { + _.isArguments = function(obj) { + return _.has(obj, 'callee'); + }; + } + + // Optimize `isFunction` if appropriate. Work around some typeof bugs in old v8, + // IE 11 (#1621), and in Safari 8 (#1929). + if (typeof /./ != 'function' && typeof Int8Array != 'object') { + _.isFunction = function(obj) { + return typeof obj == 'function' || false; + }; + } + + // Is a given object a finite number? + _.isFinite = function(obj) { + return isFinite(obj) && !isNaN(parseFloat(obj)); + }; + + // Is the given value `NaN`? (NaN is the only number which does not equal itself). + _.isNaN = function(obj) { + return _.isNumber(obj) && obj !== +obj; + }; + + // Is a given value a boolean? + _.isBoolean = function(obj) { + return obj === true || obj === false || toString.call(obj) === '[object Boolean]'; + }; + + // Is a given value equal to null? + _.isNull = function(obj) { + return obj === null; + }; + + // Is a given variable undefined? + _.isUndefined = function(obj) { + return obj === void 0; + }; + + // Shortcut function for checking if an object has a given property directly + // on itself (in other words, not on a prototype). + _.has = function(obj, key) { + return obj != null && hasOwnProperty.call(obj, key); + }; + + // Utility Functions + // ----------------- + + // Run Underscore.js in *noConflict* mode, returning the `_` variable to its + // previous owner. Returns a reference to the Underscore object. + _.noConflict = function() { + root._ = previousUnderscore; + return this; + }; + + // Keep the identity function around for default iteratees. + _.identity = function(value) { + return value; + }; + + // Predicate-generating functions. Often useful outside of Underscore. + _.constant = function(value) { + return function() { + return value; + }; + }; + + _.noop = function(){}; + + _.property = property; + + // Generates a function for a given object that returns a given property. + _.propertyOf = function(obj) { + return obj == null ? function(){} : function(key) { + return obj[key]; + }; + }; + + // Returns a predicate for checking whether an object has a given set of + // `key:value` pairs. + _.matcher = _.matches = function(attrs) { + attrs = _.extendOwn({}, attrs); + return function(obj) { + return _.isMatch(obj, attrs); + }; + }; + + // Run a function **n** times. + _.times = function(n, iteratee, context) { + var accum = Array(Math.max(0, n)); + iteratee = optimizeCb(iteratee, context, 1); + for (var i = 0; i < n; i++) accum[i] = iteratee(i); + return accum; + }; + + // Return a random integer between min and max (inclusive). + _.random = function(min, max) { + if (max == null) { + max = min; + min = 0; + } + return min + Math.floor(Math.random() * (max - min + 1)); + }; + + // A (possibly faster) way to get the current timestamp as an integer. + _.now = Date.now || function() { + return new Date().getTime(); + }; + + // List of HTML entities for escaping. + var escapeMap = { + '&': '&', + '<': '<', + '>': '>', + '"': '"', + "'": ''', + '`': '`' + }; + var unescapeMap = _.invert(escapeMap); + + // Functions for escaping and unescaping strings to/from HTML interpolation. + var createEscaper = function(map) { + var escaper = function(match) { + return map[match]; + }; + // Regexes for identifying a key that needs to be escaped + var source = '(?:' + _.keys(map).join('|') + ')'; + var testRegexp = RegExp(source); + var replaceRegexp = RegExp(source, 'g'); + return function(string) { + string = string == null ? '' : '' + string; + return testRegexp.test(string) ? string.replace(replaceRegexp, escaper) : string; + }; + }; + _.escape = createEscaper(escapeMap); + _.unescape = createEscaper(unescapeMap); + + // If the value of the named `property` is a function then invoke it with the + // `object` as context; otherwise, return it. + _.result = function(object, property, fallback) { + var value = object == null ? void 0 : object[property]; + if (value === void 0) { + value = fallback; + } + return _.isFunction(value) ? value.call(object) : value; + }; + + // Generate a unique integer id (unique within the entire client session). + // Useful for temporary DOM ids. + var idCounter = 0; + _.uniqueId = function(prefix) { + var id = ++idCounter + ''; + return prefix ? prefix + id : id; + }; + + // By default, Underscore uses ERB-style template delimiters, change the + // following template settings to use alternative delimiters. + _.templateSettings = { + evaluate : /<%([\s\S]+?)%>/g, + interpolate : /<%=([\s\S]+?)%>/g, + escape : /<%-([\s\S]+?)%>/g + }; + + // When customizing `templateSettings`, if you don't want to define an + // interpolation, evaluation or escaping regex, we need one that is + // guaranteed not to match. + var noMatch = /(.)^/; + + // Certain characters need to be escaped so that they can be put into a + // string literal. + var escapes = { + "'": "'", + '\\': '\\', + '\r': 'r', + '\n': 'n', + '\u2028': 'u2028', + '\u2029': 'u2029' + }; + + var escaper = /\\|'|\r|\n|\u2028|\u2029/g; + + var escapeChar = function(match) { + return '\\' + escapes[match]; + }; + + // JavaScript micro-templating, similar to John Resig's implementation. + // Underscore templating handles arbitrary delimiters, preserves whitespace, + // and correctly escapes quotes within interpolated code. + // NB: `oldSettings` only exists for backwards compatibility. + _.template = function(text, settings, oldSettings) { + if (!settings && oldSettings) settings = oldSettings; + settings = _.defaults({}, settings, _.templateSettings); + + // Combine delimiters into one regular expression via alternation. + var matcher = RegExp([ + (settings.escape || noMatch).source, + (settings.interpolate || noMatch).source, + (settings.evaluate || noMatch).source + ].join('|') + '|$', 'g'); + + // Compile the template source, escaping string literals appropriately. + var index = 0; + var source = "__p+='"; + text.replace(matcher, function(match, escape, interpolate, evaluate, offset) { + source += text.slice(index, offset).replace(escaper, escapeChar); + index = offset + match.length; + + if (escape) { + source += "'+\n((__t=(" + escape + "))==null?'':_.escape(__t))+\n'"; + } else if (interpolate) { + source += "'+\n((__t=(" + interpolate + "))==null?'':__t)+\n'"; + } else if (evaluate) { + source += "';\n" + evaluate + "\n__p+='"; + } + + // Adobe VMs need the match returned to produce the correct offest. + return match; + }); + source += "';\n"; + + // If a variable is not specified, place data values in local scope. + if (!settings.variable) source = 'with(obj||{}){\n' + source + '}\n'; + + source = "var __t,__p='',__j=Array.prototype.join," + + "print=function(){__p+=__j.call(arguments,'');};\n" + + source + 'return __p;\n'; + + try { + var render = new Function(settings.variable || 'obj', '_', source); + } catch (e) { + e.source = source; + throw e; + } + + var template = function(data) { + return render.call(this, data, _); + }; + + // Provide the compiled source as a convenience for precompilation. + var argument = settings.variable || 'obj'; + template.source = 'function(' + argument + '){\n' + source + '}'; + + return template; + }; + + // Add a "chain" function. Start chaining a wrapped Underscore object. + _.chain = function(obj) { + var instance = _(obj); + instance._chain = true; + return instance; + }; + + // OOP + // --------------- + // If Underscore is called as a function, it returns a wrapped object that + // can be used OO-style. This wrapper holds altered versions of all the + // underscore functions. Wrapped objects may be chained. + + // Helper function to continue chaining intermediate results. + var result = function(instance, obj) { + return instance._chain ? _(obj).chain() : obj; + }; + + // Add your own custom functions to the Underscore object. + _.mixin = function(obj) { + _.each(_.functions(obj), function(name) { + var func = _[name] = obj[name]; + _.prototype[name] = function() { + var args = [this._wrapped]; + push.apply(args, arguments); + return result(this, func.apply(_, args)); + }; + }); + }; + + // Add all of the Underscore functions to the wrapper object. + _.mixin(_); + + // Add all mutator Array functions to the wrapper. + _.each(['pop', 'push', 'reverse', 'shift', 'sort', 'splice', 'unshift'], function(name) { + var method = ArrayProto[name]; + _.prototype[name] = function() { + var obj = this._wrapped; + method.apply(obj, arguments); + if ((name === 'shift' || name === 'splice') && obj.length === 0) delete obj[0]; + return result(this, obj); + }; + }); + + // Add all accessor Array functions to the wrapper. + _.each(['concat', 'join', 'slice'], function(name) { + var method = ArrayProto[name]; + _.prototype[name] = function() { + return result(this, method.apply(this._wrapped, arguments)); + }; + }); + + // Extracts the result from a wrapped and chained object. + _.prototype.value = function() { + return this._wrapped; + }; + + // Provide unwrapping proxy for some methods used in engine operations + // such as arithmetic and JSON stringification. + _.prototype.valueOf = _.prototype.toJSON = _.prototype.value; + + _.prototype.toString = function() { + return '' + this._wrapped; + }; + + // AMD registration happens at the end for compatibility with AMD loaders + // that may not enforce next-turn semantics on modules. Even though general + // practice for AMD registration is to be anonymous, underscore registers + // as a named module because, like jQuery, it is a base library that is + // popular enough to be bundled in a third party lib, but not be part of + // an AMD load request. Those cases could generate an error when an + // anonymous define() is called outside of a loader request. + if (typeof define === 'function' && define.amd) { + define('underscore', [], function() { + return _; + }); + } +}.call(this)); + +},{}],26:[function(require,module,exports){ +arguments[4][19][0].apply(exports,arguments) +},{"dup":19}],27:[function(require,module,exports){ +module.exports = function isBuffer(arg) { + return arg && typeof arg === 'object' + && typeof arg.copy === 'function' + && typeof arg.fill === 'function' + && typeof arg.readUInt8 === 'function'; +} +},{}],28:[function(require,module,exports){ +(function (process,global){ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +var formatRegExp = /%[sdj%]/g; +exports.format = function(f) { + if (!isString(f)) { + var objects = []; + for (var i = 0; i < arguments.length; i++) { + objects.push(inspect(arguments[i])); + } + return objects.join(' '); + } + + var i = 1; + var args = arguments; + var len = args.length; + var str = String(f).replace(formatRegExp, function(x) { + if (x === '%%') return '%'; + if (i >= len) return x; + switch (x) { + case '%s': return String(args[i++]); + case '%d': return Number(args[i++]); + case '%j': + try { + return JSON.stringify(args[i++]); + } catch (_) { + return '[Circular]'; + } + default: + return x; + } + }); + for (var x = args[i]; i < len; x = args[++i]) { + if (isNull(x) || !isObject(x)) { + str += ' ' + x; + } else { + str += ' ' + inspect(x); + } + } + return str; +}; + + +// Mark that a method should not be used. +// Returns a modified function which warns once by default. +// If --no-deprecation is set, then it is a no-op. +exports.deprecate = function(fn, msg) { + // Allow for deprecating things in the process of starting up. + if (isUndefined(global.process)) { + return function() { + return exports.deprecate(fn, msg).apply(this, arguments); + }; + } + + if (process.noDeprecation === true) { + return fn; + } + + var warned = false; + function deprecated() { + if (!warned) { + if (process.throwDeprecation) { + throw new Error(msg); + } else if (process.traceDeprecation) { + console.trace(msg); + } else { + console.error(msg); + } + warned = true; + } + return fn.apply(this, arguments); + } + + return deprecated; +}; + + +var debugs = {}; +var debugEnviron; +exports.debuglog = function(set) { + if (isUndefined(debugEnviron)) + debugEnviron = process.env.NODE_DEBUG || ''; + set = set.toUpperCase(); + if (!debugs[set]) { + if (new RegExp('\\b' + set + '\\b', 'i').test(debugEnviron)) { + var pid = process.pid; + debugs[set] = function() { + var msg = exports.format.apply(exports, arguments); + console.error('%s %d: %s', set, pid, msg); + }; + } else { + debugs[set] = function() {}; + } + } + return debugs[set]; +}; + + +/** + * Echos the value of a value. Trys to print the value out + * in the best way possible given the different types. + * + * @param {Object} obj The object to print out. + * @param {Object} opts Optional options object that alters the output. + */ +/* legacy: obj, showHidden, depth, colors*/ +function inspect(obj, opts) { + // default options + var ctx = { + seen: [], + stylize: stylizeNoColor + }; + // legacy... + if (arguments.length >= 3) ctx.depth = arguments[2]; + if (arguments.length >= 4) ctx.colors = arguments[3]; + if (isBoolean(opts)) { + // legacy... + ctx.showHidden = opts; + } else if (opts) { + // got an "options" object + exports._extend(ctx, opts); + } + // set default options + if (isUndefined(ctx.showHidden)) ctx.showHidden = false; + if (isUndefined(ctx.depth)) ctx.depth = 2; + if (isUndefined(ctx.colors)) ctx.colors = false; + if (isUndefined(ctx.customInspect)) ctx.customInspect = true; + if (ctx.colors) ctx.stylize = stylizeWithColor; + return formatValue(ctx, obj, ctx.depth); +} +exports.inspect = inspect; + + +// http://en.wikipedia.org/wiki/ANSI_escape_code#graphics +inspect.colors = { + 'bold' : [1, 22], + 'italic' : [3, 23], + 'underline' : [4, 24], + 'inverse' : [7, 27], + 'white' : [37, 39], + 'grey' : [90, 39], + 'black' : [30, 39], + 'blue' : [34, 39], + 'cyan' : [36, 39], + 'green' : [32, 39], + 'magenta' : [35, 39], + 'red' : [31, 39], + 'yellow' : [33, 39] +}; + +// Don't use 'blue' not visible on cmd.exe +inspect.styles = { + 'special': 'cyan', + 'number': 'yellow', + 'boolean': 'yellow', + 'undefined': 'grey', + 'null': 'bold', + 'string': 'green', + 'date': 'magenta', + // "name": intentionally not styling + 'regexp': 'red' +}; + + +function stylizeWithColor(str, styleType) { + var style = inspect.styles[styleType]; + + if (style) { + return '\u001b[' + inspect.colors[style][0] + 'm' + str + + '\u001b[' + inspect.colors[style][1] + 'm'; + } else { + return str; + } +} + + +function stylizeNoColor(str, styleType) { + return str; +} + + +function arrayToHash(array) { + var hash = {}; + + array.forEach(function(val, idx) { + hash[val] = true; + }); + + return hash; +} + + +function formatValue(ctx, value, recurseTimes) { + // Provide a hook for user-specified inspect functions. + // Check that value is an object with an inspect function on it + if (ctx.customInspect && + value && + isFunction(value.inspect) && + // Filter out the util module, it's inspect function is special + value.inspect !== exports.inspect && + // Also filter out any prototype objects using the circular check. + !(value.constructor && value.constructor.prototype === value)) { + var ret = value.inspect(recurseTimes, ctx); + if (!isString(ret)) { + ret = formatValue(ctx, ret, recurseTimes); + } + return ret; + } + + // Primitive types cannot have properties + var primitive = formatPrimitive(ctx, value); + if (primitive) { + return primitive; + } + + // Look up the keys of the object. + var keys = Object.keys(value); + var visibleKeys = arrayToHash(keys); + + if (ctx.showHidden) { + keys = Object.getOwnPropertyNames(value); + } + + // IE doesn't make error fields non-enumerable + // http://msdn.microsoft.com/en-us/library/ie/dww52sbt(v=vs.94).aspx + if (isError(value) + && (keys.indexOf('message') >= 0 || keys.indexOf('description') >= 0)) { + return formatError(value); + } + + // Some type of object without properties can be shortcutted. + if (keys.length === 0) { + if (isFunction(value)) { + var name = value.name ? ': ' + value.name : ''; + return ctx.stylize('[Function' + name + ']', 'special'); + } + if (isRegExp(value)) { + return ctx.stylize(RegExp.prototype.toString.call(value), 'regexp'); + } + if (isDate(value)) { + return ctx.stylize(Date.prototype.toString.call(value), 'date'); + } + if (isError(value)) { + return formatError(value); + } + } + + var base = '', array = false, braces = ['{', '}']; + + // Make Array say that they are Array + if (isArray(value)) { + array = true; + braces = ['[', ']']; + } + + // Make functions say that they are functions + if (isFunction(value)) { + var n = value.name ? ': ' + value.name : ''; + base = ' [Function' + n + ']'; + } + + // Make RegExps say that they are RegExps + if (isRegExp(value)) { + base = ' ' + RegExp.prototype.toString.call(value); + } + + // Make dates with properties first say the date + if (isDate(value)) { + base = ' ' + Date.prototype.toUTCString.call(value); + } + + // Make error with message first say the error + if (isError(value)) { + base = ' ' + formatError(value); + } + + if (keys.length === 0 && (!array || value.length == 0)) { + return braces[0] + base + braces[1]; + } + + if (recurseTimes < 0) { + if (isRegExp(value)) { + return ctx.stylize(RegExp.prototype.toString.call(value), 'regexp'); + } else { + return ctx.stylize('[Object]', 'special'); + } + } + + ctx.seen.push(value); + + var output; + if (array) { + output = formatArray(ctx, value, recurseTimes, visibleKeys, keys); + } else { + output = keys.map(function(key) { + return formatProperty(ctx, value, recurseTimes, visibleKeys, key, array); + }); + } + + ctx.seen.pop(); + + return reduceToSingleString(output, base, braces); +} + + +function formatPrimitive(ctx, value) { + if (isUndefined(value)) + return ctx.stylize('undefined', 'undefined'); + if (isString(value)) { + var simple = '\'' + JSON.stringify(value).replace(/^"|"$/g, '') + .replace(/'/g, "\\'") + .replace(/\\"/g, '"') + '\''; + return ctx.stylize(simple, 'string'); + } + if (isNumber(value)) + return ctx.stylize('' + value, 'number'); + if (isBoolean(value)) + return ctx.stylize('' + value, 'boolean'); + // For some reason typeof null is "object", so special case here. + if (isNull(value)) + return ctx.stylize('null', 'null'); +} + + +function formatError(value) { + return '[' + Error.prototype.toString.call(value) + ']'; +} + + +function formatArray(ctx, value, recurseTimes, visibleKeys, keys) { + var output = []; + for (var i = 0, l = value.length; i < l; ++i) { + if (hasOwnProperty(value, String(i))) { + output.push(formatProperty(ctx, value, recurseTimes, visibleKeys, + String(i), true)); + } else { + output.push(''); + } + } + keys.forEach(function(key) { + if (!key.match(/^\d+$/)) { + output.push(formatProperty(ctx, value, recurseTimes, visibleKeys, + key, true)); + } + }); + return output; +} + + +function formatProperty(ctx, value, recurseTimes, visibleKeys, key, array) { + var name, str, desc; + desc = Object.getOwnPropertyDescriptor(value, key) || { value: value[key] }; + if (desc.get) { + if (desc.set) { + str = ctx.stylize('[Getter/Setter]', 'special'); + } else { + str = ctx.stylize('[Getter]', 'special'); + } + } else { + if (desc.set) { + str = ctx.stylize('[Setter]', 'special'); + } + } + if (!hasOwnProperty(visibleKeys, key)) { + name = '[' + key + ']'; + } + if (!str) { + if (ctx.seen.indexOf(desc.value) < 0) { + if (isNull(recurseTimes)) { + str = formatValue(ctx, desc.value, null); + } else { + str = formatValue(ctx, desc.value, recurseTimes - 1); + } + if (str.indexOf('\n') > -1) { + if (array) { + str = str.split('\n').map(function(line) { + return ' ' + line; + }).join('\n').substr(2); + } else { + str = '\n' + str.split('\n').map(function(line) { + return ' ' + line; + }).join('\n'); + } + } + } else { + str = ctx.stylize('[Circular]', 'special'); + } + } + if (isUndefined(name)) { + if (array && key.match(/^\d+$/)) { + return str; + } + name = JSON.stringify('' + key); + if (name.match(/^"([a-zA-Z_][a-zA-Z_0-9]*)"$/)) { + name = name.substr(1, name.length - 2); + name = ctx.stylize(name, 'name'); + } else { + name = name.replace(/'/g, "\\'") + .replace(/\\"/g, '"') + .replace(/(^"|"$)/g, "'"); + name = ctx.stylize(name, 'string'); + } + } + + return name + ': ' + str; +} + + +function reduceToSingleString(output, base, braces) { + var numLinesEst = 0; + var length = output.reduce(function(prev, cur) { + numLinesEst++; + if (cur.indexOf('\n') >= 0) numLinesEst++; + return prev + cur.replace(/\u001b\[\d\d?m/g, '').length + 1; + }, 0); + + if (length > 60) { + return braces[0] + + (base === '' ? '' : base + '\n ') + + ' ' + + output.join(',\n ') + + ' ' + + braces[1]; + } + + return braces[0] + base + ' ' + output.join(', ') + ' ' + braces[1]; +} + + +// NOTE: These type checking functions intentionally don't use `instanceof` +// because it is fragile and can be easily faked with `Object.create()`. +function isArray(ar) { + return Array.isArray(ar); +} +exports.isArray = isArray; + +function isBoolean(arg) { + return typeof arg === 'boolean'; +} +exports.isBoolean = isBoolean; + +function isNull(arg) { + return arg === null; +} +exports.isNull = isNull; + +function isNullOrUndefined(arg) { + return arg == null; +} +exports.isNullOrUndefined = isNullOrUndefined; + +function isNumber(arg) { + return typeof arg === 'number'; +} +exports.isNumber = isNumber; + +function isString(arg) { + return typeof arg === 'string'; +} +exports.isString = isString; + +function isSymbol(arg) { + return typeof arg === 'symbol'; +} +exports.isSymbol = isSymbol; + +function isUndefined(arg) { + return arg === void 0; +} +exports.isUndefined = isUndefined; + +function isRegExp(re) { + return isObject(re) && objectToString(re) === '[object RegExp]'; +} +exports.isRegExp = isRegExp; + +function isObject(arg) { + return typeof arg === 'object' && arg !== null; +} +exports.isObject = isObject; + +function isDate(d) { + return isObject(d) && objectToString(d) === '[object Date]'; +} +exports.isDate = isDate; + +function isError(e) { + return isObject(e) && + (objectToString(e) === '[object Error]' || e instanceof Error); +} +exports.isError = isError; + +function isFunction(arg) { + return typeof arg === 'function'; +} +exports.isFunction = isFunction; + +function isPrimitive(arg) { + return arg === null || + typeof arg === 'boolean' || + typeof arg === 'number' || + typeof arg === 'string' || + typeof arg === 'symbol' || // ES6 symbol + typeof arg === 'undefined'; +} +exports.isPrimitive = isPrimitive; + +exports.isBuffer = require('./support/isBuffer'); + +function objectToString(o) { + return Object.prototype.toString.call(o); +} + + +function pad(n) { + return n < 10 ? '0' + n.toString(10) : n.toString(10); +} + + +var months = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', + 'Oct', 'Nov', 'Dec']; + +// 26 Feb 16:19:34 +function timestamp() { + var d = new Date(); + var time = [pad(d.getHours()), + pad(d.getMinutes()), + pad(d.getSeconds())].join(':'); + return [d.getDate(), months[d.getMonth()], time].join(' '); +} + + +// log is just a thin wrapper to console.log that prepends a timestamp +exports.log = function() { + console.log('%s - %s', timestamp(), exports.format.apply(exports, arguments)); +}; + + +/** + * Inherit the prototype methods from one constructor into another. + * + * The Function.prototype.inherits from lang.js rewritten as a standalone + * function (not on Function.prototype). NOTE: If this file is to be loaded + * during bootstrapping this function needs to be rewritten using some native + * functions as prototype setup using normal JavaScript does not work as + * expected during bootstrapping (see mirror.js in r114903). + * + * @param {function} ctor Constructor function which needs to inherit the + * prototype. + * @param {function} superCtor Constructor function to inherit prototype from. + */ +exports.inherits = require('inherits'); + +exports._extend = function(origin, add) { + // Don't do anything if add isn't an object + if (!add || !isObject(add)) return origin; + + var keys = Object.keys(add); + var i = keys.length; + while (i--) { + origin[keys[i]] = add[keys[i]]; + } + return origin; +}; + +function hasOwnProperty(obj, prop) { + return Object.prototype.hasOwnProperty.call(obj, prop); +} + +}).call(this,require('_process'),typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) +},{"./support/isBuffer":27,"_process":24,"inherits":26}],29:[function(require,module,exports){ +// Returns a wrapper function that returns a wrapped callback +// The wrapper function should do some stuff, and return a +// presumably different callback function. +// This makes sure that own properties are retained, so that +// decorations and such are not lost along the way. +module.exports = wrappy +function wrappy (fn, cb) { + if (fn && cb) return wrappy(fn)(cb) + + if (typeof fn !== 'function') + throw new TypeError('need wrapper function') + + Object.keys(fn).forEach(function (k) { + wrapper[k] = fn[k] + }) + + return wrapper + + function wrapper() { + var args = new Array(arguments.length) + for (var i = 0; i < args.length; i++) { + args[i] = arguments[i] + } + var ret = fn.apply(this, args) + var cb = args[args.length-1] + if (typeof ret === 'function' && ret !== cb) { + Object.keys(cb).forEach(function (k) { + ret[k] = cb[k] + }) + } + return ret + } +} + +},{}]},{},[7])(7) +}); \ No newline at end of file diff --git a/assets/javascripts/workers/search.16e2a7d4.min.js b/assets/javascripts/workers/search.16e2a7d4.min.js new file mode 100644 index 0000000..e0dc159 --- /dev/null +++ b/assets/javascripts/workers/search.16e2a7d4.min.js @@ -0,0 +1,48 @@ +"use strict";(()=>{var ge=Object.create;var W=Object.defineProperty,ye=Object.defineProperties,me=Object.getOwnPropertyDescriptor,ve=Object.getOwnPropertyDescriptors,xe=Object.getOwnPropertyNames,G=Object.getOwnPropertySymbols,Se=Object.getPrototypeOf,X=Object.prototype.hasOwnProperty,Qe=Object.prototype.propertyIsEnumerable;var J=(t,e,r)=>e in t?W(t,e,{enumerable:!0,configurable:!0,writable:!0,value:r}):t[e]=r,M=(t,e)=>{for(var r in e||(e={}))X.call(e,r)&&J(t,r,e[r]);if(G)for(var r of G(e))Qe.call(e,r)&&J(t,r,e[r]);return t},Z=(t,e)=>ye(t,ve(e));var K=(t,e)=>()=>(e||t((e={exports:{}}).exports,e),e.exports);var be=(t,e,r,n)=>{if(e&&typeof e=="object"||typeof e=="function")for(let i of xe(e))!X.call(t,i)&&i!==r&&W(t,i,{get:()=>e[i],enumerable:!(n=me(e,i))||n.enumerable});return t};var H=(t,e,r)=>(r=t!=null?ge(Se(t)):{},be(e||!t||!t.__esModule?W(r,"default",{value:t,enumerable:!0}):r,t));var z=(t,e,r)=>new Promise((n,i)=>{var s=u=>{try{a(r.next(u))}catch(c){i(c)}},o=u=>{try{a(r.throw(u))}catch(c){i(c)}},a=u=>u.done?n(u.value):Promise.resolve(u.value).then(s,o);a((r=r.apply(t,e)).next())});var re=K((ee,te)=>{/** + * lunr - http://lunrjs.com - A bit like Solr, but much smaller and not as bright - 2.3.9 + * Copyright (C) 2020 Oliver Nightingale + * @license MIT + */(function(){var t=function(e){var r=new t.Builder;return r.pipeline.add(t.trimmer,t.stopWordFilter,t.stemmer),r.searchPipeline.add(t.stemmer),e.call(r,r),r.build()};t.version="2.3.9";/*! + * lunr.utils + * Copyright (C) 2020 Oliver Nightingale + */t.utils={},t.utils.warn=function(e){return function(r){e.console&&console.warn&&console.warn(r)}}(this),t.utils.asString=function(e){return e==null?"":e.toString()},t.utils.clone=function(e){if(e==null)return e;for(var r=Object.create(null),n=Object.keys(e),i=0;i0){var h=t.utils.clone(r)||{};h.position=[a,c],h.index=s.length,s.push(new t.Token(n.slice(a,o),h))}a=o+1}}return s},t.tokenizer.separator=/[\s\-]+/;/*! + * lunr.Pipeline + * Copyright (C) 2020 Oliver Nightingale + */t.Pipeline=function(){this._stack=[]},t.Pipeline.registeredFunctions=Object.create(null),t.Pipeline.registerFunction=function(e,r){r in this.registeredFunctions&&t.utils.warn("Overwriting existing registered function: "+r),e.label=r,t.Pipeline.registeredFunctions[e.label]=e},t.Pipeline.warnIfFunctionNotRegistered=function(e){var r=e.label&&e.label in this.registeredFunctions;r||t.utils.warn(`Function is not registered with pipeline. This may cause problems when serialising the index. +`,e)},t.Pipeline.load=function(e){var r=new t.Pipeline;return e.forEach(function(n){var i=t.Pipeline.registeredFunctions[n];if(i)r.add(i);else throw new Error("Cannot load unregistered function: "+n)}),r},t.Pipeline.prototype.add=function(){var e=Array.prototype.slice.call(arguments);e.forEach(function(r){t.Pipeline.warnIfFunctionNotRegistered(r),this._stack.push(r)},this)},t.Pipeline.prototype.after=function(e,r){t.Pipeline.warnIfFunctionNotRegistered(r);var n=this._stack.indexOf(e);if(n==-1)throw new Error("Cannot find existingFn");n=n+1,this._stack.splice(n,0,r)},t.Pipeline.prototype.before=function(e,r){t.Pipeline.warnIfFunctionNotRegistered(r);var n=this._stack.indexOf(e);if(n==-1)throw new Error("Cannot find existingFn");this._stack.splice(n,0,r)},t.Pipeline.prototype.remove=function(e){var r=this._stack.indexOf(e);r!=-1&&this._stack.splice(r,1)},t.Pipeline.prototype.run=function(e){for(var r=this._stack.length,n=0;n1&&(oe&&(n=s),o!=e);)i=n-r,s=r+Math.floor(i/2),o=this.elements[s*2];if(o==e||o>e)return s*2;if(ou?h+=2:a==u&&(r+=n[c+1]*i[h+1],c+=2,h+=2);return r},t.Vector.prototype.similarity=function(e){return this.dot(e)/this.magnitude()||0},t.Vector.prototype.toArray=function(){for(var e=new Array(this.elements.length/2),r=1,n=0;r0){var o=s.str.charAt(0),a;o in s.node.edges?a=s.node.edges[o]:(a=new t.TokenSet,s.node.edges[o]=a),s.str.length==1&&(a.final=!0),i.push({node:a,editsRemaining:s.editsRemaining,str:s.str.slice(1)})}if(s.editsRemaining!=0){if("*"in s.node.edges)var u=s.node.edges["*"];else{var u=new t.TokenSet;s.node.edges["*"]=u}if(s.str.length==0&&(u.final=!0),i.push({node:u,editsRemaining:s.editsRemaining-1,str:s.str}),s.str.length>1&&i.push({node:s.node,editsRemaining:s.editsRemaining-1,str:s.str.slice(1)}),s.str.length==1&&(s.node.final=!0),s.str.length>=1){if("*"in s.node.edges)var c=s.node.edges["*"];else{var c=new t.TokenSet;s.node.edges["*"]=c}s.str.length==1&&(c.final=!0),i.push({node:c,editsRemaining:s.editsRemaining-1,str:s.str.slice(1)})}if(s.str.length>1){var h=s.str.charAt(0),y=s.str.charAt(1),g;y in s.node.edges?g=s.node.edges[y]:(g=new t.TokenSet,s.node.edges[y]=g),s.str.length==1&&(g.final=!0),i.push({node:g,editsRemaining:s.editsRemaining-1,str:h+s.str.slice(2)})}}}return n},t.TokenSet.fromString=function(e){for(var r=new t.TokenSet,n=r,i=0,s=e.length;i=e;r--){var n=this.uncheckedNodes[r],i=n.child.toString();i in this.minimizedNodes?n.parent.edges[n.char]=this.minimizedNodes[i]:(n.child._str=i,this.minimizedNodes[i]=n.child),this.uncheckedNodes.pop()}};/*! + * lunr.Index + * Copyright (C) 2020 Oliver Nightingale + */t.Index=function(e){this.invertedIndex=e.invertedIndex,this.fieldVectors=e.fieldVectors,this.tokenSet=e.tokenSet,this.fields=e.fields,this.pipeline=e.pipeline},t.Index.prototype.search=function(e){return this.query(function(r){var n=new t.QueryParser(e,r);n.parse()})},t.Index.prototype.query=function(e){for(var r=new t.Query(this.fields),n=Object.create(null),i=Object.create(null),s=Object.create(null),o=Object.create(null),a=Object.create(null),u=0;u1?this._b=1:this._b=e},t.Builder.prototype.k1=function(e){this._k1=e},t.Builder.prototype.add=function(e,r){var n=e[this._ref],i=Object.keys(this._fields);this._documents[n]=r||{},this.documentCount+=1;for(var s=0;s=this.length)return t.QueryLexer.EOS;var e=this.str.charAt(this.pos);return this.pos+=1,e},t.QueryLexer.prototype.width=function(){return this.pos-this.start},t.QueryLexer.prototype.ignore=function(){this.start==this.pos&&(this.pos+=1),this.start=this.pos},t.QueryLexer.prototype.backup=function(){this.pos-=1},t.QueryLexer.prototype.acceptDigitRun=function(){var e,r;do e=this.next(),r=e.charCodeAt(0);while(r>47&&r<58);e!=t.QueryLexer.EOS&&this.backup()},t.QueryLexer.prototype.more=function(){return this.pos1&&(e.backup(),e.emit(t.QueryLexer.TERM)),e.ignore(),e.more())return t.QueryLexer.lexText},t.QueryLexer.lexEditDistance=function(e){return e.ignore(),e.acceptDigitRun(),e.emit(t.QueryLexer.EDIT_DISTANCE),t.QueryLexer.lexText},t.QueryLexer.lexBoost=function(e){return e.ignore(),e.acceptDigitRun(),e.emit(t.QueryLexer.BOOST),t.QueryLexer.lexText},t.QueryLexer.lexEOS=function(e){e.width()>0&&e.emit(t.QueryLexer.TERM)},t.QueryLexer.termSeparator=t.tokenizer.separator,t.QueryLexer.lexText=function(e){for(;;){var r=e.next();if(r==t.QueryLexer.EOS)return t.QueryLexer.lexEOS;if(r.charCodeAt(0)==92){e.escapeCharacter();continue}if(r==":")return t.QueryLexer.lexField;if(r=="~")return e.backup(),e.width()>0&&e.emit(t.QueryLexer.TERM),t.QueryLexer.lexEditDistance;if(r=="^")return e.backup(),e.width()>0&&e.emit(t.QueryLexer.TERM),t.QueryLexer.lexBoost;if(r=="+"&&e.width()===1||r=="-"&&e.width()===1)return e.emit(t.QueryLexer.PRESENCE),t.QueryLexer.lexText;if(r.match(t.QueryLexer.termSeparator))return t.QueryLexer.lexTerm}},t.QueryParser=function(e,r){this.lexer=new t.QueryLexer(e),this.query=r,this.currentClause={},this.lexemeIdx=0},t.QueryParser.prototype.parse=function(){this.lexer.run(),this.lexemes=this.lexer.lexemes;for(var e=t.QueryParser.parseClause;e;)e=e(this);return this.query},t.QueryParser.prototype.peekLexeme=function(){return this.lexemes[this.lexemeIdx]},t.QueryParser.prototype.consumeLexeme=function(){var e=this.peekLexeme();return this.lexemeIdx+=1,e},t.QueryParser.prototype.nextClause=function(){var e=this.currentClause;this.query.clause(e),this.currentClause={}},t.QueryParser.parseClause=function(e){var r=e.peekLexeme();if(r!=null)switch(r.type){case t.QueryLexer.PRESENCE:return t.QueryParser.parsePresence;case t.QueryLexer.FIELD:return t.QueryParser.parseField;case t.QueryLexer.TERM:return t.QueryParser.parseTerm;default:var n="expected either a field or a term, found "+r.type;throw r.str.length>=1&&(n+=" with value '"+r.str+"'"),new t.QueryParseError(n,r.start,r.end)}},t.QueryParser.parsePresence=function(e){var r=e.consumeLexeme();if(r!=null){switch(r.str){case"-":e.currentClause.presence=t.Query.presence.PROHIBITED;break;case"+":e.currentClause.presence=t.Query.presence.REQUIRED;break;default:var n="unrecognised presence operator'"+r.str+"'";throw new t.QueryParseError(n,r.start,r.end)}var i=e.peekLexeme();if(i==null){var n="expecting term or field, found nothing";throw new t.QueryParseError(n,r.start,r.end)}switch(i.type){case t.QueryLexer.FIELD:return t.QueryParser.parseField;case t.QueryLexer.TERM:return t.QueryParser.parseTerm;default:var n="expecting term or field, found '"+i.type+"'";throw new t.QueryParseError(n,i.start,i.end)}}},t.QueryParser.parseField=function(e){var r=e.consumeLexeme();if(r!=null){if(e.query.allFields.indexOf(r.str)==-1){var n=e.query.allFields.map(function(o){return"'"+o+"'"}).join(", "),i="unrecognised field '"+r.str+"', possible fields: "+n;throw new t.QueryParseError(i,r.start,r.end)}e.currentClause.fields=[r.str];var s=e.peekLexeme();if(s==null){var i="expecting term, found nothing";throw new t.QueryParseError(i,r.start,r.end)}switch(s.type){case t.QueryLexer.TERM:return t.QueryParser.parseTerm;default:var i="expecting term, found '"+s.type+"'";throw new t.QueryParseError(i,s.start,s.end)}}},t.QueryParser.parseTerm=function(e){var r=e.consumeLexeme();if(r!=null){e.currentClause.term=r.str.toLowerCase(),r.str.indexOf("*")!=-1&&(e.currentClause.usePipeline=!1);var n=e.peekLexeme();if(n==null){e.nextClause();return}switch(n.type){case t.QueryLexer.TERM:return e.nextClause(),t.QueryParser.parseTerm;case t.QueryLexer.FIELD:return e.nextClause(),t.QueryParser.parseField;case t.QueryLexer.EDIT_DISTANCE:return t.QueryParser.parseEditDistance;case t.QueryLexer.BOOST:return t.QueryParser.parseBoost;case t.QueryLexer.PRESENCE:return e.nextClause(),t.QueryParser.parsePresence;default:var i="Unexpected lexeme type '"+n.type+"'";throw new t.QueryParseError(i,n.start,n.end)}}},t.QueryParser.parseEditDistance=function(e){var r=e.consumeLexeme();if(r!=null){var n=parseInt(r.str,10);if(isNaN(n)){var i="edit distance must be numeric";throw new t.QueryParseError(i,r.start,r.end)}e.currentClause.editDistance=n;var s=e.peekLexeme();if(s==null){e.nextClause();return}switch(s.type){case t.QueryLexer.TERM:return e.nextClause(),t.QueryParser.parseTerm;case t.QueryLexer.FIELD:return e.nextClause(),t.QueryParser.parseField;case t.QueryLexer.EDIT_DISTANCE:return t.QueryParser.parseEditDistance;case t.QueryLexer.BOOST:return t.QueryParser.parseBoost;case t.QueryLexer.PRESENCE:return e.nextClause(),t.QueryParser.parsePresence;default:var i="Unexpected lexeme type '"+s.type+"'";throw new t.QueryParseError(i,s.start,s.end)}}},t.QueryParser.parseBoost=function(e){var r=e.consumeLexeme();if(r!=null){var n=parseInt(r.str,10);if(isNaN(n)){var i="boost must be numeric";throw new t.QueryParseError(i,r.start,r.end)}e.currentClause.boost=n;var s=e.peekLexeme();if(s==null){e.nextClause();return}switch(s.type){case t.QueryLexer.TERM:return e.nextClause(),t.QueryParser.parseTerm;case t.QueryLexer.FIELD:return e.nextClause(),t.QueryParser.parseField;case t.QueryLexer.EDIT_DISTANCE:return t.QueryParser.parseEditDistance;case t.QueryLexer.BOOST:return t.QueryParser.parseBoost;case t.QueryLexer.PRESENCE:return e.nextClause(),t.QueryParser.parsePresence;default:var i="Unexpected lexeme type '"+s.type+"'";throw new t.QueryParseError(i,s.start,s.end)}}},function(e,r){typeof define=="function"&&define.amd?define(r):typeof ee=="object"?te.exports=r():e.lunr=r()}(this,function(){return t})})()});var q=K((Re,ne)=>{"use strict";/*! + * escape-html + * Copyright(c) 2012-2013 TJ Holowaychuk + * Copyright(c) 2015 Andreas Lubbe + * Copyright(c) 2015 Tiancheng "Timothy" Gu + * MIT Licensed + */var Le=/["'&<>]/;ne.exports=we;function we(t){var e=""+t,r=Le.exec(e);if(!r)return e;var n,i="",s=0,o=0;for(s=r.index;s=0;r--){let n=t[r];typeof n=="string"?n=document.createTextNode(n):n.parentNode&&n.parentNode.removeChild(n),r?e.insertBefore(this.previousSibling,n):e.replaceChild(n,this)}}}));var ie=H(q());function se(t){let e=new Map,r=new Set;for(let n of t){let[i,s]=n.location.split("#"),o=n.location,a=n.title,u=n.tags,c=(0,ie.default)(n.text).replace(/\s+(?=[,.:;!?])/g,"").replace(/\s+/g," ");if(s){let h=e.get(i);r.has(h)?e.set(o,{location:o,title:a,text:c,parent:h}):(h.title=n.title,h.text=c,r.add(h))}else e.set(o,M({location:o,title:a,text:c},u&&{tags:u}))}return e}var oe=H(q());function ae(t,e){let r=new RegExp(t.separator,"img"),n=(i,s,o)=>`${s}${o}`;return i=>{i=i.replace(/[\s*+\-:~^]+/g," ").trim();let s=new RegExp(`(^|${t.separator})(${i.replace(/[|\\{}()[\]^$+*?.-]/g,"\\$&").replace(r,"|")})`,"img");return o=>(e?(0,oe.default)(o):o).replace(s,n).replace(/<\/mark>(\s+)]*>/img,"$1")}}function ue(t){let e=new lunr.Query(["title","text"]);return new lunr.QueryParser(t,e).parse(),e.clauses}function ce(t,e){var i;let r=new Set(t),n={};for(let s=0;s!n.has(i)))]}var U=class{constructor({config:e,docs:r,options:n}){this.options=n,this.documents=se(r),this.highlight=ae(e,!1),lunr.tokenizer.separator=new RegExp(e.separator),this.index=lunr(function(){e.lang.length===1&&e.lang[0]!=="en"?this.use(lunr[e.lang[0]]):e.lang.length>1&&this.use(lunr.multiLanguage(...e.lang));let i=Ee(["trimmer","stopWordFilter","stemmer"],n.pipeline);for(let s of e.lang.map(o=>o==="en"?lunr:lunr[o]))for(let o of i)this.pipeline.remove(s[o]),this.searchPipeline.remove(s[o]);this.ref("location"),this.field("title",{boost:1e3}),this.field("text"),this.field("tags",{boost:1e6,extractor:s=>{let{tags:o=[]}=s;return o.reduce((a,u)=>[...a,...lunr.tokenizer(u)],[])}});for(let s of r)this.add(s,{boost:s.boost})})}search(e){if(e)try{let r=this.highlight(e),n=ue(e).filter(o=>o.presence!==lunr.Query.presence.PROHIBITED),i=this.index.search(`${e}*`).reduce((o,{ref:a,score:u,matchData:c})=>{let h=this.documents.get(a);if(typeof h!="undefined"){let{location:y,title:g,text:b,tags:m,parent:Q}=h,p=ce(n,Object.keys(c.metadata)),d=+!Q+ +Object.values(p).every(w=>w);o.push(Z(M({location:y,title:r(g),text:r(b)},m&&{tags:m.map(r)}),{score:u*(1+d),terms:p}))}return o},[]).sort((o,a)=>a.score-o.score).reduce((o,a)=>{let u=this.documents.get(a.location);if(typeof u!="undefined"){let c="parent"in u?u.parent.location:u.location;o.set(c,[...o.get(c)||[],a])}return o},new Map),s;if(this.options.suggestions){let o=this.index.query(a=>{for(let u of n)a.term(u.term,{fields:["title"],presence:lunr.Query.presence.REQUIRED,wildcard:lunr.Query.wildcard.TRAILING})});s=o.length?Object.keys(o[0].matchData.metadata):[]}return M({items:[...i.values()]},typeof s!="undefined"&&{suggestions:s})}catch(r){console.warn(`Invalid query: ${e} \u2013 see https://bit.ly/2s3ChXG`)}return{items:[]}}};var Y;function ke(t){return z(this,null,function*(){let e="../lunr";if(typeof parent!="undefined"&&"IFrameWorker"in parent){let n=document.querySelector("script[src]"),[i]=n.src.split("/worker");e=e.replace("..",i)}let r=[];for(let n of t.lang){switch(n){case"ja":r.push(`${e}/tinyseg.js`);break;case"hi":case"th":r.push(`${e}/wordcut.js`);break}n!=="en"&&r.push(`${e}/min/lunr.${n}.min.js`)}t.lang.length>1&&r.push(`${e}/min/lunr.multi.min.js`),r.length&&(yield importScripts(`${e}/min/lunr.stemmer.support.min.js`,...r))})}function Te(t){return z(this,null,function*(){switch(t.type){case 0:return yield ke(t.data.config),Y=new U(t.data),{type:1};case 2:return{type:3,data:Y?Y.search(t.data):{items:[]}};default:throw new TypeError("Invalid message type")}})}self.lunr=le.default;addEventListener("message",t=>z(void 0,null,function*(){postMessage(yield Te(t.data))}));})(); +//# sourceMappingURL=search.16e2a7d4.min.js.map + diff --git a/assets/javascripts/workers/search.16e2a7d4.min.js.map b/assets/javascripts/workers/search.16e2a7d4.min.js.map new file mode 100644 index 0000000..fa01f37 --- /dev/null +++ b/assets/javascripts/workers/search.16e2a7d4.min.js.map @@ -0,0 +1,8 @@ +{ + "version": 3, + "sources": ["node_modules/lunr/lunr.js", "node_modules/escape-html/index.js", "src/assets/javascripts/integrations/search/worker/main/index.ts", "src/assets/javascripts/polyfills/index.ts", "src/assets/javascripts/integrations/search/document/index.ts", "src/assets/javascripts/integrations/search/highlighter/index.ts", "src/assets/javascripts/integrations/search/query/_/index.ts", "src/assets/javascripts/integrations/search/_/index.ts"], + "sourceRoot": "../../../..", + "sourcesContent": ["/**\n * lunr - http://lunrjs.com - A bit like Solr, but much smaller and not as bright - 2.3.9\n * Copyright (C) 2020 Oliver Nightingale\n * @license MIT\n */\n\n;(function(){\n\n/**\n * A convenience function for configuring and constructing\n * a new lunr Index.\n *\n * A lunr.Builder instance is created and the pipeline setup\n * with a trimmer, stop word filter and stemmer.\n *\n * This builder object is yielded to the configuration function\n * that is passed as a parameter, allowing the list of fields\n * and other builder parameters to be customised.\n *\n * All documents _must_ be added within the passed config function.\n *\n * @example\n * var idx = lunr(function () {\n * this.field('title')\n * this.field('body')\n * this.ref('id')\n *\n * documents.forEach(function (doc) {\n * this.add(doc)\n * }, this)\n * })\n *\n * @see {@link lunr.Builder}\n * @see {@link lunr.Pipeline}\n * @see {@link lunr.trimmer}\n * @see {@link lunr.stopWordFilter}\n * @see {@link lunr.stemmer}\n * @namespace {function} lunr\n */\nvar lunr = function (config) {\n var builder = new lunr.Builder\n\n builder.pipeline.add(\n lunr.trimmer,\n lunr.stopWordFilter,\n lunr.stemmer\n )\n\n builder.searchPipeline.add(\n lunr.stemmer\n )\n\n config.call(builder, builder)\n return builder.build()\n}\n\nlunr.version = \"2.3.9\"\n/*!\n * lunr.utils\n * Copyright (C) 2020 Oliver Nightingale\n */\n\n/**\n * A namespace containing utils for the rest of the lunr library\n * @namespace lunr.utils\n */\nlunr.utils = {}\n\n/**\n * Print a warning message to the console.\n *\n * @param {String} message The message to be printed.\n * @memberOf lunr.utils\n * @function\n */\nlunr.utils.warn = (function (global) {\n /* eslint-disable no-console */\n return function (message) {\n if (global.console && console.warn) {\n console.warn(message)\n }\n }\n /* eslint-enable no-console */\n})(this)\n\n/**\n * Convert an object to a string.\n *\n * In the case of `null` and `undefined` the function returns\n * the empty string, in all other cases the result of calling\n * `toString` on the passed object is returned.\n *\n * @param {Any} obj The object to convert to a string.\n * @return {String} string representation of the passed object.\n * @memberOf lunr.utils\n */\nlunr.utils.asString = function (obj) {\n if (obj === void 0 || obj === null) {\n return \"\"\n } else {\n return obj.toString()\n }\n}\n\n/**\n * Clones an object.\n *\n * Will create a copy of an existing object such that any mutations\n * on the copy cannot affect the original.\n *\n * Only shallow objects are supported, passing a nested object to this\n * function will cause a TypeError.\n *\n * Objects with primitives, and arrays of primitives are supported.\n *\n * @param {Object} obj The object to clone.\n * @return {Object} a clone of the passed object.\n * @throws {TypeError} when a nested object is passed.\n * @memberOf Utils\n */\nlunr.utils.clone = function (obj) {\n if (obj === null || obj === undefined) {\n return obj\n }\n\n var clone = Object.create(null),\n keys = Object.keys(obj)\n\n for (var i = 0; i < keys.length; i++) {\n var key = keys[i],\n val = obj[key]\n\n if (Array.isArray(val)) {\n clone[key] = val.slice()\n continue\n }\n\n if (typeof val === 'string' ||\n typeof val === 'number' ||\n typeof val === 'boolean') {\n clone[key] = val\n continue\n }\n\n throw new TypeError(\"clone is not deep and does not support nested objects\")\n }\n\n return clone\n}\nlunr.FieldRef = function (docRef, fieldName, stringValue) {\n this.docRef = docRef\n this.fieldName = fieldName\n this._stringValue = stringValue\n}\n\nlunr.FieldRef.joiner = \"/\"\n\nlunr.FieldRef.fromString = function (s) {\n var n = s.indexOf(lunr.FieldRef.joiner)\n\n if (n === -1) {\n throw \"malformed field ref string\"\n }\n\n var fieldRef = s.slice(0, n),\n docRef = s.slice(n + 1)\n\n return new lunr.FieldRef (docRef, fieldRef, s)\n}\n\nlunr.FieldRef.prototype.toString = function () {\n if (this._stringValue == undefined) {\n this._stringValue = this.fieldName + lunr.FieldRef.joiner + this.docRef\n }\n\n return this._stringValue\n}\n/*!\n * lunr.Set\n * Copyright (C) 2020 Oliver Nightingale\n */\n\n/**\n * A lunr set.\n *\n * @constructor\n */\nlunr.Set = function (elements) {\n this.elements = Object.create(null)\n\n if (elements) {\n this.length = elements.length\n\n for (var i = 0; i < this.length; i++) {\n this.elements[elements[i]] = true\n }\n } else {\n this.length = 0\n }\n}\n\n/**\n * A complete set that contains all elements.\n *\n * @static\n * @readonly\n * @type {lunr.Set}\n */\nlunr.Set.complete = {\n intersect: function (other) {\n return other\n },\n\n union: function () {\n return this\n },\n\n contains: function () {\n return true\n }\n}\n\n/**\n * An empty set that contains no elements.\n *\n * @static\n * @readonly\n * @type {lunr.Set}\n */\nlunr.Set.empty = {\n intersect: function () {\n return this\n },\n\n union: function (other) {\n return other\n },\n\n contains: function () {\n return false\n }\n}\n\n/**\n * Returns true if this set contains the specified object.\n *\n * @param {object} object - Object whose presence in this set is to be tested.\n * @returns {boolean} - True if this set contains the specified object.\n */\nlunr.Set.prototype.contains = function (object) {\n return !!this.elements[object]\n}\n\n/**\n * Returns a new set containing only the elements that are present in both\n * this set and the specified set.\n *\n * @param {lunr.Set} other - set to intersect with this set.\n * @returns {lunr.Set} a new set that is the intersection of this and the specified set.\n */\n\nlunr.Set.prototype.intersect = function (other) {\n var a, b, elements, intersection = []\n\n if (other === lunr.Set.complete) {\n return this\n }\n\n if (other === lunr.Set.empty) {\n return other\n }\n\n if (this.length < other.length) {\n a = this\n b = other\n } else {\n a = other\n b = this\n }\n\n elements = Object.keys(a.elements)\n\n for (var i = 0; i < elements.length; i++) {\n var element = elements[i]\n if (element in b.elements) {\n intersection.push(element)\n }\n }\n\n return new lunr.Set (intersection)\n}\n\n/**\n * Returns a new set combining the elements of this and the specified set.\n *\n * @param {lunr.Set} other - set to union with this set.\n * @return {lunr.Set} a new set that is the union of this and the specified set.\n */\n\nlunr.Set.prototype.union = function (other) {\n if (other === lunr.Set.complete) {\n return lunr.Set.complete\n }\n\n if (other === lunr.Set.empty) {\n return this\n }\n\n return new lunr.Set(Object.keys(this.elements).concat(Object.keys(other.elements)))\n}\n/**\n * A function to calculate the inverse document frequency for\n * a posting. This is shared between the builder and the index\n *\n * @private\n * @param {object} posting - The posting for a given term\n * @param {number} documentCount - The total number of documents.\n */\nlunr.idf = function (posting, documentCount) {\n var documentsWithTerm = 0\n\n for (var fieldName in posting) {\n if (fieldName == '_index') continue // Ignore the term index, its not a field\n documentsWithTerm += Object.keys(posting[fieldName]).length\n }\n\n var x = (documentCount - documentsWithTerm + 0.5) / (documentsWithTerm + 0.5)\n\n return Math.log(1 + Math.abs(x))\n}\n\n/**\n * A token wraps a string representation of a token\n * as it is passed through the text processing pipeline.\n *\n * @constructor\n * @param {string} [str=''] - The string token being wrapped.\n * @param {object} [metadata={}] - Metadata associated with this token.\n */\nlunr.Token = function (str, metadata) {\n this.str = str || \"\"\n this.metadata = metadata || {}\n}\n\n/**\n * Returns the token string that is being wrapped by this object.\n *\n * @returns {string}\n */\nlunr.Token.prototype.toString = function () {\n return this.str\n}\n\n/**\n * A token update function is used when updating or optionally\n * when cloning a token.\n *\n * @callback lunr.Token~updateFunction\n * @param {string} str - The string representation of the token.\n * @param {Object} metadata - All metadata associated with this token.\n */\n\n/**\n * Applies the given function to the wrapped string token.\n *\n * @example\n * token.update(function (str, metadata) {\n * return str.toUpperCase()\n * })\n *\n * @param {lunr.Token~updateFunction} fn - A function to apply to the token string.\n * @returns {lunr.Token}\n */\nlunr.Token.prototype.update = function (fn) {\n this.str = fn(this.str, this.metadata)\n return this\n}\n\n/**\n * Creates a clone of this token. Optionally a function can be\n * applied to the cloned token.\n *\n * @param {lunr.Token~updateFunction} [fn] - An optional function to apply to the cloned token.\n * @returns {lunr.Token}\n */\nlunr.Token.prototype.clone = function (fn) {\n fn = fn || function (s) { return s }\n return new lunr.Token (fn(this.str, this.metadata), this.metadata)\n}\n/*!\n * lunr.tokenizer\n * Copyright (C) 2020 Oliver Nightingale\n */\n\n/**\n * A function for splitting a string into tokens ready to be inserted into\n * the search index. Uses `lunr.tokenizer.separator` to split strings, change\n * the value of this property to change how strings are split into tokens.\n *\n * This tokenizer will convert its parameter to a string by calling `toString` and\n * then will split this string on the character in `lunr.tokenizer.separator`.\n * Arrays will have their elements converted to strings and wrapped in a lunr.Token.\n *\n * Optional metadata can be passed to the tokenizer, this metadata will be cloned and\n * added as metadata to every token that is created from the object to be tokenized.\n *\n * @static\n * @param {?(string|object|object[])} obj - The object to convert into tokens\n * @param {?object} metadata - Optional metadata to associate with every token\n * @returns {lunr.Token[]}\n * @see {@link lunr.Pipeline}\n */\nlunr.tokenizer = function (obj, metadata) {\n if (obj == null || obj == undefined) {\n return []\n }\n\n if (Array.isArray(obj)) {\n return obj.map(function (t) {\n return new lunr.Token(\n lunr.utils.asString(t).toLowerCase(),\n lunr.utils.clone(metadata)\n )\n })\n }\n\n var str = obj.toString().toLowerCase(),\n len = str.length,\n tokens = []\n\n for (var sliceEnd = 0, sliceStart = 0; sliceEnd <= len; sliceEnd++) {\n var char = str.charAt(sliceEnd),\n sliceLength = sliceEnd - sliceStart\n\n if ((char.match(lunr.tokenizer.separator) || sliceEnd == len)) {\n\n if (sliceLength > 0) {\n var tokenMetadata = lunr.utils.clone(metadata) || {}\n tokenMetadata[\"position\"] = [sliceStart, sliceLength]\n tokenMetadata[\"index\"] = tokens.length\n\n tokens.push(\n new lunr.Token (\n str.slice(sliceStart, sliceEnd),\n tokenMetadata\n )\n )\n }\n\n sliceStart = sliceEnd + 1\n }\n\n }\n\n return tokens\n}\n\n/**\n * The separator used to split a string into tokens. Override this property to change the behaviour of\n * `lunr.tokenizer` behaviour when tokenizing strings. By default this splits on whitespace and hyphens.\n *\n * @static\n * @see lunr.tokenizer\n */\nlunr.tokenizer.separator = /[\\s\\-]+/\n/*!\n * lunr.Pipeline\n * Copyright (C) 2020 Oliver Nightingale\n */\n\n/**\n * lunr.Pipelines maintain an ordered list of functions to be applied to all\n * tokens in documents entering the search index and queries being ran against\n * the index.\n *\n * An instance of lunr.Index created with the lunr shortcut will contain a\n * pipeline with a stop word filter and an English language stemmer. Extra\n * functions can be added before or after either of these functions or these\n * default functions can be removed.\n *\n * When run the pipeline will call each function in turn, passing a token, the\n * index of that token in the original list of all tokens and finally a list of\n * all the original tokens.\n *\n * The output of functions in the pipeline will be passed to the next function\n * in the pipeline. To exclude a token from entering the index the function\n * should return undefined, the rest of the pipeline will not be called with\n * this token.\n *\n * For serialisation of pipelines to work, all functions used in an instance of\n * a pipeline should be registered with lunr.Pipeline. Registered functions can\n * then be loaded. If trying to load a serialised pipeline that uses functions\n * that are not registered an error will be thrown.\n *\n * If not planning on serialising the pipeline then registering pipeline functions\n * is not necessary.\n *\n * @constructor\n */\nlunr.Pipeline = function () {\n this._stack = []\n}\n\nlunr.Pipeline.registeredFunctions = Object.create(null)\n\n/**\n * A pipeline function maps lunr.Token to lunr.Token. A lunr.Token contains the token\n * string as well as all known metadata. A pipeline function can mutate the token string\n * or mutate (or add) metadata for a given token.\n *\n * A pipeline function can indicate that the passed token should be discarded by returning\n * null, undefined or an empty string. This token will not be passed to any downstream pipeline\n * functions and will not be added to the index.\n *\n * Multiple tokens can be returned by returning an array of tokens. Each token will be passed\n * to any downstream pipeline functions and all will returned tokens will be added to the index.\n *\n * Any number of pipeline functions may be chained together using a lunr.Pipeline.\n *\n * @interface lunr.PipelineFunction\n * @param {lunr.Token} token - A token from the document being processed.\n * @param {number} i - The index of this token in the complete list of tokens for this document/field.\n * @param {lunr.Token[]} tokens - All tokens for this document/field.\n * @returns {(?lunr.Token|lunr.Token[])}\n */\n\n/**\n * Register a function with the pipeline.\n *\n * Functions that are used in the pipeline should be registered if the pipeline\n * needs to be serialised, or a serialised pipeline needs to be loaded.\n *\n * Registering a function does not add it to a pipeline, functions must still be\n * added to instances of the pipeline for them to be used when running a pipeline.\n *\n * @param {lunr.PipelineFunction} fn - The function to check for.\n * @param {String} label - The label to register this function with\n */\nlunr.Pipeline.registerFunction = function (fn, label) {\n if (label in this.registeredFunctions) {\n lunr.utils.warn('Overwriting existing registered function: ' + label)\n }\n\n fn.label = label\n lunr.Pipeline.registeredFunctions[fn.label] = fn\n}\n\n/**\n * Warns if the function is not registered as a Pipeline function.\n *\n * @param {lunr.PipelineFunction} fn - The function to check for.\n * @private\n */\nlunr.Pipeline.warnIfFunctionNotRegistered = function (fn) {\n var isRegistered = fn.label && (fn.label in this.registeredFunctions)\n\n if (!isRegistered) {\n lunr.utils.warn('Function is not registered with pipeline. This may cause problems when serialising the index.\\n', fn)\n }\n}\n\n/**\n * Loads a previously serialised pipeline.\n *\n * All functions to be loaded must already be registered with lunr.Pipeline.\n * If any function from the serialised data has not been registered then an\n * error will be thrown.\n *\n * @param {Object} serialised - The serialised pipeline to load.\n * @returns {lunr.Pipeline}\n */\nlunr.Pipeline.load = function (serialised) {\n var pipeline = new lunr.Pipeline\n\n serialised.forEach(function (fnName) {\n var fn = lunr.Pipeline.registeredFunctions[fnName]\n\n if (fn) {\n pipeline.add(fn)\n } else {\n throw new Error('Cannot load unregistered function: ' + fnName)\n }\n })\n\n return pipeline\n}\n\n/**\n * Adds new functions to the end of the pipeline.\n *\n * Logs a warning if the function has not been registered.\n *\n * @param {lunr.PipelineFunction[]} functions - Any number of functions to add to the pipeline.\n */\nlunr.Pipeline.prototype.add = function () {\n var fns = Array.prototype.slice.call(arguments)\n\n fns.forEach(function (fn) {\n lunr.Pipeline.warnIfFunctionNotRegistered(fn)\n this._stack.push(fn)\n }, this)\n}\n\n/**\n * Adds a single function after a function that already exists in the\n * pipeline.\n *\n * Logs a warning if the function has not been registered.\n *\n * @param {lunr.PipelineFunction} existingFn - A function that already exists in the pipeline.\n * @param {lunr.PipelineFunction} newFn - The new function to add to the pipeline.\n */\nlunr.Pipeline.prototype.after = function (existingFn, newFn) {\n lunr.Pipeline.warnIfFunctionNotRegistered(newFn)\n\n var pos = this._stack.indexOf(existingFn)\n if (pos == -1) {\n throw new Error('Cannot find existingFn')\n }\n\n pos = pos + 1\n this._stack.splice(pos, 0, newFn)\n}\n\n/**\n * Adds a single function before a function that already exists in the\n * pipeline.\n *\n * Logs a warning if the function has not been registered.\n *\n * @param {lunr.PipelineFunction} existingFn - A function that already exists in the pipeline.\n * @param {lunr.PipelineFunction} newFn - The new function to add to the pipeline.\n */\nlunr.Pipeline.prototype.before = function (existingFn, newFn) {\n lunr.Pipeline.warnIfFunctionNotRegistered(newFn)\n\n var pos = this._stack.indexOf(existingFn)\n if (pos == -1) {\n throw new Error('Cannot find existingFn')\n }\n\n this._stack.splice(pos, 0, newFn)\n}\n\n/**\n * Removes a function from the pipeline.\n *\n * @param {lunr.PipelineFunction} fn The function to remove from the pipeline.\n */\nlunr.Pipeline.prototype.remove = function (fn) {\n var pos = this._stack.indexOf(fn)\n if (pos == -1) {\n return\n }\n\n this._stack.splice(pos, 1)\n}\n\n/**\n * Runs the current list of functions that make up the pipeline against the\n * passed tokens.\n *\n * @param {Array} tokens The tokens to run through the pipeline.\n * @returns {Array}\n */\nlunr.Pipeline.prototype.run = function (tokens) {\n var stackLength = this._stack.length\n\n for (var i = 0; i < stackLength; i++) {\n var fn = this._stack[i]\n var memo = []\n\n for (var j = 0; j < tokens.length; j++) {\n var result = fn(tokens[j], j, tokens)\n\n if (result === null || result === void 0 || result === '') continue\n\n if (Array.isArray(result)) {\n for (var k = 0; k < result.length; k++) {\n memo.push(result[k])\n }\n } else {\n memo.push(result)\n }\n }\n\n tokens = memo\n }\n\n return tokens\n}\n\n/**\n * Convenience method for passing a string through a pipeline and getting\n * strings out. This method takes care of wrapping the passed string in a\n * token and mapping the resulting tokens back to strings.\n *\n * @param {string} str - The string to pass through the pipeline.\n * @param {?object} metadata - Optional metadata to associate with the token\n * passed to the pipeline.\n * @returns {string[]}\n */\nlunr.Pipeline.prototype.runString = function (str, metadata) {\n var token = new lunr.Token (str, metadata)\n\n return this.run([token]).map(function (t) {\n return t.toString()\n })\n}\n\n/**\n * Resets the pipeline by removing any existing processors.\n *\n */\nlunr.Pipeline.prototype.reset = function () {\n this._stack = []\n}\n\n/**\n * Returns a representation of the pipeline ready for serialisation.\n *\n * Logs a warning if the function has not been registered.\n *\n * @returns {Array}\n */\nlunr.Pipeline.prototype.toJSON = function () {\n return this._stack.map(function (fn) {\n lunr.Pipeline.warnIfFunctionNotRegistered(fn)\n\n return fn.label\n })\n}\n/*!\n * lunr.Vector\n * Copyright (C) 2020 Oliver Nightingale\n */\n\n/**\n * A vector is used to construct the vector space of documents and queries. These\n * vectors support operations to determine the similarity between two documents or\n * a document and a query.\n *\n * Normally no parameters are required for initializing a vector, but in the case of\n * loading a previously dumped vector the raw elements can be provided to the constructor.\n *\n * For performance reasons vectors are implemented with a flat array, where an elements\n * index is immediately followed by its value. E.g. [index, value, index, value]. This\n * allows the underlying array to be as sparse as possible and still offer decent\n * performance when being used for vector calculations.\n *\n * @constructor\n * @param {Number[]} [elements] - The flat list of element index and element value pairs.\n */\nlunr.Vector = function (elements) {\n this._magnitude = 0\n this.elements = elements || []\n}\n\n\n/**\n * Calculates the position within the vector to insert a given index.\n *\n * This is used internally by insert and upsert. If there are duplicate indexes then\n * the position is returned as if the value for that index were to be updated, but it\n * is the callers responsibility to check whether there is a duplicate at that index\n *\n * @param {Number} insertIdx - The index at which the element should be inserted.\n * @returns {Number}\n */\nlunr.Vector.prototype.positionForIndex = function (index) {\n // For an empty vector the tuple can be inserted at the beginning\n if (this.elements.length == 0) {\n return 0\n }\n\n var start = 0,\n end = this.elements.length / 2,\n sliceLength = end - start,\n pivotPoint = Math.floor(sliceLength / 2),\n pivotIndex = this.elements[pivotPoint * 2]\n\n while (sliceLength > 1) {\n if (pivotIndex < index) {\n start = pivotPoint\n }\n\n if (pivotIndex > index) {\n end = pivotPoint\n }\n\n if (pivotIndex == index) {\n break\n }\n\n sliceLength = end - start\n pivotPoint = start + Math.floor(sliceLength / 2)\n pivotIndex = this.elements[pivotPoint * 2]\n }\n\n if (pivotIndex == index) {\n return pivotPoint * 2\n }\n\n if (pivotIndex > index) {\n return pivotPoint * 2\n }\n\n if (pivotIndex < index) {\n return (pivotPoint + 1) * 2\n }\n}\n\n/**\n * Inserts an element at an index within the vector.\n *\n * Does not allow duplicates, will throw an error if there is already an entry\n * for this index.\n *\n * @param {Number} insertIdx - The index at which the element should be inserted.\n * @param {Number} val - The value to be inserted into the vector.\n */\nlunr.Vector.prototype.insert = function (insertIdx, val) {\n this.upsert(insertIdx, val, function () {\n throw \"duplicate index\"\n })\n}\n\n/**\n * Inserts or updates an existing index within the vector.\n *\n * @param {Number} insertIdx - The index at which the element should be inserted.\n * @param {Number} val - The value to be inserted into the vector.\n * @param {function} fn - A function that is called for updates, the existing value and the\n * requested value are passed as arguments\n */\nlunr.Vector.prototype.upsert = function (insertIdx, val, fn) {\n this._magnitude = 0\n var position = this.positionForIndex(insertIdx)\n\n if (this.elements[position] == insertIdx) {\n this.elements[position + 1] = fn(this.elements[position + 1], val)\n } else {\n this.elements.splice(position, 0, insertIdx, val)\n }\n}\n\n/**\n * Calculates the magnitude of this vector.\n *\n * @returns {Number}\n */\nlunr.Vector.prototype.magnitude = function () {\n if (this._magnitude) return this._magnitude\n\n var sumOfSquares = 0,\n elementsLength = this.elements.length\n\n for (var i = 1; i < elementsLength; i += 2) {\n var val = this.elements[i]\n sumOfSquares += val * val\n }\n\n return this._magnitude = Math.sqrt(sumOfSquares)\n}\n\n/**\n * Calculates the dot product of this vector and another vector.\n *\n * @param {lunr.Vector} otherVector - The vector to compute the dot product with.\n * @returns {Number}\n */\nlunr.Vector.prototype.dot = function (otherVector) {\n var dotProduct = 0,\n a = this.elements, b = otherVector.elements,\n aLen = a.length, bLen = b.length,\n aVal = 0, bVal = 0,\n i = 0, j = 0\n\n while (i < aLen && j < bLen) {\n aVal = a[i], bVal = b[j]\n if (aVal < bVal) {\n i += 2\n } else if (aVal > bVal) {\n j += 2\n } else if (aVal == bVal) {\n dotProduct += a[i + 1] * b[j + 1]\n i += 2\n j += 2\n }\n }\n\n return dotProduct\n}\n\n/**\n * Calculates the similarity between this vector and another vector.\n *\n * @param {lunr.Vector} otherVector - The other vector to calculate the\n * similarity with.\n * @returns {Number}\n */\nlunr.Vector.prototype.similarity = function (otherVector) {\n return this.dot(otherVector) / this.magnitude() || 0\n}\n\n/**\n * Converts the vector to an array of the elements within the vector.\n *\n * @returns {Number[]}\n */\nlunr.Vector.prototype.toArray = function () {\n var output = new Array (this.elements.length / 2)\n\n for (var i = 1, j = 0; i < this.elements.length; i += 2, j++) {\n output[j] = this.elements[i]\n }\n\n return output\n}\n\n/**\n * A JSON serializable representation of the vector.\n *\n * @returns {Number[]}\n */\nlunr.Vector.prototype.toJSON = function () {\n return this.elements\n}\n/* eslint-disable */\n/*!\n * lunr.stemmer\n * Copyright (C) 2020 Oliver Nightingale\n * Includes code from - http://tartarus.org/~martin/PorterStemmer/js.txt\n */\n\n/**\n * lunr.stemmer is an english language stemmer, this is a JavaScript\n * implementation of the PorterStemmer taken from http://tartarus.org/~martin\n *\n * @static\n * @implements {lunr.PipelineFunction}\n * @param {lunr.Token} token - The string to stem\n * @returns {lunr.Token}\n * @see {@link lunr.Pipeline}\n * @function\n */\nlunr.stemmer = (function(){\n var step2list = {\n \"ational\" : \"ate\",\n \"tional\" : \"tion\",\n \"enci\" : \"ence\",\n \"anci\" : \"ance\",\n \"izer\" : \"ize\",\n \"bli\" : \"ble\",\n \"alli\" : \"al\",\n \"entli\" : \"ent\",\n \"eli\" : \"e\",\n \"ousli\" : \"ous\",\n \"ization\" : \"ize\",\n \"ation\" : \"ate\",\n \"ator\" : \"ate\",\n \"alism\" : \"al\",\n \"iveness\" : \"ive\",\n \"fulness\" : \"ful\",\n \"ousness\" : \"ous\",\n \"aliti\" : \"al\",\n \"iviti\" : \"ive\",\n \"biliti\" : \"ble\",\n \"logi\" : \"log\"\n },\n\n step3list = {\n \"icate\" : \"ic\",\n \"ative\" : \"\",\n \"alize\" : \"al\",\n \"iciti\" : \"ic\",\n \"ical\" : \"ic\",\n \"ful\" : \"\",\n \"ness\" : \"\"\n },\n\n c = \"[^aeiou]\", // consonant\n v = \"[aeiouy]\", // vowel\n C = c + \"[^aeiouy]*\", // consonant sequence\n V = v + \"[aeiou]*\", // vowel sequence\n\n mgr0 = \"^(\" + C + \")?\" + V + C, // [C]VC... is m>0\n meq1 = \"^(\" + C + \")?\" + V + C + \"(\" + V + \")?$\", // [C]VC[V] is m=1\n mgr1 = \"^(\" + C + \")?\" + V + C + V + C, // [C]VCVC... is m>1\n s_v = \"^(\" + C + \")?\" + v; // vowel in stem\n\n var re_mgr0 = new RegExp(mgr0);\n var re_mgr1 = new RegExp(mgr1);\n var re_meq1 = new RegExp(meq1);\n var re_s_v = new RegExp(s_v);\n\n var re_1a = /^(.+?)(ss|i)es$/;\n var re2_1a = /^(.+?)([^s])s$/;\n var re_1b = /^(.+?)eed$/;\n var re2_1b = /^(.+?)(ed|ing)$/;\n var re_1b_2 = /.$/;\n var re2_1b_2 = /(at|bl|iz)$/;\n var re3_1b_2 = new RegExp(\"([^aeiouylsz])\\\\1$\");\n var re4_1b_2 = new RegExp(\"^\" + C + v + \"[^aeiouwxy]$\");\n\n var re_1c = /^(.+?[^aeiou])y$/;\n var re_2 = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/;\n\n var re_3 = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/;\n\n var re_4 = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/;\n var re2_4 = /^(.+?)(s|t)(ion)$/;\n\n var re_5 = /^(.+?)e$/;\n var re_5_1 = /ll$/;\n var re3_5 = new RegExp(\"^\" + C + v + \"[^aeiouwxy]$\");\n\n var porterStemmer = function porterStemmer(w) {\n var stem,\n suffix,\n firstch,\n re,\n re2,\n re3,\n re4;\n\n if (w.length < 3) { return w; }\n\n firstch = w.substr(0,1);\n if (firstch == \"y\") {\n w = firstch.toUpperCase() + w.substr(1);\n }\n\n // Step 1a\n re = re_1a\n re2 = re2_1a;\n\n if (re.test(w)) { w = w.replace(re,\"$1$2\"); }\n else if (re2.test(w)) { w = w.replace(re2,\"$1$2\"); }\n\n // Step 1b\n re = re_1b;\n re2 = re2_1b;\n if (re.test(w)) {\n var fp = re.exec(w);\n re = re_mgr0;\n if (re.test(fp[1])) {\n re = re_1b_2;\n w = w.replace(re,\"\");\n }\n } else if (re2.test(w)) {\n var fp = re2.exec(w);\n stem = fp[1];\n re2 = re_s_v;\n if (re2.test(stem)) {\n w = stem;\n re2 = re2_1b_2;\n re3 = re3_1b_2;\n re4 = re4_1b_2;\n if (re2.test(w)) { w = w + \"e\"; }\n else if (re3.test(w)) { re = re_1b_2; w = w.replace(re,\"\"); }\n else if (re4.test(w)) { w = w + \"e\"; }\n }\n }\n\n // Step 1c - replace suffix y or Y by i if preceded by a non-vowel which is not the first letter of the word (so cry -> cri, by -> by, say -> say)\n re = re_1c;\n if (re.test(w)) {\n var fp = re.exec(w);\n stem = fp[1];\n w = stem + \"i\";\n }\n\n // Step 2\n re = re_2;\n if (re.test(w)) {\n var fp = re.exec(w);\n stem = fp[1];\n suffix = fp[2];\n re = re_mgr0;\n if (re.test(stem)) {\n w = stem + step2list[suffix];\n }\n }\n\n // Step 3\n re = re_3;\n if (re.test(w)) {\n var fp = re.exec(w);\n stem = fp[1];\n suffix = fp[2];\n re = re_mgr0;\n if (re.test(stem)) {\n w = stem + step3list[suffix];\n }\n }\n\n // Step 4\n re = re_4;\n re2 = re2_4;\n if (re.test(w)) {\n var fp = re.exec(w);\n stem = fp[1];\n re = re_mgr1;\n if (re.test(stem)) {\n w = stem;\n }\n } else if (re2.test(w)) {\n var fp = re2.exec(w);\n stem = fp[1] + fp[2];\n re2 = re_mgr1;\n if (re2.test(stem)) {\n w = stem;\n }\n }\n\n // Step 5\n re = re_5;\n if (re.test(w)) {\n var fp = re.exec(w);\n stem = fp[1];\n re = re_mgr1;\n re2 = re_meq1;\n re3 = re3_5;\n if (re.test(stem) || (re2.test(stem) && !(re3.test(stem)))) {\n w = stem;\n }\n }\n\n re = re_5_1;\n re2 = re_mgr1;\n if (re.test(w) && re2.test(w)) {\n re = re_1b_2;\n w = w.replace(re,\"\");\n }\n\n // and turn initial Y back to y\n\n if (firstch == \"y\") {\n w = firstch.toLowerCase() + w.substr(1);\n }\n\n return w;\n };\n\n return function (token) {\n return token.update(porterStemmer);\n }\n})();\n\nlunr.Pipeline.registerFunction(lunr.stemmer, 'stemmer')\n/*!\n * lunr.stopWordFilter\n * Copyright (C) 2020 Oliver Nightingale\n */\n\n/**\n * lunr.generateStopWordFilter builds a stopWordFilter function from the provided\n * list of stop words.\n *\n * The built in lunr.stopWordFilter is built using this generator and can be used\n * to generate custom stopWordFilters for applications or non English languages.\n *\n * @function\n * @param {Array} token The token to pass through the filter\n * @returns {lunr.PipelineFunction}\n * @see lunr.Pipeline\n * @see lunr.stopWordFilter\n */\nlunr.generateStopWordFilter = function (stopWords) {\n var words = stopWords.reduce(function (memo, stopWord) {\n memo[stopWord] = stopWord\n return memo\n }, {})\n\n return function (token) {\n if (token && words[token.toString()] !== token.toString()) return token\n }\n}\n\n/**\n * lunr.stopWordFilter is an English language stop word list filter, any words\n * contained in the list will not be passed through the filter.\n *\n * This is intended to be used in the Pipeline. If the token does not pass the\n * filter then undefined will be returned.\n *\n * @function\n * @implements {lunr.PipelineFunction}\n * @params {lunr.Token} token - A token to check for being a stop word.\n * @returns {lunr.Token}\n * @see {@link lunr.Pipeline}\n */\nlunr.stopWordFilter = lunr.generateStopWordFilter([\n 'a',\n 'able',\n 'about',\n 'across',\n 'after',\n 'all',\n 'almost',\n 'also',\n 'am',\n 'among',\n 'an',\n 'and',\n 'any',\n 'are',\n 'as',\n 'at',\n 'be',\n 'because',\n 'been',\n 'but',\n 'by',\n 'can',\n 'cannot',\n 'could',\n 'dear',\n 'did',\n 'do',\n 'does',\n 'either',\n 'else',\n 'ever',\n 'every',\n 'for',\n 'from',\n 'get',\n 'got',\n 'had',\n 'has',\n 'have',\n 'he',\n 'her',\n 'hers',\n 'him',\n 'his',\n 'how',\n 'however',\n 'i',\n 'if',\n 'in',\n 'into',\n 'is',\n 'it',\n 'its',\n 'just',\n 'least',\n 'let',\n 'like',\n 'likely',\n 'may',\n 'me',\n 'might',\n 'most',\n 'must',\n 'my',\n 'neither',\n 'no',\n 'nor',\n 'not',\n 'of',\n 'off',\n 'often',\n 'on',\n 'only',\n 'or',\n 'other',\n 'our',\n 'own',\n 'rather',\n 'said',\n 'say',\n 'says',\n 'she',\n 'should',\n 'since',\n 'so',\n 'some',\n 'than',\n 'that',\n 'the',\n 'their',\n 'them',\n 'then',\n 'there',\n 'these',\n 'they',\n 'this',\n 'tis',\n 'to',\n 'too',\n 'twas',\n 'us',\n 'wants',\n 'was',\n 'we',\n 'were',\n 'what',\n 'when',\n 'where',\n 'which',\n 'while',\n 'who',\n 'whom',\n 'why',\n 'will',\n 'with',\n 'would',\n 'yet',\n 'you',\n 'your'\n])\n\nlunr.Pipeline.registerFunction(lunr.stopWordFilter, 'stopWordFilter')\n/*!\n * lunr.trimmer\n * Copyright (C) 2020 Oliver Nightingale\n */\n\n/**\n * lunr.trimmer is a pipeline function for trimming non word\n * characters from the beginning and end of tokens before they\n * enter the index.\n *\n * This implementation may not work correctly for non latin\n * characters and should either be removed or adapted for use\n * with languages with non-latin characters.\n *\n * @static\n * @implements {lunr.PipelineFunction}\n * @param {lunr.Token} token The token to pass through the filter\n * @returns {lunr.Token}\n * @see lunr.Pipeline\n */\nlunr.trimmer = function (token) {\n return token.update(function (s) {\n return s.replace(/^\\W+/, '').replace(/\\W+$/, '')\n })\n}\n\nlunr.Pipeline.registerFunction(lunr.trimmer, 'trimmer')\n/*!\n * lunr.TokenSet\n * Copyright (C) 2020 Oliver Nightingale\n */\n\n/**\n * A token set is used to store the unique list of all tokens\n * within an index. Token sets are also used to represent an\n * incoming query to the index, this query token set and index\n * token set are then intersected to find which tokens to look\n * up in the inverted index.\n *\n * A token set can hold multiple tokens, as in the case of the\n * index token set, or it can hold a single token as in the\n * case of a simple query token set.\n *\n * Additionally token sets are used to perform wildcard matching.\n * Leading, contained and trailing wildcards are supported, and\n * from this edit distance matching can also be provided.\n *\n * Token sets are implemented as a minimal finite state automata,\n * where both common prefixes and suffixes are shared between tokens.\n * This helps to reduce the space used for storing the token set.\n *\n * @constructor\n */\nlunr.TokenSet = function () {\n this.final = false\n this.edges = {}\n this.id = lunr.TokenSet._nextId\n lunr.TokenSet._nextId += 1\n}\n\n/**\n * Keeps track of the next, auto increment, identifier to assign\n * to a new tokenSet.\n *\n * TokenSets require a unique identifier to be correctly minimised.\n *\n * @private\n */\nlunr.TokenSet._nextId = 1\n\n/**\n * Creates a TokenSet instance from the given sorted array of words.\n *\n * @param {String[]} arr - A sorted array of strings to create the set from.\n * @returns {lunr.TokenSet}\n * @throws Will throw an error if the input array is not sorted.\n */\nlunr.TokenSet.fromArray = function (arr) {\n var builder = new lunr.TokenSet.Builder\n\n for (var i = 0, len = arr.length; i < len; i++) {\n builder.insert(arr[i])\n }\n\n builder.finish()\n return builder.root\n}\n\n/**\n * Creates a token set from a query clause.\n *\n * @private\n * @param {Object} clause - A single clause from lunr.Query.\n * @param {string} clause.term - The query clause term.\n * @param {number} [clause.editDistance] - The optional edit distance for the term.\n * @returns {lunr.TokenSet}\n */\nlunr.TokenSet.fromClause = function (clause) {\n if ('editDistance' in clause) {\n return lunr.TokenSet.fromFuzzyString(clause.term, clause.editDistance)\n } else {\n return lunr.TokenSet.fromString(clause.term)\n }\n}\n\n/**\n * Creates a token set representing a single string with a specified\n * edit distance.\n *\n * Insertions, deletions, substitutions and transpositions are each\n * treated as an edit distance of 1.\n *\n * Increasing the allowed edit distance will have a dramatic impact\n * on the performance of both creating and intersecting these TokenSets.\n * It is advised to keep the edit distance less than 3.\n *\n * @param {string} str - The string to create the token set from.\n * @param {number} editDistance - The allowed edit distance to match.\n * @returns {lunr.Vector}\n */\nlunr.TokenSet.fromFuzzyString = function (str, editDistance) {\n var root = new lunr.TokenSet\n\n var stack = [{\n node: root,\n editsRemaining: editDistance,\n str: str\n }]\n\n while (stack.length) {\n var frame = stack.pop()\n\n // no edit\n if (frame.str.length > 0) {\n var char = frame.str.charAt(0),\n noEditNode\n\n if (char in frame.node.edges) {\n noEditNode = frame.node.edges[char]\n } else {\n noEditNode = new lunr.TokenSet\n frame.node.edges[char] = noEditNode\n }\n\n if (frame.str.length == 1) {\n noEditNode.final = true\n }\n\n stack.push({\n node: noEditNode,\n editsRemaining: frame.editsRemaining,\n str: frame.str.slice(1)\n })\n }\n\n if (frame.editsRemaining == 0) {\n continue\n }\n\n // insertion\n if (\"*\" in frame.node.edges) {\n var insertionNode = frame.node.edges[\"*\"]\n } else {\n var insertionNode = new lunr.TokenSet\n frame.node.edges[\"*\"] = insertionNode\n }\n\n if (frame.str.length == 0) {\n insertionNode.final = true\n }\n\n stack.push({\n node: insertionNode,\n editsRemaining: frame.editsRemaining - 1,\n str: frame.str\n })\n\n // deletion\n // can only do a deletion if we have enough edits remaining\n // and if there are characters left to delete in the string\n if (frame.str.length > 1) {\n stack.push({\n node: frame.node,\n editsRemaining: frame.editsRemaining - 1,\n str: frame.str.slice(1)\n })\n }\n\n // deletion\n // just removing the last character from the str\n if (frame.str.length == 1) {\n frame.node.final = true\n }\n\n // substitution\n // can only do a substitution if we have enough edits remaining\n // and if there are characters left to substitute\n if (frame.str.length >= 1) {\n if (\"*\" in frame.node.edges) {\n var substitutionNode = frame.node.edges[\"*\"]\n } else {\n var substitutionNode = new lunr.TokenSet\n frame.node.edges[\"*\"] = substitutionNode\n }\n\n if (frame.str.length == 1) {\n substitutionNode.final = true\n }\n\n stack.push({\n node: substitutionNode,\n editsRemaining: frame.editsRemaining - 1,\n str: frame.str.slice(1)\n })\n }\n\n // transposition\n // can only do a transposition if there are edits remaining\n // and there are enough characters to transpose\n if (frame.str.length > 1) {\n var charA = frame.str.charAt(0),\n charB = frame.str.charAt(1),\n transposeNode\n\n if (charB in frame.node.edges) {\n transposeNode = frame.node.edges[charB]\n } else {\n transposeNode = new lunr.TokenSet\n frame.node.edges[charB] = transposeNode\n }\n\n if (frame.str.length == 1) {\n transposeNode.final = true\n }\n\n stack.push({\n node: transposeNode,\n editsRemaining: frame.editsRemaining - 1,\n str: charA + frame.str.slice(2)\n })\n }\n }\n\n return root\n}\n\n/**\n * Creates a TokenSet from a string.\n *\n * The string may contain one or more wildcard characters (*)\n * that will allow wildcard matching when intersecting with\n * another TokenSet.\n *\n * @param {string} str - The string to create a TokenSet from.\n * @returns {lunr.TokenSet}\n */\nlunr.TokenSet.fromString = function (str) {\n var node = new lunr.TokenSet,\n root = node\n\n /*\n * Iterates through all characters within the passed string\n * appending a node for each character.\n *\n * When a wildcard character is found then a self\n * referencing edge is introduced to continually match\n * any number of any characters.\n */\n for (var i = 0, len = str.length; i < len; i++) {\n var char = str[i],\n final = (i == len - 1)\n\n if (char == \"*\") {\n node.edges[char] = node\n node.final = final\n\n } else {\n var next = new lunr.TokenSet\n next.final = final\n\n node.edges[char] = next\n node = next\n }\n }\n\n return root\n}\n\n/**\n * Converts this TokenSet into an array of strings\n * contained within the TokenSet.\n *\n * This is not intended to be used on a TokenSet that\n * contains wildcards, in these cases the results are\n * undefined and are likely to cause an infinite loop.\n *\n * @returns {string[]}\n */\nlunr.TokenSet.prototype.toArray = function () {\n var words = []\n\n var stack = [{\n prefix: \"\",\n node: this\n }]\n\n while (stack.length) {\n var frame = stack.pop(),\n edges = Object.keys(frame.node.edges),\n len = edges.length\n\n if (frame.node.final) {\n /* In Safari, at this point the prefix is sometimes corrupted, see:\n * https://github.com/olivernn/lunr.js/issues/279 Calling any\n * String.prototype method forces Safari to \"cast\" this string to what\n * it's supposed to be, fixing the bug. */\n frame.prefix.charAt(0)\n words.push(frame.prefix)\n }\n\n for (var i = 0; i < len; i++) {\n var edge = edges[i]\n\n stack.push({\n prefix: frame.prefix.concat(edge),\n node: frame.node.edges[edge]\n })\n }\n }\n\n return words\n}\n\n/**\n * Generates a string representation of a TokenSet.\n *\n * This is intended to allow TokenSets to be used as keys\n * in objects, largely to aid the construction and minimisation\n * of a TokenSet. As such it is not designed to be a human\n * friendly representation of the TokenSet.\n *\n * @returns {string}\n */\nlunr.TokenSet.prototype.toString = function () {\n // NOTE: Using Object.keys here as this.edges is very likely\n // to enter 'hash-mode' with many keys being added\n //\n // avoiding a for-in loop here as it leads to the function\n // being de-optimised (at least in V8). From some simple\n // benchmarks the performance is comparable, but allowing\n // V8 to optimize may mean easy performance wins in the future.\n\n if (this._str) {\n return this._str\n }\n\n var str = this.final ? '1' : '0',\n labels = Object.keys(this.edges).sort(),\n len = labels.length\n\n for (var i = 0; i < len; i++) {\n var label = labels[i],\n node = this.edges[label]\n\n str = str + label + node.id\n }\n\n return str\n}\n\n/**\n * Returns a new TokenSet that is the intersection of\n * this TokenSet and the passed TokenSet.\n *\n * This intersection will take into account any wildcards\n * contained within the TokenSet.\n *\n * @param {lunr.TokenSet} b - An other TokenSet to intersect with.\n * @returns {lunr.TokenSet}\n */\nlunr.TokenSet.prototype.intersect = function (b) {\n var output = new lunr.TokenSet,\n frame = undefined\n\n var stack = [{\n qNode: b,\n output: output,\n node: this\n }]\n\n while (stack.length) {\n frame = stack.pop()\n\n // NOTE: As with the #toString method, we are using\n // Object.keys and a for loop instead of a for-in loop\n // as both of these objects enter 'hash' mode, causing\n // the function to be de-optimised in V8\n var qEdges = Object.keys(frame.qNode.edges),\n qLen = qEdges.length,\n nEdges = Object.keys(frame.node.edges),\n nLen = nEdges.length\n\n for (var q = 0; q < qLen; q++) {\n var qEdge = qEdges[q]\n\n for (var n = 0; n < nLen; n++) {\n var nEdge = nEdges[n]\n\n if (nEdge == qEdge || qEdge == '*') {\n var node = frame.node.edges[nEdge],\n qNode = frame.qNode.edges[qEdge],\n final = node.final && qNode.final,\n next = undefined\n\n if (nEdge in frame.output.edges) {\n // an edge already exists for this character\n // no need to create a new node, just set the finality\n // bit unless this node is already final\n next = frame.output.edges[nEdge]\n next.final = next.final || final\n\n } else {\n // no edge exists yet, must create one\n // set the finality bit and insert it\n // into the output\n next = new lunr.TokenSet\n next.final = final\n frame.output.edges[nEdge] = next\n }\n\n stack.push({\n qNode: qNode,\n output: next,\n node: node\n })\n }\n }\n }\n }\n\n return output\n}\nlunr.TokenSet.Builder = function () {\n this.previousWord = \"\"\n this.root = new lunr.TokenSet\n this.uncheckedNodes = []\n this.minimizedNodes = {}\n}\n\nlunr.TokenSet.Builder.prototype.insert = function (word) {\n var node,\n commonPrefix = 0\n\n if (word < this.previousWord) {\n throw new Error (\"Out of order word insertion\")\n }\n\n for (var i = 0; i < word.length && i < this.previousWord.length; i++) {\n if (word[i] != this.previousWord[i]) break\n commonPrefix++\n }\n\n this.minimize(commonPrefix)\n\n if (this.uncheckedNodes.length == 0) {\n node = this.root\n } else {\n node = this.uncheckedNodes[this.uncheckedNodes.length - 1].child\n }\n\n for (var i = commonPrefix; i < word.length; i++) {\n var nextNode = new lunr.TokenSet,\n char = word[i]\n\n node.edges[char] = nextNode\n\n this.uncheckedNodes.push({\n parent: node,\n char: char,\n child: nextNode\n })\n\n node = nextNode\n }\n\n node.final = true\n this.previousWord = word\n}\n\nlunr.TokenSet.Builder.prototype.finish = function () {\n this.minimize(0)\n}\n\nlunr.TokenSet.Builder.prototype.minimize = function (downTo) {\n for (var i = this.uncheckedNodes.length - 1; i >= downTo; i--) {\n var node = this.uncheckedNodes[i],\n childKey = node.child.toString()\n\n if (childKey in this.minimizedNodes) {\n node.parent.edges[node.char] = this.minimizedNodes[childKey]\n } else {\n // Cache the key for this node since\n // we know it can't change anymore\n node.child._str = childKey\n\n this.minimizedNodes[childKey] = node.child\n }\n\n this.uncheckedNodes.pop()\n }\n}\n/*!\n * lunr.Index\n * Copyright (C) 2020 Oliver Nightingale\n */\n\n/**\n * An index contains the built index of all documents and provides a query interface\n * to the index.\n *\n * Usually instances of lunr.Index will not be created using this constructor, instead\n * lunr.Builder should be used to construct new indexes, or lunr.Index.load should be\n * used to load previously built and serialized indexes.\n *\n * @constructor\n * @param {Object} attrs - The attributes of the built search index.\n * @param {Object} attrs.invertedIndex - An index of term/field to document reference.\n * @param {Object} attrs.fieldVectors - Field vectors\n * @param {lunr.TokenSet} attrs.tokenSet - An set of all corpus tokens.\n * @param {string[]} attrs.fields - The names of indexed document fields.\n * @param {lunr.Pipeline} attrs.pipeline - The pipeline to use for search terms.\n */\nlunr.Index = function (attrs) {\n this.invertedIndex = attrs.invertedIndex\n this.fieldVectors = attrs.fieldVectors\n this.tokenSet = attrs.tokenSet\n this.fields = attrs.fields\n this.pipeline = attrs.pipeline\n}\n\n/**\n * A result contains details of a document matching a search query.\n * @typedef {Object} lunr.Index~Result\n * @property {string} ref - The reference of the document this result represents.\n * @property {number} score - A number between 0 and 1 representing how similar this document is to the query.\n * @property {lunr.MatchData} matchData - Contains metadata about this match including which term(s) caused the match.\n */\n\n/**\n * Although lunr provides the ability to create queries using lunr.Query, it also provides a simple\n * query language which itself is parsed into an instance of lunr.Query.\n *\n * For programmatically building queries it is advised to directly use lunr.Query, the query language\n * is best used for human entered text rather than program generated text.\n *\n * At its simplest queries can just be a single term, e.g. `hello`, multiple terms are also supported\n * and will be combined with OR, e.g `hello world` will match documents that contain either 'hello'\n * or 'world', though those that contain both will rank higher in the results.\n *\n * Wildcards can be included in terms to match one or more unspecified characters, these wildcards can\n * be inserted anywhere within the term, and more than one wildcard can exist in a single term. Adding\n * wildcards will increase the number of documents that will be found but can also have a negative\n * impact on query performance, especially with wildcards at the beginning of a term.\n *\n * Terms can be restricted to specific fields, e.g. `title:hello`, only documents with the term\n * hello in the title field will match this query. Using a field not present in the index will lead\n * to an error being thrown.\n *\n * Modifiers can also be added to terms, lunr supports edit distance and boost modifiers on terms. A term\n * boost will make documents matching that term score higher, e.g. `foo^5`. Edit distance is also supported\n * to provide fuzzy matching, e.g. 'hello~2' will match documents with hello with an edit distance of 2.\n * Avoid large values for edit distance to improve query performance.\n *\n * Each term also supports a presence modifier. By default a term's presence in document is optional, however\n * this can be changed to either required or prohibited. For a term's presence to be required in a document the\n * term should be prefixed with a '+', e.g. `+foo bar` is a search for documents that must contain 'foo' and\n * optionally contain 'bar'. Conversely a leading '-' sets the terms presence to prohibited, i.e. it must not\n * appear in a document, e.g. `-foo bar` is a search for documents that do not contain 'foo' but may contain 'bar'.\n *\n * To escape special characters the backslash character '\\' can be used, this allows searches to include\n * characters that would normally be considered modifiers, e.g. `foo\\~2` will search for a term \"foo~2\" instead\n * of attempting to apply a boost of 2 to the search term \"foo\".\n *\n * @typedef {string} lunr.Index~QueryString\n * @example Simple single term query\n * hello\n * @example Multiple term query\n * hello world\n * @example term scoped to a field\n * title:hello\n * @example term with a boost of 10\n * hello^10\n * @example term with an edit distance of 2\n * hello~2\n * @example terms with presence modifiers\n * -foo +bar baz\n */\n\n/**\n * Performs a search against the index using lunr query syntax.\n *\n * Results will be returned sorted by their score, the most relevant results\n * will be returned first. For details on how the score is calculated, please see\n * the {@link https://lunrjs.com/guides/searching.html#scoring|guide}.\n *\n * For more programmatic querying use lunr.Index#query.\n *\n * @param {lunr.Index~QueryString} queryString - A string containing a lunr query.\n * @throws {lunr.QueryParseError} If the passed query string cannot be parsed.\n * @returns {lunr.Index~Result[]}\n */\nlunr.Index.prototype.search = function (queryString) {\n return this.query(function (query) {\n var parser = new lunr.QueryParser(queryString, query)\n parser.parse()\n })\n}\n\n/**\n * A query builder callback provides a query object to be used to express\n * the query to perform on the index.\n *\n * @callback lunr.Index~queryBuilder\n * @param {lunr.Query} query - The query object to build up.\n * @this lunr.Query\n */\n\n/**\n * Performs a query against the index using the yielded lunr.Query object.\n *\n * If performing programmatic queries against the index, this method is preferred\n * over lunr.Index#search so as to avoid the additional query parsing overhead.\n *\n * A query object is yielded to the supplied function which should be used to\n * express the query to be run against the index.\n *\n * Note that although this function takes a callback parameter it is _not_ an\n * asynchronous operation, the callback is just yielded a query object to be\n * customized.\n *\n * @param {lunr.Index~queryBuilder} fn - A function that is used to build the query.\n * @returns {lunr.Index~Result[]}\n */\nlunr.Index.prototype.query = function (fn) {\n // for each query clause\n // * process terms\n // * expand terms from token set\n // * find matching documents and metadata\n // * get document vectors\n // * score documents\n\n var query = new lunr.Query(this.fields),\n matchingFields = Object.create(null),\n queryVectors = Object.create(null),\n termFieldCache = Object.create(null),\n requiredMatches = Object.create(null),\n prohibitedMatches = Object.create(null)\n\n /*\n * To support field level boosts a query vector is created per\n * field. An empty vector is eagerly created to support negated\n * queries.\n */\n for (var i = 0; i < this.fields.length; i++) {\n queryVectors[this.fields[i]] = new lunr.Vector\n }\n\n fn.call(query, query)\n\n for (var i = 0; i < query.clauses.length; i++) {\n /*\n * Unless the pipeline has been disabled for this term, which is\n * the case for terms with wildcards, we need to pass the clause\n * term through the search pipeline. A pipeline returns an array\n * of processed terms. Pipeline functions may expand the passed\n * term, which means we may end up performing multiple index lookups\n * for a single query term.\n */\n var clause = query.clauses[i],\n terms = null,\n clauseMatches = lunr.Set.empty\n\n if (clause.usePipeline) {\n terms = this.pipeline.runString(clause.term, {\n fields: clause.fields\n })\n } else {\n terms = [clause.term]\n }\n\n for (var m = 0; m < terms.length; m++) {\n var term = terms[m]\n\n /*\n * Each term returned from the pipeline needs to use the same query\n * clause object, e.g. the same boost and or edit distance. The\n * simplest way to do this is to re-use the clause object but mutate\n * its term property.\n */\n clause.term = term\n\n /*\n * From the term in the clause we create a token set which will then\n * be used to intersect the indexes token set to get a list of terms\n * to lookup in the inverted index\n */\n var termTokenSet = lunr.TokenSet.fromClause(clause),\n expandedTerms = this.tokenSet.intersect(termTokenSet).toArray()\n\n /*\n * If a term marked as required does not exist in the tokenSet it is\n * impossible for the search to return any matches. We set all the field\n * scoped required matches set to empty and stop examining any further\n * clauses.\n */\n if (expandedTerms.length === 0 && clause.presence === lunr.Query.presence.REQUIRED) {\n for (var k = 0; k < clause.fields.length; k++) {\n var field = clause.fields[k]\n requiredMatches[field] = lunr.Set.empty\n }\n\n break\n }\n\n for (var j = 0; j < expandedTerms.length; j++) {\n /*\n * For each term get the posting and termIndex, this is required for\n * building the query vector.\n */\n var expandedTerm = expandedTerms[j],\n posting = this.invertedIndex[expandedTerm],\n termIndex = posting._index\n\n for (var k = 0; k < clause.fields.length; k++) {\n /*\n * For each field that this query term is scoped by (by default\n * all fields are in scope) we need to get all the document refs\n * that have this term in that field.\n *\n * The posting is the entry in the invertedIndex for the matching\n * term from above.\n */\n var field = clause.fields[k],\n fieldPosting = posting[field],\n matchingDocumentRefs = Object.keys(fieldPosting),\n termField = expandedTerm + \"/\" + field,\n matchingDocumentsSet = new lunr.Set(matchingDocumentRefs)\n\n /*\n * if the presence of this term is required ensure that the matching\n * documents are added to the set of required matches for this clause.\n *\n */\n if (clause.presence == lunr.Query.presence.REQUIRED) {\n clauseMatches = clauseMatches.union(matchingDocumentsSet)\n\n if (requiredMatches[field] === undefined) {\n requiredMatches[field] = lunr.Set.complete\n }\n }\n\n /*\n * if the presence of this term is prohibited ensure that the matching\n * documents are added to the set of prohibited matches for this field,\n * creating that set if it does not yet exist.\n */\n if (clause.presence == lunr.Query.presence.PROHIBITED) {\n if (prohibitedMatches[field] === undefined) {\n prohibitedMatches[field] = lunr.Set.empty\n }\n\n prohibitedMatches[field] = prohibitedMatches[field].union(matchingDocumentsSet)\n\n /*\n * Prohibited matches should not be part of the query vector used for\n * similarity scoring and no metadata should be extracted so we continue\n * to the next field\n */\n continue\n }\n\n /*\n * The query field vector is populated using the termIndex found for\n * the term and a unit value with the appropriate boost applied.\n * Using upsert because there could already be an entry in the vector\n * for the term we are working with. In that case we just add the scores\n * together.\n */\n queryVectors[field].upsert(termIndex, clause.boost, function (a, b) { return a + b })\n\n /**\n * If we've already seen this term, field combo then we've already collected\n * the matching documents and metadata, no need to go through all that again\n */\n if (termFieldCache[termField]) {\n continue\n }\n\n for (var l = 0; l < matchingDocumentRefs.length; l++) {\n /*\n * All metadata for this term/field/document triple\n * are then extracted and collected into an instance\n * of lunr.MatchData ready to be returned in the query\n * results\n */\n var matchingDocumentRef = matchingDocumentRefs[l],\n matchingFieldRef = new lunr.FieldRef (matchingDocumentRef, field),\n metadata = fieldPosting[matchingDocumentRef],\n fieldMatch\n\n if ((fieldMatch = matchingFields[matchingFieldRef]) === undefined) {\n matchingFields[matchingFieldRef] = new lunr.MatchData (expandedTerm, field, metadata)\n } else {\n fieldMatch.add(expandedTerm, field, metadata)\n }\n\n }\n\n termFieldCache[termField] = true\n }\n }\n }\n\n /**\n * If the presence was required we need to update the requiredMatches field sets.\n * We do this after all fields for the term have collected their matches because\n * the clause terms presence is required in _any_ of the fields not _all_ of the\n * fields.\n */\n if (clause.presence === lunr.Query.presence.REQUIRED) {\n for (var k = 0; k < clause.fields.length; k++) {\n var field = clause.fields[k]\n requiredMatches[field] = requiredMatches[field].intersect(clauseMatches)\n }\n }\n }\n\n /**\n * Need to combine the field scoped required and prohibited\n * matching documents into a global set of required and prohibited\n * matches\n */\n var allRequiredMatches = lunr.Set.complete,\n allProhibitedMatches = lunr.Set.empty\n\n for (var i = 0; i < this.fields.length; i++) {\n var field = this.fields[i]\n\n if (requiredMatches[field]) {\n allRequiredMatches = allRequiredMatches.intersect(requiredMatches[field])\n }\n\n if (prohibitedMatches[field]) {\n allProhibitedMatches = allProhibitedMatches.union(prohibitedMatches[field])\n }\n }\n\n var matchingFieldRefs = Object.keys(matchingFields),\n results = [],\n matches = Object.create(null)\n\n /*\n * If the query is negated (contains only prohibited terms)\n * we need to get _all_ fieldRefs currently existing in the\n * index. This is only done when we know that the query is\n * entirely prohibited terms to avoid any cost of getting all\n * fieldRefs unnecessarily.\n *\n * Additionally, blank MatchData must be created to correctly\n * populate the results.\n */\n if (query.isNegated()) {\n matchingFieldRefs = Object.keys(this.fieldVectors)\n\n for (var i = 0; i < matchingFieldRefs.length; i++) {\n var matchingFieldRef = matchingFieldRefs[i]\n var fieldRef = lunr.FieldRef.fromString(matchingFieldRef)\n matchingFields[matchingFieldRef] = new lunr.MatchData\n }\n }\n\n for (var i = 0; i < matchingFieldRefs.length; i++) {\n /*\n * Currently we have document fields that match the query, but we\n * need to return documents. The matchData and scores are combined\n * from multiple fields belonging to the same document.\n *\n * Scores are calculated by field, using the query vectors created\n * above, and combined into a final document score using addition.\n */\n var fieldRef = lunr.FieldRef.fromString(matchingFieldRefs[i]),\n docRef = fieldRef.docRef\n\n if (!allRequiredMatches.contains(docRef)) {\n continue\n }\n\n if (allProhibitedMatches.contains(docRef)) {\n continue\n }\n\n var fieldVector = this.fieldVectors[fieldRef],\n score = queryVectors[fieldRef.fieldName].similarity(fieldVector),\n docMatch\n\n if ((docMatch = matches[docRef]) !== undefined) {\n docMatch.score += score\n docMatch.matchData.combine(matchingFields[fieldRef])\n } else {\n var match = {\n ref: docRef,\n score: score,\n matchData: matchingFields[fieldRef]\n }\n matches[docRef] = match\n results.push(match)\n }\n }\n\n /*\n * Sort the results objects by score, highest first.\n */\n return results.sort(function (a, b) {\n return b.score - a.score\n })\n}\n\n/**\n * Prepares the index for JSON serialization.\n *\n * The schema for this JSON blob will be described in a\n * separate JSON schema file.\n *\n * @returns {Object}\n */\nlunr.Index.prototype.toJSON = function () {\n var invertedIndex = Object.keys(this.invertedIndex)\n .sort()\n .map(function (term) {\n return [term, this.invertedIndex[term]]\n }, this)\n\n var fieldVectors = Object.keys(this.fieldVectors)\n .map(function (ref) {\n return [ref, this.fieldVectors[ref].toJSON()]\n }, this)\n\n return {\n version: lunr.version,\n fields: this.fields,\n fieldVectors: fieldVectors,\n invertedIndex: invertedIndex,\n pipeline: this.pipeline.toJSON()\n }\n}\n\n/**\n * Loads a previously serialized lunr.Index\n *\n * @param {Object} serializedIndex - A previously serialized lunr.Index\n * @returns {lunr.Index}\n */\nlunr.Index.load = function (serializedIndex) {\n var attrs = {},\n fieldVectors = {},\n serializedVectors = serializedIndex.fieldVectors,\n invertedIndex = Object.create(null),\n serializedInvertedIndex = serializedIndex.invertedIndex,\n tokenSetBuilder = new lunr.TokenSet.Builder,\n pipeline = lunr.Pipeline.load(serializedIndex.pipeline)\n\n if (serializedIndex.version != lunr.version) {\n lunr.utils.warn(\"Version mismatch when loading serialised index. Current version of lunr '\" + lunr.version + \"' does not match serialized index '\" + serializedIndex.version + \"'\")\n }\n\n for (var i = 0; i < serializedVectors.length; i++) {\n var tuple = serializedVectors[i],\n ref = tuple[0],\n elements = tuple[1]\n\n fieldVectors[ref] = new lunr.Vector(elements)\n }\n\n for (var i = 0; i < serializedInvertedIndex.length; i++) {\n var tuple = serializedInvertedIndex[i],\n term = tuple[0],\n posting = tuple[1]\n\n tokenSetBuilder.insert(term)\n invertedIndex[term] = posting\n }\n\n tokenSetBuilder.finish()\n\n attrs.fields = serializedIndex.fields\n\n attrs.fieldVectors = fieldVectors\n attrs.invertedIndex = invertedIndex\n attrs.tokenSet = tokenSetBuilder.root\n attrs.pipeline = pipeline\n\n return new lunr.Index(attrs)\n}\n/*!\n * lunr.Builder\n * Copyright (C) 2020 Oliver Nightingale\n */\n\n/**\n * lunr.Builder performs indexing on a set of documents and\n * returns instances of lunr.Index ready for querying.\n *\n * All configuration of the index is done via the builder, the\n * fields to index, the document reference, the text processing\n * pipeline and document scoring parameters are all set on the\n * builder before indexing.\n *\n * @constructor\n * @property {string} _ref - Internal reference to the document reference field.\n * @property {string[]} _fields - Internal reference to the document fields to index.\n * @property {object} invertedIndex - The inverted index maps terms to document fields.\n * @property {object} documentTermFrequencies - Keeps track of document term frequencies.\n * @property {object} documentLengths - Keeps track of the length of documents added to the index.\n * @property {lunr.tokenizer} tokenizer - Function for splitting strings into tokens for indexing.\n * @property {lunr.Pipeline} pipeline - The pipeline performs text processing on tokens before indexing.\n * @property {lunr.Pipeline} searchPipeline - A pipeline for processing search terms before querying the index.\n * @property {number} documentCount - Keeps track of the total number of documents indexed.\n * @property {number} _b - A parameter to control field length normalization, setting this to 0 disabled normalization, 1 fully normalizes field lengths, the default value is 0.75.\n * @property {number} _k1 - A parameter to control how quickly an increase in term frequency results in term frequency saturation, the default value is 1.2.\n * @property {number} termIndex - A counter incremented for each unique term, used to identify a terms position in the vector space.\n * @property {array} metadataWhitelist - A list of metadata keys that have been whitelisted for entry in the index.\n */\nlunr.Builder = function () {\n this._ref = \"id\"\n this._fields = Object.create(null)\n this._documents = Object.create(null)\n this.invertedIndex = Object.create(null)\n this.fieldTermFrequencies = {}\n this.fieldLengths = {}\n this.tokenizer = lunr.tokenizer\n this.pipeline = new lunr.Pipeline\n this.searchPipeline = new lunr.Pipeline\n this.documentCount = 0\n this._b = 0.75\n this._k1 = 1.2\n this.termIndex = 0\n this.metadataWhitelist = []\n}\n\n/**\n * Sets the document field used as the document reference. Every document must have this field.\n * The type of this field in the document should be a string, if it is not a string it will be\n * coerced into a string by calling toString.\n *\n * The default ref is 'id'.\n *\n * The ref should _not_ be changed during indexing, it should be set before any documents are\n * added to the index. Changing it during indexing can lead to inconsistent results.\n *\n * @param {string} ref - The name of the reference field in the document.\n */\nlunr.Builder.prototype.ref = function (ref) {\n this._ref = ref\n}\n\n/**\n * A function that is used to extract a field from a document.\n *\n * Lunr expects a field to be at the top level of a document, if however the field\n * is deeply nested within a document an extractor function can be used to extract\n * the right field for indexing.\n *\n * @callback fieldExtractor\n * @param {object} doc - The document being added to the index.\n * @returns {?(string|object|object[])} obj - The object that will be indexed for this field.\n * @example Extracting a nested field\n * function (doc) { return doc.nested.field }\n */\n\n/**\n * Adds a field to the list of document fields that will be indexed. Every document being\n * indexed should have this field. Null values for this field in indexed documents will\n * not cause errors but will limit the chance of that document being retrieved by searches.\n *\n * All fields should be added before adding documents to the index. Adding fields after\n * a document has been indexed will have no effect on already indexed documents.\n *\n * Fields can be boosted at build time. This allows terms within that field to have more\n * importance when ranking search results. Use a field boost to specify that matches within\n * one field are more important than other fields.\n *\n * @param {string} fieldName - The name of a field to index in all documents.\n * @param {object} attributes - Optional attributes associated with this field.\n * @param {number} [attributes.boost=1] - Boost applied to all terms within this field.\n * @param {fieldExtractor} [attributes.extractor] - Function to extract a field from a document.\n * @throws {RangeError} fieldName cannot contain unsupported characters '/'\n */\nlunr.Builder.prototype.field = function (fieldName, attributes) {\n if (/\\//.test(fieldName)) {\n throw new RangeError (\"Field '\" + fieldName + \"' contains illegal character '/'\")\n }\n\n this._fields[fieldName] = attributes || {}\n}\n\n/**\n * A parameter to tune the amount of field length normalisation that is applied when\n * calculating relevance scores. A value of 0 will completely disable any normalisation\n * and a value of 1 will fully normalise field lengths. The default is 0.75. Values of b\n * will be clamped to the range 0 - 1.\n *\n * @param {number} number - The value to set for this tuning parameter.\n */\nlunr.Builder.prototype.b = function (number) {\n if (number < 0) {\n this._b = 0\n } else if (number > 1) {\n this._b = 1\n } else {\n this._b = number\n }\n}\n\n/**\n * A parameter that controls the speed at which a rise in term frequency results in term\n * frequency saturation. The default value is 1.2. Setting this to a higher value will give\n * slower saturation levels, a lower value will result in quicker saturation.\n *\n * @param {number} number - The value to set for this tuning parameter.\n */\nlunr.Builder.prototype.k1 = function (number) {\n this._k1 = number\n}\n\n/**\n * Adds a document to the index.\n *\n * Before adding fields to the index the index should have been fully setup, with the document\n * ref and all fields to index already having been specified.\n *\n * The document must have a field name as specified by the ref (by default this is 'id') and\n * it should have all fields defined for indexing, though null or undefined values will not\n * cause errors.\n *\n * Entire documents can be boosted at build time. Applying a boost to a document indicates that\n * this document should rank higher in search results than other documents.\n *\n * @param {object} doc - The document to add to the index.\n * @param {object} attributes - Optional attributes associated with this document.\n * @param {number} [attributes.boost=1] - Boost applied to all terms within this document.\n */\nlunr.Builder.prototype.add = function (doc, attributes) {\n var docRef = doc[this._ref],\n fields = Object.keys(this._fields)\n\n this._documents[docRef] = attributes || {}\n this.documentCount += 1\n\n for (var i = 0; i < fields.length; i++) {\n var fieldName = fields[i],\n extractor = this._fields[fieldName].extractor,\n field = extractor ? extractor(doc) : doc[fieldName],\n tokens = this.tokenizer(field, {\n fields: [fieldName]\n }),\n terms = this.pipeline.run(tokens),\n fieldRef = new lunr.FieldRef (docRef, fieldName),\n fieldTerms = Object.create(null)\n\n this.fieldTermFrequencies[fieldRef] = fieldTerms\n this.fieldLengths[fieldRef] = 0\n\n // store the length of this field for this document\n this.fieldLengths[fieldRef] += terms.length\n\n // calculate term frequencies for this field\n for (var j = 0; j < terms.length; j++) {\n var term = terms[j]\n\n if (fieldTerms[term] == undefined) {\n fieldTerms[term] = 0\n }\n\n fieldTerms[term] += 1\n\n // add to inverted index\n // create an initial posting if one doesn't exist\n if (this.invertedIndex[term] == undefined) {\n var posting = Object.create(null)\n posting[\"_index\"] = this.termIndex\n this.termIndex += 1\n\n for (var k = 0; k < fields.length; k++) {\n posting[fields[k]] = Object.create(null)\n }\n\n this.invertedIndex[term] = posting\n }\n\n // add an entry for this term/fieldName/docRef to the invertedIndex\n if (this.invertedIndex[term][fieldName][docRef] == undefined) {\n this.invertedIndex[term][fieldName][docRef] = Object.create(null)\n }\n\n // store all whitelisted metadata about this token in the\n // inverted index\n for (var l = 0; l < this.metadataWhitelist.length; l++) {\n var metadataKey = this.metadataWhitelist[l],\n metadata = term.metadata[metadataKey]\n\n if (this.invertedIndex[term][fieldName][docRef][metadataKey] == undefined) {\n this.invertedIndex[term][fieldName][docRef][metadataKey] = []\n }\n\n this.invertedIndex[term][fieldName][docRef][metadataKey].push(metadata)\n }\n }\n\n }\n}\n\n/**\n * Calculates the average document length for this index\n *\n * @private\n */\nlunr.Builder.prototype.calculateAverageFieldLengths = function () {\n\n var fieldRefs = Object.keys(this.fieldLengths),\n numberOfFields = fieldRefs.length,\n accumulator = {},\n documentsWithField = {}\n\n for (var i = 0; i < numberOfFields; i++) {\n var fieldRef = lunr.FieldRef.fromString(fieldRefs[i]),\n field = fieldRef.fieldName\n\n documentsWithField[field] || (documentsWithField[field] = 0)\n documentsWithField[field] += 1\n\n accumulator[field] || (accumulator[field] = 0)\n accumulator[field] += this.fieldLengths[fieldRef]\n }\n\n var fields = Object.keys(this._fields)\n\n for (var i = 0; i < fields.length; i++) {\n var fieldName = fields[i]\n accumulator[fieldName] = accumulator[fieldName] / documentsWithField[fieldName]\n }\n\n this.averageFieldLength = accumulator\n}\n\n/**\n * Builds a vector space model of every document using lunr.Vector\n *\n * @private\n */\nlunr.Builder.prototype.createFieldVectors = function () {\n var fieldVectors = {},\n fieldRefs = Object.keys(this.fieldTermFrequencies),\n fieldRefsLength = fieldRefs.length,\n termIdfCache = Object.create(null)\n\n for (var i = 0; i < fieldRefsLength; i++) {\n var fieldRef = lunr.FieldRef.fromString(fieldRefs[i]),\n fieldName = fieldRef.fieldName,\n fieldLength = this.fieldLengths[fieldRef],\n fieldVector = new lunr.Vector,\n termFrequencies = this.fieldTermFrequencies[fieldRef],\n terms = Object.keys(termFrequencies),\n termsLength = terms.length\n\n\n var fieldBoost = this._fields[fieldName].boost || 1,\n docBoost = this._documents[fieldRef.docRef].boost || 1\n\n for (var j = 0; j < termsLength; j++) {\n var term = terms[j],\n tf = termFrequencies[term],\n termIndex = this.invertedIndex[term]._index,\n idf, score, scoreWithPrecision\n\n if (termIdfCache[term] === undefined) {\n idf = lunr.idf(this.invertedIndex[term], this.documentCount)\n termIdfCache[term] = idf\n } else {\n idf = termIdfCache[term]\n }\n\n score = idf * ((this._k1 + 1) * tf) / (this._k1 * (1 - this._b + this._b * (fieldLength / this.averageFieldLength[fieldName])) + tf)\n score *= fieldBoost\n score *= docBoost\n scoreWithPrecision = Math.round(score * 1000) / 1000\n // Converts 1.23456789 to 1.234.\n // Reducing the precision so that the vectors take up less\n // space when serialised. Doing it now so that they behave\n // the same before and after serialisation. Also, this is\n // the fastest approach to reducing a number's precision in\n // JavaScript.\n\n fieldVector.insert(termIndex, scoreWithPrecision)\n }\n\n fieldVectors[fieldRef] = fieldVector\n }\n\n this.fieldVectors = fieldVectors\n}\n\n/**\n * Creates a token set of all tokens in the index using lunr.TokenSet\n *\n * @private\n */\nlunr.Builder.prototype.createTokenSet = function () {\n this.tokenSet = lunr.TokenSet.fromArray(\n Object.keys(this.invertedIndex).sort()\n )\n}\n\n/**\n * Builds the index, creating an instance of lunr.Index.\n *\n * This completes the indexing process and should only be called\n * once all documents have been added to the index.\n *\n * @returns {lunr.Index}\n */\nlunr.Builder.prototype.build = function () {\n this.calculateAverageFieldLengths()\n this.createFieldVectors()\n this.createTokenSet()\n\n return new lunr.Index({\n invertedIndex: this.invertedIndex,\n fieldVectors: this.fieldVectors,\n tokenSet: this.tokenSet,\n fields: Object.keys(this._fields),\n pipeline: this.searchPipeline\n })\n}\n\n/**\n * Applies a plugin to the index builder.\n *\n * A plugin is a function that is called with the index builder as its context.\n * Plugins can be used to customise or extend the behaviour of the index\n * in some way. A plugin is just a function, that encapsulated the custom\n * behaviour that should be applied when building the index.\n *\n * The plugin function will be called with the index builder as its argument, additional\n * arguments can also be passed when calling use. The function will be called\n * with the index builder as its context.\n *\n * @param {Function} plugin The plugin to apply.\n */\nlunr.Builder.prototype.use = function (fn) {\n var args = Array.prototype.slice.call(arguments, 1)\n args.unshift(this)\n fn.apply(this, args)\n}\n/**\n * Contains and collects metadata about a matching document.\n * A single instance of lunr.MatchData is returned as part of every\n * lunr.Index~Result.\n *\n * @constructor\n * @param {string} term - The term this match data is associated with\n * @param {string} field - The field in which the term was found\n * @param {object} metadata - The metadata recorded about this term in this field\n * @property {object} metadata - A cloned collection of metadata associated with this document.\n * @see {@link lunr.Index~Result}\n */\nlunr.MatchData = function (term, field, metadata) {\n var clonedMetadata = Object.create(null),\n metadataKeys = Object.keys(metadata || {})\n\n // Cloning the metadata to prevent the original\n // being mutated during match data combination.\n // Metadata is kept in an array within the inverted\n // index so cloning the data can be done with\n // Array#slice\n for (var i = 0; i < metadataKeys.length; i++) {\n var key = metadataKeys[i]\n clonedMetadata[key] = metadata[key].slice()\n }\n\n this.metadata = Object.create(null)\n\n if (term !== undefined) {\n this.metadata[term] = Object.create(null)\n this.metadata[term][field] = clonedMetadata\n }\n}\n\n/**\n * An instance of lunr.MatchData will be created for every term that matches a\n * document. However only one instance is required in a lunr.Index~Result. This\n * method combines metadata from another instance of lunr.MatchData with this\n * objects metadata.\n *\n * @param {lunr.MatchData} otherMatchData - Another instance of match data to merge with this one.\n * @see {@link lunr.Index~Result}\n */\nlunr.MatchData.prototype.combine = function (otherMatchData) {\n var terms = Object.keys(otherMatchData.metadata)\n\n for (var i = 0; i < terms.length; i++) {\n var term = terms[i],\n fields = Object.keys(otherMatchData.metadata[term])\n\n if (this.metadata[term] == undefined) {\n this.metadata[term] = Object.create(null)\n }\n\n for (var j = 0; j < fields.length; j++) {\n var field = fields[j],\n keys = Object.keys(otherMatchData.metadata[term][field])\n\n if (this.metadata[term][field] == undefined) {\n this.metadata[term][field] = Object.create(null)\n }\n\n for (var k = 0; k < keys.length; k++) {\n var key = keys[k]\n\n if (this.metadata[term][field][key] == undefined) {\n this.metadata[term][field][key] = otherMatchData.metadata[term][field][key]\n } else {\n this.metadata[term][field][key] = this.metadata[term][field][key].concat(otherMatchData.metadata[term][field][key])\n }\n\n }\n }\n }\n}\n\n/**\n * Add metadata for a term/field pair to this instance of match data.\n *\n * @param {string} term - The term this match data is associated with\n * @param {string} field - The field in which the term was found\n * @param {object} metadata - The metadata recorded about this term in this field\n */\nlunr.MatchData.prototype.add = function (term, field, metadata) {\n if (!(term in this.metadata)) {\n this.metadata[term] = Object.create(null)\n this.metadata[term][field] = metadata\n return\n }\n\n if (!(field in this.metadata[term])) {\n this.metadata[term][field] = metadata\n return\n }\n\n var metadataKeys = Object.keys(metadata)\n\n for (var i = 0; i < metadataKeys.length; i++) {\n var key = metadataKeys[i]\n\n if (key in this.metadata[term][field]) {\n this.metadata[term][field][key] = this.metadata[term][field][key].concat(metadata[key])\n } else {\n this.metadata[term][field][key] = metadata[key]\n }\n }\n}\n/**\n * A lunr.Query provides a programmatic way of defining queries to be performed\n * against a {@link lunr.Index}.\n *\n * Prefer constructing a lunr.Query using the {@link lunr.Index#query} method\n * so the query object is pre-initialized with the right index fields.\n *\n * @constructor\n * @property {lunr.Query~Clause[]} clauses - An array of query clauses.\n * @property {string[]} allFields - An array of all available fields in a lunr.Index.\n */\nlunr.Query = function (allFields) {\n this.clauses = []\n this.allFields = allFields\n}\n\n/**\n * Constants for indicating what kind of automatic wildcard insertion will be used when constructing a query clause.\n *\n * This allows wildcards to be added to the beginning and end of a term without having to manually do any string\n * concatenation.\n *\n * The wildcard constants can be bitwise combined to select both leading and trailing wildcards.\n *\n * @constant\n * @default\n * @property {number} wildcard.NONE - The term will have no wildcards inserted, this is the default behaviour\n * @property {number} wildcard.LEADING - Prepend the term with a wildcard, unless a leading wildcard already exists\n * @property {number} wildcard.TRAILING - Append a wildcard to the term, unless a trailing wildcard already exists\n * @see lunr.Query~Clause\n * @see lunr.Query#clause\n * @see lunr.Query#term\n * @example query term with trailing wildcard\n * query.term('foo', { wildcard: lunr.Query.wildcard.TRAILING })\n * @example query term with leading and trailing wildcard\n * query.term('foo', {\n * wildcard: lunr.Query.wildcard.LEADING | lunr.Query.wildcard.TRAILING\n * })\n */\n\nlunr.Query.wildcard = new String (\"*\")\nlunr.Query.wildcard.NONE = 0\nlunr.Query.wildcard.LEADING = 1\nlunr.Query.wildcard.TRAILING = 2\n\n/**\n * Constants for indicating what kind of presence a term must have in matching documents.\n *\n * @constant\n * @enum {number}\n * @see lunr.Query~Clause\n * @see lunr.Query#clause\n * @see lunr.Query#term\n * @example query term with required presence\n * query.term('foo', { presence: lunr.Query.presence.REQUIRED })\n */\nlunr.Query.presence = {\n /**\n * Term's presence in a document is optional, this is the default value.\n */\n OPTIONAL: 1,\n\n /**\n * Term's presence in a document is required, documents that do not contain\n * this term will not be returned.\n */\n REQUIRED: 2,\n\n /**\n * Term's presence in a document is prohibited, documents that do contain\n * this term will not be returned.\n */\n PROHIBITED: 3\n}\n\n/**\n * A single clause in a {@link lunr.Query} contains a term and details on how to\n * match that term against a {@link lunr.Index}.\n *\n * @typedef {Object} lunr.Query~Clause\n * @property {string[]} fields - The fields in an index this clause should be matched against.\n * @property {number} [boost=1] - Any boost that should be applied when matching this clause.\n * @property {number} [editDistance] - Whether the term should have fuzzy matching applied, and how fuzzy the match should be.\n * @property {boolean} [usePipeline] - Whether the term should be passed through the search pipeline.\n * @property {number} [wildcard=lunr.Query.wildcard.NONE] - Whether the term should have wildcards appended or prepended.\n * @property {number} [presence=lunr.Query.presence.OPTIONAL] - The terms presence in any matching documents.\n */\n\n/**\n * Adds a {@link lunr.Query~Clause} to this query.\n *\n * Unless the clause contains the fields to be matched all fields will be matched. In addition\n * a default boost of 1 is applied to the clause.\n *\n * @param {lunr.Query~Clause} clause - The clause to add to this query.\n * @see lunr.Query~Clause\n * @returns {lunr.Query}\n */\nlunr.Query.prototype.clause = function (clause) {\n if (!('fields' in clause)) {\n clause.fields = this.allFields\n }\n\n if (!('boost' in clause)) {\n clause.boost = 1\n }\n\n if (!('usePipeline' in clause)) {\n clause.usePipeline = true\n }\n\n if (!('wildcard' in clause)) {\n clause.wildcard = lunr.Query.wildcard.NONE\n }\n\n if ((clause.wildcard & lunr.Query.wildcard.LEADING) && (clause.term.charAt(0) != lunr.Query.wildcard)) {\n clause.term = \"*\" + clause.term\n }\n\n if ((clause.wildcard & lunr.Query.wildcard.TRAILING) && (clause.term.slice(-1) != lunr.Query.wildcard)) {\n clause.term = \"\" + clause.term + \"*\"\n }\n\n if (!('presence' in clause)) {\n clause.presence = lunr.Query.presence.OPTIONAL\n }\n\n this.clauses.push(clause)\n\n return this\n}\n\n/**\n * A negated query is one in which every clause has a presence of\n * prohibited. These queries require some special processing to return\n * the expected results.\n *\n * @returns boolean\n */\nlunr.Query.prototype.isNegated = function () {\n for (var i = 0; i < this.clauses.length; i++) {\n if (this.clauses[i].presence != lunr.Query.presence.PROHIBITED) {\n return false\n }\n }\n\n return true\n}\n\n/**\n * Adds a term to the current query, under the covers this will create a {@link lunr.Query~Clause}\n * to the list of clauses that make up this query.\n *\n * The term is used as is, i.e. no tokenization will be performed by this method. Instead conversion\n * to a token or token-like string should be done before calling this method.\n *\n * The term will be converted to a string by calling `toString`. Multiple terms can be passed as an\n * array, each term in the array will share the same options.\n *\n * @param {object|object[]} term - The term(s) to add to the query.\n * @param {object} [options] - Any additional properties to add to the query clause.\n * @returns {lunr.Query}\n * @see lunr.Query#clause\n * @see lunr.Query~Clause\n * @example adding a single term to a query\n * query.term(\"foo\")\n * @example adding a single term to a query and specifying search fields, term boost and automatic trailing wildcard\n * query.term(\"foo\", {\n * fields: [\"title\"],\n * boost: 10,\n * wildcard: lunr.Query.wildcard.TRAILING\n * })\n * @example using lunr.tokenizer to convert a string to tokens before using them as terms\n * query.term(lunr.tokenizer(\"foo bar\"))\n */\nlunr.Query.prototype.term = function (term, options) {\n if (Array.isArray(term)) {\n term.forEach(function (t) { this.term(t, lunr.utils.clone(options)) }, this)\n return this\n }\n\n var clause = options || {}\n clause.term = term.toString()\n\n this.clause(clause)\n\n return this\n}\nlunr.QueryParseError = function (message, start, end) {\n this.name = \"QueryParseError\"\n this.message = message\n this.start = start\n this.end = end\n}\n\nlunr.QueryParseError.prototype = new Error\nlunr.QueryLexer = function (str) {\n this.lexemes = []\n this.str = str\n this.length = str.length\n this.pos = 0\n this.start = 0\n this.escapeCharPositions = []\n}\n\nlunr.QueryLexer.prototype.run = function () {\n var state = lunr.QueryLexer.lexText\n\n while (state) {\n state = state(this)\n }\n}\n\nlunr.QueryLexer.prototype.sliceString = function () {\n var subSlices = [],\n sliceStart = this.start,\n sliceEnd = this.pos\n\n for (var i = 0; i < this.escapeCharPositions.length; i++) {\n sliceEnd = this.escapeCharPositions[i]\n subSlices.push(this.str.slice(sliceStart, sliceEnd))\n sliceStart = sliceEnd + 1\n }\n\n subSlices.push(this.str.slice(sliceStart, this.pos))\n this.escapeCharPositions.length = 0\n\n return subSlices.join('')\n}\n\nlunr.QueryLexer.prototype.emit = function (type) {\n this.lexemes.push({\n type: type,\n str: this.sliceString(),\n start: this.start,\n end: this.pos\n })\n\n this.start = this.pos\n}\n\nlunr.QueryLexer.prototype.escapeCharacter = function () {\n this.escapeCharPositions.push(this.pos - 1)\n this.pos += 1\n}\n\nlunr.QueryLexer.prototype.next = function () {\n if (this.pos >= this.length) {\n return lunr.QueryLexer.EOS\n }\n\n var char = this.str.charAt(this.pos)\n this.pos += 1\n return char\n}\n\nlunr.QueryLexer.prototype.width = function () {\n return this.pos - this.start\n}\n\nlunr.QueryLexer.prototype.ignore = function () {\n if (this.start == this.pos) {\n this.pos += 1\n }\n\n this.start = this.pos\n}\n\nlunr.QueryLexer.prototype.backup = function () {\n this.pos -= 1\n}\n\nlunr.QueryLexer.prototype.acceptDigitRun = function () {\n var char, charCode\n\n do {\n char = this.next()\n charCode = char.charCodeAt(0)\n } while (charCode > 47 && charCode < 58)\n\n if (char != lunr.QueryLexer.EOS) {\n this.backup()\n }\n}\n\nlunr.QueryLexer.prototype.more = function () {\n return this.pos < this.length\n}\n\nlunr.QueryLexer.EOS = 'EOS'\nlunr.QueryLexer.FIELD = 'FIELD'\nlunr.QueryLexer.TERM = 'TERM'\nlunr.QueryLexer.EDIT_DISTANCE = 'EDIT_DISTANCE'\nlunr.QueryLexer.BOOST = 'BOOST'\nlunr.QueryLexer.PRESENCE = 'PRESENCE'\n\nlunr.QueryLexer.lexField = function (lexer) {\n lexer.backup()\n lexer.emit(lunr.QueryLexer.FIELD)\n lexer.ignore()\n return lunr.QueryLexer.lexText\n}\n\nlunr.QueryLexer.lexTerm = function (lexer) {\n if (lexer.width() > 1) {\n lexer.backup()\n lexer.emit(lunr.QueryLexer.TERM)\n }\n\n lexer.ignore()\n\n if (lexer.more()) {\n return lunr.QueryLexer.lexText\n }\n}\n\nlunr.QueryLexer.lexEditDistance = function (lexer) {\n lexer.ignore()\n lexer.acceptDigitRun()\n lexer.emit(lunr.QueryLexer.EDIT_DISTANCE)\n return lunr.QueryLexer.lexText\n}\n\nlunr.QueryLexer.lexBoost = function (lexer) {\n lexer.ignore()\n lexer.acceptDigitRun()\n lexer.emit(lunr.QueryLexer.BOOST)\n return lunr.QueryLexer.lexText\n}\n\nlunr.QueryLexer.lexEOS = function (lexer) {\n if (lexer.width() > 0) {\n lexer.emit(lunr.QueryLexer.TERM)\n }\n}\n\n// This matches the separator used when tokenising fields\n// within a document. These should match otherwise it is\n// not possible to search for some tokens within a document.\n//\n// It is possible for the user to change the separator on the\n// tokenizer so it _might_ clash with any other of the special\n// characters already used within the search string, e.g. :.\n//\n// This means that it is possible to change the separator in\n// such a way that makes some words unsearchable using a search\n// string.\nlunr.QueryLexer.termSeparator = lunr.tokenizer.separator\n\nlunr.QueryLexer.lexText = function (lexer) {\n while (true) {\n var char = lexer.next()\n\n if (char == lunr.QueryLexer.EOS) {\n return lunr.QueryLexer.lexEOS\n }\n\n // Escape character is '\\'\n if (char.charCodeAt(0) == 92) {\n lexer.escapeCharacter()\n continue\n }\n\n if (char == \":\") {\n return lunr.QueryLexer.lexField\n }\n\n if (char == \"~\") {\n lexer.backup()\n if (lexer.width() > 0) {\n lexer.emit(lunr.QueryLexer.TERM)\n }\n return lunr.QueryLexer.lexEditDistance\n }\n\n if (char == \"^\") {\n lexer.backup()\n if (lexer.width() > 0) {\n lexer.emit(lunr.QueryLexer.TERM)\n }\n return lunr.QueryLexer.lexBoost\n }\n\n // \"+\" indicates term presence is required\n // checking for length to ensure that only\n // leading \"+\" are considered\n if (char == \"+\" && lexer.width() === 1) {\n lexer.emit(lunr.QueryLexer.PRESENCE)\n return lunr.QueryLexer.lexText\n }\n\n // \"-\" indicates term presence is prohibited\n // checking for length to ensure that only\n // leading \"-\" are considered\n if (char == \"-\" && lexer.width() === 1) {\n lexer.emit(lunr.QueryLexer.PRESENCE)\n return lunr.QueryLexer.lexText\n }\n\n if (char.match(lunr.QueryLexer.termSeparator)) {\n return lunr.QueryLexer.lexTerm\n }\n }\n}\n\nlunr.QueryParser = function (str, query) {\n this.lexer = new lunr.QueryLexer (str)\n this.query = query\n this.currentClause = {}\n this.lexemeIdx = 0\n}\n\nlunr.QueryParser.prototype.parse = function () {\n this.lexer.run()\n this.lexemes = this.lexer.lexemes\n\n var state = lunr.QueryParser.parseClause\n\n while (state) {\n state = state(this)\n }\n\n return this.query\n}\n\nlunr.QueryParser.prototype.peekLexeme = function () {\n return this.lexemes[this.lexemeIdx]\n}\n\nlunr.QueryParser.prototype.consumeLexeme = function () {\n var lexeme = this.peekLexeme()\n this.lexemeIdx += 1\n return lexeme\n}\n\nlunr.QueryParser.prototype.nextClause = function () {\n var completedClause = this.currentClause\n this.query.clause(completedClause)\n this.currentClause = {}\n}\n\nlunr.QueryParser.parseClause = function (parser) {\n var lexeme = parser.peekLexeme()\n\n if (lexeme == undefined) {\n return\n }\n\n switch (lexeme.type) {\n case lunr.QueryLexer.PRESENCE:\n return lunr.QueryParser.parsePresence\n case lunr.QueryLexer.FIELD:\n return lunr.QueryParser.parseField\n case lunr.QueryLexer.TERM:\n return lunr.QueryParser.parseTerm\n default:\n var errorMessage = \"expected either a field or a term, found \" + lexeme.type\n\n if (lexeme.str.length >= 1) {\n errorMessage += \" with value '\" + lexeme.str + \"'\"\n }\n\n throw new lunr.QueryParseError (errorMessage, lexeme.start, lexeme.end)\n }\n}\n\nlunr.QueryParser.parsePresence = function (parser) {\n var lexeme = parser.consumeLexeme()\n\n if (lexeme == undefined) {\n return\n }\n\n switch (lexeme.str) {\n case \"-\":\n parser.currentClause.presence = lunr.Query.presence.PROHIBITED\n break\n case \"+\":\n parser.currentClause.presence = lunr.Query.presence.REQUIRED\n break\n default:\n var errorMessage = \"unrecognised presence operator'\" + lexeme.str + \"'\"\n throw new lunr.QueryParseError (errorMessage, lexeme.start, lexeme.end)\n }\n\n var nextLexeme = parser.peekLexeme()\n\n if (nextLexeme == undefined) {\n var errorMessage = \"expecting term or field, found nothing\"\n throw new lunr.QueryParseError (errorMessage, lexeme.start, lexeme.end)\n }\n\n switch (nextLexeme.type) {\n case lunr.QueryLexer.FIELD:\n return lunr.QueryParser.parseField\n case lunr.QueryLexer.TERM:\n return lunr.QueryParser.parseTerm\n default:\n var errorMessage = \"expecting term or field, found '\" + nextLexeme.type + \"'\"\n throw new lunr.QueryParseError (errorMessage, nextLexeme.start, nextLexeme.end)\n }\n}\n\nlunr.QueryParser.parseField = function (parser) {\n var lexeme = parser.consumeLexeme()\n\n if (lexeme == undefined) {\n return\n }\n\n if (parser.query.allFields.indexOf(lexeme.str) == -1) {\n var possibleFields = parser.query.allFields.map(function (f) { return \"'\" + f + \"'\" }).join(', '),\n errorMessage = \"unrecognised field '\" + lexeme.str + \"', possible fields: \" + possibleFields\n\n throw new lunr.QueryParseError (errorMessage, lexeme.start, lexeme.end)\n }\n\n parser.currentClause.fields = [lexeme.str]\n\n var nextLexeme = parser.peekLexeme()\n\n if (nextLexeme == undefined) {\n var errorMessage = \"expecting term, found nothing\"\n throw new lunr.QueryParseError (errorMessage, lexeme.start, lexeme.end)\n }\n\n switch (nextLexeme.type) {\n case lunr.QueryLexer.TERM:\n return lunr.QueryParser.parseTerm\n default:\n var errorMessage = \"expecting term, found '\" + nextLexeme.type + \"'\"\n throw new lunr.QueryParseError (errorMessage, nextLexeme.start, nextLexeme.end)\n }\n}\n\nlunr.QueryParser.parseTerm = function (parser) {\n var lexeme = parser.consumeLexeme()\n\n if (lexeme == undefined) {\n return\n }\n\n parser.currentClause.term = lexeme.str.toLowerCase()\n\n if (lexeme.str.indexOf(\"*\") != -1) {\n parser.currentClause.usePipeline = false\n }\n\n var nextLexeme = parser.peekLexeme()\n\n if (nextLexeme == undefined) {\n parser.nextClause()\n return\n }\n\n switch (nextLexeme.type) {\n case lunr.QueryLexer.TERM:\n parser.nextClause()\n return lunr.QueryParser.parseTerm\n case lunr.QueryLexer.FIELD:\n parser.nextClause()\n return lunr.QueryParser.parseField\n case lunr.QueryLexer.EDIT_DISTANCE:\n return lunr.QueryParser.parseEditDistance\n case lunr.QueryLexer.BOOST:\n return lunr.QueryParser.parseBoost\n case lunr.QueryLexer.PRESENCE:\n parser.nextClause()\n return lunr.QueryParser.parsePresence\n default:\n var errorMessage = \"Unexpected lexeme type '\" + nextLexeme.type + \"'\"\n throw new lunr.QueryParseError (errorMessage, nextLexeme.start, nextLexeme.end)\n }\n}\n\nlunr.QueryParser.parseEditDistance = function (parser) {\n var lexeme = parser.consumeLexeme()\n\n if (lexeme == undefined) {\n return\n }\n\n var editDistance = parseInt(lexeme.str, 10)\n\n if (isNaN(editDistance)) {\n var errorMessage = \"edit distance must be numeric\"\n throw new lunr.QueryParseError (errorMessage, lexeme.start, lexeme.end)\n }\n\n parser.currentClause.editDistance = editDistance\n\n var nextLexeme = parser.peekLexeme()\n\n if (nextLexeme == undefined) {\n parser.nextClause()\n return\n }\n\n switch (nextLexeme.type) {\n case lunr.QueryLexer.TERM:\n parser.nextClause()\n return lunr.QueryParser.parseTerm\n case lunr.QueryLexer.FIELD:\n parser.nextClause()\n return lunr.QueryParser.parseField\n case lunr.QueryLexer.EDIT_DISTANCE:\n return lunr.QueryParser.parseEditDistance\n case lunr.QueryLexer.BOOST:\n return lunr.QueryParser.parseBoost\n case lunr.QueryLexer.PRESENCE:\n parser.nextClause()\n return lunr.QueryParser.parsePresence\n default:\n var errorMessage = \"Unexpected lexeme type '\" + nextLexeme.type + \"'\"\n throw new lunr.QueryParseError (errorMessage, nextLexeme.start, nextLexeme.end)\n }\n}\n\nlunr.QueryParser.parseBoost = function (parser) {\n var lexeme = parser.consumeLexeme()\n\n if (lexeme == undefined) {\n return\n }\n\n var boost = parseInt(lexeme.str, 10)\n\n if (isNaN(boost)) {\n var errorMessage = \"boost must be numeric\"\n throw new lunr.QueryParseError (errorMessage, lexeme.start, lexeme.end)\n }\n\n parser.currentClause.boost = boost\n\n var nextLexeme = parser.peekLexeme()\n\n if (nextLexeme == undefined) {\n parser.nextClause()\n return\n }\n\n switch (nextLexeme.type) {\n case lunr.QueryLexer.TERM:\n parser.nextClause()\n return lunr.QueryParser.parseTerm\n case lunr.QueryLexer.FIELD:\n parser.nextClause()\n return lunr.QueryParser.parseField\n case lunr.QueryLexer.EDIT_DISTANCE:\n return lunr.QueryParser.parseEditDistance\n case lunr.QueryLexer.BOOST:\n return lunr.QueryParser.parseBoost\n case lunr.QueryLexer.PRESENCE:\n parser.nextClause()\n return lunr.QueryParser.parsePresence\n default:\n var errorMessage = \"Unexpected lexeme type '\" + nextLexeme.type + \"'\"\n throw new lunr.QueryParseError (errorMessage, nextLexeme.start, nextLexeme.end)\n }\n}\n\n /**\n * export the module via AMD, CommonJS or as a browser global\n * Export code from https://github.com/umdjs/umd/blob/master/returnExports.js\n */\n ;(function (root, factory) {\n if (typeof define === 'function' && define.amd) {\n // AMD. Register as an anonymous module.\n define(factory)\n } else if (typeof exports === 'object') {\n /**\n * Node. Does not work with strict CommonJS, but\n * only CommonJS-like enviroments that support module.exports,\n * like Node.\n */\n module.exports = factory()\n } else {\n // Browser globals (root is window)\n root.lunr = factory()\n }\n }(this, function () {\n /**\n * Just return a value to define the module export.\n * This example returns an object, but the module\n * can return a function as the exported value.\n */\n return lunr\n }))\n})();\n", "/*!\n * escape-html\n * Copyright(c) 2012-2013 TJ Holowaychuk\n * Copyright(c) 2015 Andreas Lubbe\n * Copyright(c) 2015 Tiancheng \"Timothy\" Gu\n * MIT Licensed\n */\n\n'use strict';\n\n/**\n * Module variables.\n * @private\n */\n\nvar matchHtmlRegExp = /[\"'&<>]/;\n\n/**\n * Module exports.\n * @public\n */\n\nmodule.exports = escapeHtml;\n\n/**\n * Escape special characters in the given string of html.\n *\n * @param {string} string The string to escape for inserting into HTML\n * @return {string}\n * @public\n */\n\nfunction escapeHtml(string) {\n var str = '' + string;\n var match = matchHtmlRegExp.exec(str);\n\n if (!match) {\n return str;\n }\n\n var escape;\n var html = '';\n var index = 0;\n var lastIndex = 0;\n\n for (index = match.index; index < str.length; index++) {\n switch (str.charCodeAt(index)) {\n case 34: // \"\n escape = '"';\n break;\n case 38: // &\n escape = '&';\n break;\n case 39: // '\n escape = ''';\n break;\n case 60: // <\n escape = '<';\n break;\n case 62: // >\n escape = '>';\n break;\n default:\n continue;\n }\n\n if (lastIndex !== index) {\n html += str.substring(lastIndex, index);\n }\n\n lastIndex = index + 1;\n html += escape;\n }\n\n return lastIndex !== index\n ? html + str.substring(lastIndex, index)\n : html;\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A RTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport lunr from \"lunr\"\n\nimport \"~/polyfills\"\n\nimport { Search, SearchIndexConfig } from \"../../_\"\nimport {\n SearchMessage,\n SearchMessageType\n} from \"../message\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Add support for usage with `iframe-worker` polyfill\n *\n * While `importScripts` is synchronous when executed inside of a web worker,\n * it's not possible to provide a synchronous polyfilled implementation. The\n * cool thing is that awaiting a non-Promise is a noop, so extending the type\n * definition to return a `Promise` shouldn't break anything.\n *\n * @see https://bit.ly/2PjDnXi - GitHub comment\n */\ndeclare global {\n function importScripts(...urls: string[]): Promise | void\n}\n\n/* ----------------------------------------------------------------------------\n * Data\n * ------------------------------------------------------------------------- */\n\n/**\n * Search index\n */\nlet index: Search\n\n/* ----------------------------------------------------------------------------\n * Helper functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Fetch (= import) multi-language support through `lunr-languages`\n *\n * This function automatically imports the stemmers necessary to process the\n * languages, which are defined through the search index configuration.\n *\n * If the worker runs inside of an `iframe` (when using `iframe-worker` as\n * a shim), the base URL for the stemmers to be loaded must be determined by\n * searching for the first `script` element with a `src` attribute, which will\n * contain the contents of this script.\n *\n * @param config - Search index configuration\n *\n * @returns Promise resolving with no result\n */\nasync function setupSearchLanguages(\n config: SearchIndexConfig\n): Promise {\n let base = \"../lunr\"\n\n /* Detect `iframe-worker` and fix base URL */\n if (typeof parent !== \"undefined\" && \"IFrameWorker\" in parent) {\n const worker = document.querySelector(\"script[src]\")!\n const [path] = worker.src.split(\"/worker\")\n\n /* Prefix base with path */\n base = base.replace(\"..\", path)\n }\n\n /* Add scripts for languages */\n const scripts = []\n for (const lang of config.lang) {\n switch (lang) {\n\n /* Add segmenter for Japanese */\n case \"ja\":\n scripts.push(`${base}/tinyseg.js`)\n break\n\n /* Add segmenter for Hindi and Thai */\n case \"hi\":\n case \"th\":\n scripts.push(`${base}/wordcut.js`)\n break\n }\n\n /* Add language support */\n if (lang !== \"en\")\n scripts.push(`${base}/min/lunr.${lang}.min.js`)\n }\n\n /* Add multi-language support */\n if (config.lang.length > 1)\n scripts.push(`${base}/min/lunr.multi.min.js`)\n\n /* Load scripts synchronously */\n if (scripts.length)\n await importScripts(\n `${base}/min/lunr.stemmer.support.min.js`,\n ...scripts\n )\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Message handler\n *\n * @param message - Source message\n *\n * @returns Target message\n */\nexport async function handler(\n message: SearchMessage\n): Promise {\n switch (message.type) {\n\n /* Search setup message */\n case SearchMessageType.SETUP:\n await setupSearchLanguages(message.data.config)\n index = new Search(message.data)\n return {\n type: SearchMessageType.READY\n }\n\n /* Search query message */\n case SearchMessageType.QUERY:\n return {\n type: SearchMessageType.RESULT,\n data: index ? index.search(message.data) : { items: [] }\n }\n\n /* All other messages */\n default:\n throw new TypeError(\"Invalid message type\")\n }\n}\n\n/* ----------------------------------------------------------------------------\n * Worker\n * ------------------------------------------------------------------------- */\n\n/* @ts-expect-error - expose Lunr.js in global scope, or stemmers won't work */\nself.lunr = lunr\n\n/* Handle messages */\naddEventListener(\"message\", async ev => {\n postMessage(await handler(ev.data))\n})\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\n/* ----------------------------------------------------------------------------\n * Polyfills\n * ------------------------------------------------------------------------- */\n\n/* Polyfill `Object.entries` */\nif (!Object.entries)\n Object.entries = function (obj: object) {\n const data: [string, string][] = []\n for (const key of Object.keys(obj))\n // @ts-expect-error - ignore property access warning\n data.push([key, obj[key]])\n\n /* Return entries */\n return data\n }\n\n/* Polyfill `Object.values` */\nif (!Object.values)\n Object.values = function (obj: object) {\n const data: string[] = []\n for (const key of Object.keys(obj))\n // @ts-expect-error - ignore property access warning\n data.push(obj[key])\n\n /* Return values */\n return data\n }\n\n/* ------------------------------------------------------------------------- */\n\n/* Polyfills for `Element` */\nif (typeof Element !== \"undefined\") {\n\n /* Polyfill `Element.scrollTo` */\n if (!Element.prototype.scrollTo)\n Element.prototype.scrollTo = function (\n x?: ScrollToOptions | number, y?: number\n ): void {\n if (typeof x === \"object\") {\n this.scrollLeft = x.left!\n this.scrollTop = x.top!\n } else {\n this.scrollLeft = x!\n this.scrollTop = y!\n }\n }\n\n /* Polyfill `Element.replaceWith` */\n if (!Element.prototype.replaceWith)\n Element.prototype.replaceWith = function (\n ...nodes: Array\n ): void {\n const parent = this.parentNode\n if (parent) {\n if (nodes.length === 0)\n parent.removeChild(this)\n\n /* Replace children and create text nodes */\n for (let i = nodes.length - 1; i >= 0; i--) {\n let node = nodes[i]\n if (typeof node === \"string\")\n node = document.createTextNode(node)\n else if (node.parentNode)\n node.parentNode.removeChild(node)\n\n /* Replace child or insert before previous sibling */\n if (!i)\n parent.replaceChild(node, this)\n else\n parent.insertBefore(this.previousSibling!, node)\n }\n }\n }\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport escapeHTML from \"escape-html\"\n\nimport { SearchIndexDocument } from \"../_\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Search document\n */\nexport interface SearchDocument extends SearchIndexDocument {\n parent?: SearchIndexDocument /* Parent article */\n}\n\n/* ------------------------------------------------------------------------- */\n\n/**\n * Search document mapping\n */\nexport type SearchDocumentMap = Map\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Create a search document mapping\n *\n * @param docs - Search index documents\n *\n * @returns Search document map\n */\nexport function setupSearchDocumentMap(\n docs: SearchIndexDocument[]\n): SearchDocumentMap {\n const documents = new Map()\n const parents = new Set()\n for (const doc of docs) {\n const [path, hash] = doc.location.split(\"#\")\n\n /* Extract location, title and tags */\n const location = doc.location\n const title = doc.title\n const tags = doc.tags\n\n /* Escape and cleanup text */\n const text = escapeHTML(doc.text)\n .replace(/\\s+(?=[,.:;!?])/g, \"\")\n .replace(/\\s+/g, \" \")\n\n /* Handle section */\n if (hash) {\n const parent = documents.get(path)!\n\n /* Ignore first section, override article */\n if (!parents.has(parent)) {\n parent.title = doc.title\n parent.text = text\n\n /* Remember that we processed the article */\n parents.add(parent)\n\n /* Add subsequent section */\n } else {\n documents.set(location, {\n location,\n title,\n text,\n parent\n })\n }\n\n /* Add article */\n } else {\n documents.set(location, {\n location,\n title,\n text,\n ...tags && { tags }\n })\n }\n }\n return documents\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport escapeHTML from \"escape-html\"\n\nimport { SearchIndexConfig } from \"../_\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Search highlight function\n *\n * @param value - Value\n *\n * @returns Highlighted value\n */\nexport type SearchHighlightFn = (value: string) => string\n\n/**\n * Search highlight factory function\n *\n * @param query - Query value\n *\n * @returns Search highlight function\n */\nexport type SearchHighlightFactoryFn = (query: string) => SearchHighlightFn\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Create a search highlighter\n *\n * @param config - Search index configuration\n * @param escape - Whether to escape HTML\n *\n * @returns Search highlight factory function\n */\nexport function setupSearchHighlighter(\n config: SearchIndexConfig, escape: boolean\n): SearchHighlightFactoryFn {\n const separator = new RegExp(config.separator, \"img\")\n const highlight = (_: unknown, data: string, term: string) => {\n return `${data}${term}`\n }\n\n /* Return factory function */\n return (query: string) => {\n query = query\n .replace(/[\\s*+\\-:~^]+/g, \" \")\n .trim()\n\n /* Create search term match expression */\n const match = new RegExp(`(^|${config.separator})(${\n query\n .replace(/[|\\\\{}()[\\]^$+*?.-]/g, \"\\\\$&\")\n .replace(separator, \"|\")\n })`, \"img\")\n\n /* Highlight string value */\n return value => (\n escape\n ? escapeHTML(value)\n : value\n )\n .replace(match, highlight)\n .replace(/<\\/mark>(\\s+)]*>/img, \"$1\")\n }\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Search query clause\n */\nexport interface SearchQueryClause {\n presence: lunr.Query.presence /* Clause presence */\n term: string /* Clause term */\n}\n\n/* ------------------------------------------------------------------------- */\n\n/**\n * Search query terms\n */\nexport type SearchQueryTerms = Record\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Parse a search query for analysis\n *\n * @param value - Query value\n *\n * @returns Search query clauses\n */\nexport function parseSearchQuery(\n value: string\n): SearchQueryClause[] {\n const query = new (lunr as any).Query([\"title\", \"text\"])\n const parser = new (lunr as any).QueryParser(value, query)\n\n /* Parse and return query clauses */\n parser.parse()\n return query.clauses\n}\n\n/**\n * Analyze the search query clauses in regard to the search terms found\n *\n * @param query - Search query clauses\n * @param terms - Search terms\n *\n * @returns Search query terms\n */\nexport function getSearchQueryTerms(\n query: SearchQueryClause[], terms: string[]\n): SearchQueryTerms {\n const clauses = new Set(query)\n\n /* Match query clauses against terms */\n const result: SearchQueryTerms = {}\n for (let t = 0; t < terms.length; t++)\n for (const clause of clauses)\n if (terms[t].startsWith(clause.term)) {\n result[clause.term] = true\n clauses.delete(clause)\n }\n\n /* Annotate unmatched non-stopword query clauses */\n for (const clause of clauses)\n if (lunr.stopWordFilter?.(clause.term as any))\n result[clause.term] = false\n\n /* Return query terms */\n return result\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n SearchDocument,\n SearchDocumentMap,\n setupSearchDocumentMap\n} from \"../document\"\nimport {\n SearchHighlightFactoryFn,\n setupSearchHighlighter\n} from \"../highlighter\"\nimport { SearchOptions } from \"../options\"\nimport {\n SearchQueryTerms,\n getSearchQueryTerms,\n parseSearchQuery\n} from \"../query\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Search index configuration\n */\nexport interface SearchIndexConfig {\n lang: string[] /* Search languages */\n separator: string /* Search separator */\n}\n\n/**\n * Search index document\n */\nexport interface SearchIndexDocument {\n location: string /* Document location */\n title: string /* Document title */\n text: string /* Document text */\n tags?: string[] /* Document tags */\n boost?: number /* Document boost */\n}\n\n/* ------------------------------------------------------------------------- */\n\n/**\n * Search index\n *\n * This interfaces describes the format of the `search_index.json` file which\n * is automatically built by the MkDocs search plugin.\n */\nexport interface SearchIndex {\n config: SearchIndexConfig /* Search index configuration */\n docs: SearchIndexDocument[] /* Search index documents */\n options: SearchOptions /* Search options */\n}\n\n/* ------------------------------------------------------------------------- */\n\n/**\n * Search metadata\n */\nexport interface SearchMetadata {\n score: number /* Score (relevance) */\n terms: SearchQueryTerms /* Search query terms */\n}\n\n/* ------------------------------------------------------------------------- */\n\n/**\n * Search result document\n */\nexport type SearchResultDocument = SearchDocument & SearchMetadata\n\n/**\n * Search result item\n */\nexport type SearchResultItem = SearchResultDocument[]\n\n/* ------------------------------------------------------------------------- */\n\n/**\n * Search result\n */\nexport interface SearchResult {\n items: SearchResultItem[] /* Search result items */\n suggestions?: string[] /* Search suggestions */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Compute the difference of two lists of strings\n *\n * @param a - 1st list of strings\n * @param b - 2nd list of strings\n *\n * @returns Difference\n */\nfunction difference(a: string[], b: string[]): string[] {\n const [x, y] = [new Set(a), new Set(b)]\n return [\n ...new Set([...x].filter(value => !y.has(value)))\n ]\n}\n\n/* ----------------------------------------------------------------------------\n * Class\n * ------------------------------------------------------------------------- */\n\n/**\n * Search index\n */\nexport class Search {\n\n /**\n * Search document mapping\n *\n * A mapping of URLs (including hash fragments) to the actual articles and\n * sections of the documentation. The search document mapping must be created\n * regardless of whether the index was prebuilt or not, as Lunr.js itself\n * only stores the actual index.\n */\n protected documents: SearchDocumentMap\n\n /**\n * Search highlight factory function\n */\n protected highlight: SearchHighlightFactoryFn\n\n /**\n * The underlying Lunr.js search index\n */\n protected index: lunr.Index\n\n /**\n * Search options\n */\n protected options: SearchOptions\n\n /**\n * Create the search integration\n *\n * @param data - Search index\n */\n public constructor({ config, docs, options }: SearchIndex) {\n this.options = options\n\n /* Set up document map and highlighter factory */\n this.documents = setupSearchDocumentMap(docs)\n this.highlight = setupSearchHighlighter(config, false)\n\n /* Set separator for tokenizer */\n lunr.tokenizer.separator = new RegExp(config.separator)\n\n /* Create search index */\n this.index = lunr(function () {\n\n /* Set up multi-language support */\n if (config.lang.length === 1 && config.lang[0] !== \"en\") {\n this.use((lunr as any)[config.lang[0]])\n } else if (config.lang.length > 1) {\n this.use((lunr as any).multiLanguage(...config.lang))\n }\n\n /* Compute functions to be removed from the pipeline */\n const fns = difference([\n \"trimmer\", \"stopWordFilter\", \"stemmer\"\n ], options.pipeline)\n\n /* Remove functions from the pipeline for registered languages */\n for (const lang of config.lang.map(language => (\n language === \"en\" ? lunr : (lunr as any)[language]\n ))) {\n for (const fn of fns) {\n this.pipeline.remove(lang[fn])\n this.searchPipeline.remove(lang[fn])\n }\n }\n\n /* Set up reference */\n this.ref(\"location\")\n\n /* Set up fields */\n this.field(\"title\", { boost: 1e3 })\n this.field(\"text\")\n this.field(\"tags\", { boost: 1e6, extractor: doc => {\n const { tags = [] } = doc as SearchDocument\n return tags.reduce((list, tag) => [\n ...list,\n ...lunr.tokenizer(tag)\n ], [] as lunr.Token[])\n } })\n\n /* Index documents */\n for (const doc of docs)\n this.add(doc, { boost: doc.boost })\n })\n }\n\n /**\n * Search for matching documents\n *\n * The search index which MkDocs provides is divided up into articles, which\n * contain the whole content of the individual pages, and sections, which only\n * contain the contents of the subsections obtained by breaking the individual\n * pages up at `h1` ... `h6`. As there may be many sections on different pages\n * with identical titles (for example within this very project, e.g. \"Usage\"\n * or \"Installation\"), they need to be put into the context of the containing\n * page. For this reason, section results are grouped within their respective\n * articles which are the top-level results that are returned.\n *\n * @param query - Query value\n *\n * @returns Search results\n */\n public search(query: string): SearchResult {\n if (query) {\n try {\n const highlight = this.highlight(query)\n\n /* Parse query to extract clauses for analysis */\n const clauses = parseSearchQuery(query)\n .filter(clause => (\n clause.presence !== lunr.Query.presence.PROHIBITED\n ))\n\n /* Perform search and post-process results */\n const groups = this.index.search(`${query}*`)\n\n /* Apply post-query boosts based on title and search query terms */\n .reduce((item, { ref, score, matchData }) => {\n const document = this.documents.get(ref)\n if (typeof document !== \"undefined\") {\n const { location, title, text, tags, parent } = document\n\n /* Compute and analyze search query terms */\n const terms = getSearchQueryTerms(\n clauses,\n Object.keys(matchData.metadata)\n )\n\n /* Highlight title and text and apply post-query boosts */\n const boost = +!parent + +Object.values(terms).every(t => t)\n item.push({\n location,\n title: highlight(title),\n text: highlight(text),\n ...tags && { tags: tags.map(highlight) },\n score: score * (1 + boost),\n terms\n })\n }\n return item\n }, [])\n\n /* Sort search results again after applying boosts */\n .sort((a, b) => b.score - a.score)\n\n /* Group search results by page */\n .reduce((items, result) => {\n const document = this.documents.get(result.location)\n if (typeof document !== \"undefined\") {\n const ref = \"parent\" in document\n ? document.parent!.location\n : document.location\n items.set(ref, [...items.get(ref) || [], result])\n }\n return items\n }, new Map())\n\n /* Generate search suggestions, if desired */\n let suggestions: string[] | undefined\n if (this.options.suggestions) {\n const titles = this.index.query(builder => {\n for (const clause of clauses)\n builder.term(clause.term, {\n fields: [\"title\"],\n presence: lunr.Query.presence.REQUIRED,\n wildcard: lunr.Query.wildcard.TRAILING\n })\n })\n\n /* Retrieve suggestions for best match */\n suggestions = titles.length\n ? Object.keys(titles[0].matchData.metadata)\n : []\n }\n\n /* Return items and suggestions */\n return {\n items: [...groups.values()],\n ...typeof suggestions !== \"undefined\" && { suggestions }\n }\n\n /* Log errors to console (for now) */\n } catch {\n console.warn(`Invalid query: ${query} \u2013 see https://bit.ly/2s3ChXG`)\n }\n }\n\n /* Return nothing in case of error or empty query */\n return { items: [] }\n }\n}\n"], + "mappings": "glCAAA,IAAAA,GAAAC,EAAA,CAAAC,GAAAC,KAAA;AAAA;AAAA;AAAA;AAAA,IAME,UAAU,CAiCZ,IAAIC,EAAO,SAAUC,EAAQ,CAC3B,IAAIC,EAAU,IAAIF,EAAK,QAEvB,OAAAE,EAAQ,SAAS,IACfF,EAAK,QACLA,EAAK,eACLA,EAAK,OACP,EAEAE,EAAQ,eAAe,IACrBF,EAAK,OACP,EAEAC,EAAO,KAAKC,EAASA,CAAO,EACrBA,EAAQ,MAAM,CACvB,EAEAF,EAAK,QAAU,QACf;AAAA;AAAA;AAAA,GASAA,EAAK,MAAQ,CAAC,EASdA,EAAK,MAAM,KAAQ,SAAUG,EAAQ,CAEnC,OAAO,SAAUC,EAAS,CACpBD,EAAO,SAAW,QAAQ,MAC5B,QAAQ,KAAKC,CAAO,CAExB,CAEF,EAAG,IAAI,EAaPJ,EAAK,MAAM,SAAW,SAAUK,EAAK,CACnC,OAAsBA,GAAQ,KACrB,GAEAA,EAAI,SAAS,CAExB,EAkBAL,EAAK,MAAM,MAAQ,SAAUK,EAAK,CAChC,GAAIA,GAAQ,KACV,OAAOA,EAMT,QAHIC,EAAQ,OAAO,OAAO,IAAI,EAC1BC,EAAO,OAAO,KAAKF,CAAG,EAEjB,EAAI,EAAG,EAAIE,EAAK,OAAQ,IAAK,CACpC,IAAIC,EAAMD,EAAK,GACXE,EAAMJ,EAAIG,GAEd,GAAI,MAAM,QAAQC,CAAG,EAAG,CACtBH,EAAME,GAAOC,EAAI,MAAM,EACvB,QACF,CAEA,GAAI,OAAOA,GAAQ,UACf,OAAOA,GAAQ,UACf,OAAOA,GAAQ,UAAW,CAC5BH,EAAME,GAAOC,EACb,QACF,CAEA,MAAM,IAAI,UAAU,uDAAuD,CAC7E,CAEA,OAAOH,CACT,EACAN,EAAK,SAAW,SAAUU,EAAQC,EAAWC,EAAa,CACxD,KAAK,OAASF,EACd,KAAK,UAAYC,EACjB,KAAK,aAAeC,CACtB,EAEAZ,EAAK,SAAS,OAAS,IAEvBA,EAAK,SAAS,WAAa,SAAUa,EAAG,CACtC,IAAIC,EAAID,EAAE,QAAQb,EAAK,SAAS,MAAM,EAEtC,GAAIc,IAAM,GACR,KAAM,6BAGR,IAAIC,EAAWF,EAAE,MAAM,EAAGC,CAAC,EACvBJ,EAASG,EAAE,MAAMC,EAAI,CAAC,EAE1B,OAAO,IAAId,EAAK,SAAUU,EAAQK,EAAUF,CAAC,CAC/C,EAEAb,EAAK,SAAS,UAAU,SAAW,UAAY,CAC7C,OAAI,KAAK,cAAgB,OACvB,KAAK,aAAe,KAAK,UAAYA,EAAK,SAAS,OAAS,KAAK,QAG5D,KAAK,YACd,EACA;AAAA;AAAA;AAAA,GAUAA,EAAK,IAAM,SAAUgB,EAAU,CAG7B,GAFA,KAAK,SAAW,OAAO,OAAO,IAAI,EAE9BA,EAAU,CACZ,KAAK,OAASA,EAAS,OAEvB,QAASC,EAAI,EAAGA,EAAI,KAAK,OAAQA,IAC/B,KAAK,SAASD,EAASC,IAAM,EAEjC,MACE,KAAK,OAAS,CAElB,EASAjB,EAAK,IAAI,SAAW,CAClB,UAAW,SAAUkB,EAAO,CAC1B,OAAOA,CACT,EAEA,MAAO,UAAY,CACjB,OAAO,IACT,EAEA,SAAU,UAAY,CACpB,MAAO,EACT,CACF,EASAlB,EAAK,IAAI,MAAQ,CACf,UAAW,UAAY,CACrB,OAAO,IACT,EAEA,MAAO,SAAUkB,EAAO,CACtB,OAAOA,CACT,EAEA,SAAU,UAAY,CACpB,MAAO,EACT,CACF,EAQAlB,EAAK,IAAI,UAAU,SAAW,SAAUmB,EAAQ,CAC9C,MAAO,CAAC,CAAC,KAAK,SAASA,EACzB,EAUAnB,EAAK,IAAI,UAAU,UAAY,SAAUkB,EAAO,CAC9C,IAAIE,EAAGC,EAAGL,EAAUM,EAAe,CAAC,EAEpC,GAAIJ,IAAUlB,EAAK,IAAI,SACrB,OAAO,KAGT,GAAIkB,IAAUlB,EAAK,IAAI,MACrB,OAAOkB,EAGL,KAAK,OAASA,EAAM,QACtBE,EAAI,KACJC,EAAIH,IAEJE,EAAIF,EACJG,EAAI,MAGNL,EAAW,OAAO,KAAKI,EAAE,QAAQ,EAEjC,QAASH,EAAI,EAAGA,EAAID,EAAS,OAAQC,IAAK,CACxC,IAAIM,EAAUP,EAASC,GACnBM,KAAWF,EAAE,UACfC,EAAa,KAAKC,CAAO,CAE7B,CAEA,OAAO,IAAIvB,EAAK,IAAKsB,CAAY,CACnC,EASAtB,EAAK,IAAI,UAAU,MAAQ,SAAUkB,EAAO,CAC1C,OAAIA,IAAUlB,EAAK,IAAI,SACdA,EAAK,IAAI,SAGdkB,IAAUlB,EAAK,IAAI,MACd,KAGF,IAAIA,EAAK,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE,OAAO,OAAO,KAAKkB,EAAM,QAAQ,CAAC,CAAC,CACpF,EASAlB,EAAK,IAAM,SAAUwB,EAASC,EAAe,CAC3C,IAAIC,EAAoB,EAExB,QAASf,KAAaa,EAChBb,GAAa,WACjBe,GAAqB,OAAO,KAAKF,EAAQb,EAAU,EAAE,QAGvD,IAAIgB,GAAKF,EAAgBC,EAAoB,KAAQA,EAAoB,IAEzE,OAAO,KAAK,IAAI,EAAI,KAAK,IAAIC,CAAC,CAAC,CACjC,EAUA3B,EAAK,MAAQ,SAAU4B,EAAKC,EAAU,CACpC,KAAK,IAAMD,GAAO,GAClB,KAAK,SAAWC,GAAY,CAAC,CAC/B,EAOA7B,EAAK,MAAM,UAAU,SAAW,UAAY,CAC1C,OAAO,KAAK,GACd,EAsBAA,EAAK,MAAM,UAAU,OAAS,SAAU8B,EAAI,CAC1C,YAAK,IAAMA,EAAG,KAAK,IAAK,KAAK,QAAQ,EAC9B,IACT,EASA9B,EAAK,MAAM,UAAU,MAAQ,SAAU8B,EAAI,CACzC,OAAAA,EAAKA,GAAM,SAAUjB,EAAG,CAAE,OAAOA,CAAE,EAC5B,IAAIb,EAAK,MAAO8B,EAAG,KAAK,IAAK,KAAK,QAAQ,EAAG,KAAK,QAAQ,CACnE,EACA;AAAA;AAAA;AAAA,GAuBA9B,EAAK,UAAY,SAAUK,EAAKwB,EAAU,CACxC,GAAIxB,GAAO,MAAQA,GAAO,KACxB,MAAO,CAAC,EAGV,GAAI,MAAM,QAAQA,CAAG,EACnB,OAAOA,EAAI,IAAI,SAAU0B,EAAG,CAC1B,OAAO,IAAI/B,EAAK,MACdA,EAAK,MAAM,SAAS+B,CAAC,EAAE,YAAY,EACnC/B,EAAK,MAAM,MAAM6B,CAAQ,CAC3B,CACF,CAAC,EAOH,QAJID,EAAMvB,EAAI,SAAS,EAAE,YAAY,EACjC2B,EAAMJ,EAAI,OACVK,EAAS,CAAC,EAELC,EAAW,EAAGC,EAAa,EAAGD,GAAYF,EAAKE,IAAY,CAClE,IAAIE,EAAOR,EAAI,OAAOM,CAAQ,EAC1BG,EAAcH,EAAWC,EAE7B,GAAKC,EAAK,MAAMpC,EAAK,UAAU,SAAS,GAAKkC,GAAYF,EAAM,CAE7D,GAAIK,EAAc,EAAG,CACnB,IAAIC,EAAgBtC,EAAK,MAAM,MAAM6B,CAAQ,GAAK,CAAC,EACnDS,EAAc,SAAc,CAACH,EAAYE,CAAW,EACpDC,EAAc,MAAWL,EAAO,OAEhCA,EAAO,KACL,IAAIjC,EAAK,MACP4B,EAAI,MAAMO,EAAYD,CAAQ,EAC9BI,CACF,CACF,CACF,CAEAH,EAAaD,EAAW,CAC1B,CAEF,CAEA,OAAOD,CACT,EASAjC,EAAK,UAAU,UAAY,UAC3B;AAAA;AAAA;AAAA,GAkCAA,EAAK,SAAW,UAAY,CAC1B,KAAK,OAAS,CAAC,CACjB,EAEAA,EAAK,SAAS,oBAAsB,OAAO,OAAO,IAAI,EAmCtDA,EAAK,SAAS,iBAAmB,SAAU8B,EAAIS,EAAO,CAChDA,KAAS,KAAK,qBAChBvC,EAAK,MAAM,KAAK,6CAA+CuC,CAAK,EAGtET,EAAG,MAAQS,EACXvC,EAAK,SAAS,oBAAoB8B,EAAG,OAASA,CAChD,EAQA9B,EAAK,SAAS,4BAA8B,SAAU8B,EAAI,CACxD,IAAIU,EAAeV,EAAG,OAAUA,EAAG,SAAS,KAAK,oBAE5CU,GACHxC,EAAK,MAAM,KAAK;AAAA,EAAmG8B,CAAE,CAEzH,EAYA9B,EAAK,SAAS,KAAO,SAAUyC,EAAY,CACzC,IAAIC,EAAW,IAAI1C,EAAK,SAExB,OAAAyC,EAAW,QAAQ,SAAUE,EAAQ,CACnC,IAAIb,EAAK9B,EAAK,SAAS,oBAAoB2C,GAE3C,GAAIb,EACFY,EAAS,IAAIZ,CAAE,MAEf,OAAM,IAAI,MAAM,sCAAwCa,CAAM,CAElE,CAAC,EAEMD,CACT,EASA1C,EAAK,SAAS,UAAU,IAAM,UAAY,CACxC,IAAI4C,EAAM,MAAM,UAAU,MAAM,KAAK,SAAS,EAE9CA,EAAI,QAAQ,SAAUd,EAAI,CACxB9B,EAAK,SAAS,4BAA4B8B,CAAE,EAC5C,KAAK,OAAO,KAAKA,CAAE,CACrB,EAAG,IAAI,CACT,EAWA9B,EAAK,SAAS,UAAU,MAAQ,SAAU6C,EAAYC,EAAO,CAC3D9C,EAAK,SAAS,4BAA4B8C,CAAK,EAE/C,IAAIC,EAAM,KAAK,OAAO,QAAQF,CAAU,EACxC,GAAIE,GAAO,GACT,MAAM,IAAI,MAAM,wBAAwB,EAG1CA,EAAMA,EAAM,EACZ,KAAK,OAAO,OAAOA,EAAK,EAAGD,CAAK,CAClC,EAWA9C,EAAK,SAAS,UAAU,OAAS,SAAU6C,EAAYC,EAAO,CAC5D9C,EAAK,SAAS,4BAA4B8C,CAAK,EAE/C,IAAIC,EAAM,KAAK,OAAO,QAAQF,CAAU,EACxC,GAAIE,GAAO,GACT,MAAM,IAAI,MAAM,wBAAwB,EAG1C,KAAK,OAAO,OAAOA,EAAK,EAAGD,CAAK,CAClC,EAOA9C,EAAK,SAAS,UAAU,OAAS,SAAU8B,EAAI,CAC7C,IAAIiB,EAAM,KAAK,OAAO,QAAQjB,CAAE,EAC5BiB,GAAO,IAIX,KAAK,OAAO,OAAOA,EAAK,CAAC,CAC3B,EASA/C,EAAK,SAAS,UAAU,IAAM,SAAUiC,EAAQ,CAG9C,QAFIe,EAAc,KAAK,OAAO,OAErB/B,EAAI,EAAGA,EAAI+B,EAAa/B,IAAK,CAIpC,QAHIa,EAAK,KAAK,OAAOb,GACjBgC,EAAO,CAAC,EAEHC,EAAI,EAAGA,EAAIjB,EAAO,OAAQiB,IAAK,CACtC,IAAIC,EAASrB,EAAGG,EAAOiB,GAAIA,EAAGjB,CAAM,EAEpC,GAAI,EAAAkB,GAAW,MAA6BA,IAAW,IAEvD,GAAI,MAAM,QAAQA,CAAM,EACtB,QAASC,EAAI,EAAGA,EAAID,EAAO,OAAQC,IACjCH,EAAK,KAAKE,EAAOC,EAAE,OAGrBH,EAAK,KAAKE,CAAM,CAEpB,CAEAlB,EAASgB,CACX,CAEA,OAAOhB,CACT,EAYAjC,EAAK,SAAS,UAAU,UAAY,SAAU4B,EAAKC,EAAU,CAC3D,IAAIwB,EAAQ,IAAIrD,EAAK,MAAO4B,EAAKC,CAAQ,EAEzC,OAAO,KAAK,IAAI,CAACwB,CAAK,CAAC,EAAE,IAAI,SAAUtB,EAAG,CACxC,OAAOA,EAAE,SAAS,CACpB,CAAC,CACH,EAMA/B,EAAK,SAAS,UAAU,MAAQ,UAAY,CAC1C,KAAK,OAAS,CAAC,CACjB,EASAA,EAAK,SAAS,UAAU,OAAS,UAAY,CAC3C,OAAO,KAAK,OAAO,IAAI,SAAU8B,EAAI,CACnC,OAAA9B,EAAK,SAAS,4BAA4B8B,CAAE,EAErCA,EAAG,KACZ,CAAC,CACH,EACA;AAAA;AAAA;AAAA,GAqBA9B,EAAK,OAAS,SAAUgB,EAAU,CAChC,KAAK,WAAa,EAClB,KAAK,SAAWA,GAAY,CAAC,CAC/B,EAaAhB,EAAK,OAAO,UAAU,iBAAmB,SAAUsD,EAAO,CAExD,GAAI,KAAK,SAAS,QAAU,EAC1B,MAAO,GAST,QANIC,EAAQ,EACRC,EAAM,KAAK,SAAS,OAAS,EAC7BnB,EAAcmB,EAAMD,EACpBE,EAAa,KAAK,MAAMpB,EAAc,CAAC,EACvCqB,EAAa,KAAK,SAASD,EAAa,GAErCpB,EAAc,IACfqB,EAAaJ,IACfC,EAAQE,GAGNC,EAAaJ,IACfE,EAAMC,GAGJC,GAAcJ,IAIlBjB,EAAcmB,EAAMD,EACpBE,EAAaF,EAAQ,KAAK,MAAMlB,EAAc,CAAC,EAC/CqB,EAAa,KAAK,SAASD,EAAa,GAO1C,GAJIC,GAAcJ,GAIdI,EAAaJ,EACf,OAAOG,EAAa,EAGtB,GAAIC,EAAaJ,EACf,OAAQG,EAAa,GAAK,CAE9B,EAWAzD,EAAK,OAAO,UAAU,OAAS,SAAU2D,EAAWlD,EAAK,CACvD,KAAK,OAAOkD,EAAWlD,EAAK,UAAY,CACtC,KAAM,iBACR,CAAC,CACH,EAUAT,EAAK,OAAO,UAAU,OAAS,SAAU2D,EAAWlD,EAAKqB,EAAI,CAC3D,KAAK,WAAa,EAClB,IAAI8B,EAAW,KAAK,iBAAiBD,CAAS,EAE1C,KAAK,SAASC,IAAaD,EAC7B,KAAK,SAASC,EAAW,GAAK9B,EAAG,KAAK,SAAS8B,EAAW,GAAInD,CAAG,EAEjE,KAAK,SAAS,OAAOmD,EAAU,EAAGD,EAAWlD,CAAG,CAEpD,EAOAT,EAAK,OAAO,UAAU,UAAY,UAAY,CAC5C,GAAI,KAAK,WAAY,OAAO,KAAK,WAKjC,QAHI6D,EAAe,EACfC,EAAiB,KAAK,SAAS,OAE1B7C,EAAI,EAAGA,EAAI6C,EAAgB7C,GAAK,EAAG,CAC1C,IAAIR,EAAM,KAAK,SAASQ,GACxB4C,GAAgBpD,EAAMA,CACxB,CAEA,OAAO,KAAK,WAAa,KAAK,KAAKoD,CAAY,CACjD,EAQA7D,EAAK,OAAO,UAAU,IAAM,SAAU+D,EAAa,CAOjD,QANIC,EAAa,EACb5C,EAAI,KAAK,SAAUC,EAAI0C,EAAY,SACnCE,EAAO7C,EAAE,OAAQ8C,EAAO7C,EAAE,OAC1B8C,EAAO,EAAGC,EAAO,EACjBnD,EAAI,EAAGiC,EAAI,EAERjC,EAAIgD,GAAQf,EAAIgB,GACrBC,EAAO/C,EAAEH,GAAImD,EAAO/C,EAAE6B,GAClBiB,EAAOC,EACTnD,GAAK,EACIkD,EAAOC,EAChBlB,GAAK,EACIiB,GAAQC,IACjBJ,GAAc5C,EAAEH,EAAI,GAAKI,EAAE6B,EAAI,GAC/BjC,GAAK,EACLiC,GAAK,GAIT,OAAOc,CACT,EASAhE,EAAK,OAAO,UAAU,WAAa,SAAU+D,EAAa,CACxD,OAAO,KAAK,IAAIA,CAAW,EAAI,KAAK,UAAU,GAAK,CACrD,EAOA/D,EAAK,OAAO,UAAU,QAAU,UAAY,CAG1C,QAFIqE,EAAS,IAAI,MAAO,KAAK,SAAS,OAAS,CAAC,EAEvCpD,EAAI,EAAGiC,EAAI,EAAGjC,EAAI,KAAK,SAAS,OAAQA,GAAK,EAAGiC,IACvDmB,EAAOnB,GAAK,KAAK,SAASjC,GAG5B,OAAOoD,CACT,EAOArE,EAAK,OAAO,UAAU,OAAS,UAAY,CACzC,OAAO,KAAK,QACd,EAEA;AAAA;AAAA;AAAA;AAAA,GAiBAA,EAAK,QAAW,UAAU,CACxB,IAAIsE,EAAY,CACZ,QAAY,MACZ,OAAW,OACX,KAAS,OACT,KAAS,OACT,KAAS,MACT,IAAQ,MACR,KAAS,KACT,MAAU,MACV,IAAQ,IACR,MAAU,MACV,QAAY,MACZ,MAAU,MACV,KAAS,MACT,MAAU,KACV,QAAY,MACZ,QAAY,MACZ,QAAY,MACZ,MAAU,KACV,MAAU,MACV,OAAW,MACX,KAAS,KACX,EAEAC,EAAY,CACV,MAAU,KACV,MAAU,GACV,MAAU,KACV,MAAU,KACV,KAAS,KACT,IAAQ,GACR,KAAS,EACX,EAEAC,EAAI,WACJC,EAAI,WACJC,EAAIF,EAAI,aACRG,EAAIF,EAAI,WAERG,EAAO,KAAOF,EAAI,KAAOC,EAAID,EAC7BG,EAAO,KAAOH,EAAI,KAAOC,EAAID,EAAI,IAAMC,EAAI,MAC3CG,EAAO,KAAOJ,EAAI,KAAOC,EAAID,EAAIC,EAAID,EACrCK,EAAM,KAAOL,EAAI,KAAOD,EAEtBO,EAAU,IAAI,OAAOJ,CAAI,EACzBK,EAAU,IAAI,OAAOH,CAAI,EACzBI,EAAU,IAAI,OAAOL,CAAI,EACzBM,EAAS,IAAI,OAAOJ,CAAG,EAEvBK,EAAQ,kBACRC,EAAS,iBACTC,EAAQ,aACRC,EAAS,kBACTC,EAAU,KACVC,EAAW,cACXC,EAAW,IAAI,OAAO,oBAAoB,EAC1CC,EAAW,IAAI,OAAO,IAAMjB,EAAID,EAAI,cAAc,EAElDmB,EAAQ,mBACRC,EAAO,2IAEPC,EAAO,iDAEPC,EAAO,sFACPC,EAAQ,oBAERC,EAAO,WACPC,EAAS,MACTC,EAAQ,IAAI,OAAO,IAAMzB,EAAID,EAAI,cAAc,EAE/C2B,EAAgB,SAAuBC,EAAG,CAC5C,IAAIC,EACFC,EACAC,EACAC,EACAC,EACAC,EACAC,EAEF,GAAIP,EAAE,OAAS,EAAK,OAAOA,EAiB3B,GAfAG,EAAUH,EAAE,OAAO,EAAE,CAAC,EAClBG,GAAW,MACbH,EAAIG,EAAQ,YAAY,EAAIH,EAAE,OAAO,CAAC,GAIxCI,EAAKrB,EACLsB,EAAMrB,EAEFoB,EAAG,KAAKJ,CAAC,EAAKA,EAAIA,EAAE,QAAQI,EAAG,MAAM,EAChCC,EAAI,KAAKL,CAAC,IAAKA,EAAIA,EAAE,QAAQK,EAAI,MAAM,GAGhDD,EAAKnB,EACLoB,EAAMnB,EACFkB,EAAG,KAAKJ,CAAC,EAAG,CACd,IAAIQ,EAAKJ,EAAG,KAAKJ,CAAC,EAClBI,EAAKzB,EACDyB,EAAG,KAAKI,EAAG,EAAE,IACfJ,EAAKjB,EACLa,EAAIA,EAAE,QAAQI,EAAG,EAAE,EAEvB,SAAWC,EAAI,KAAKL,CAAC,EAAG,CACtB,IAAIQ,EAAKH,EAAI,KAAKL,CAAC,EACnBC,EAAOO,EAAG,GACVH,EAAMvB,EACFuB,EAAI,KAAKJ,CAAI,IACfD,EAAIC,EACJI,EAAMjB,EACNkB,EAAMjB,EACNkB,EAAMjB,EACFe,EAAI,KAAKL,CAAC,EAAKA,EAAIA,EAAI,IAClBM,EAAI,KAAKN,CAAC,GAAKI,EAAKjB,EAASa,EAAIA,EAAE,QAAQI,EAAG,EAAE,GAChDG,EAAI,KAAKP,CAAC,IAAKA,EAAIA,EAAI,KAEpC,CAIA,GADAI,EAAKb,EACDa,EAAG,KAAKJ,CAAC,EAAG,CACd,IAAIQ,EAAKJ,EAAG,KAAKJ,CAAC,EAClBC,EAAOO,EAAG,GACVR,EAAIC,EAAO,GACb,CAIA,GADAG,EAAKZ,EACDY,EAAG,KAAKJ,CAAC,EAAG,CACd,IAAIQ,EAAKJ,EAAG,KAAKJ,CAAC,EAClBC,EAAOO,EAAG,GACVN,EAASM,EAAG,GACZJ,EAAKzB,EACDyB,EAAG,KAAKH,CAAI,IACdD,EAAIC,EAAOhC,EAAUiC,GAEzB,CAIA,GADAE,EAAKX,EACDW,EAAG,KAAKJ,CAAC,EAAG,CACd,IAAIQ,EAAKJ,EAAG,KAAKJ,CAAC,EAClBC,EAAOO,EAAG,GACVN,EAASM,EAAG,GACZJ,EAAKzB,EACDyB,EAAG,KAAKH,CAAI,IACdD,EAAIC,EAAO/B,EAAUgC,GAEzB,CAKA,GAFAE,EAAKV,EACLW,EAAMV,EACFS,EAAG,KAAKJ,CAAC,EAAG,CACd,IAAIQ,EAAKJ,EAAG,KAAKJ,CAAC,EAClBC,EAAOO,EAAG,GACVJ,EAAKxB,EACDwB,EAAG,KAAKH,CAAI,IACdD,EAAIC,EAER,SAAWI,EAAI,KAAKL,CAAC,EAAG,CACtB,IAAIQ,EAAKH,EAAI,KAAKL,CAAC,EACnBC,EAAOO,EAAG,GAAKA,EAAG,GAClBH,EAAMzB,EACFyB,EAAI,KAAKJ,CAAI,IACfD,EAAIC,EAER,CAIA,GADAG,EAAKR,EACDQ,EAAG,KAAKJ,CAAC,EAAG,CACd,IAAIQ,EAAKJ,EAAG,KAAKJ,CAAC,EAClBC,EAAOO,EAAG,GACVJ,EAAKxB,EACLyB,EAAMxB,EACNyB,EAAMR,GACFM,EAAG,KAAKH,CAAI,GAAMI,EAAI,KAAKJ,CAAI,GAAK,CAAEK,EAAI,KAAKL,CAAI,KACrDD,EAAIC,EAER,CAEA,OAAAG,EAAKP,EACLQ,EAAMzB,EACFwB,EAAG,KAAKJ,CAAC,GAAKK,EAAI,KAAKL,CAAC,IAC1BI,EAAKjB,EACLa,EAAIA,EAAE,QAAQI,EAAG,EAAE,GAKjBD,GAAW,MACbH,EAAIG,EAAQ,YAAY,EAAIH,EAAE,OAAO,CAAC,GAGjCA,CACT,EAEA,OAAO,SAAUhD,EAAO,CACtB,OAAOA,EAAM,OAAO+C,CAAa,CACnC,CACF,EAAG,EAEHpG,EAAK,SAAS,iBAAiBA,EAAK,QAAS,SAAS,EACtD;AAAA;AAAA;AAAA,GAkBAA,EAAK,uBAAyB,SAAU8G,EAAW,CACjD,IAAIC,EAAQD,EAAU,OAAO,SAAU7D,EAAM+D,EAAU,CACrD,OAAA/D,EAAK+D,GAAYA,EACV/D,CACT,EAAG,CAAC,CAAC,EAEL,OAAO,SAAUI,EAAO,CACtB,GAAIA,GAAS0D,EAAM1D,EAAM,SAAS,KAAOA,EAAM,SAAS,EAAG,OAAOA,CACpE,CACF,EAeArD,EAAK,eAAiBA,EAAK,uBAAuB,CAChD,IACA,OACA,QACA,SACA,QACA,MACA,SACA,OACA,KACA,QACA,KACA,MACA,MACA,MACA,KACA,KACA,KACA,UACA,OACA,MACA,KACA,MACA,SACA,QACA,OACA,MACA,KACA,OACA,SACA,OACA,OACA,QACA,MACA,OACA,MACA,MACA,MACA,MACA,OACA,KACA,MACA,OACA,MACA,MACA,MACA,UACA,IACA,KACA,KACA,OACA,KACA,KACA,MACA,OACA,QACA,MACA,OACA,SACA,MACA,KACA,QACA,OACA,OACA,KACA,UACA,KACA,MACA,MACA,KACA,MACA,QACA,KACA,OACA,KACA,QACA,MACA,MACA,SACA,OACA,MACA,OACA,MACA,SACA,QACA,KACA,OACA,OACA,OACA,MACA,QACA,OACA,OACA,QACA,QACA,OACA,OACA,MACA,KACA,MACA,OACA,KACA,QACA,MACA,KACA,OACA,OACA,OACA,QACA,QACA,QACA,MACA,OACA,MACA,OACA,OACA,QACA,MACA,MACA,MACF,CAAC,EAEDA,EAAK,SAAS,iBAAiBA,EAAK,eAAgB,gBAAgB,EACpE;AAAA;AAAA;AAAA,GAoBAA,EAAK,QAAU,SAAUqD,EAAO,CAC9B,OAAOA,EAAM,OAAO,SAAUxC,EAAG,CAC/B,OAAOA,EAAE,QAAQ,OAAQ,EAAE,EAAE,QAAQ,OAAQ,EAAE,CACjD,CAAC,CACH,EAEAb,EAAK,SAAS,iBAAiBA,EAAK,QAAS,SAAS,EACtD;AAAA;AAAA;AAAA,GA0BAA,EAAK,SAAW,UAAY,CAC1B,KAAK,MAAQ,GACb,KAAK,MAAQ,CAAC,EACd,KAAK,GAAKA,EAAK,SAAS,QACxBA,EAAK,SAAS,SAAW,CAC3B,EAUAA,EAAK,SAAS,QAAU,EASxBA,EAAK,SAAS,UAAY,SAAUiH,EAAK,CAGvC,QAFI/G,EAAU,IAAIF,EAAK,SAAS,QAEvBiB,EAAI,EAAGe,EAAMiF,EAAI,OAAQhG,EAAIe,EAAKf,IACzCf,EAAQ,OAAO+G,EAAIhG,EAAE,EAGvB,OAAAf,EAAQ,OAAO,EACRA,EAAQ,IACjB,EAWAF,EAAK,SAAS,WAAa,SAAUkH,EAAQ,CAC3C,MAAI,iBAAkBA,EACblH,EAAK,SAAS,gBAAgBkH,EAAO,KAAMA,EAAO,YAAY,EAE9DlH,EAAK,SAAS,WAAWkH,EAAO,IAAI,CAE/C,EAiBAlH,EAAK,SAAS,gBAAkB,SAAU4B,EAAKuF,EAAc,CAS3D,QARIC,EAAO,IAAIpH,EAAK,SAEhBqH,EAAQ,CAAC,CACX,KAAMD,EACN,eAAgBD,EAChB,IAAKvF,CACP,CAAC,EAEMyF,EAAM,QAAQ,CACnB,IAAIC,EAAQD,EAAM,IAAI,EAGtB,GAAIC,EAAM,IAAI,OAAS,EAAG,CACxB,IAAIlF,EAAOkF,EAAM,IAAI,OAAO,CAAC,EACzBC,EAEAnF,KAAQkF,EAAM,KAAK,MACrBC,EAAaD,EAAM,KAAK,MAAMlF,IAE9BmF,EAAa,IAAIvH,EAAK,SACtBsH,EAAM,KAAK,MAAMlF,GAAQmF,GAGvBD,EAAM,IAAI,QAAU,IACtBC,EAAW,MAAQ,IAGrBF,EAAM,KAAK,CACT,KAAME,EACN,eAAgBD,EAAM,eACtB,IAAKA,EAAM,IAAI,MAAM,CAAC,CACxB,CAAC,CACH,CAEA,GAAIA,EAAM,gBAAkB,EAK5B,IAAI,MAAOA,EAAM,KAAK,MACpB,IAAIE,EAAgBF,EAAM,KAAK,MAAM,SAChC,CACL,IAAIE,EAAgB,IAAIxH,EAAK,SAC7BsH,EAAM,KAAK,MAAM,KAAOE,CAC1B,CAgCA,GA9BIF,EAAM,IAAI,QAAU,IACtBE,EAAc,MAAQ,IAGxBH,EAAM,KAAK,CACT,KAAMG,EACN,eAAgBF,EAAM,eAAiB,EACvC,IAAKA,EAAM,GACb,CAAC,EAKGA,EAAM,IAAI,OAAS,GACrBD,EAAM,KAAK,CACT,KAAMC,EAAM,KACZ,eAAgBA,EAAM,eAAiB,EACvC,IAAKA,EAAM,IAAI,MAAM,CAAC,CACxB,CAAC,EAKCA,EAAM,IAAI,QAAU,IACtBA,EAAM,KAAK,MAAQ,IAMjBA,EAAM,IAAI,QAAU,EAAG,CACzB,GAAI,MAAOA,EAAM,KAAK,MACpB,IAAIG,EAAmBH,EAAM,KAAK,MAAM,SACnC,CACL,IAAIG,EAAmB,IAAIzH,EAAK,SAChCsH,EAAM,KAAK,MAAM,KAAOG,CAC1B,CAEIH,EAAM,IAAI,QAAU,IACtBG,EAAiB,MAAQ,IAG3BJ,EAAM,KAAK,CACT,KAAMI,EACN,eAAgBH,EAAM,eAAiB,EACvC,IAAKA,EAAM,IAAI,MAAM,CAAC,CACxB,CAAC,CACH,CAKA,GAAIA,EAAM,IAAI,OAAS,EAAG,CACxB,IAAII,EAAQJ,EAAM,IAAI,OAAO,CAAC,EAC1BK,EAAQL,EAAM,IAAI,OAAO,CAAC,EAC1BM,EAEAD,KAASL,EAAM,KAAK,MACtBM,EAAgBN,EAAM,KAAK,MAAMK,IAEjCC,EAAgB,IAAI5H,EAAK,SACzBsH,EAAM,KAAK,MAAMK,GAASC,GAGxBN,EAAM,IAAI,QAAU,IACtBM,EAAc,MAAQ,IAGxBP,EAAM,KAAK,CACT,KAAMO,EACN,eAAgBN,EAAM,eAAiB,EACvC,IAAKI,EAAQJ,EAAM,IAAI,MAAM,CAAC,CAChC,CAAC,CACH,EACF,CAEA,OAAOF,CACT,EAYApH,EAAK,SAAS,WAAa,SAAU4B,EAAK,CAYxC,QAXIiG,EAAO,IAAI7H,EAAK,SAChBoH,EAAOS,EAUF,EAAI,EAAG7F,EAAMJ,EAAI,OAAQ,EAAII,EAAK,IAAK,CAC9C,IAAII,EAAOR,EAAI,GACXkG,EAAS,GAAK9F,EAAM,EAExB,GAAII,GAAQ,IACVyF,EAAK,MAAMzF,GAAQyF,EACnBA,EAAK,MAAQC,MAER,CACL,IAAIC,EAAO,IAAI/H,EAAK,SACpB+H,EAAK,MAAQD,EAEbD,EAAK,MAAMzF,GAAQ2F,EACnBF,EAAOE,CACT,CACF,CAEA,OAAOX,CACT,EAYApH,EAAK,SAAS,UAAU,QAAU,UAAY,CAQ5C,QAPI+G,EAAQ,CAAC,EAETM,EAAQ,CAAC,CACX,OAAQ,GACR,KAAM,IACR,CAAC,EAEMA,EAAM,QAAQ,CACnB,IAAIC,EAAQD,EAAM,IAAI,EAClBW,EAAQ,OAAO,KAAKV,EAAM,KAAK,KAAK,EACpCtF,EAAMgG,EAAM,OAEZV,EAAM,KAAK,QAKbA,EAAM,OAAO,OAAO,CAAC,EACrBP,EAAM,KAAKO,EAAM,MAAM,GAGzB,QAASrG,EAAI,EAAGA,EAAIe,EAAKf,IAAK,CAC5B,IAAIgH,EAAOD,EAAM/G,GAEjBoG,EAAM,KAAK,CACT,OAAQC,EAAM,OAAO,OAAOW,CAAI,EAChC,KAAMX,EAAM,KAAK,MAAMW,EACzB,CAAC,CACH,CACF,CAEA,OAAOlB,CACT,EAYA/G,EAAK,SAAS,UAAU,SAAW,UAAY,CAS7C,GAAI,KAAK,KACP,OAAO,KAAK,KAOd,QAJI4B,EAAM,KAAK,MAAQ,IAAM,IACzBsG,EAAS,OAAO,KAAK,KAAK,KAAK,EAAE,KAAK,EACtClG,EAAMkG,EAAO,OAER,EAAI,EAAG,EAAIlG,EAAK,IAAK,CAC5B,IAAIO,EAAQ2F,EAAO,GACfL,EAAO,KAAK,MAAMtF,GAEtBX,EAAMA,EAAMW,EAAQsF,EAAK,EAC3B,CAEA,OAAOjG,CACT,EAYA5B,EAAK,SAAS,UAAU,UAAY,SAAUqB,EAAG,CAU/C,QATIgD,EAAS,IAAIrE,EAAK,SAClBsH,EAAQ,OAERD,EAAQ,CAAC,CACX,MAAOhG,EACP,OAAQgD,EACR,KAAM,IACR,CAAC,EAEMgD,EAAM,QAAQ,CACnBC,EAAQD,EAAM,IAAI,EAWlB,QALIc,EAAS,OAAO,KAAKb,EAAM,MAAM,KAAK,EACtCc,EAAOD,EAAO,OACdE,EAAS,OAAO,KAAKf,EAAM,KAAK,KAAK,EACrCgB,EAAOD,EAAO,OAETE,EAAI,EAAGA,EAAIH,EAAMG,IAGxB,QAFIC,EAAQL,EAAOI,GAEVzH,EAAI,EAAGA,EAAIwH,EAAMxH,IAAK,CAC7B,IAAI2H,EAAQJ,EAAOvH,GAEnB,GAAI2H,GAASD,GAASA,GAAS,IAAK,CAClC,IAAIX,EAAOP,EAAM,KAAK,MAAMmB,GACxBC,EAAQpB,EAAM,MAAM,MAAMkB,GAC1BV,EAAQD,EAAK,OAASa,EAAM,MAC5BX,EAAO,OAEPU,KAASnB,EAAM,OAAO,OAIxBS,EAAOT,EAAM,OAAO,MAAMmB,GAC1BV,EAAK,MAAQA,EAAK,OAASD,IAM3BC,EAAO,IAAI/H,EAAK,SAChB+H,EAAK,MAAQD,EACbR,EAAM,OAAO,MAAMmB,GAASV,GAG9BV,EAAM,KAAK,CACT,MAAOqB,EACP,OAAQX,EACR,KAAMF,CACR,CAAC,CACH,CACF,CAEJ,CAEA,OAAOxD,CACT,EACArE,EAAK,SAAS,QAAU,UAAY,CAClC,KAAK,aAAe,GACpB,KAAK,KAAO,IAAIA,EAAK,SACrB,KAAK,eAAiB,CAAC,EACvB,KAAK,eAAiB,CAAC,CACzB,EAEAA,EAAK,SAAS,QAAQ,UAAU,OAAS,SAAU2I,EAAM,CACvD,IAAId,EACAe,EAAe,EAEnB,GAAID,EAAO,KAAK,aACd,MAAM,IAAI,MAAO,6BAA6B,EAGhD,QAAS,EAAI,EAAG,EAAIA,EAAK,QAAU,EAAI,KAAK,aAAa,QACnDA,EAAK,IAAM,KAAK,aAAa,GAD8B,IAE/DC,IAGF,KAAK,SAASA,CAAY,EAEtB,KAAK,eAAe,QAAU,EAChCf,EAAO,KAAK,KAEZA,EAAO,KAAK,eAAe,KAAK,eAAe,OAAS,GAAG,MAG7D,QAAS,EAAIe,EAAc,EAAID,EAAK,OAAQ,IAAK,CAC/C,IAAIE,EAAW,IAAI7I,EAAK,SACpBoC,EAAOuG,EAAK,GAEhBd,EAAK,MAAMzF,GAAQyG,EAEnB,KAAK,eAAe,KAAK,CACvB,OAAQhB,EACR,KAAMzF,EACN,MAAOyG,CACT,CAAC,EAEDhB,EAAOgB,CACT,CAEAhB,EAAK,MAAQ,GACb,KAAK,aAAec,CACtB,EAEA3I,EAAK,SAAS,QAAQ,UAAU,OAAS,UAAY,CACnD,KAAK,SAAS,CAAC,CACjB,EAEAA,EAAK,SAAS,QAAQ,UAAU,SAAW,SAAU8I,EAAQ,CAC3D,QAAS7H,EAAI,KAAK,eAAe,OAAS,EAAGA,GAAK6H,EAAQ7H,IAAK,CAC7D,IAAI4G,EAAO,KAAK,eAAe5G,GAC3B8H,EAAWlB,EAAK,MAAM,SAAS,EAE/BkB,KAAY,KAAK,eACnBlB,EAAK,OAAO,MAAMA,EAAK,MAAQ,KAAK,eAAekB,IAInDlB,EAAK,MAAM,KAAOkB,EAElB,KAAK,eAAeA,GAAYlB,EAAK,OAGvC,KAAK,eAAe,IAAI,CAC1B,CACF,EACA;AAAA;AAAA;AAAA,GAqBA7H,EAAK,MAAQ,SAAUgJ,EAAO,CAC5B,KAAK,cAAgBA,EAAM,cAC3B,KAAK,aAAeA,EAAM,aAC1B,KAAK,SAAWA,EAAM,SACtB,KAAK,OAASA,EAAM,OACpB,KAAK,SAAWA,EAAM,QACxB,EAyEAhJ,EAAK,MAAM,UAAU,OAAS,SAAUiJ,EAAa,CACnD,OAAO,KAAK,MAAM,SAAUC,EAAO,CACjC,IAAIC,EAAS,IAAInJ,EAAK,YAAYiJ,EAAaC,CAAK,EACpDC,EAAO,MAAM,CACf,CAAC,CACH,EA2BAnJ,EAAK,MAAM,UAAU,MAAQ,SAAU8B,EAAI,CAoBzC,QAZIoH,EAAQ,IAAIlJ,EAAK,MAAM,KAAK,MAAM,EAClCoJ,EAAiB,OAAO,OAAO,IAAI,EACnCC,EAAe,OAAO,OAAO,IAAI,EACjCC,EAAiB,OAAO,OAAO,IAAI,EACnCC,EAAkB,OAAO,OAAO,IAAI,EACpCC,EAAoB,OAAO,OAAO,IAAI,EAOjCvI,EAAI,EAAGA,EAAI,KAAK,OAAO,OAAQA,IACtCoI,EAAa,KAAK,OAAOpI,IAAM,IAAIjB,EAAK,OAG1C8B,EAAG,KAAKoH,EAAOA,CAAK,EAEpB,QAASjI,EAAI,EAAGA,EAAIiI,EAAM,QAAQ,OAAQjI,IAAK,CAS7C,IAAIiG,EAASgC,EAAM,QAAQjI,GACvBwI,EAAQ,KACRC,EAAgB1J,EAAK,IAAI,MAEzBkH,EAAO,YACTuC,EAAQ,KAAK,SAAS,UAAUvC,EAAO,KAAM,CAC3C,OAAQA,EAAO,MACjB,CAAC,EAEDuC,EAAQ,CAACvC,EAAO,IAAI,EAGtB,QAASyC,EAAI,EAAGA,EAAIF,EAAM,OAAQE,IAAK,CACrC,IAAIC,EAAOH,EAAME,GAQjBzC,EAAO,KAAO0C,EAOd,IAAIC,EAAe7J,EAAK,SAAS,WAAWkH,CAAM,EAC9C4C,EAAgB,KAAK,SAAS,UAAUD,CAAY,EAAE,QAAQ,EAQlE,GAAIC,EAAc,SAAW,GAAK5C,EAAO,WAAalH,EAAK,MAAM,SAAS,SAAU,CAClF,QAASoD,EAAI,EAAGA,EAAI8D,EAAO,OAAO,OAAQ9D,IAAK,CAC7C,IAAI2G,EAAQ7C,EAAO,OAAO9D,GAC1BmG,EAAgBQ,GAAS/J,EAAK,IAAI,KACpC,CAEA,KACF,CAEA,QAASkD,EAAI,EAAGA,EAAI4G,EAAc,OAAQ5G,IASxC,QAJI8G,EAAeF,EAAc5G,GAC7B1B,EAAU,KAAK,cAAcwI,GAC7BC,EAAYzI,EAAQ,OAEf4B,EAAI,EAAGA,EAAI8D,EAAO,OAAO,OAAQ9D,IAAK,CAS7C,IAAI2G,EAAQ7C,EAAO,OAAO9D,GACtB8G,EAAe1I,EAAQuI,GACvBI,EAAuB,OAAO,KAAKD,CAAY,EAC/CE,EAAYJ,EAAe,IAAMD,EACjCM,EAAuB,IAAIrK,EAAK,IAAImK,CAAoB,EAoB5D,GAbIjD,EAAO,UAAYlH,EAAK,MAAM,SAAS,WACzC0J,EAAgBA,EAAc,MAAMW,CAAoB,EAEpDd,EAAgBQ,KAAW,SAC7BR,EAAgBQ,GAAS/J,EAAK,IAAI,WASlCkH,EAAO,UAAYlH,EAAK,MAAM,SAAS,WAAY,CACjDwJ,EAAkBO,KAAW,SAC/BP,EAAkBO,GAAS/J,EAAK,IAAI,OAGtCwJ,EAAkBO,GAASP,EAAkBO,GAAO,MAAMM,CAAoB,EAO9E,QACF,CAeA,GANAhB,EAAaU,GAAO,OAAOE,EAAW/C,EAAO,MAAO,SAAU9F,GAAGC,GAAG,CAAE,OAAOD,GAAIC,EAAE,CAAC,EAMhF,CAAAiI,EAAec,GAInB,SAASE,EAAI,EAAGA,EAAIH,EAAqB,OAAQG,IAAK,CAOpD,IAAIC,EAAsBJ,EAAqBG,GAC3CE,EAAmB,IAAIxK,EAAK,SAAUuK,EAAqBR,CAAK,EAChElI,EAAWqI,EAAaK,GACxBE,GAECA,EAAarB,EAAeoB,MAAuB,OACtDpB,EAAeoB,GAAoB,IAAIxK,EAAK,UAAWgK,EAAcD,EAAOlI,CAAQ,EAEpF4I,EAAW,IAAIT,EAAcD,EAAOlI,CAAQ,CAGhD,CAEAyH,EAAec,GAAa,GAC9B,CAEJ,CAQA,GAAIlD,EAAO,WAAalH,EAAK,MAAM,SAAS,SAC1C,QAASoD,EAAI,EAAGA,EAAI8D,EAAO,OAAO,OAAQ9D,IAAK,CAC7C,IAAI2G,EAAQ7C,EAAO,OAAO9D,GAC1BmG,EAAgBQ,GAASR,EAAgBQ,GAAO,UAAUL,CAAa,CACzE,CAEJ,CAUA,QAHIgB,EAAqB1K,EAAK,IAAI,SAC9B2K,EAAuB3K,EAAK,IAAI,MAE3BiB,EAAI,EAAGA,EAAI,KAAK,OAAO,OAAQA,IAAK,CAC3C,IAAI8I,EAAQ,KAAK,OAAO9I,GAEpBsI,EAAgBQ,KAClBW,EAAqBA,EAAmB,UAAUnB,EAAgBQ,EAAM,GAGtEP,EAAkBO,KACpBY,EAAuBA,EAAqB,MAAMnB,EAAkBO,EAAM,EAE9E,CAEA,IAAIa,EAAoB,OAAO,KAAKxB,CAAc,EAC9CyB,EAAU,CAAC,EACXC,EAAU,OAAO,OAAO,IAAI,EAYhC,GAAI5B,EAAM,UAAU,EAAG,CACrB0B,EAAoB,OAAO,KAAK,KAAK,YAAY,EAEjD,QAAS3J,EAAI,EAAGA,EAAI2J,EAAkB,OAAQ3J,IAAK,CACjD,IAAIuJ,EAAmBI,EAAkB3J,GACrCF,EAAWf,EAAK,SAAS,WAAWwK,CAAgB,EACxDpB,EAAeoB,GAAoB,IAAIxK,EAAK,SAC9C,CACF,CAEA,QAASiB,EAAI,EAAGA,EAAI2J,EAAkB,OAAQ3J,IAAK,CASjD,IAAIF,EAAWf,EAAK,SAAS,WAAW4K,EAAkB3J,EAAE,EACxDP,EAASK,EAAS,OAEtB,GAAI,EAAC2J,EAAmB,SAAShK,CAAM,GAInC,CAAAiK,EAAqB,SAASjK,CAAM,EAIxC,KAAIqK,EAAc,KAAK,aAAahK,GAChCiK,EAAQ3B,EAAatI,EAAS,WAAW,WAAWgK,CAAW,EAC/DE,EAEJ,IAAKA,EAAWH,EAAQpK,MAAa,OACnCuK,EAAS,OAASD,EAClBC,EAAS,UAAU,QAAQ7B,EAAerI,EAAS,MAC9C,CACL,IAAImK,EAAQ,CACV,IAAKxK,EACL,MAAOsK,EACP,UAAW5B,EAAerI,EAC5B,EACA+J,EAAQpK,GAAUwK,EAClBL,EAAQ,KAAKK,CAAK,CACpB,EACF,CAKA,OAAOL,EAAQ,KAAK,SAAUzJ,GAAGC,GAAG,CAClC,OAAOA,GAAE,MAAQD,GAAE,KACrB,CAAC,CACH,EAUApB,EAAK,MAAM,UAAU,OAAS,UAAY,CACxC,IAAImL,EAAgB,OAAO,KAAK,KAAK,aAAa,EAC/C,KAAK,EACL,IAAI,SAAUvB,EAAM,CACnB,MAAO,CAACA,EAAM,KAAK,cAAcA,EAAK,CACxC,EAAG,IAAI,EAELwB,EAAe,OAAO,KAAK,KAAK,YAAY,EAC7C,IAAI,SAAUC,EAAK,CAClB,MAAO,CAACA,EAAK,KAAK,aAAaA,GAAK,OAAO,CAAC,CAC9C,EAAG,IAAI,EAET,MAAO,CACL,QAASrL,EAAK,QACd,OAAQ,KAAK,OACb,aAAcoL,EACd,cAAeD,EACf,SAAU,KAAK,SAAS,OAAO,CACjC,CACF,EAQAnL,EAAK,MAAM,KAAO,SAAUsL,EAAiB,CAC3C,IAAItC,EAAQ,CAAC,EACToC,EAAe,CAAC,EAChBG,EAAoBD,EAAgB,aACpCH,EAAgB,OAAO,OAAO,IAAI,EAClCK,EAA0BF,EAAgB,cAC1CG,EAAkB,IAAIzL,EAAK,SAAS,QACpC0C,EAAW1C,EAAK,SAAS,KAAKsL,EAAgB,QAAQ,EAEtDA,EAAgB,SAAWtL,EAAK,SAClCA,EAAK,MAAM,KAAK,4EAA8EA,EAAK,QAAU,sCAAwCsL,EAAgB,QAAU,GAAG,EAGpL,QAASrK,EAAI,EAAGA,EAAIsK,EAAkB,OAAQtK,IAAK,CACjD,IAAIyK,EAAQH,EAAkBtK,GAC1BoK,EAAMK,EAAM,GACZ1K,EAAW0K,EAAM,GAErBN,EAAaC,GAAO,IAAIrL,EAAK,OAAOgB,CAAQ,CAC9C,CAEA,QAASC,EAAI,EAAGA,EAAIuK,EAAwB,OAAQvK,IAAK,CACvD,IAAIyK,EAAQF,EAAwBvK,GAChC2I,EAAO8B,EAAM,GACblK,EAAUkK,EAAM,GAEpBD,EAAgB,OAAO7B,CAAI,EAC3BuB,EAAcvB,GAAQpI,CACxB,CAEA,OAAAiK,EAAgB,OAAO,EAEvBzC,EAAM,OAASsC,EAAgB,OAE/BtC,EAAM,aAAeoC,EACrBpC,EAAM,cAAgBmC,EACtBnC,EAAM,SAAWyC,EAAgB,KACjCzC,EAAM,SAAWtG,EAEV,IAAI1C,EAAK,MAAMgJ,CAAK,CAC7B,EACA;AAAA;AAAA;AAAA,GA6BAhJ,EAAK,QAAU,UAAY,CACzB,KAAK,KAAO,KACZ,KAAK,QAAU,OAAO,OAAO,IAAI,EACjC,KAAK,WAAa,OAAO,OAAO,IAAI,EACpC,KAAK,cAAgB,OAAO,OAAO,IAAI,EACvC,KAAK,qBAAuB,CAAC,EAC7B,KAAK,aAAe,CAAC,EACrB,KAAK,UAAYA,EAAK,UACtB,KAAK,SAAW,IAAIA,EAAK,SACzB,KAAK,eAAiB,IAAIA,EAAK,SAC/B,KAAK,cAAgB,EACrB,KAAK,GAAK,IACV,KAAK,IAAM,IACX,KAAK,UAAY,EACjB,KAAK,kBAAoB,CAAC,CAC5B,EAcAA,EAAK,QAAQ,UAAU,IAAM,SAAUqL,EAAK,CAC1C,KAAK,KAAOA,CACd,EAkCArL,EAAK,QAAQ,UAAU,MAAQ,SAAUW,EAAWgL,EAAY,CAC9D,GAAI,KAAK,KAAKhL,CAAS,EACrB,MAAM,IAAI,WAAY,UAAYA,EAAY,kCAAkC,EAGlF,KAAK,QAAQA,GAAagL,GAAc,CAAC,CAC3C,EAUA3L,EAAK,QAAQ,UAAU,EAAI,SAAU4L,EAAQ,CACvCA,EAAS,EACX,KAAK,GAAK,EACDA,EAAS,EAClB,KAAK,GAAK,EAEV,KAAK,GAAKA,CAEd,EASA5L,EAAK,QAAQ,UAAU,GAAK,SAAU4L,EAAQ,CAC5C,KAAK,IAAMA,CACb,EAmBA5L,EAAK,QAAQ,UAAU,IAAM,SAAU6L,EAAKF,EAAY,CACtD,IAAIjL,EAASmL,EAAI,KAAK,MAClBC,EAAS,OAAO,KAAK,KAAK,OAAO,EAErC,KAAK,WAAWpL,GAAUiL,GAAc,CAAC,EACzC,KAAK,eAAiB,EAEtB,QAAS1K,EAAI,EAAGA,EAAI6K,EAAO,OAAQ7K,IAAK,CACtC,IAAIN,EAAYmL,EAAO7K,GACnB8K,EAAY,KAAK,QAAQpL,GAAW,UACpCoJ,EAAQgC,EAAYA,EAAUF,CAAG,EAAIA,EAAIlL,GACzCsB,EAAS,KAAK,UAAU8H,EAAO,CAC7B,OAAQ,CAACpJ,CAAS,CACpB,CAAC,EACD8I,EAAQ,KAAK,SAAS,IAAIxH,CAAM,EAChClB,EAAW,IAAIf,EAAK,SAAUU,EAAQC,CAAS,EAC/CqL,EAAa,OAAO,OAAO,IAAI,EAEnC,KAAK,qBAAqBjL,GAAYiL,EACtC,KAAK,aAAajL,GAAY,EAG9B,KAAK,aAAaA,IAAa0I,EAAM,OAGrC,QAASvG,EAAI,EAAGA,EAAIuG,EAAM,OAAQvG,IAAK,CACrC,IAAI0G,EAAOH,EAAMvG,GAUjB,GARI8I,EAAWpC,IAAS,OACtBoC,EAAWpC,GAAQ,GAGrBoC,EAAWpC,IAAS,EAIhB,KAAK,cAAcA,IAAS,KAAW,CACzC,IAAIpI,EAAU,OAAO,OAAO,IAAI,EAChCA,EAAQ,OAAY,KAAK,UACzB,KAAK,WAAa,EAElB,QAAS4B,EAAI,EAAGA,EAAI0I,EAAO,OAAQ1I,IACjC5B,EAAQsK,EAAO1I,IAAM,OAAO,OAAO,IAAI,EAGzC,KAAK,cAAcwG,GAAQpI,CAC7B,CAGI,KAAK,cAAcoI,GAAMjJ,GAAWD,IAAW,OACjD,KAAK,cAAckJ,GAAMjJ,GAAWD,GAAU,OAAO,OAAO,IAAI,GAKlE,QAAS4J,EAAI,EAAGA,EAAI,KAAK,kBAAkB,OAAQA,IAAK,CACtD,IAAI2B,EAAc,KAAK,kBAAkB3B,GACrCzI,EAAW+H,EAAK,SAASqC,GAEzB,KAAK,cAAcrC,GAAMjJ,GAAWD,GAAQuL,IAAgB,OAC9D,KAAK,cAAcrC,GAAMjJ,GAAWD,GAAQuL,GAAe,CAAC,GAG9D,KAAK,cAAcrC,GAAMjJ,GAAWD,GAAQuL,GAAa,KAAKpK,CAAQ,CACxE,CACF,CAEF,CACF,EAOA7B,EAAK,QAAQ,UAAU,6BAA+B,UAAY,CAOhE,QALIkM,EAAY,OAAO,KAAK,KAAK,YAAY,EACzCC,EAAiBD,EAAU,OAC3BE,EAAc,CAAC,EACfC,EAAqB,CAAC,EAEjBpL,EAAI,EAAGA,EAAIkL,EAAgBlL,IAAK,CACvC,IAAIF,EAAWf,EAAK,SAAS,WAAWkM,EAAUjL,EAAE,EAChD8I,EAAQhJ,EAAS,UAErBsL,EAAmBtC,KAAWsC,EAAmBtC,GAAS,GAC1DsC,EAAmBtC,IAAU,EAE7BqC,EAAYrC,KAAWqC,EAAYrC,GAAS,GAC5CqC,EAAYrC,IAAU,KAAK,aAAahJ,EAC1C,CAIA,QAFI+K,EAAS,OAAO,KAAK,KAAK,OAAO,EAE5B7K,EAAI,EAAGA,EAAI6K,EAAO,OAAQ7K,IAAK,CACtC,IAAIN,EAAYmL,EAAO7K,GACvBmL,EAAYzL,GAAayL,EAAYzL,GAAa0L,EAAmB1L,EACvE,CAEA,KAAK,mBAAqByL,CAC5B,EAOApM,EAAK,QAAQ,UAAU,mBAAqB,UAAY,CAMtD,QALIoL,EAAe,CAAC,EAChBc,EAAY,OAAO,KAAK,KAAK,oBAAoB,EACjDI,EAAkBJ,EAAU,OAC5BK,EAAe,OAAO,OAAO,IAAI,EAE5BtL,EAAI,EAAGA,EAAIqL,EAAiBrL,IAAK,CAaxC,QAZIF,EAAWf,EAAK,SAAS,WAAWkM,EAAUjL,EAAE,EAChDN,EAAYI,EAAS,UACrByL,EAAc,KAAK,aAAazL,GAChCgK,EAAc,IAAI/K,EAAK,OACvByM,EAAkB,KAAK,qBAAqB1L,GAC5C0I,EAAQ,OAAO,KAAKgD,CAAe,EACnCC,EAAcjD,EAAM,OAGpBkD,EAAa,KAAK,QAAQhM,GAAW,OAAS,EAC9CiM,EAAW,KAAK,WAAW7L,EAAS,QAAQ,OAAS,EAEhDmC,EAAI,EAAGA,EAAIwJ,EAAaxJ,IAAK,CACpC,IAAI0G,EAAOH,EAAMvG,GACb2J,EAAKJ,EAAgB7C,GACrBK,EAAY,KAAK,cAAcL,GAAM,OACrCkD,EAAK9B,EAAO+B,EAEZR,EAAa3C,KAAU,QACzBkD,EAAM9M,EAAK,IAAI,KAAK,cAAc4J,GAAO,KAAK,aAAa,EAC3D2C,EAAa3C,GAAQkD,GAErBA,EAAMP,EAAa3C,GAGrBoB,EAAQ8B,IAAQ,KAAK,IAAM,GAAKD,IAAO,KAAK,KAAO,EAAI,KAAK,GAAK,KAAK,IAAML,EAAc,KAAK,mBAAmB7L,KAAekM,GACjI7B,GAAS2B,EACT3B,GAAS4B,EACTG,EAAqB,KAAK,MAAM/B,EAAQ,GAAI,EAAI,IAQhDD,EAAY,OAAOd,EAAW8C,CAAkB,CAClD,CAEA3B,EAAarK,GAAYgK,CAC3B,CAEA,KAAK,aAAeK,CACtB,EAOApL,EAAK,QAAQ,UAAU,eAAiB,UAAY,CAClD,KAAK,SAAWA,EAAK,SAAS,UAC5B,OAAO,KAAK,KAAK,aAAa,EAAE,KAAK,CACvC,CACF,EAUAA,EAAK,QAAQ,UAAU,MAAQ,UAAY,CACzC,YAAK,6BAA6B,EAClC,KAAK,mBAAmB,EACxB,KAAK,eAAe,EAEb,IAAIA,EAAK,MAAM,CACpB,cAAe,KAAK,cACpB,aAAc,KAAK,aACnB,SAAU,KAAK,SACf,OAAQ,OAAO,KAAK,KAAK,OAAO,EAChC,SAAU,KAAK,cACjB,CAAC,CACH,EAgBAA,EAAK,QAAQ,UAAU,IAAM,SAAU8B,EAAI,CACzC,IAAIkL,EAAO,MAAM,UAAU,MAAM,KAAK,UAAW,CAAC,EAClDA,EAAK,QAAQ,IAAI,EACjBlL,EAAG,MAAM,KAAMkL,CAAI,CACrB,EAaAhN,EAAK,UAAY,SAAU4J,EAAMG,EAAOlI,EAAU,CAShD,QARIoL,EAAiB,OAAO,OAAO,IAAI,EACnCC,EAAe,OAAO,KAAKrL,GAAY,CAAC,CAAC,EAOpCZ,EAAI,EAAGA,EAAIiM,EAAa,OAAQjM,IAAK,CAC5C,IAAIT,EAAM0M,EAAajM,GACvBgM,EAAezM,GAAOqB,EAASrB,GAAK,MAAM,CAC5C,CAEA,KAAK,SAAW,OAAO,OAAO,IAAI,EAE9BoJ,IAAS,SACX,KAAK,SAASA,GAAQ,OAAO,OAAO,IAAI,EACxC,KAAK,SAASA,GAAMG,GAASkD,EAEjC,EAWAjN,EAAK,UAAU,UAAU,QAAU,SAAUmN,EAAgB,CAG3D,QAFI1D,EAAQ,OAAO,KAAK0D,EAAe,QAAQ,EAEtClM,EAAI,EAAGA,EAAIwI,EAAM,OAAQxI,IAAK,CACrC,IAAI2I,EAAOH,EAAMxI,GACb6K,EAAS,OAAO,KAAKqB,EAAe,SAASvD,EAAK,EAElD,KAAK,SAASA,IAAS,OACzB,KAAK,SAASA,GAAQ,OAAO,OAAO,IAAI,GAG1C,QAAS1G,EAAI,EAAGA,EAAI4I,EAAO,OAAQ5I,IAAK,CACtC,IAAI6G,EAAQ+B,EAAO5I,GACf3C,EAAO,OAAO,KAAK4M,EAAe,SAASvD,GAAMG,EAAM,EAEvD,KAAK,SAASH,GAAMG,IAAU,OAChC,KAAK,SAASH,GAAMG,GAAS,OAAO,OAAO,IAAI,GAGjD,QAAS3G,EAAI,EAAGA,EAAI7C,EAAK,OAAQ6C,IAAK,CACpC,IAAI5C,EAAMD,EAAK6C,GAEX,KAAK,SAASwG,GAAMG,GAAOvJ,IAAQ,KACrC,KAAK,SAASoJ,GAAMG,GAAOvJ,GAAO2M,EAAe,SAASvD,GAAMG,GAAOvJ,GAEvE,KAAK,SAASoJ,GAAMG,GAAOvJ,GAAO,KAAK,SAASoJ,GAAMG,GAAOvJ,GAAK,OAAO2M,EAAe,SAASvD,GAAMG,GAAOvJ,EAAI,CAGtH,CACF,CACF,CACF,EASAR,EAAK,UAAU,UAAU,IAAM,SAAU4J,EAAMG,EAAOlI,EAAU,CAC9D,GAAI,EAAE+H,KAAQ,KAAK,UAAW,CAC5B,KAAK,SAASA,GAAQ,OAAO,OAAO,IAAI,EACxC,KAAK,SAASA,GAAMG,GAASlI,EAC7B,MACF,CAEA,GAAI,EAAEkI,KAAS,KAAK,SAASH,IAAQ,CACnC,KAAK,SAASA,GAAMG,GAASlI,EAC7B,MACF,CAIA,QAFIqL,EAAe,OAAO,KAAKrL,CAAQ,EAE9BZ,EAAI,EAAGA,EAAIiM,EAAa,OAAQjM,IAAK,CAC5C,IAAIT,EAAM0M,EAAajM,GAEnBT,KAAO,KAAK,SAASoJ,GAAMG,GAC7B,KAAK,SAASH,GAAMG,GAAOvJ,GAAO,KAAK,SAASoJ,GAAMG,GAAOvJ,GAAK,OAAOqB,EAASrB,EAAI,EAEtF,KAAK,SAASoJ,GAAMG,GAAOvJ,GAAOqB,EAASrB,EAE/C,CACF,EAYAR,EAAK,MAAQ,SAAUoN,EAAW,CAChC,KAAK,QAAU,CAAC,EAChB,KAAK,UAAYA,CACnB,EA0BApN,EAAK,MAAM,SAAW,IAAI,OAAQ,GAAG,EACrCA,EAAK,MAAM,SAAS,KAAO,EAC3BA,EAAK,MAAM,SAAS,QAAU,EAC9BA,EAAK,MAAM,SAAS,SAAW,EAa/BA,EAAK,MAAM,SAAW,CAIpB,SAAU,EAMV,SAAU,EAMV,WAAY,CACd,EAyBAA,EAAK,MAAM,UAAU,OAAS,SAAUkH,EAAQ,CAC9C,MAAM,WAAYA,IAChBA,EAAO,OAAS,KAAK,WAGjB,UAAWA,IACfA,EAAO,MAAQ,GAGX,gBAAiBA,IACrBA,EAAO,YAAc,IAGjB,aAAcA,IAClBA,EAAO,SAAWlH,EAAK,MAAM,SAAS,MAGnCkH,EAAO,SAAWlH,EAAK,MAAM,SAAS,SAAakH,EAAO,KAAK,OAAO,CAAC,GAAKlH,EAAK,MAAM,WAC1FkH,EAAO,KAAO,IAAMA,EAAO,MAGxBA,EAAO,SAAWlH,EAAK,MAAM,SAAS,UAAckH,EAAO,KAAK,MAAM,EAAE,GAAKlH,EAAK,MAAM,WAC3FkH,EAAO,KAAO,GAAKA,EAAO,KAAO,KAG7B,aAAcA,IAClBA,EAAO,SAAWlH,EAAK,MAAM,SAAS,UAGxC,KAAK,QAAQ,KAAKkH,CAAM,EAEjB,IACT,EASAlH,EAAK,MAAM,UAAU,UAAY,UAAY,CAC3C,QAASiB,EAAI,EAAGA,EAAI,KAAK,QAAQ,OAAQA,IACvC,GAAI,KAAK,QAAQA,GAAG,UAAYjB,EAAK,MAAM,SAAS,WAClD,MAAO,GAIX,MAAO,EACT,EA4BAA,EAAK,MAAM,UAAU,KAAO,SAAU4J,EAAMyD,EAAS,CACnD,GAAI,MAAM,QAAQzD,CAAI,EACpB,OAAAA,EAAK,QAAQ,SAAU7H,EAAG,CAAE,KAAK,KAAKA,EAAG/B,EAAK,MAAM,MAAMqN,CAAO,CAAC,CAAE,EAAG,IAAI,EACpE,KAGT,IAAInG,EAASmG,GAAW,CAAC,EACzB,OAAAnG,EAAO,KAAO0C,EAAK,SAAS,EAE5B,KAAK,OAAO1C,CAAM,EAEX,IACT,EACAlH,EAAK,gBAAkB,SAAUI,EAASmD,EAAOC,EAAK,CACpD,KAAK,KAAO,kBACZ,KAAK,QAAUpD,EACf,KAAK,MAAQmD,EACb,KAAK,IAAMC,CACb,EAEAxD,EAAK,gBAAgB,UAAY,IAAI,MACrCA,EAAK,WAAa,SAAU4B,EAAK,CAC/B,KAAK,QAAU,CAAC,EAChB,KAAK,IAAMA,EACX,KAAK,OAASA,EAAI,OAClB,KAAK,IAAM,EACX,KAAK,MAAQ,EACb,KAAK,oBAAsB,CAAC,CAC9B,EAEA5B,EAAK,WAAW,UAAU,IAAM,UAAY,CAG1C,QAFIsN,EAAQtN,EAAK,WAAW,QAErBsN,GACLA,EAAQA,EAAM,IAAI,CAEtB,EAEAtN,EAAK,WAAW,UAAU,YAAc,UAAY,CAKlD,QAJIuN,EAAY,CAAC,EACbpL,EAAa,KAAK,MAClBD,EAAW,KAAK,IAEX,EAAI,EAAG,EAAI,KAAK,oBAAoB,OAAQ,IACnDA,EAAW,KAAK,oBAAoB,GACpCqL,EAAU,KAAK,KAAK,IAAI,MAAMpL,EAAYD,CAAQ,CAAC,EACnDC,EAAaD,EAAW,EAG1B,OAAAqL,EAAU,KAAK,KAAK,IAAI,MAAMpL,EAAY,KAAK,GAAG,CAAC,EACnD,KAAK,oBAAoB,OAAS,EAE3BoL,EAAU,KAAK,EAAE,CAC1B,EAEAvN,EAAK,WAAW,UAAU,KAAO,SAAUwN,EAAM,CAC/C,KAAK,QAAQ,KAAK,CAChB,KAAMA,EACN,IAAK,KAAK,YAAY,EACtB,MAAO,KAAK,MACZ,IAAK,KAAK,GACZ,CAAC,EAED,KAAK,MAAQ,KAAK,GACpB,EAEAxN,EAAK,WAAW,UAAU,gBAAkB,UAAY,CACtD,KAAK,oBAAoB,KAAK,KAAK,IAAM,CAAC,EAC1C,KAAK,KAAO,CACd,EAEAA,EAAK,WAAW,UAAU,KAAO,UAAY,CAC3C,GAAI,KAAK,KAAO,KAAK,OACnB,OAAOA,EAAK,WAAW,IAGzB,IAAIoC,EAAO,KAAK,IAAI,OAAO,KAAK,GAAG,EACnC,YAAK,KAAO,EACLA,CACT,EAEApC,EAAK,WAAW,UAAU,MAAQ,UAAY,CAC5C,OAAO,KAAK,IAAM,KAAK,KACzB,EAEAA,EAAK,WAAW,UAAU,OAAS,UAAY,CACzC,KAAK,OAAS,KAAK,MACrB,KAAK,KAAO,GAGd,KAAK,MAAQ,KAAK,GACpB,EAEAA,EAAK,WAAW,UAAU,OAAS,UAAY,CAC7C,KAAK,KAAO,CACd,EAEAA,EAAK,WAAW,UAAU,eAAiB,UAAY,CACrD,IAAIoC,EAAMqL,EAEV,GACErL,EAAO,KAAK,KAAK,EACjBqL,EAAWrL,EAAK,WAAW,CAAC,QACrBqL,EAAW,IAAMA,EAAW,IAEjCrL,GAAQpC,EAAK,WAAW,KAC1B,KAAK,OAAO,CAEhB,EAEAA,EAAK,WAAW,UAAU,KAAO,UAAY,CAC3C,OAAO,KAAK,IAAM,KAAK,MACzB,EAEAA,EAAK,WAAW,IAAM,MACtBA,EAAK,WAAW,MAAQ,QACxBA,EAAK,WAAW,KAAO,OACvBA,EAAK,WAAW,cAAgB,gBAChCA,EAAK,WAAW,MAAQ,QACxBA,EAAK,WAAW,SAAW,WAE3BA,EAAK,WAAW,SAAW,SAAU0N,EAAO,CAC1C,OAAAA,EAAM,OAAO,EACbA,EAAM,KAAK1N,EAAK,WAAW,KAAK,EAChC0N,EAAM,OAAO,EACN1N,EAAK,WAAW,OACzB,EAEAA,EAAK,WAAW,QAAU,SAAU0N,EAAO,CAQzC,GAPIA,EAAM,MAAM,EAAI,IAClBA,EAAM,OAAO,EACbA,EAAM,KAAK1N,EAAK,WAAW,IAAI,GAGjC0N,EAAM,OAAO,EAETA,EAAM,KAAK,EACb,OAAO1N,EAAK,WAAW,OAE3B,EAEAA,EAAK,WAAW,gBAAkB,SAAU0N,EAAO,CACjD,OAAAA,EAAM,OAAO,EACbA,EAAM,eAAe,EACrBA,EAAM,KAAK1N,EAAK,WAAW,aAAa,EACjCA,EAAK,WAAW,OACzB,EAEAA,EAAK,WAAW,SAAW,SAAU0N,EAAO,CAC1C,OAAAA,EAAM,OAAO,EACbA,EAAM,eAAe,EACrBA,EAAM,KAAK1N,EAAK,WAAW,KAAK,EACzBA,EAAK,WAAW,OACzB,EAEAA,EAAK,WAAW,OAAS,SAAU0N,EAAO,CACpCA,EAAM,MAAM,EAAI,GAClBA,EAAM,KAAK1N,EAAK,WAAW,IAAI,CAEnC,EAaAA,EAAK,WAAW,cAAgBA,EAAK,UAAU,UAE/CA,EAAK,WAAW,QAAU,SAAU0N,EAAO,CACzC,OAAa,CACX,IAAItL,EAAOsL,EAAM,KAAK,EAEtB,GAAItL,GAAQpC,EAAK,WAAW,IAC1B,OAAOA,EAAK,WAAW,OAIzB,GAAIoC,EAAK,WAAW,CAAC,GAAK,GAAI,CAC5BsL,EAAM,gBAAgB,EACtB,QACF,CAEA,GAAItL,GAAQ,IACV,OAAOpC,EAAK,WAAW,SAGzB,GAAIoC,GAAQ,IACV,OAAAsL,EAAM,OAAO,EACTA,EAAM,MAAM,EAAI,GAClBA,EAAM,KAAK1N,EAAK,WAAW,IAAI,EAE1BA,EAAK,WAAW,gBAGzB,GAAIoC,GAAQ,IACV,OAAAsL,EAAM,OAAO,EACTA,EAAM,MAAM,EAAI,GAClBA,EAAM,KAAK1N,EAAK,WAAW,IAAI,EAE1BA,EAAK,WAAW,SAczB,GARIoC,GAAQ,KAAOsL,EAAM,MAAM,IAAM,GAQjCtL,GAAQ,KAAOsL,EAAM,MAAM,IAAM,EACnC,OAAAA,EAAM,KAAK1N,EAAK,WAAW,QAAQ,EAC5BA,EAAK,WAAW,QAGzB,GAAIoC,EAAK,MAAMpC,EAAK,WAAW,aAAa,EAC1C,OAAOA,EAAK,WAAW,OAE3B,CACF,EAEAA,EAAK,YAAc,SAAU4B,EAAKsH,EAAO,CACvC,KAAK,MAAQ,IAAIlJ,EAAK,WAAY4B,CAAG,EACrC,KAAK,MAAQsH,EACb,KAAK,cAAgB,CAAC,EACtB,KAAK,UAAY,CACnB,EAEAlJ,EAAK,YAAY,UAAU,MAAQ,UAAY,CAC7C,KAAK,MAAM,IAAI,EACf,KAAK,QAAU,KAAK,MAAM,QAI1B,QAFIsN,EAAQtN,EAAK,YAAY,YAEtBsN,GACLA,EAAQA,EAAM,IAAI,EAGpB,OAAO,KAAK,KACd,EAEAtN,EAAK,YAAY,UAAU,WAAa,UAAY,CAClD,OAAO,KAAK,QAAQ,KAAK,UAC3B,EAEAA,EAAK,YAAY,UAAU,cAAgB,UAAY,CACrD,IAAI2N,EAAS,KAAK,WAAW,EAC7B,YAAK,WAAa,EACXA,CACT,EAEA3N,EAAK,YAAY,UAAU,WAAa,UAAY,CAClD,IAAI4N,EAAkB,KAAK,cAC3B,KAAK,MAAM,OAAOA,CAAe,EACjC,KAAK,cAAgB,CAAC,CACxB,EAEA5N,EAAK,YAAY,YAAc,SAAUmJ,EAAQ,CAC/C,IAAIwE,EAASxE,EAAO,WAAW,EAE/B,GAAIwE,GAAU,KAId,OAAQA,EAAO,KAAM,CACnB,KAAK3N,EAAK,WAAW,SACnB,OAAOA,EAAK,YAAY,cAC1B,KAAKA,EAAK,WAAW,MACnB,OAAOA,EAAK,YAAY,WAC1B,KAAKA,EAAK,WAAW,KACnB,OAAOA,EAAK,YAAY,UAC1B,QACE,IAAI6N,EAAe,4CAA8CF,EAAO,KAExE,MAAIA,EAAO,IAAI,QAAU,IACvBE,GAAgB,gBAAkBF,EAAO,IAAM,KAG3C,IAAI3N,EAAK,gBAAiB6N,EAAcF,EAAO,MAAOA,EAAO,GAAG,CAC1E,CACF,EAEA3N,EAAK,YAAY,cAAgB,SAAUmJ,EAAQ,CACjD,IAAIwE,EAASxE,EAAO,cAAc,EAElC,GAAIwE,GAAU,KAId,QAAQA,EAAO,IAAK,CAClB,IAAK,IACHxE,EAAO,cAAc,SAAWnJ,EAAK,MAAM,SAAS,WACpD,MACF,IAAK,IACHmJ,EAAO,cAAc,SAAWnJ,EAAK,MAAM,SAAS,SACpD,MACF,QACE,IAAI6N,EAAe,kCAAoCF,EAAO,IAAM,IACpE,MAAM,IAAI3N,EAAK,gBAAiB6N,EAAcF,EAAO,MAAOA,EAAO,GAAG,CAC1E,CAEA,IAAIG,EAAa3E,EAAO,WAAW,EAEnC,GAAI2E,GAAc,KAAW,CAC3B,IAAID,EAAe,yCACnB,MAAM,IAAI7N,EAAK,gBAAiB6N,EAAcF,EAAO,MAAOA,EAAO,GAAG,CACxE,CAEA,OAAQG,EAAW,KAAM,CACvB,KAAK9N,EAAK,WAAW,MACnB,OAAOA,EAAK,YAAY,WAC1B,KAAKA,EAAK,WAAW,KACnB,OAAOA,EAAK,YAAY,UAC1B,QACE,IAAI6N,EAAe,mCAAqCC,EAAW,KAAO,IAC1E,MAAM,IAAI9N,EAAK,gBAAiB6N,EAAcC,EAAW,MAAOA,EAAW,GAAG,CAClF,EACF,EAEA9N,EAAK,YAAY,WAAa,SAAUmJ,EAAQ,CAC9C,IAAIwE,EAASxE,EAAO,cAAc,EAElC,GAAIwE,GAAU,KAId,IAAIxE,EAAO,MAAM,UAAU,QAAQwE,EAAO,GAAG,GAAK,GAAI,CACpD,IAAII,EAAiB5E,EAAO,MAAM,UAAU,IAAI,SAAU6E,EAAG,CAAE,MAAO,IAAMA,EAAI,GAAI,CAAC,EAAE,KAAK,IAAI,EAC5FH,EAAe,uBAAyBF,EAAO,IAAM,uBAAyBI,EAElF,MAAM,IAAI/N,EAAK,gBAAiB6N,EAAcF,EAAO,MAAOA,EAAO,GAAG,CACxE,CAEAxE,EAAO,cAAc,OAAS,CAACwE,EAAO,GAAG,EAEzC,IAAIG,EAAa3E,EAAO,WAAW,EAEnC,GAAI2E,GAAc,KAAW,CAC3B,IAAID,EAAe,gCACnB,MAAM,IAAI7N,EAAK,gBAAiB6N,EAAcF,EAAO,MAAOA,EAAO,GAAG,CACxE,CAEA,OAAQG,EAAW,KAAM,CACvB,KAAK9N,EAAK,WAAW,KACnB,OAAOA,EAAK,YAAY,UAC1B,QACE,IAAI6N,EAAe,0BAA4BC,EAAW,KAAO,IACjE,MAAM,IAAI9N,EAAK,gBAAiB6N,EAAcC,EAAW,MAAOA,EAAW,GAAG,CAClF,EACF,EAEA9N,EAAK,YAAY,UAAY,SAAUmJ,EAAQ,CAC7C,IAAIwE,EAASxE,EAAO,cAAc,EAElC,GAAIwE,GAAU,KAId,CAAAxE,EAAO,cAAc,KAAOwE,EAAO,IAAI,YAAY,EAE/CA,EAAO,IAAI,QAAQ,GAAG,GAAK,KAC7BxE,EAAO,cAAc,YAAc,IAGrC,IAAI2E,EAAa3E,EAAO,WAAW,EAEnC,GAAI2E,GAAc,KAAW,CAC3B3E,EAAO,WAAW,EAClB,MACF,CAEA,OAAQ2E,EAAW,KAAM,CACvB,KAAK9N,EAAK,WAAW,KACnB,OAAAmJ,EAAO,WAAW,EACXnJ,EAAK,YAAY,UAC1B,KAAKA,EAAK,WAAW,MACnB,OAAAmJ,EAAO,WAAW,EACXnJ,EAAK,YAAY,WAC1B,KAAKA,EAAK,WAAW,cACnB,OAAOA,EAAK,YAAY,kBAC1B,KAAKA,EAAK,WAAW,MACnB,OAAOA,EAAK,YAAY,WAC1B,KAAKA,EAAK,WAAW,SACnB,OAAAmJ,EAAO,WAAW,EACXnJ,EAAK,YAAY,cAC1B,QACE,IAAI6N,EAAe,2BAA6BC,EAAW,KAAO,IAClE,MAAM,IAAI9N,EAAK,gBAAiB6N,EAAcC,EAAW,MAAOA,EAAW,GAAG,CAClF,EACF,EAEA9N,EAAK,YAAY,kBAAoB,SAAUmJ,EAAQ,CACrD,IAAIwE,EAASxE,EAAO,cAAc,EAElC,GAAIwE,GAAU,KAId,KAAIxG,EAAe,SAASwG,EAAO,IAAK,EAAE,EAE1C,GAAI,MAAMxG,CAAY,EAAG,CACvB,IAAI0G,EAAe,gCACnB,MAAM,IAAI7N,EAAK,gBAAiB6N,EAAcF,EAAO,MAAOA,EAAO,GAAG,CACxE,CAEAxE,EAAO,cAAc,aAAehC,EAEpC,IAAI2G,EAAa3E,EAAO,WAAW,EAEnC,GAAI2E,GAAc,KAAW,CAC3B3E,EAAO,WAAW,EAClB,MACF,CAEA,OAAQ2E,EAAW,KAAM,CACvB,KAAK9N,EAAK,WAAW,KACnB,OAAAmJ,EAAO,WAAW,EACXnJ,EAAK,YAAY,UAC1B,KAAKA,EAAK,WAAW,MACnB,OAAAmJ,EAAO,WAAW,EACXnJ,EAAK,YAAY,WAC1B,KAAKA,EAAK,WAAW,cACnB,OAAOA,EAAK,YAAY,kBAC1B,KAAKA,EAAK,WAAW,MACnB,OAAOA,EAAK,YAAY,WAC1B,KAAKA,EAAK,WAAW,SACnB,OAAAmJ,EAAO,WAAW,EACXnJ,EAAK,YAAY,cAC1B,QACE,IAAI6N,EAAe,2BAA6BC,EAAW,KAAO,IAClE,MAAM,IAAI9N,EAAK,gBAAiB6N,EAAcC,EAAW,MAAOA,EAAW,GAAG,CAClF,EACF,EAEA9N,EAAK,YAAY,WAAa,SAAUmJ,EAAQ,CAC9C,IAAIwE,EAASxE,EAAO,cAAc,EAElC,GAAIwE,GAAU,KAId,KAAIM,EAAQ,SAASN,EAAO,IAAK,EAAE,EAEnC,GAAI,MAAMM,CAAK,EAAG,CAChB,IAAIJ,EAAe,wBACnB,MAAM,IAAI7N,EAAK,gBAAiB6N,EAAcF,EAAO,MAAOA,EAAO,GAAG,CACxE,CAEAxE,EAAO,cAAc,MAAQ8E,EAE7B,IAAIH,EAAa3E,EAAO,WAAW,EAEnC,GAAI2E,GAAc,KAAW,CAC3B3E,EAAO,WAAW,EAClB,MACF,CAEA,OAAQ2E,EAAW,KAAM,CACvB,KAAK9N,EAAK,WAAW,KACnB,OAAAmJ,EAAO,WAAW,EACXnJ,EAAK,YAAY,UAC1B,KAAKA,EAAK,WAAW,MACnB,OAAAmJ,EAAO,WAAW,EACXnJ,EAAK,YAAY,WAC1B,KAAKA,EAAK,WAAW,cACnB,OAAOA,EAAK,YAAY,kBAC1B,KAAKA,EAAK,WAAW,MACnB,OAAOA,EAAK,YAAY,WAC1B,KAAKA,EAAK,WAAW,SACnB,OAAAmJ,EAAO,WAAW,EACXnJ,EAAK,YAAY,cAC1B,QACE,IAAI6N,EAAe,2BAA6BC,EAAW,KAAO,IAClE,MAAM,IAAI9N,EAAK,gBAAiB6N,EAAcC,EAAW,MAAOA,EAAW,GAAG,CAClF,EACF,EAMI,SAAU1G,EAAM8G,EAAS,CACrB,OAAO,QAAW,YAAc,OAAO,IAEzC,OAAOA,CAAO,EACL,OAAOpO,IAAY,SAM5BC,GAAO,QAAUmO,EAAQ,EAGzB9G,EAAK,KAAO8G,EAAQ,CAExB,EAAE,KAAM,UAAY,CAMlB,OAAOlO,CACT,CAAC,CACH,GAAG,ICl5GH,IAAAmO,EAAAC,EAAA,CAAAC,GAAAC,KAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAeA,IAAIC,GAAkB,UAOtBD,GAAO,QAAUE,GAUjB,SAASA,GAAWC,EAAQ,CAC1B,IAAIC,EAAM,GAAKD,EACXE,EAAQJ,GAAgB,KAAKG,CAAG,EAEpC,GAAI,CAACC,EACH,OAAOD,EAGT,IAAIE,EACAC,EAAO,GACPC,EAAQ,EACRC,EAAY,EAEhB,IAAKD,EAAQH,EAAM,MAAOG,EAAQJ,EAAI,OAAQI,IAAS,CACrD,OAAQJ,EAAI,WAAWI,CAAK,EAAG,CAC7B,IAAK,IACHF,EAAS,SACT,MACF,IAAK,IACHA,EAAS,QACT,MACF,IAAK,IACHA,EAAS,QACT,MACF,IAAK,IACHA,EAAS,OACT,MACF,IAAK,IACHA,EAAS,OACT,MACF,QACE,QACJ,CAEIG,IAAcD,IAChBD,GAAQH,EAAI,UAAUK,EAAWD,CAAK,GAGxCC,EAAYD,EAAQ,EACpBD,GAAQD,CACV,CAEA,OAAOG,IAAcD,EACjBD,EAAOH,EAAI,UAAUK,EAAWD,CAAK,EACrCD,CACN,ICvDA,IAAAG,GAAiB,QCKZ,OAAO,UACV,OAAO,QAAU,SAAUC,EAAa,CACtC,IAAMC,EAA2B,CAAC,EAClC,QAAWC,KAAO,OAAO,KAAKF,CAAG,EAE/BC,EAAK,KAAK,CAACC,EAAKF,EAAIE,EAAI,CAAC,EAG3B,OAAOD,CACT,GAGG,OAAO,SACV,OAAO,OAAS,SAAUD,EAAa,CACrC,IAAMC,EAAiB,CAAC,EACxB,QAAWC,KAAO,OAAO,KAAKF,CAAG,EAE/BC,EAAK,KAAKD,EAAIE,EAAI,EAGpB,OAAOD,CACT,GAKE,OAAO,SAAY,cAGhB,QAAQ,UAAU,WACrB,QAAQ,UAAU,SAAW,SAC3BE,EAA8BC,EACxB,CACF,OAAOD,GAAM,UACf,KAAK,WAAaA,EAAE,KACpB,KAAK,UAAYA,EAAE,MAEnB,KAAK,WAAaA,EAClB,KAAK,UAAYC,EAErB,GAGG,QAAQ,UAAU,cACrB,QAAQ,UAAU,YAAc,YAC3BC,EACG,CACN,IAAMC,EAAS,KAAK,WACpB,GAAIA,EAAQ,CACND,EAAM,SAAW,GACnBC,EAAO,YAAY,IAAI,EAGzB,QAASC,EAAIF,EAAM,OAAS,EAAGE,GAAK,EAAGA,IAAK,CAC1C,IAAIC,EAAOH,EAAME,GACb,OAAOC,GAAS,SAClBA,EAAO,SAAS,eAAeA,CAAI,EAC5BA,EAAK,YACZA,EAAK,WAAW,YAAYA,CAAI,EAG7BD,EAGHD,EAAO,aAAa,KAAK,gBAAkBE,CAAI,EAF/CF,EAAO,aAAaE,EAAM,IAAI,CAGlC,CACF,CACF,ICxEJ,IAAAC,GAAuB,OAiChB,SAASC,GACdC,EACmB,CACnB,IAAMC,EAAY,IAAI,IAChBC,EAAY,IAAI,IACtB,QAAWC,KAAOH,EAAM,CACtB,GAAM,CAACI,EAAMC,CAAI,EAAIF,EAAI,SAAS,MAAM,GAAG,EAGrCG,EAAWH,EAAI,SACfI,EAAWJ,EAAI,MACfK,EAAWL,EAAI,KAGfM,KAAO,GAAAC,SAAWP,EAAI,IAAI,EAC7B,QAAQ,mBAAoB,EAAE,EAC9B,QAAQ,OAAQ,GAAG,EAGtB,GAAIE,EAAM,CACR,IAAMM,EAASV,EAAU,IAAIG,CAAI,EAG5BF,EAAQ,IAAIS,CAAM,EASrBV,EAAU,IAAIK,EAAU,CACtB,SAAAA,EACA,MAAAC,EACA,KAAAE,EACA,OAAAE,CACF,CAAC,GAbDA,EAAO,MAAQR,EAAI,MACnBQ,EAAO,KAAQF,EAGfP,EAAQ,IAAIS,CAAM,EAatB,MACEV,EAAU,IAAIK,EAAUM,EAAA,CACtB,SAAAN,EACA,MAAAC,EACA,KAAAE,GACGD,GAAQ,CAAE,KAAAA,CAAK,EACnB,CAEL,CACA,OAAOP,CACT,CCpFA,IAAAY,GAAuB,OAsChB,SAASC,GACdC,EAA2BC,EACD,CAC1B,IAAMC,EAAY,IAAI,OAAOF,EAAO,UAAW,KAAK,EAC9CG,EAAY,CAACC,EAAYC,EAAcC,IACpC,GAAGD,4BAA+BC,WAI3C,OAAQC,GAAkB,CACxBA,EAAQA,EACL,QAAQ,gBAAiB,GAAG,EAC5B,KAAK,EAGR,IAAMC,EAAQ,IAAI,OAAO,MAAMR,EAAO,cACpCO,EACG,QAAQ,uBAAwB,MAAM,EACtC,QAAQL,EAAW,GAAG,KACtB,KAAK,EAGV,OAAOO,IACLR,KACI,GAAAS,SAAWD,CAAK,EAChBA,GAED,QAAQD,EAAOL,CAAS,EACxB,QAAQ,8BAA+B,IAAI,CAClD,CACF,CCtCO,SAASQ,GACdC,EACqB,CACrB,IAAMC,EAAS,IAAK,KAAa,MAAM,CAAC,QAAS,MAAM,CAAC,EAIxD,OAHe,IAAK,KAAa,YAAYD,EAAOC,CAAK,EAGlD,MAAM,EACNA,EAAM,OACf,CAUO,SAASC,GACdD,EAA4BE,EACV,CAzEpB,IAAAC,EA0EE,IAAMC,EAAU,IAAI,IAAuBJ,CAAK,EAG1CK,EAA2B,CAAC,EAClC,QAASC,EAAI,EAAGA,EAAIJ,EAAM,OAAQI,IAChC,QAAWC,KAAUH,EACfF,EAAMI,GAAG,WAAWC,EAAO,IAAI,IACjCF,EAAOE,EAAO,MAAQ,GACtBH,EAAQ,OAAOG,CAAM,GAI3B,QAAWA,KAAUH,GACfD,EAAA,KAAK,iBAAL,MAAAA,EAAA,UAAsBI,EAAO,QAC/BF,EAAOE,EAAO,MAAQ,IAG1B,OAAOF,CACT,CC2BA,SAASG,GAAWC,EAAaC,EAAuB,CACtD,GAAM,CAACC,EAAGC,CAAC,EAAI,CAAC,IAAI,IAAIH,CAAC,EAAG,IAAI,IAAIC,CAAC,CAAC,EACtC,MAAO,CACL,GAAG,IAAI,IAAI,CAAC,GAAGC,CAAC,EAAE,OAAOE,GAAS,CAACD,EAAE,IAAIC,CAAK,CAAC,CAAC,CAClD,CACF,CASO,IAAMC,EAAN,KAAa,CAgCX,YAAY,CAAE,OAAAC,EAAQ,KAAAC,EAAM,QAAAC,CAAQ,EAAgB,CACzD,KAAK,QAAUA,EAGf,KAAK,UAAYC,GAAuBF,CAAI,EAC5C,KAAK,UAAYG,GAAuBJ,EAAQ,EAAK,EAGrD,KAAK,UAAU,UAAY,IAAI,OAAOA,EAAO,SAAS,EAGtD,KAAK,MAAQ,KAAK,UAAY,CAGxBA,EAAO,KAAK,SAAW,GAAKA,EAAO,KAAK,KAAO,KACjD,KAAK,IAAK,KAAaA,EAAO,KAAK,GAAG,EAC7BA,EAAO,KAAK,OAAS,GAC9B,KAAK,IAAK,KAAa,cAAc,GAAGA,EAAO,IAAI,CAAC,EAItD,IAAMK,EAAMZ,GAAW,CACrB,UAAW,iBAAkB,SAC/B,EAAGS,EAAQ,QAAQ,EAGnB,QAAWI,KAAQN,EAAO,KAAK,IAAIO,GACjCA,IAAa,KAAO,KAAQ,KAAaA,EAC1C,EACC,QAAWC,KAAMH,EACf,KAAK,SAAS,OAAOC,EAAKE,EAAG,EAC7B,KAAK,eAAe,OAAOF,EAAKE,EAAG,EAKvC,KAAK,IAAI,UAAU,EAGnB,KAAK,MAAM,QAAS,CAAE,MAAO,GAAI,CAAC,EAClC,KAAK,MAAM,MAAM,EACjB,KAAK,MAAM,OAAQ,CAAE,MAAO,IAAK,UAAWC,GAAO,CACjD,GAAM,CAAE,KAAAC,EAAO,CAAC,CAAE,EAAID,EACtB,OAAOC,EAAK,OAAO,CAACC,EAAMC,IAAQ,CAChC,GAAGD,EACH,GAAG,KAAK,UAAUC,CAAG,CACvB,EAAG,CAAC,CAAiB,CACvB,CAAE,CAAC,EAGH,QAAWH,KAAOR,EAChB,KAAK,IAAIQ,EAAK,CAAE,MAAOA,EAAI,KAAM,CAAC,CACtC,CAAC,CACH,CAkBO,OAAOI,EAA6B,CACzC,GAAIA,EACF,GAAI,CACF,IAAMC,EAAY,KAAK,UAAUD,CAAK,EAGhCE,EAAUC,GAAiBH,CAAK,EACnC,OAAOI,GACNA,EAAO,WAAa,KAAK,MAAM,SAAS,UACzC,EAGGC,EAAS,KAAK,MAAM,OAAO,GAAGL,IAAQ,EAGzC,OAAyB,CAACM,EAAM,CAAE,IAAAC,EAAK,MAAAC,EAAO,UAAAC,CAAU,IAAM,CAC7D,IAAMC,EAAW,KAAK,UAAU,IAAIH,CAAG,EACvC,GAAI,OAAOG,GAAa,YAAa,CACnC,GAAM,CAAE,SAAAC,EAAU,MAAAC,EAAO,KAAAC,EAAM,KAAAhB,EAAM,OAAAiB,CAAO,EAAIJ,EAG1CK,EAAQC,GACZd,EACA,OAAO,KAAKO,EAAU,QAAQ,CAChC,EAGMQ,EAAQ,CAAC,CAACH,GAAS,CAAC,OAAO,OAAOC,CAAK,EAAE,MAAMG,GAAKA,CAAC,EAC3DZ,EAAK,KAAKa,EAAAC,EAAA,CACR,SAAAT,EACA,MAAOV,EAAUW,CAAK,EACtB,KAAOX,EAAUY,CAAI,GAClBhB,GAAQ,CAAE,KAAMA,EAAK,IAAII,CAAS,CAAE,GAJ/B,CAKR,MAAOO,GAAS,EAAIS,GACpB,MAAAF,CACF,EAAC,CACH,CACA,OAAOT,CACT,EAAG,CAAC,CAAC,EAGJ,KAAK,CAACzB,EAAGC,IAAMA,EAAE,MAAQD,EAAE,KAAK,EAGhC,OAAO,CAACwC,EAAOC,IAAW,CACzB,IAAMZ,EAAW,KAAK,UAAU,IAAIY,EAAO,QAAQ,EACnD,GAAI,OAAOZ,GAAa,YAAa,CACnC,IAAMH,EAAM,WAAYG,EACpBA,EAAS,OAAQ,SACjBA,EAAS,SACbW,EAAM,IAAId,EAAK,CAAC,GAAGc,EAAM,IAAId,CAAG,GAAK,CAAC,EAAGe,CAAM,CAAC,CAClD,CACA,OAAOD,CACT,EAAG,IAAI,GAA+B,EAGpCE,EACJ,GAAI,KAAK,QAAQ,YAAa,CAC5B,IAAMC,EAAS,KAAK,MAAM,MAAMC,GAAW,CACzC,QAAWrB,KAAUF,EACnBuB,EAAQ,KAAKrB,EAAO,KAAM,CACxB,OAAQ,CAAC,OAAO,EAChB,SAAU,KAAK,MAAM,SAAS,SAC9B,SAAU,KAAK,MAAM,SAAS,QAChC,CAAC,CACL,CAAC,EAGDmB,EAAcC,EAAO,OACjB,OAAO,KAAKA,EAAO,GAAG,UAAU,QAAQ,EACxC,CAAC,CACP,CAGA,OAAOJ,EAAA,CACL,MAAO,CAAC,GAAGf,EAAO,OAAO,CAAC,GACvB,OAAOkB,GAAgB,aAAe,CAAE,YAAAA,CAAY,EAI3D,OAAQG,EAAN,CACA,QAAQ,KAAK,kBAAkB1B,qCAAoC,CACrE,CAIF,MAAO,CAAE,MAAO,CAAC,CAAE,CACrB,CACF,EL3QA,IAAI2B,EAqBJ,SAAeC,GACbC,EACe,QAAAC,EAAA,sBACf,IAAIC,EAAO,UAGX,GAAI,OAAO,QAAW,aAAe,iBAAkB,OAAQ,CAC7D,IAAMC,EAAS,SAAS,cAAiC,aAAa,EAChE,CAACC,CAAI,EAAID,EAAO,IAAI,MAAM,SAAS,EAGzCD,EAAOA,EAAK,QAAQ,KAAME,CAAI,CAChC,CAGA,IAAMC,EAAU,CAAC,EACjB,QAAWC,KAAQN,EAAO,KAAM,CAC9B,OAAQM,EAAM,CAGZ,IAAK,KACHD,EAAQ,KAAK,GAAGH,cAAiB,EACjC,MAGF,IAAK,KACL,IAAK,KACHG,EAAQ,KAAK,GAAGH,cAAiB,EACjC,KACJ,CAGII,IAAS,MACXD,EAAQ,KAAK,GAAGH,cAAiBI,UAAa,CAClD,CAGIN,EAAO,KAAK,OAAS,GACvBK,EAAQ,KAAK,GAAGH,yBAA4B,EAG1CG,EAAQ,SACV,MAAM,cACJ,GAAGH,oCACH,GAAGG,CACL,EACJ,GAaA,SAAsBE,GACpBC,EACwB,QAAAP,EAAA,sBACxB,OAAQO,EAAQ,KAAM,CAGpB,OACE,aAAMT,GAAqBS,EAAQ,KAAK,MAAM,EAC9CV,EAAQ,IAAIW,EAAOD,EAAQ,IAAI,EACxB,CACL,MACF,EAGF,OACE,MAAO,CACL,OACA,KAAMV,EAAQA,EAAM,OAAOU,EAAQ,IAAI,EAAI,CAAE,MAAO,CAAC,CAAE,CACzD,EAGF,QACE,MAAM,IAAI,UAAU,sBAAsB,CAC9C,CACF,GAOA,KAAK,KAAO,GAAAE,QAGZ,iBAAiB,UAAiBC,GAAMV,EAAA,wBACtC,YAAY,MAAMM,GAAQI,EAAG,IAAI,CAAC,CACpC,EAAC", + "names": ["require_lunr", "__commonJSMin", "exports", "module", "lunr", "config", "builder", "global", "message", "obj", "clone", "keys", "key", "val", "docRef", "fieldName", "stringValue", "s", "n", "fieldRef", "elements", "i", "other", "object", "a", "b", "intersection", "element", "posting", "documentCount", "documentsWithTerm", "x", "str", "metadata", "fn", "t", "len", "tokens", "sliceEnd", "sliceStart", "char", "sliceLength", "tokenMetadata", "label", "isRegistered", "serialised", "pipeline", "fnName", "fns", "existingFn", "newFn", "pos", "stackLength", "memo", "j", "result", "k", "token", "index", "start", "end", "pivotPoint", "pivotIndex", "insertIdx", "position", "sumOfSquares", "elementsLength", "otherVector", "dotProduct", "aLen", "bLen", "aVal", "bVal", "output", "step2list", "step3list", "c", "v", "C", "V", "mgr0", "meq1", "mgr1", "s_v", "re_mgr0", "re_mgr1", "re_meq1", "re_s_v", "re_1a", "re2_1a", "re_1b", "re2_1b", "re_1b_2", "re2_1b_2", "re3_1b_2", "re4_1b_2", "re_1c", "re_2", "re_3", "re_4", "re2_4", "re_5", "re_5_1", "re3_5", "porterStemmer", "w", "stem", "suffix", "firstch", "re", "re2", "re3", "re4", "fp", "stopWords", "words", "stopWord", "arr", "clause", "editDistance", "root", "stack", "frame", "noEditNode", "insertionNode", "substitutionNode", "charA", "charB", "transposeNode", "node", "final", "next", "edges", "edge", "labels", "qEdges", "qLen", "nEdges", "nLen", "q", "qEdge", "nEdge", "qNode", "word", "commonPrefix", "nextNode", "downTo", "childKey", "attrs", "queryString", "query", "parser", "matchingFields", "queryVectors", "termFieldCache", "requiredMatches", "prohibitedMatches", "terms", "clauseMatches", "m", "term", "termTokenSet", "expandedTerms", "field", "expandedTerm", "termIndex", "fieldPosting", "matchingDocumentRefs", "termField", "matchingDocumentsSet", "l", "matchingDocumentRef", "matchingFieldRef", "fieldMatch", "allRequiredMatches", "allProhibitedMatches", "matchingFieldRefs", "results", "matches", "fieldVector", "score", "docMatch", "match", "invertedIndex", "fieldVectors", "ref", "serializedIndex", "serializedVectors", "serializedInvertedIndex", "tokenSetBuilder", "tuple", "attributes", "number", "doc", "fields", "extractor", "fieldTerms", "metadataKey", "fieldRefs", "numberOfFields", "accumulator", "documentsWithField", "fieldRefsLength", "termIdfCache", "fieldLength", "termFrequencies", "termsLength", "fieldBoost", "docBoost", "tf", "idf", "scoreWithPrecision", "args", "clonedMetadata", "metadataKeys", "otherMatchData", "allFields", "options", "state", "subSlices", "type", "charCode", "lexer", "lexeme", "completedClause", "errorMessage", "nextLexeme", "possibleFields", "f", "boost", "factory", "require_escape_html", "__commonJSMin", "exports", "module", "matchHtmlRegExp", "escapeHtml", "string", "str", "match", "escape", "html", "index", "lastIndex", "import_lunr", "obj", "data", "key", "x", "y", "nodes", "parent", "i", "node", "import_escape_html", "setupSearchDocumentMap", "docs", "documents", "parents", "doc", "path", "hash", "location", "title", "tags", "text", "escapeHTML", "parent", "__spreadValues", "import_escape_html", "setupSearchHighlighter", "config", "escape", "separator", "highlight", "_", "data", "term", "query", "match", "value", "escapeHTML", "parseSearchQuery", "value", "query", "getSearchQueryTerms", "terms", "_a", "clauses", "result", "t", "clause", "difference", "a", "b", "x", "y", "value", "Search", "config", "docs", "options", "setupSearchDocumentMap", "setupSearchHighlighter", "fns", "lang", "language", "fn", "doc", "tags", "list", "tag", "query", "highlight", "clauses", "parseSearchQuery", "clause", "groups", "item", "ref", "score", "matchData", "document", "location", "title", "text", "parent", "terms", "getSearchQueryTerms", "boost", "t", "__spreadProps", "__spreadValues", "items", "result", "suggestions", "titles", "builder", "e", "index", "setupSearchLanguages", "config", "__async", "base", "worker", "path", "scripts", "lang", "handler", "message", "Search", "lunr", "ev"] +} diff --git a/assets/launch-terminal.png b/assets/launch-terminal.png new file mode 100644 index 0000000..85f0644 Binary files /dev/null and b/assets/launch-terminal.png differ diff --git a/assets/redlining/georectified-thumbnail.png b/assets/redlining/georectified-thumbnail.png new file mode 100644 index 0000000..474768e Binary files /dev/null and b/assets/redlining/georectified-thumbnail.png differ diff --git a/assets/redlining/holc-scan-thumbnail.jpg b/assets/redlining/holc-scan-thumbnail.jpg new file mode 100644 index 0000000..b621d5f Binary files /dev/null and b/assets/redlining/holc-scan-thumbnail.jpg differ diff --git a/assets/redlining/redlining.png b/assets/redlining/redlining.png new file mode 100644 index 0000000..d229eef Binary files /dev/null and b/assets/redlining/redlining.png differ diff --git a/assets/select-j-esiil.png b/assets/select-j-esiil.png new file mode 100644 index 0000000..1665f2d Binary files /dev/null and b/assets/select-j-esiil.png differ diff --git a/assets/stylesheets/extra.0d2c79a8.min.css b/assets/stylesheets/extra.0d2c79a8.min.css new file mode 100644 index 0000000..6e23ef1 --- /dev/null +++ b/assets/stylesheets/extra.0d2c79a8.min.css @@ -0,0 +1 @@ +@charset "UTF-8";@keyframes ᴴₒᴴₒᴴₒ{0%{transform:translate3d(var(--left-start),0,0)}to{transform:translate3d(var(--left-end),110vh,0)}}.ᴴₒᴴₒᴴₒ{--size:1vw;background:#fff;border:1px solid #ddd;border-radius:50%;cursor:pointer;height:var(--size);opacity:1;position:fixed;top:-5vh;transition:opacity 1s;width:var(--size);z-index:10}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):first-child{--size:0.4vw;--left-start:7vw;--left-end:-8vw;animation:ᴴₒᴴₒᴴₒ 12s linear infinite both;animation-delay:-4s;left:24vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(2){--size:0.4vw;--left-start:9vw;--left-end:0vw;animation:ᴴₒᴴₒᴴₒ 18s linear infinite both;animation-delay:-2s;left:68vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(3){--size:0.4vw;--left-start:1vw;--left-end:7vw;animation:ᴴₒᴴₒᴴₒ 11s linear infinite both;animation-delay:-6s;left:10vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(4){--size:0.5vw;--left-start:8vw;--left-end:10vw;animation:ᴴₒᴴₒᴴₒ 18s linear infinite both;animation-delay:-8s;left:63vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(5){--size:0.5vw;--left-start:5vw;--left-end:9vw;animation:ᴴₒᴴₒᴴₒ 19s linear infinite both;animation-delay:-4s;left:58vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(6){--size:0.1vw;--left-start:3vw;--left-end:10vw;animation:ᴴₒᴴₒᴴₒ 14s linear infinite both;animation-delay:-1s;left:55vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(7){--size:0.2vw;--left-start:-2vw;--left-end:6vw;animation:ᴴₒᴴₒᴴₒ 19s linear infinite both;animation-delay:-7s;left:50vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(8){--size:0.3vw;--left-start:7vw;--left-end:7vw;animation:ᴴₒᴴₒᴴₒ 19s linear infinite both;animation-delay:-3s;left:65vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(9){--size:0.2vw;--left-start:4vw;--left-end:5vw;animation:ᴴₒᴴₒᴴₒ 13s linear infinite both;animation-delay:-2s;left:1vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(10){--size:0.3vw;--left-start:2vw;--left-end:-3vw;animation:ᴴₒᴴₒᴴₒ 12s linear infinite both;animation-delay:-10s;left:92vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(11){--size:0.2vw;--left-start:1vw;--left-end:8vw;animation:ᴴₒᴴₒᴴₒ 13s linear infinite both;animation-delay:-6s;left:5vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(12){--size:0.4vw;--left-start:9vw;--left-end:1vw;animation:ᴴₒᴴₒᴴₒ 18s linear infinite both;animation-delay:-3s;left:77vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(13){--size:0.1vw;--left-start:-3vw;--left-end:3vw;animation:ᴴₒᴴₒᴴₒ 18s linear infinite both;animation-delay:-7s;left:93vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(14){--size:0.5vw;--left-start:0vw;--left-end:-5vw;animation:ᴴₒᴴₒᴴₒ 12s linear infinite both;animation-delay:-4s;left:35vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(15){--size:0.1vw;--left-start:-9vw;--left-end:4vw;animation:ᴴₒᴴₒᴴₒ 20s linear infinite both;animation-delay:-6s;left:15vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(16){--size:0.1vw;--left-start:1vw;--left-end:9vw;animation:ᴴₒᴴₒᴴₒ 17s linear infinite both;animation-delay:-6s;left:100vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(17){--size:0.1vw;--left-start:1vw;--left-end:0vw;animation:ᴴₒᴴₒᴴₒ 17s linear infinite both;animation-delay:-1s;left:44vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(18){--size:0.4vw;--left-start:-9vw;--left-end:-9vw;animation:ᴴₒᴴₒᴴₒ 16s linear infinite both;animation-delay:-6s;left:69vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(19){--size:0.2vw;--left-start:3vw;--left-end:-8vw;animation:ᴴₒᴴₒᴴₒ 14s linear infinite both;animation-delay:-1s;left:32vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(20){--size:0.1vw;--left-start:-7vw;--left-end:8vw;animation:ᴴₒᴴₒᴴₒ 19s linear infinite both;animation-delay:-8s;left:59vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(21){--size:0.2vw;--left-start:-1vw;--left-end:-8vw;animation:ᴴₒᴴₒᴴₒ 13s linear infinite both;animation-delay:-6s;left:96vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(22){--size:0.2vw;--left-start:9vw;--left-end:1vw;animation:ᴴₒᴴₒᴴₒ 11s linear infinite both;animation-delay:-7s;left:78vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(23){--size:0.4vw;--left-start:5vw;--left-end:-2vw;animation:ᴴₒᴴₒᴴₒ 19s linear infinite both;animation-delay:-10s;left:29vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(24){--size:0.1vw;--left-start:-4vw;--left-end:1vw;animation:ᴴₒᴴₒᴴₒ 20s linear infinite both;animation-delay:-7s;left:83vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(25){--size:0.3vw;--left-start:-1vw;--left-end:2vw;animation:ᴴₒᴴₒᴴₒ 19s linear infinite both;animation-delay:-8s;left:95vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(26){--size:0.5vw;--left-start:-3vw;--left-end:-6vw;animation:ᴴₒᴴₒᴴₒ 18s linear infinite both;animation-delay:-8s;left:74vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(27){--size:0.5vw;--left-start:9vw;--left-end:-9vw;animation:ᴴₒᴴₒᴴₒ 19s linear infinite both;animation-delay:-2s;left:94vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(28){--size:0.1vw;--left-start:0vw;--left-end:-4vw;animation:ᴴₒᴴₒᴴₒ 15s linear infinite both;animation-delay:-4s;left:95vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(29){--size:0.5vw;--left-start:8vw;--left-end:4vw;animation:ᴴₒᴴₒᴴₒ 11s linear infinite both;animation-delay:-3s;left:42vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(30){--size:0.4vw;--left-start:-5vw;--left-end:0vw;animation:ᴴₒᴴₒᴴₒ 19s linear infinite both;animation-delay:-10s;left:8vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(31){--size:0.4vw;--left-start:-7vw;--left-end:3vw;animation:ᴴₒᴴₒᴴₒ 11s linear infinite both;animation-delay:-4s;left:77vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(32){--size:0.4vw;--left-start:8vw;--left-end:-5vw;animation:ᴴₒᴴₒᴴₒ 15s linear infinite both;animation-delay:-3s;left:80vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(33){--size:0.2vw;--left-start:-3vw;--left-end:8vw;animation:ᴴₒᴴₒᴴₒ 20s linear infinite both;animation-delay:-6s;left:15vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(34){--size:0.5vw;--left-start:5vw;--left-end:1vw;animation:ᴴₒᴴₒᴴₒ 13s linear infinite both;animation-delay:-1s;left:91vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(35){--size:0.3vw;--left-start:-6vw;--left-end:-5vw;animation:ᴴₒᴴₒᴴₒ 11s linear infinite both;animation-delay:-5s;left:93vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(36){--size:0.1vw;--left-start:10vw;--left-end:10vw;animation:ᴴₒᴴₒᴴₒ 13s linear infinite both;animation-delay:-10s;left:59vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(37){--size:0.3vw;--left-start:4vw;--left-end:6vw;animation:ᴴₒᴴₒᴴₒ 14s linear infinite both;animation-delay:-8s;left:35vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(38){--size:0.5vw;--left-start:8vw;--left-end:-3vw;animation:ᴴₒᴴₒᴴₒ 19s linear infinite both;animation-delay:-6s;left:6vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(39){--size:0.2vw;--left-start:-6vw;--left-end:-2vw;animation:ᴴₒᴴₒᴴₒ 14s linear infinite both;animation-delay:-7s;left:58vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(40){--size:0.4vw;--left-start:3vw;--left-end:-5vw;animation:ᴴₒᴴₒᴴₒ 13s linear infinite both;animation-delay:-4s;left:15vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(41){--size:0.1vw;--left-start:2vw;--left-end:-7vw;animation:ᴴₒᴴₒᴴₒ 17s linear infinite both;animation-delay:-7s;left:24vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(42){--size:0.3vw;--left-start:8vw;--left-end:3vw;animation:ᴴₒᴴₒᴴₒ 19s linear infinite both;animation-delay:-9s;left:36vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(43){--size:0.2vw;--left-start:-9vw;--left-end:-3vw;animation:ᴴₒᴴₒᴴₒ 13s linear infinite both;animation-delay:-10s;left:23vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(44){--size:0.1vw;--left-start:4vw;--left-end:-6vw;animation:ᴴₒᴴₒᴴₒ 16s linear infinite both;animation-delay:-6s;left:9vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(45){--size:0.1vw;--left-start:-3vw;--left-end:-5vw;animation:ᴴₒᴴₒᴴₒ 19s linear infinite both;animation-delay:-5s;left:62vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(46){--size:0.3vw;--left-start:0vw;--left-end:2vw;animation:ᴴₒᴴₒᴴₒ 20s linear infinite both;animation-delay:-4s;left:1vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(47){--size:0.4vw;--left-start:8vw;--left-end:-4vw;animation:ᴴₒᴴₒᴴₒ 14s linear infinite both;animation-delay:-1s;left:76vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(48){--size:0.2vw;--left-start:5vw;--left-end:-3vw;animation:ᴴₒᴴₒᴴₒ 15s linear infinite both;animation-delay:-5s;left:19vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(49){--size:0.4vw;--left-start:1vw;--left-end:-1vw;animation:ᴴₒᴴₒᴴₒ 18s linear infinite both;animation-delay:-4s;left:72vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(50){--size:0.4vw;--left-start:8vw;--left-end:-6vw;animation:ᴴₒᴴₒᴴₒ 16s linear infinite both;animation-delay:-10s;left:25vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(51){--size:0.1vw;--left-start:-5vw;--left-end:-8vw;animation:ᴴₒᴴₒᴴₒ 17s linear infinite both;animation-delay:-9s;left:71vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(52){--size:0.4vw;--left-start:-4vw;--left-end:9vw;animation:ᴴₒᴴₒᴴₒ 15s linear infinite both;animation-delay:-7s;left:30vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(53){--size:0.5vw;--left-start:-1vw;--left-end:-8vw;animation:ᴴₒᴴₒᴴₒ 15s linear infinite both;animation-delay:-4s;left:37vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(54){--size:0.4vw;--left-start:-1vw;--left-end:-1vw;animation:ᴴₒᴴₒᴴₒ 12s linear infinite both;animation-delay:-9s;left:48vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(55){--size:0.5vw;--left-start:8vw;--left-end:6vw;animation:ᴴₒᴴₒᴴₒ 20s linear infinite both;animation-delay:-6s;left:65vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(56){--size:0.4vw;--left-start:9vw;--left-end:5vw;animation:ᴴₒᴴₒᴴₒ 18s linear infinite both;animation-delay:-6s;left:53vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(57){--size:0.4vw;--left-start:3vw;--left-end:-9vw;animation:ᴴₒᴴₒᴴₒ 12s linear infinite both;animation-delay:-1s;left:76vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(58){--size:0.2vw;--left-start:-7vw;--left-end:0vw;animation:ᴴₒᴴₒᴴₒ 16s linear infinite both;animation-delay:-9s;left:54vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(59){--size:0.1vw;--left-start:-9vw;--left-end:-2vw;animation:ᴴₒᴴₒᴴₒ 20s linear infinite both;animation-delay:-1s;left:66vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(60){--size:0.3vw;--left-start:-6vw;--left-end:2vw;animation:ᴴₒᴴₒᴴₒ 11s linear infinite both;animation-delay:-7s;left:91vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(61){--size:0.4vw;--left-start:6vw;--left-end:-8vw;animation:ᴴₒᴴₒᴴₒ 14s linear infinite both;animation-delay:-7s;left:35vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(62){--size:0.4vw;--left-start:-6vw;--left-end:2vw;animation:ᴴₒᴴₒᴴₒ 16s linear infinite both;animation-delay:-3s;left:86vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(63){--size:0.5vw;--left-start:-7vw;--left-end:7vw;animation:ᴴₒᴴₒᴴₒ 20s linear infinite both;animation-delay:-5s;left:86vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(64){--size:0.2vw;--left-start:-9vw;--left-end:1vw;animation:ᴴₒᴴₒᴴₒ 13s linear infinite both;animation-delay:-5s;left:53vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(65){--size:0.2vw;--left-start:-2vw;--left-end:3vw;animation:ᴴₒᴴₒᴴₒ 11s linear infinite both;animation-delay:-6s;left:56vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(66){--size:0.5vw;--left-start:1vw;--left-end:8vw;animation:ᴴₒᴴₒᴴₒ 17s linear infinite both;animation-delay:-5s;left:58vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(67){--size:0.5vw;--left-start:2vw;--left-end:9vw;animation:ᴴₒᴴₒᴴₒ 15s linear infinite both;animation-delay:-5s;left:14vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(68){--size:0.3vw;--left-start:-1vw;--left-end:6vw;animation:ᴴₒᴴₒᴴₒ 14s linear infinite both;animation-delay:-1s;left:100vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(69){--size:0.2vw;--left-start:9vw;--left-end:-2vw;animation:ᴴₒᴴₒᴴₒ 15s linear infinite both;animation-delay:-7s;left:8vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(70){--size:0.4vw;--left-start:-5vw;--left-end:8vw;animation:ᴴₒᴴₒᴴₒ 11s linear infinite both;animation-delay:-4s;left:82vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(71){--size:0.4vw;--left-start:3vw;--left-end:-7vw;animation:ᴴₒᴴₒᴴₒ 13s linear infinite both;animation-delay:-6s;left:26vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(72){--size:0.2vw;--left-start:-2vw;--left-end:-3vw;animation:ᴴₒᴴₒᴴₒ 15s linear infinite both;animation-delay:-3s;left:24vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(73){--size:0.3vw;--left-start:-7vw;--left-end:-8vw;animation:ᴴₒᴴₒᴴₒ 16s linear infinite both;animation-delay:-2s;left:2vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(74){--size:0.4vw;--left-start:-9vw;--left-end:-3vw;animation:ᴴₒᴴₒᴴₒ 14s linear infinite both;animation-delay:-10s;left:94vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(75){--size:0.3vw;--left-start:7vw;--left-end:2vw;animation:ᴴₒᴴₒᴴₒ 17s linear infinite both;animation-delay:-2s;left:26vw}.ᴴₒᴴₒᴴₒ:nth-child(5n){filter:blur(2px)}.ᴴₒᴴₒᴴₒ--ᵍₒᵗ꜀ᴴₐ{opacity:0}.ᴴₒᴴₒᴴₒ__button{display:block}.ᴴₒᴴₒᴴₒ__button:after{background-color:currentcolor;content:"";display:block;height:24px;margin:0 auto;-webkit-mask-image:url('data:image/svg+xml;charset=utf-8,');mask-image:url('data:image/svg+xml;charset=utf-8,');-webkit-mask-position:center;mask-position:center;-webkit-mask-repeat:no-repeat;mask-repeat:no-repeat;-webkit-mask-size:contain;mask-size:contain;width:24px}.ᴴₒᴴₒᴴₒ__button[hidden]:after{-webkit-mask-image:url('data:image/svg+xml;charset=utf-8,');mask-image:url('data:image/svg+xml;charset=utf-8,')} \ No newline at end of file diff --git a/assets/stylesheets/extra.0d2c79a8.min.css.map b/assets/stylesheets/extra.0d2c79a8.min.css.map new file mode 100644 index 0000000..cd262c0 --- /dev/null +++ b/assets/stylesheets/extra.0d2c79a8.min.css.map @@ -0,0 +1 @@ +{"version":3,"sources":["src/assets/stylesheets/extra.scss","../../../src/assets/stylesheets/extra.scss"],"names":[],"mappings":"AA6BA,gBCpBA,CDoBA,kBACE,GACE,4CC1BF,CD4BA,GACE,8CC1BF,CACF,CDkCA,QACE,UAAA,CAOA,eAAA,CACA,qBAAA,CACA,iBAAA,CACA,cAAA,CAJA,kBAAA,CAMA,SAAA,CAVA,cAAA,CACA,QAAA,CAQA,qBAAA,CANA,iBAAA,CADA,UCzBF,CDqCI,yCACE,YAAA,CACA,gBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SClCN,CD6BI,0CACE,YAAA,CACA,gBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC1BN,CDqBI,0CACE,YAAA,CACA,gBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SClBN,CDaI,0CACE,YAAA,CACA,gBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCVN,CDKI,0CACE,YAAA,CACA,gBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCFN,CDHI,0CACE,YAAA,CACA,gBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCMN,CDXI,0CACE,YAAA,CACA,iBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCcN,CDnBI,0CACE,YAAA,CACA,gBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCsBN,CD3BI,0CACE,YAAA,CACA,gBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,QC8BN,CDnCI,2CACE,YAAA,CACA,gBAAA,CACA,eAAA,CAGA,yCAAA,CACA,oBAAA,CAFA,SCsCN,CD3CI,2CACE,YAAA,CACA,gBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,QC8CN,CDnDI,2CACE,YAAA,CACA,gBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCsDN,CD3DI,2CACE,YAAA,CACA,iBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8DN,CDnEI,2CACE,YAAA,CACA,gBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCsEN,CD3EI,2CACE,YAAA,CACA,iBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8EN,CDnFI,2CACE,YAAA,CACA,gBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,UCsFN,CD3FI,2CACE,YAAA,CACA,gBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8FN,CDnGI,2CACE,YAAA,CACA,iBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCsGN,CD3GI,2CACE,YAAA,CACA,gBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8GN,CDnHI,2CACE,YAAA,CACA,iBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCsHN,CD3HI,2CACE,YAAA,CACA,iBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8HN,CDnII,2CACE,YAAA,CACA,gBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCsIN,CD3II,2CACE,YAAA,CACA,gBAAA,CACA,eAAA,CAGA,yCAAA,CACA,oBAAA,CAFA,SC8IN,CDnJI,2CACE,YAAA,CACA,iBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCsJN,CD3JI,2CACE,YAAA,CACA,iBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8JN,CDnKI,2CACE,YAAA,CACA,iBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCsKN,CD3KI,2CACE,YAAA,CACA,gBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8KN,CDnLI,2CACE,YAAA,CACA,gBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCsLN,CD3LI,2CACE,YAAA,CACA,gBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8LN,CDnMI,2CACE,YAAA,CACA,iBAAA,CACA,cAAA,CAGA,yCAAA,CACA,oBAAA,CAFA,QCsMN,CD3MI,2CACE,YAAA,CACA,iBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8MN,CDnNI,2CACE,YAAA,CACA,gBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCsNN,CD3NI,2CACE,YAAA,CACA,iBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8NN,CDnOI,2CACE,YAAA,CACA,gBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCsON,CD3OI,2CACE,YAAA,CACA,iBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8ON,CDnPI,2CACE,YAAA,CACA,iBAAA,CACA,eAAA,CAGA,yCAAA,CACA,oBAAA,CAFA,SCsPN,CD3PI,2CACE,YAAA,CACA,gBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8PN,CDnQI,2CACE,YAAA,CACA,gBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,QCsQN,CD3QI,2CACE,YAAA,CACA,iBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8QN,CDnRI,2CACE,YAAA,CACA,gBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCsRN,CD3RI,2CACE,YAAA,CACA,gBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8RN,CDnSI,2CACE,YAAA,CACA,gBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCsSN,CD3SI,2CACE,YAAA,CACA,iBAAA,CACA,eAAA,CAGA,yCAAA,CACA,oBAAA,CAFA,SC8SN,CDnTI,2CACE,YAAA,CACA,gBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,QCsTN,CD3TI,2CACE,YAAA,CACA,iBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8TN,CDnUI,2CACE,YAAA,CACA,gBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,QCsUN,CD3UI,2CACE,YAAA,CACA,gBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8UN,CDnVI,2CACE,YAAA,CACA,gBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCsVN,CD3VI,2CACE,YAAA,CACA,gBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8VN,CDnWI,2CACE,YAAA,CACA,gBAAA,CACA,eAAA,CAGA,yCAAA,CACA,oBAAA,CAFA,SCsWN,CD3WI,2CACE,YAAA,CACA,iBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8WN,CDnXI,2CACE,YAAA,CACA,iBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCsXN,CD3XI,2CACE,YAAA,CACA,iBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8XN,CDnYI,2CACE,YAAA,CACA,iBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCsYN,CD3YI,2CACE,YAAA,CACA,gBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8YN,CDnZI,2CACE,YAAA,CACA,gBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCsZN,CD3ZI,2CACE,YAAA,CACA,gBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8ZN,CDnaI,2CACE,YAAA,CACA,iBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCsaN,CD3aI,2CACE,YAAA,CACA,iBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8aN,CDnbI,2CACE,YAAA,CACA,iBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCsbN,CD3bI,2CACE,YAAA,CACA,gBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8bN,CDncI,2CACE,YAAA,CACA,iBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCscN,CD3cI,2CACE,YAAA,CACA,iBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8cN,CDndI,2CACE,YAAA,CACA,iBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCsdN,CD3dI,2CACE,YAAA,CACA,iBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8dN,CDneI,2CACE,YAAA,CACA,gBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCseN,CD3eI,2CACE,YAAA,CACA,gBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8eN,CDnfI,2CACE,YAAA,CACA,iBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,UCsfN,CD3fI,2CACE,YAAA,CACA,gBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,QC8fN,CDngBI,2CACE,YAAA,CACA,iBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCsgBN,CD3gBI,2CACE,YAAA,CACA,gBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8gBN,CDnhBI,2CACE,YAAA,CACA,iBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCshBN,CD3hBI,2CACE,YAAA,CACA,iBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,QC8hBN,CDniBI,2CACE,YAAA,CACA,iBAAA,CACA,eAAA,CAGA,yCAAA,CACA,oBAAA,CAFA,SCsiBN,CD3iBI,2CACE,YAAA,CACA,gBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8iBN,CDviBE,sBACE,gBCyiBJ,CDriBE,gBACE,SCuiBJ,CDniBE,gBACE,aCqiBJ,CDjiBE,sBAKE,6BAAA,CAKA,UAAA,CATA,aAAA,CAEA,WAAA,CACA,aAAA,CAEA,ooBAAA,CAAA,4nBAAA,CACA,4BAAA,CAAA,oBAAA,CACA,6BAAA,CAAA,qBAAA,CACA,yBAAA,CAAA,iBAAA,CAPA,UC2iBJ,CD/hBE,8BACE,qqBAAA,CAAA,6pBCiiBJ","file":"extra.css"} \ No newline at end of file diff --git a/assets/stylesheets/main.975780f9.min.css b/assets/stylesheets/main.975780f9.min.css new file mode 100644 index 0000000..dac48ba --- /dev/null +++ b/assets/stylesheets/main.975780f9.min.css @@ -0,0 +1 @@ +@charset "UTF-8";html{-webkit-text-size-adjust:none;-moz-text-size-adjust:none;text-size-adjust:none;box-sizing:border-box}*,:after,:before{box-sizing:inherit}@media (prefers-reduced-motion){*,:after,:before{transition:none!important}}body{margin:0}a,button,input,label{-webkit-tap-highlight-color:transparent}a{color:inherit;text-decoration:none}hr{border:0;box-sizing:initial;display:block;height:.05rem;overflow:visible;padding:0}small{font-size:80%}sub,sup{line-height:1em}img{border-style:none}table{border-collapse:initial;border-spacing:0}td,th{font-weight:400;vertical-align:top}button{background:#0000;border:0;font-family:inherit;font-size:inherit;margin:0;padding:0}input{border:0;outline:none}:root{--md-primary-fg-color:#4051b5;--md-primary-fg-color--light:#5d6cc0;--md-primary-fg-color--dark:#303fa1;--md-primary-bg-color:#fff;--md-primary-bg-color--light:#ffffffb3;--md-accent-fg-color:#526cfe;--md-accent-fg-color--transparent:#526cfe1a;--md-accent-bg-color:#fff;--md-accent-bg-color--light:#ffffffb3}:root,[data-md-color-scheme=default]{--md-default-fg-color:#000000de;--md-default-fg-color--light:#0000008a;--md-default-fg-color--lighter:#00000052;--md-default-fg-color--lightest:#00000012;--md-default-bg-color:#fff;--md-default-bg-color--light:#ffffffb3;--md-default-bg-color--lighter:#ffffff4d;--md-default-bg-color--lightest:#ffffff1f;--md-code-fg-color:#36464e;--md-code-bg-color:#f5f5f5;--md-code-hl-color:#ffff0080;--md-code-hl-number-color:#d52a2a;--md-code-hl-special-color:#db1457;--md-code-hl-function-color:#a846b9;--md-code-hl-constant-color:#6e59d9;--md-code-hl-keyword-color:#3f6ec6;--md-code-hl-string-color:#1c7d4d;--md-code-hl-name-color:var(--md-code-fg-color);--md-code-hl-operator-color:var(--md-default-fg-color--light);--md-code-hl-punctuation-color:var(--md-default-fg-color--light);--md-code-hl-comment-color:var(--md-default-fg-color--light);--md-code-hl-generic-color:var(--md-default-fg-color--light);--md-code-hl-variable-color:var(--md-default-fg-color--light);--md-typeset-color:var(--md-default-fg-color);--md-typeset-a-color:var(--md-primary-fg-color);--md-typeset-mark-color:#ffff0080;--md-typeset-del-color:#f5503d26;--md-typeset-ins-color:#0bd57026;--md-typeset-kbd-color:#fafafa;--md-typeset-kbd-accent-color:#fff;--md-typeset-kbd-border-color:#b8b8b8;--md-typeset-table-color:#0000001f;--md-admonition-fg-color:var(--md-default-fg-color);--md-admonition-bg-color:var(--md-default-bg-color);--md-footer-fg-color:#fff;--md-footer-fg-color--light:#ffffffb3;--md-footer-fg-color--lighter:#ffffff4d;--md-footer-bg-color:#000000de;--md-footer-bg-color--dark:#00000052;--md-shadow-z1:0 0.2rem 0.5rem #0000000d,0 0 0.05rem #0000001a;--md-shadow-z2:0 0.2rem 0.5rem #0000001a,0 0 0.05rem #00000040;--md-shadow-z3:0 0.2rem 0.5rem #0003,0 0 0.05rem #00000059}.md-icon svg{fill:currentcolor;display:block;height:1.2rem;width:1.2rem}body{-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale;--md-text-font-family:var(--md-text-font,_),-apple-system,BlinkMacSystemFont,Helvetica,Arial,sans-serif;--md-code-font-family:var(--md-code-font,_),SFMono-Regular,Consolas,Menlo,monospace}body,input{font-feature-settings:"kern","liga";font-family:var(--md-text-font-family)}body,code,input,kbd,pre{color:var(--md-typeset-color)}code,kbd,pre{font-feature-settings:"kern";font-family:var(--md-code-font-family)}:root{--md-typeset-table-sort-icon:url('data:image/svg+xml;charset=utf-8,');--md-typeset-table-sort-icon--asc:url('data:image/svg+xml;charset=utf-8,');--md-typeset-table-sort-icon--desc:url('data:image/svg+xml;charset=utf-8,')}.md-typeset{-webkit-print-color-adjust:exact;color-adjust:exact;font-size:.8rem;line-height:1.6}@media print{.md-typeset{font-size:.68rem}}.md-typeset blockquote,.md-typeset dl,.md-typeset figure,.md-typeset ol,.md-typeset pre,.md-typeset ul{margin-bottom:1em;margin-top:1em}.md-typeset h1{color:var(--md-default-fg-color--light);font-size:2em;line-height:1.3;margin:0 0 1.25em}.md-typeset h1,.md-typeset h2{font-weight:300;letter-spacing:-.01em}.md-typeset h2{font-size:1.5625em;line-height:1.4;margin:1.6em 0 .64em}.md-typeset h3{font-size:1.25em;font-weight:400;letter-spacing:-.01em;line-height:1.5;margin:1.6em 0 .8em}.md-typeset h2+h3{margin-top:.8em}.md-typeset h4{font-weight:700;letter-spacing:-.01em;margin:1em 0}.md-typeset h5,.md-typeset h6{color:var(--md-default-fg-color--light);font-size:.8em;font-weight:700;letter-spacing:-.01em;margin:1.25em 0}.md-typeset h5{text-transform:uppercase}.md-typeset hr{border-bottom:.05rem solid var(--md-default-fg-color--lightest);display:flow-root;margin:1.5em 0}.md-typeset a{color:var(--md-typeset-a-color);word-break:break-word}.md-typeset a,.md-typeset a:before{transition:color 125ms}.md-typeset a:focus,.md-typeset a:hover{color:var(--md-accent-fg-color)}.md-typeset a:focus code,.md-typeset a:hover code{background-color:var(--md-accent-fg-color--transparent)}.md-typeset a code{color:currentcolor;transition:background-color 125ms}.md-typeset a.focus-visible{outline-color:var(--md-accent-fg-color);outline-offset:.2rem}.md-typeset code,.md-typeset kbd,.md-typeset pre{color:var(--md-code-fg-color);direction:ltr;font-variant-ligatures:none}@media print{.md-typeset code,.md-typeset kbd,.md-typeset pre{white-space:pre-wrap}}.md-typeset code{background-color:var(--md-code-bg-color);border-radius:.1rem;-webkit-box-decoration-break:clone;box-decoration-break:clone;font-size:.85em;padding:0 .2941176471em;word-break:break-word}.md-typeset code:not(.focus-visible){-webkit-tap-highlight-color:transparent;outline:none}.md-typeset pre{display:flow-root;line-height:1.4;position:relative}.md-typeset pre>code{-webkit-box-decoration-break:slice;box-decoration-break:slice;box-shadow:none;display:block;margin:0;outline-color:var(--md-accent-fg-color);overflow:auto;padding:.7720588235em 1.1764705882em;scrollbar-color:var(--md-default-fg-color--lighter) #0000;scrollbar-width:thin;touch-action:auto;word-break:normal}.md-typeset pre>code:hover{scrollbar-color:var(--md-accent-fg-color) #0000}.md-typeset pre>code::-webkit-scrollbar{height:.2rem;width:.2rem}.md-typeset pre>code::-webkit-scrollbar-thumb{background-color:var(--md-default-fg-color--lighter)}.md-typeset pre>code::-webkit-scrollbar-thumb:hover{background-color:var(--md-accent-fg-color)}.md-typeset kbd{background-color:var(--md-typeset-kbd-color);border-radius:.1rem;box-shadow:0 .1rem 0 .05rem var(--md-typeset-kbd-border-color),0 .1rem 0 var(--md-typeset-kbd-border-color),0 -.1rem .2rem var(--md-typeset-kbd-accent-color) inset;color:var(--md-default-fg-color);display:inline-block;font-size:.75em;padding:0 .6666666667em;vertical-align:text-top;word-break:break-word}.md-typeset mark{background-color:var(--md-typeset-mark-color);-webkit-box-decoration-break:clone;box-decoration-break:clone;color:inherit;word-break:break-word}.md-typeset abbr{border-bottom:.05rem dotted var(--md-default-fg-color--light);cursor:help;text-decoration:none}@media (hover:none){.md-typeset abbr{position:relative}.md-typeset abbr[title]:-webkit-any(:focus,:hover):after{background-color:var(--md-default-fg-color);border-radius:.1rem;box-shadow:var(--md-shadow-z3);color:var(--md-default-bg-color);content:attr(title);display:inline-block;font-size:.7rem;margin-top:2em;max-width:80%;min-width:-webkit-max-content;min-width:max-content;padding:.2rem .3rem;position:absolute;width:auto}.md-typeset abbr[title]:-moz-any(:focus,:hover):after{background-color:var(--md-default-fg-color);border-radius:.1rem;box-shadow:var(--md-shadow-z3);color:var(--md-default-bg-color);content:attr(title);display:inline-block;font-size:.7rem;margin-top:2em;max-width:80%;min-width:-moz-max-content;min-width:max-content;padding:.2rem .3rem;position:absolute;width:auto}[dir=ltr] .md-typeset abbr[title]:-webkit-any(:focus,:hover):after{left:0}[dir=ltr] .md-typeset abbr[title]:-moz-any(:focus,:hover):after{left:0}[dir=ltr] .md-typeset abbr[title]:is(:focus,:hover):after{left:0}[dir=rtl] .md-typeset abbr[title]:-webkit-any(:focus,:hover):after{right:0}[dir=rtl] .md-typeset abbr[title]:-moz-any(:focus,:hover):after{right:0}[dir=rtl] .md-typeset abbr[title]:is(:focus,:hover):after{right:0}.md-typeset abbr[title]:is(:focus,:hover):after{background-color:var(--md-default-fg-color);border-radius:.1rem;box-shadow:var(--md-shadow-z3);color:var(--md-default-bg-color);content:attr(title);display:inline-block;font-size:.7rem;margin-top:2em;max-width:80%;min-width:-webkit-max-content;min-width:-moz-max-content;min-width:max-content;padding:.2rem .3rem;position:absolute;width:auto}}.md-typeset small{opacity:.75}[dir=ltr] .md-typeset sub,[dir=ltr] .md-typeset sup{margin-left:.078125em}[dir=rtl] .md-typeset sub,[dir=rtl] .md-typeset sup{margin-right:.078125em}[dir=ltr] .md-typeset blockquote{padding-left:.6rem}[dir=rtl] .md-typeset blockquote{padding-right:.6rem}[dir=ltr] .md-typeset blockquote{border-left:.2rem solid var(--md-default-fg-color--lighter)}[dir=rtl] .md-typeset blockquote{border-right:.2rem solid var(--md-default-fg-color--lighter)}.md-typeset blockquote{color:var(--md-default-fg-color--light);margin-left:0;margin-right:0}.md-typeset ul{list-style-type:disc}[dir=ltr] .md-typeset ol,[dir=ltr] .md-typeset ul{margin-left:.625em}[dir=rtl] .md-typeset ol,[dir=rtl] .md-typeset ul{margin-right:.625em}.md-typeset ol,.md-typeset ul{padding:0}.md-typeset ol:not([hidden]),.md-typeset ul:not([hidden]){display:flow-root}.md-typeset ol ol,.md-typeset ul ol{list-style-type:lower-alpha}.md-typeset ol ol ol,.md-typeset ul ol ol{list-style-type:lower-roman}[dir=ltr] .md-typeset ol li,[dir=ltr] .md-typeset ul li{margin-left:1.25em}[dir=rtl] .md-typeset ol li,[dir=rtl] .md-typeset ul li{margin-right:1.25em}.md-typeset ol li,.md-typeset ul li{margin-bottom:.5em}.md-typeset ol li blockquote,.md-typeset ol li p,.md-typeset ul li blockquote,.md-typeset ul li p{margin:.5em 0}.md-typeset ol li:last-child,.md-typeset ul li:last-child{margin-bottom:0}.md-typeset ol li :-webkit-any(ul,ol),.md-typeset ul li :-webkit-any(ul,ol){margin-bottom:.5em;margin-top:.5em}.md-typeset ol li :-moz-any(ul,ol),.md-typeset ul li :-moz-any(ul,ol){margin-bottom:.5em;margin-top:.5em}[dir=ltr] .md-typeset ol li :-webkit-any(ul,ol),[dir=ltr] .md-typeset ul li :-webkit-any(ul,ol){margin-left:.625em}[dir=ltr] .md-typeset ol li :-moz-any(ul,ol),[dir=ltr] .md-typeset ul li :-moz-any(ul,ol){margin-left:.625em}[dir=ltr] .md-typeset ol li :is(ul,ol),[dir=ltr] .md-typeset ul li :is(ul,ol){margin-left:.625em}[dir=rtl] .md-typeset ol li :-webkit-any(ul,ol),[dir=rtl] .md-typeset ul li :-webkit-any(ul,ol){margin-right:.625em}[dir=rtl] .md-typeset ol li :-moz-any(ul,ol),[dir=rtl] .md-typeset ul li :-moz-any(ul,ol){margin-right:.625em}[dir=rtl] .md-typeset ol li :is(ul,ol),[dir=rtl] .md-typeset ul li :is(ul,ol){margin-right:.625em}.md-typeset ol li :is(ul,ol),.md-typeset ul li :is(ul,ol){margin-bottom:.5em;margin-top:.5em}[dir=ltr] .md-typeset dd{margin-left:1.875em}[dir=rtl] .md-typeset dd{margin-right:1.875em}.md-typeset dd{margin-bottom:1.5em;margin-top:1em}.md-typeset img,.md-typeset svg,.md-typeset video{height:auto;max-width:100%}.md-typeset img[align=left]{margin:1em 1em 1em 0}.md-typeset img[align=right]{margin:1em 0 1em 1em}.md-typeset img[align]:only-child{margin-top:0}.md-typeset img[src$="#gh-dark-mode-only"],.md-typeset img[src$="#only-dark"]{display:none}.md-typeset figure{display:flow-root;margin:1em auto;max-width:100%;text-align:center;width:-webkit-fit-content;width:-moz-fit-content;width:fit-content}.md-typeset figure img{display:block}.md-typeset figcaption{font-style:italic;margin:1em auto;max-width:24rem}.md-typeset iframe{max-width:100%}.md-typeset table:not([class]){background-color:var(--md-default-bg-color);border:.05rem solid var(--md-typeset-table-color);border-radius:.1rem;display:inline-block;font-size:.64rem;max-width:100%;overflow:auto;touch-action:auto}@media print{.md-typeset table:not([class]){display:table}}.md-typeset table:not([class])+*{margin-top:1.5em}.md-typeset table:not([class]) :-webkit-any(th,td)>:first-child{margin-top:0}.md-typeset table:not([class]) :-moz-any(th,td)>:first-child{margin-top:0}.md-typeset table:not([class]) :is(th,td)>:first-child{margin-top:0}.md-typeset table:not([class]) :-webkit-any(th,td)>:last-child{margin-bottom:0}.md-typeset table:not([class]) :-moz-any(th,td)>:last-child{margin-bottom:0}.md-typeset table:not([class]) :is(th,td)>:last-child{margin-bottom:0}.md-typeset table:not([class]) :-webkit-any(th,td):not([align]){text-align:left}.md-typeset table:not([class]) :-moz-any(th,td):not([align]){text-align:left}.md-typeset table:not([class]) :is(th,td):not([align]){text-align:left}[dir=rtl] .md-typeset table:not([class]) :-webkit-any(th,td):not([align]){text-align:right}[dir=rtl] .md-typeset table:not([class]) :-moz-any(th,td):not([align]){text-align:right}[dir=rtl] .md-typeset table:not([class]) :is(th,td):not([align]){text-align:right}.md-typeset table:not([class]) th{font-weight:700;min-width:5rem;padding:.9375em 1.25em;vertical-align:top}.md-typeset table:not([class]) td{border-top:.05rem solid var(--md-typeset-table-color);padding:.9375em 1.25em;vertical-align:top}.md-typeset table:not([class]) tbody tr{transition:background-color 125ms}.md-typeset table:not([class]) tbody tr:hover{background-color:rgba(0,0,0,.035);box-shadow:0 .05rem 0 var(--md-default-bg-color) inset}.md-typeset table:not([class]) a{word-break:normal}.md-typeset table th[role=columnheader]{cursor:pointer}[dir=ltr] .md-typeset table th[role=columnheader]:after{margin-left:.5em}[dir=rtl] .md-typeset table th[role=columnheader]:after{margin-right:.5em}.md-typeset table th[role=columnheader]:after{content:"";display:inline-block;height:1.2em;-webkit-mask-image:var(--md-typeset-table-sort-icon);mask-image:var(--md-typeset-table-sort-icon);-webkit-mask-repeat:no-repeat;mask-repeat:no-repeat;-webkit-mask-size:contain;mask-size:contain;transition:background-color 125ms;vertical-align:text-bottom;width:1.2em}.md-typeset table th[role=columnheader]:hover:after{background-color:var(--md-default-fg-color--lighter)}.md-typeset table th[role=columnheader][aria-sort=ascending]:after{background-color:var(--md-default-fg-color--light);-webkit-mask-image:var(--md-typeset-table-sort-icon--asc);mask-image:var(--md-typeset-table-sort-icon--asc)}.md-typeset table th[role=columnheader][aria-sort=descending]:after{background-color:var(--md-default-fg-color--light);-webkit-mask-image:var(--md-typeset-table-sort-icon--desc);mask-image:var(--md-typeset-table-sort-icon--desc)}.md-typeset__scrollwrap{margin:1em -.8rem;overflow-x:auto;touch-action:auto}.md-typeset__table{display:inline-block;margin-bottom:.5em;padding:0 .8rem}@media print{.md-typeset__table{display:block}}html .md-typeset__table table{display:table;margin:0;overflow:hidden;width:100%}@media screen and (max-width:44.9375em){.md-content__inner>pre{margin:1em -.8rem}.md-content__inner>pre code{border-radius:0}}.md-banner{background-color:var(--md-footer-bg-color);color:var(--md-footer-fg-color);overflow:auto}@media print{.md-banner{display:none}}.md-banner--warning{background:var(--md-typeset-mark-color);color:var(--md-default-fg-color)}.md-banner__inner{font-size:.7rem;margin:.6rem auto;padding:0 .8rem}[dir=ltr] .md-banner__button{float:right}[dir=rtl] .md-banner__button{float:left}.md-banner__button{color:inherit;cursor:pointer;transition:opacity .25s}.md-banner__button:hover{opacity:.7}html{font-size:125%;height:100%;overflow-x:hidden}@media screen and (min-width:100em){html{font-size:137.5%}}@media screen and (min-width:125em){html{font-size:150%}}body{background-color:var(--md-default-bg-color);display:flex;flex-direction:column;font-size:.5rem;min-height:100%;position:relative;width:100%}@media print{body{display:block}}@media screen and (max-width:59.9375em){body[data-md-scrolllock]{position:fixed}}.md-grid{margin-left:auto;margin-right:auto;max-width:61rem}.md-container{display:flex;flex-direction:column;flex-grow:1}@media print{.md-container{display:block}}.md-main{flex-grow:1}.md-main__inner{display:flex;height:100%;margin-top:1.5rem}.md-ellipsis{overflow:hidden;text-overflow:ellipsis;white-space:nowrap}.md-toggle{display:none}.md-option{height:0;opacity:0;position:absolute;width:0}.md-option:checked+label:not([hidden]){display:block}.md-option.focus-visible+label{outline-color:var(--md-accent-fg-color);outline-style:auto}.md-skip{background-color:var(--md-default-fg-color);border-radius:.1rem;color:var(--md-default-bg-color);font-size:.64rem;margin:.5rem;opacity:0;outline-color:var(--md-accent-fg-color);padding:.3rem .5rem;position:fixed;transform:translateY(.4rem);z-index:-1}.md-skip:focus{opacity:1;transform:translateY(0);transition:transform .25s cubic-bezier(.4,0,.2,1),opacity 175ms 75ms;z-index:10}@page{margin:25mm}:root{--md-clipboard-icon:url('data:image/svg+xml;charset=utf-8,')}.md-clipboard{border-radius:.1rem;color:var(--md-default-fg-color--lightest);cursor:pointer;height:1.5em;outline-color:var(--md-accent-fg-color);outline-offset:.1rem;position:absolute;right:.5em;top:.5em;transition:color .25s;width:1.5em;z-index:1}@media print{.md-clipboard{display:none}}.md-clipboard:not(.focus-visible){-webkit-tap-highlight-color:transparent;outline:none}:hover>.md-clipboard{color:var(--md-default-fg-color--light)}.md-clipboard:-webkit-any(:focus,:hover){color:var(--md-accent-fg-color)}.md-clipboard:-moz-any(:focus,:hover){color:var(--md-accent-fg-color)}.md-clipboard:is(:focus,:hover){color:var(--md-accent-fg-color)}.md-clipboard:after{background-color:currentcolor;content:"";display:block;height:1.125em;margin:0 auto;-webkit-mask-image:var(--md-clipboard-icon);mask-image:var(--md-clipboard-icon);-webkit-mask-position:center;mask-position:center;-webkit-mask-repeat:no-repeat;mask-repeat:no-repeat;-webkit-mask-size:contain;mask-size:contain;width:1.125em}.md-clipboard--inline{cursor:pointer}.md-clipboard--inline code{transition:color .25s,background-color .25s}.md-clipboard--inline:-webkit-any(:focus,:hover) code{background-color:var(--md-accent-fg-color--transparent);color:var(--md-accent-fg-color)}.md-clipboard--inline:-moz-any(:focus,:hover) code{background-color:var(--md-accent-fg-color--transparent);color:var(--md-accent-fg-color)}.md-clipboard--inline:is(:focus,:hover) code{background-color:var(--md-accent-fg-color--transparent);color:var(--md-accent-fg-color)}@keyframes consent{0%{opacity:0;transform:translateY(100%)}to{opacity:1;transform:translateY(0)}}@keyframes overlay{0%{opacity:0}to{opacity:1}}.md-consent__overlay{animation:overlay .25s both;-webkit-backdrop-filter:blur(.1rem);backdrop-filter:blur(.1rem);background-color:#0000008a;height:100%;opacity:1;position:fixed;top:0;width:100%;z-index:5}.md-consent__inner{animation:consent .5s cubic-bezier(.1,.7,.1,1) both;background-color:var(--md-default-bg-color);border:0;border-radius:.1rem;bottom:0;box-shadow:0 0 .2rem #0000001a,0 .2rem .4rem #0003;max-height:100%;overflow:auto;padding:0;position:fixed;width:100%;z-index:5}.md-consent__form{padding:.8rem}.md-consent__settings{display:none;margin:1em 0}input:checked+.md-consent__settings{display:block}.md-consent__controls{margin-bottom:.8rem}.md-typeset .md-consent__controls .md-button{display:inline}@media screen and (max-width:44.9375em){.md-typeset .md-consent__controls .md-button{display:block;margin-top:.4rem;text-align:center;width:100%}}.md-consent label{cursor:pointer}.md-content{flex-grow:1;min-width:0}.md-content__inner{margin:0 .8rem 1.2rem;padding-top:.6rem}@media screen and (min-width:76.25em){[dir=ltr] .md-sidebar--primary:not([hidden])~.md-content>.md-content__inner{margin-left:1.2rem}[dir=ltr] .md-sidebar--secondary:not([hidden])~.md-content>.md-content__inner,[dir=rtl] .md-sidebar--primary:not([hidden])~.md-content>.md-content__inner{margin-right:1.2rem}[dir=rtl] .md-sidebar--secondary:not([hidden])~.md-content>.md-content__inner{margin-left:1.2rem}}.md-content__inner:before{content:"";display:block;height:.4rem}.md-content__inner>:last-child{margin-bottom:0}[dir=ltr] .md-content__button{float:right}[dir=rtl] .md-content__button{float:left}[dir=ltr] .md-content__button{margin-left:.4rem}[dir=rtl] .md-content__button{margin-right:.4rem}.md-content__button{margin:.4rem 0;padding:0}@media print{.md-content__button{display:none}}.md-typeset .md-content__button{color:var(--md-default-fg-color--lighter)}.md-content__button svg{display:inline;vertical-align:top}[dir=rtl] .md-content__button svg{transform:scaleX(-1)}[dir=ltr] .md-dialog{right:.8rem}[dir=rtl] .md-dialog{left:.8rem}.md-dialog{background-color:var(--md-default-fg-color);border-radius:.1rem;bottom:.8rem;box-shadow:var(--md-shadow-z3);min-width:11.1rem;opacity:0;padding:.4rem .6rem;pointer-events:none;position:fixed;transform:translateY(100%);transition:transform 0ms .4s,opacity .4s;z-index:4}@media print{.md-dialog{display:none}}.md-dialog--active{opacity:1;pointer-events:auto;transform:translateY(0);transition:transform .4s cubic-bezier(.075,.85,.175,1),opacity .4s}.md-dialog__inner{color:var(--md-default-bg-color);font-size:.7rem}.md-feedback{margin:2em 0 1em;text-align:center}.md-feedback fieldset{border:none;margin:0;padding:0}.md-feedback__title{font-weight:700;margin:1em auto}.md-feedback__inner{position:relative}.md-feedback__list{align-content:baseline;display:flex;flex-wrap:wrap;justify-content:center;position:relative}.md-feedback__list:hover .md-icon:not(:disabled){color:var(--md-default-fg-color--lighter)}:disabled .md-feedback__list{min-height:1.8rem}.md-feedback__icon{color:var(--md-default-fg-color--light);cursor:pointer;flex-shrink:0;margin:0 .1rem;transition:color 125ms}.md-feedback__icon:not(:disabled).md-icon:hover{color:var(--md-accent-fg-color)}.md-feedback__icon:disabled{color:var(--md-default-fg-color--lightest);pointer-events:none}.md-feedback__note{opacity:0;position:relative;transform:translateY(.4rem);transition:transform .4s cubic-bezier(.1,.7,.1,1),opacity .15s}.md-feedback__note>*{margin:0 auto;max-width:16rem}:disabled .md-feedback__note{opacity:1;transform:translateY(0)}.md-footer{background-color:var(--md-footer-bg-color);color:var(--md-footer-fg-color)}@media print{.md-footer{display:none}}.md-footer__inner{justify-content:space-between;overflow:auto;padding:.2rem}.md-footer__inner:not([hidden]){display:flex}.md-footer__link{display:flex;flex-grow:0.01;outline-color:var(--md-accent-fg-color);overflow:hidden;padding-bottom:.4rem;padding-top:1.4rem;transition:opacity .25s}.md-footer__link:-webkit-any(:focus,:hover){opacity:.7}.md-footer__link:-moz-any(:focus,:hover){opacity:.7}.md-footer__link:is(:focus,:hover){opacity:.7}[dir=rtl] .md-footer__link svg{transform:scaleX(-1)}@media screen and (max-width:44.9375em){.md-footer__link--prev .md-footer__title{display:none}}[dir=ltr] .md-footer__link--next{margin-left:auto}[dir=rtl] .md-footer__link--next{margin-right:auto}.md-footer__link--next{text-align:right}[dir=rtl] .md-footer__link--next{text-align:left}.md-footer__title{flex-grow:1;font-size:.9rem;line-height:2.4rem;max-width:calc(100% - 2.4rem);padding:0 1rem;position:relative;white-space:nowrap}.md-footer__button{margin:.2rem;padding:.4rem}.md-footer__direction{font-size:.64rem;left:0;margin-top:-1rem;opacity:.7;padding:0 1rem;position:absolute;right:0}.md-footer-meta{background-color:var(--md-footer-bg-color--dark)}.md-footer-meta__inner{display:flex;flex-wrap:wrap;justify-content:space-between;padding:.2rem}html .md-footer-meta.md-typeset a{color:var(--md-footer-fg-color--light)}html .md-footer-meta.md-typeset a:-webkit-any(:focus,:hover){color:var(--md-footer-fg-color)}html .md-footer-meta.md-typeset a:-moz-any(:focus,:hover){color:var(--md-footer-fg-color)}html .md-footer-meta.md-typeset a:is(:focus,:hover){color:var(--md-footer-fg-color)}.md-copyright{color:var(--md-footer-fg-color--lighter);font-size:.64rem;margin:auto .6rem;padding:.4rem 0;width:100%}@media screen and (min-width:45em){.md-copyright{width:auto}}.md-copyright__highlight{color:var(--md-footer-fg-color--light)}.md-social{margin:0 .4rem;padding:.2rem 0 .6rem}@media screen and (min-width:45em){.md-social{padding:.6rem 0}}.md-social__link{display:inline-block;height:1.6rem;text-align:center;width:1.6rem}.md-social__link:before{line-height:1.9}.md-social__link svg{fill:currentcolor;max-height:.8rem;vertical-align:-25%}.md-typeset .md-button{border:.1rem solid;border-radius:.1rem;color:var(--md-primary-fg-color);cursor:pointer;display:inline-block;font-weight:700;padding:.625em 2em;transition:color 125ms,background-color 125ms,border-color 125ms}.md-typeset .md-button--primary{background-color:var(--md-primary-fg-color);border-color:var(--md-primary-fg-color);color:var(--md-primary-bg-color)}.md-typeset .md-button:-webkit-any(:focus,:hover){background-color:var(--md-accent-fg-color);border-color:var(--md-accent-fg-color);color:var(--md-accent-bg-color)}.md-typeset .md-button:-moz-any(:focus,:hover){background-color:var(--md-accent-fg-color);border-color:var(--md-accent-fg-color);color:var(--md-accent-bg-color)}.md-typeset .md-button:is(:focus,:hover){background-color:var(--md-accent-fg-color);border-color:var(--md-accent-fg-color);color:var(--md-accent-bg-color)}[dir=ltr] .md-typeset .md-input{border-top-left-radius:.1rem}[dir=ltr] .md-typeset .md-input,[dir=rtl] .md-typeset .md-input{border-top-right-radius:.1rem}[dir=rtl] .md-typeset .md-input{border-top-left-radius:.1rem}.md-typeset .md-input{border-bottom:.1rem solid var(--md-default-fg-color--lighter);box-shadow:var(--md-shadow-z1);font-size:.8rem;height:1.8rem;padding:0 .6rem;transition:border .25s,box-shadow .25s}.md-typeset .md-input:-webkit-any(:focus,:hover){border-bottom-color:var(--md-accent-fg-color);box-shadow:var(--md-shadow-z2)}.md-typeset .md-input:-moz-any(:focus,:hover){border-bottom-color:var(--md-accent-fg-color);box-shadow:var(--md-shadow-z2)}.md-typeset .md-input:is(:focus,:hover){border-bottom-color:var(--md-accent-fg-color);box-shadow:var(--md-shadow-z2)}.md-typeset .md-input--stretch{width:100%}.md-header{background-color:var(--md-primary-fg-color);box-shadow:0 0 .2rem #0000,0 .2rem .4rem #0000;color:var(--md-primary-bg-color);display:block;left:0;position:-webkit-sticky;position:sticky;right:0;top:0;z-index:4}@media print{.md-header{display:none}}.md-header[hidden]{transform:translateY(-100%);transition:transform .25s cubic-bezier(.8,0,.6,1),box-shadow .25s}.md-header--shadow{box-shadow:0 0 .2rem #0000001a,0 .2rem .4rem #0003;transition:transform .25s cubic-bezier(.1,.7,.1,1),box-shadow .25s}.md-header__inner{align-items:center;display:flex;padding:0 .2rem}.md-header__button{color:currentcolor;cursor:pointer;margin:.2rem;outline-color:var(--md-accent-fg-color);padding:.4rem;position:relative;transition:opacity .25s;vertical-align:middle;z-index:1}.md-header__button:hover{opacity:.7}.md-header__button:not([hidden]){display:inline-block}.md-header__button:not(.focus-visible){-webkit-tap-highlight-color:transparent;outline:none}.md-header__button.md-logo{margin:.2rem;padding:.4rem}@media screen and (max-width:76.1875em){.md-header__button.md-logo{display:none}}.md-header__button.md-logo :-webkit-any(img,svg){fill:currentcolor;display:block;height:1.2rem;width:auto}.md-header__button.md-logo :-moz-any(img,svg){fill:currentcolor;display:block;height:1.2rem;width:auto}.md-header__button.md-logo :is(img,svg){fill:currentcolor;display:block;height:1.2rem;width:auto}@media screen and (min-width:60em){.md-header__button[for=__search]{display:none}}.no-js .md-header__button[for=__search]{display:none}[dir=rtl] .md-header__button[for=__search] svg{transform:scaleX(-1)}@media screen and (min-width:76.25em){.md-header__button[for=__drawer]{display:none}}.md-header__topic{display:flex;max-width:100%;position:absolute;transition:transform .4s cubic-bezier(.1,.7,.1,1),opacity .15s;white-space:nowrap}.md-header__topic+.md-header__topic{opacity:0;pointer-events:none;transform:translateX(1.25rem);transition:transform .4s cubic-bezier(1,.7,.1,.1),opacity .15s;z-index:-1}[dir=rtl] .md-header__topic+.md-header__topic{transform:translateX(-1.25rem)}.md-header__topic:first-child{font-weight:700}[dir=ltr] .md-header__title{margin-right:.4rem}[dir=rtl] .md-header__title{margin-left:.4rem}[dir=ltr] .md-header__title{margin-left:1rem}[dir=rtl] .md-header__title{margin-right:1rem}.md-header__title{flex-grow:1;font-size:.9rem;height:2.4rem;line-height:2.4rem}.md-header__title--active .md-header__topic{opacity:0;pointer-events:none;transform:translateX(-1.25rem);transition:transform .4s cubic-bezier(1,.7,.1,.1),opacity .15s;z-index:-1}[dir=rtl] .md-header__title--active .md-header__topic{transform:translateX(1.25rem)}.md-header__title--active .md-header__topic+.md-header__topic{opacity:1;pointer-events:auto;transform:translateX(0);transition:transform .4s cubic-bezier(.1,.7,.1,1),opacity .15s;z-index:0}.md-header__title>.md-header__ellipsis{height:100%;position:relative;width:100%}.md-header__option{display:flex;flex-shrink:0;max-width:100%;transition:max-width 0ms .25s,opacity .25s .25s;white-space:nowrap}[data-md-toggle=search]:checked~.md-header .md-header__option{max-width:0;opacity:0;transition:max-width 0ms,opacity 0ms}.md-header__source{display:none}@media screen and (min-width:60em){[dir=ltr] .md-header__source{margin-left:1rem}[dir=rtl] .md-header__source{margin-right:1rem}.md-header__source{display:block;max-width:11.7rem;width:11.7rem}}@media screen and (min-width:76.25em){[dir=ltr] .md-header__source{margin-left:1.4rem}[dir=rtl] .md-header__source{margin-right:1.4rem}}:root{--md-nav-icon--prev:url('data:image/svg+xml;charset=utf-8,');--md-nav-icon--next:url('data:image/svg+xml;charset=utf-8,');--md-toc-icon:url('data:image/svg+xml;charset=utf-8,')}.md-nav{font-size:.7rem;line-height:1.3}.md-nav__title{display:block;font-weight:700;overflow:hidden;padding:0 .6rem;text-overflow:ellipsis}.md-nav__title .md-nav__button{display:none}.md-nav__title .md-nav__button img{height:100%;width:auto}.md-nav__title .md-nav__button.md-logo :-webkit-any(img,svg){fill:currentcolor;display:block;height:2.4rem;max-width:100%;object-fit:contain;width:auto}.md-nav__title .md-nav__button.md-logo :-moz-any(img,svg){fill:currentcolor;display:block;height:2.4rem;max-width:100%;object-fit:contain;width:auto}.md-nav__title .md-nav__button.md-logo :is(img,svg){fill:currentcolor;display:block;height:2.4rem;max-width:100%;object-fit:contain;width:auto}.md-nav__list{list-style:none;margin:0;padding:0}.md-nav__item{padding:0 .6rem}[dir=ltr] .md-nav__item .md-nav__item{padding-right:0}[dir=rtl] .md-nav__item .md-nav__item{padding-left:0}.md-nav__link{align-items:center;cursor:pointer;display:flex;justify-content:space-between;margin-top:.625em;overflow:hidden;scroll-snap-align:start;text-overflow:ellipsis;transition:color 125ms}.md-nav__link--passed{color:var(--md-default-fg-color--light)}.md-nav__item .md-nav__link--active{color:var(--md-typeset-a-color)}.md-nav__item .md-nav__link--index [href]{width:100%}.md-nav__link:-webkit-any(:focus,:hover){color:var(--md-accent-fg-color)}.md-nav__link:-moz-any(:focus,:hover){color:var(--md-accent-fg-color)}.md-nav__link:is(:focus,:hover){color:var(--md-accent-fg-color)}.md-nav__link.focus-visible{outline-color:var(--md-accent-fg-color);outline-offset:.2rem}.md-nav--primary .md-nav__link[for=__toc]{display:none}.md-nav--primary .md-nav__link[for=__toc] .md-icon:after{background-color:currentcolor;display:block;height:100%;-webkit-mask-image:var(--md-toc-icon);mask-image:var(--md-toc-icon);width:100%}.md-nav--primary .md-nav__link[for=__toc]~.md-nav{display:none}.md-nav__link>*{cursor:pointer;display:flex}.md-nav__icon{flex-shrink:0}.md-nav__source{display:none}@media screen and (max-width:76.1875em){.md-nav--primary,.md-nav--primary .md-nav{background-color:var(--md-default-bg-color);display:flex;flex-direction:column;height:100%;left:0;position:absolute;right:0;top:0;z-index:1}.md-nav--primary :-webkit-any(.md-nav__title,.md-nav__item){font-size:.8rem;line-height:1.5}.md-nav--primary :-moz-any(.md-nav__title,.md-nav__item){font-size:.8rem;line-height:1.5}.md-nav--primary :is(.md-nav__title,.md-nav__item){font-size:.8rem;line-height:1.5}.md-nav--primary .md-nav__title{background-color:var(--md-default-fg-color--lightest);color:var(--md-default-fg-color--light);cursor:pointer;height:5.6rem;line-height:2.4rem;padding:3rem .8rem .2rem;position:relative;white-space:nowrap}[dir=ltr] .md-nav--primary .md-nav__title .md-nav__icon{left:.4rem}[dir=rtl] .md-nav--primary .md-nav__title .md-nav__icon{right:.4rem}.md-nav--primary .md-nav__title .md-nav__icon{display:block;height:1.2rem;margin:.2rem;position:absolute;top:.4rem;width:1.2rem}.md-nav--primary .md-nav__title .md-nav__icon:after{background-color:currentcolor;content:"";display:block;height:100%;-webkit-mask-image:var(--md-nav-icon--prev);mask-image:var(--md-nav-icon--prev);-webkit-mask-position:center;mask-position:center;-webkit-mask-repeat:no-repeat;mask-repeat:no-repeat;-webkit-mask-size:contain;mask-size:contain;width:100%}.md-nav--primary .md-nav__title~.md-nav__list{background-color:var(--md-default-bg-color);box-shadow:0 .05rem 0 var(--md-default-fg-color--lightest) inset;overflow-y:auto;scroll-snap-type:y mandatory;touch-action:pan-y}.md-nav--primary .md-nav__title~.md-nav__list>:first-child{border-top:0}.md-nav--primary .md-nav__title[for=__drawer]{background-color:var(--md-primary-fg-color);color:var(--md-primary-bg-color);font-weight:700}.md-nav--primary .md-nav__title .md-logo{display:block;left:.2rem;margin:.2rem;padding:.4rem;position:absolute;right:.2rem;top:.2rem}.md-nav--primary .md-nav__list{flex:1}.md-nav--primary .md-nav__item{border-top:.05rem solid var(--md-default-fg-color--lightest);padding:0}.md-nav--primary .md-nav__item--active>.md-nav__link{color:var(--md-typeset-a-color)}.md-nav--primary .md-nav__item--active>.md-nav__link:-webkit-any(:focus,:hover){color:var(--md-accent-fg-color)}.md-nav--primary .md-nav__item--active>.md-nav__link:-moz-any(:focus,:hover){color:var(--md-accent-fg-color)}.md-nav--primary .md-nav__item--active>.md-nav__link:is(:focus,:hover){color:var(--md-accent-fg-color)}.md-nav--primary .md-nav__link{margin-top:0;padding:.6rem .8rem}[dir=ltr] .md-nav--primary .md-nav__link .md-nav__icon{margin-right:-.2rem}[dir=rtl] .md-nav--primary .md-nav__link .md-nav__icon{margin-left:-.2rem}.md-nav--primary .md-nav__link .md-nav__icon{font-size:1.2rem;height:1.2rem;width:1.2rem}.md-nav--primary .md-nav__link .md-nav__icon:after{background-color:currentcolor;content:"";display:block;height:100%;-webkit-mask-image:var(--md-nav-icon--next);mask-image:var(--md-nav-icon--next);-webkit-mask-position:center;mask-position:center;-webkit-mask-repeat:no-repeat;mask-repeat:no-repeat;-webkit-mask-size:contain;mask-size:contain;width:100%}[dir=rtl] .md-nav--primary .md-nav__icon:after{transform:scale(-1)}.md-nav--primary .md-nav--secondary .md-nav{background-color:initial;position:static}[dir=ltr] .md-nav--primary .md-nav--secondary .md-nav .md-nav__link{padding-left:1.4rem}[dir=rtl] .md-nav--primary .md-nav--secondary .md-nav .md-nav__link{padding-right:1.4rem}[dir=ltr] .md-nav--primary .md-nav--secondary .md-nav .md-nav .md-nav__link{padding-left:2rem}[dir=rtl] .md-nav--primary .md-nav--secondary .md-nav .md-nav .md-nav__link{padding-right:2rem}[dir=ltr] .md-nav--primary .md-nav--secondary .md-nav .md-nav .md-nav .md-nav__link{padding-left:2.6rem}[dir=rtl] .md-nav--primary .md-nav--secondary .md-nav .md-nav .md-nav .md-nav__link{padding-right:2.6rem}[dir=ltr] .md-nav--primary .md-nav--secondary .md-nav .md-nav .md-nav .md-nav .md-nav__link{padding-left:3.2rem}[dir=rtl] .md-nav--primary .md-nav--secondary .md-nav .md-nav .md-nav .md-nav .md-nav__link{padding-right:3.2rem}.md-nav--secondary{background-color:initial}.md-nav__toggle~.md-nav{display:flex;opacity:0;transform:translateX(100%);transition:transform .25s cubic-bezier(.8,0,.6,1),opacity 125ms 50ms}[dir=rtl] .md-nav__toggle~.md-nav{transform:translateX(-100%)}.md-nav__toggle:checked~.md-nav{opacity:1;transform:translateX(0);transition:transform .25s cubic-bezier(.4,0,.2,1),opacity 125ms 125ms}.md-nav__toggle:checked~.md-nav>.md-nav__list{-webkit-backface-visibility:hidden;backface-visibility:hidden}}@media screen and (max-width:59.9375em){.md-nav--primary .md-nav__link[for=__toc]{display:flex}.md-nav--primary .md-nav__link[for=__toc] .md-icon:after{content:""}.md-nav--primary .md-nav__link[for=__toc]+.md-nav__link{display:none}.md-nav--primary .md-nav__link[for=__toc]~.md-nav{display:flex}.md-nav__source{background-color:var(--md-primary-fg-color--dark);color:var(--md-primary-bg-color);display:block;padding:0 .2rem}}@media screen and (min-width:60em) and (max-width:76.1875em){.md-nav--integrated .md-nav__link[for=__toc]{display:flex}.md-nav--integrated .md-nav__link[for=__toc] .md-icon:after{content:""}.md-nav--integrated .md-nav__link[for=__toc]+.md-nav__link{display:none}.md-nav--integrated .md-nav__link[for=__toc]~.md-nav{display:flex}}@media screen and (min-width:60em){.md-nav--secondary .md-nav__title{background:var(--md-default-bg-color);box-shadow:0 0 .4rem .4rem var(--md-default-bg-color);position:-webkit-sticky;position:sticky;top:0;z-index:1}.md-nav--secondary .md-nav__title[for=__toc]{scroll-snap-align:start}.md-nav--secondary .md-nav__title .md-nav__icon{display:none}}@media screen and (min-width:76.25em){.md-nav{transition:max-height .25s cubic-bezier(.86,0,.07,1)}.md-nav--primary .md-nav__title{background:var(--md-default-bg-color);box-shadow:0 0 .4rem .4rem var(--md-default-bg-color);position:-webkit-sticky;position:sticky;top:0;z-index:1}.md-nav--primary .md-nav__title[for=__drawer]{scroll-snap-align:start}.md-nav--primary .md-nav__title .md-nav__icon,.md-nav__toggle~.md-nav{display:none}.md-nav__toggle:-webkit-any(:checked,:indeterminate)~.md-nav{display:block}.md-nav__toggle:-moz-any(:checked,:indeterminate)~.md-nav{display:block}.md-nav__toggle:is(:checked,:indeterminate)~.md-nav{display:block}.md-nav__item--nested>.md-nav>.md-nav__title{display:none}.md-nav__item--section{display:block;margin:1.25em 0}.md-nav__item--section:last-child{margin-bottom:0}.md-nav__item--section>.md-nav__link{font-weight:700;pointer-events:none}.md-nav__item--section>.md-nav__link--index [href]{pointer-events:auto}.md-nav__item--section>.md-nav__link .md-nav__icon{display:none}.md-nav__item--section>.md-nav{display:block}.md-nav__item--section>.md-nav>.md-nav__list>.md-nav__item{padding:0}.md-nav__icon{border-radius:100%;height:.9rem;transition:background-color .25s,transform .25s;width:.9rem}[dir=rtl] .md-nav__icon{transform:rotate(180deg)}.md-nav__icon:hover{background-color:var(--md-accent-fg-color--transparent)}.md-nav__icon:after{background-color:currentcolor;content:"";display:inline-block;height:100%;-webkit-mask-image:var(--md-nav-icon--next);mask-image:var(--md-nav-icon--next);-webkit-mask-position:center;mask-position:center;-webkit-mask-repeat:no-repeat;mask-repeat:no-repeat;-webkit-mask-size:contain;mask-size:contain;vertical-align:-.1rem;width:100%}.md-nav__item--nested .md-nav__toggle:checked~.md-nav__link .md-nav__icon,.md-nav__item--nested .md-nav__toggle:indeterminate~.md-nav__link .md-nav__icon{transform:rotate(90deg)}.md-nav--lifted>.md-nav__list>.md-nav__item,.md-nav--lifted>.md-nav__list>.md-nav__item--nested,.md-nav--lifted>.md-nav__title{display:none}.md-nav--lifted>.md-nav__list>.md-nav__item--active{display:block;padding:0}.md-nav--lifted>.md-nav__list>.md-nav__item--active>.md-nav__link{background:var(--md-default-bg-color);box-shadow:0 0 .4rem .4rem var(--md-default-bg-color);font-weight:700;margin-top:0;padding:0 .6rem;position:-webkit-sticky;position:sticky;top:0;z-index:1}.md-nav--lifted>.md-nav__list>.md-nav__item--active>.md-nav__link:not(.md-nav__link--index){pointer-events:none}.md-nav--lifted>.md-nav__list>.md-nav__item--active>.md-nav__link .md-nav__icon{display:none}.md-nav--lifted .md-nav[data-md-level="1"]{display:block}[dir=ltr] .md-nav--lifted .md-nav[data-md-level="1"]>.md-nav__list>.md-nav__item{padding-right:.6rem}[dir=rtl] .md-nav--lifted .md-nav[data-md-level="1"]>.md-nav__list>.md-nav__item{padding-left:.6rem}.md-nav--integrated>.md-nav__list>.md-nav__item--active:not(.md-nav__item--nested){padding:0 .6rem}.md-nav--integrated>.md-nav__list>.md-nav__item--active:not(.md-nav__item--nested)>.md-nav__link{padding:0}[dir=ltr] .md-nav--integrated>.md-nav__list>.md-nav__item--active .md-nav--secondary{border-left:.05rem solid var(--md-primary-fg-color)}[dir=rtl] .md-nav--integrated>.md-nav__list>.md-nav__item--active .md-nav--secondary{border-right:.05rem solid var(--md-primary-fg-color)}.md-nav--integrated>.md-nav__list>.md-nav__item--active .md-nav--secondary{display:block;margin-bottom:1.25em}.md-nav--integrated>.md-nav__list>.md-nav__item--active .md-nav--secondary>.md-nav__title{display:none}}:root{--md-search-result-icon:url('data:image/svg+xml;charset=utf-8,')}.md-search{position:relative}@media screen and (min-width:60em){.md-search{padding:.2rem 0}}.no-js .md-search{display:none}.md-search__overlay{opacity:0;z-index:1}@media screen and (max-width:59.9375em){[dir=ltr] .md-search__overlay{left:-2.2rem}[dir=rtl] .md-search__overlay{right:-2.2rem}.md-search__overlay{background-color:var(--md-default-bg-color);border-radius:1rem;height:2rem;overflow:hidden;pointer-events:none;position:absolute;top:-1rem;transform-origin:center;transition:transform .3s .1s,opacity .2s .2s;width:2rem}[data-md-toggle=search]:checked~.md-header .md-search__overlay{opacity:1;transition:transform .4s,opacity .1s}}@media screen and (min-width:60em){[dir=ltr] .md-search__overlay{left:0}[dir=rtl] .md-search__overlay{right:0}.md-search__overlay{background-color:#0000008a;cursor:pointer;height:0;position:fixed;top:0;transition:width 0ms .25s,height 0ms .25s,opacity .25s;width:0}[data-md-toggle=search]:checked~.md-header .md-search__overlay{height:200vh;opacity:1;transition:width 0ms,height 0ms,opacity .25s;width:100%}}@media screen and (max-width:29.9375em){[data-md-toggle=search]:checked~.md-header .md-search__overlay{transform:scale(45)}}@media screen and (min-width:30em) and (max-width:44.9375em){[data-md-toggle=search]:checked~.md-header .md-search__overlay{transform:scale(60)}}@media screen and (min-width:45em) and (max-width:59.9375em){[data-md-toggle=search]:checked~.md-header .md-search__overlay{transform:scale(75)}}.md-search__inner{-webkit-backface-visibility:hidden;backface-visibility:hidden}@media screen and (max-width:59.9375em){[dir=ltr] .md-search__inner{left:0}[dir=rtl] .md-search__inner{right:0}.md-search__inner{height:0;opacity:0;overflow:hidden;position:fixed;top:0;transform:translateX(5%);transition:width 0ms .3s,height 0ms .3s,transform .15s cubic-bezier(.4,0,.2,1) .15s,opacity .15s .15s;width:0;z-index:2}[dir=rtl] .md-search__inner{transform:translateX(-5%)}[data-md-toggle=search]:checked~.md-header .md-search__inner{height:100%;opacity:1;transform:translateX(0);transition:width 0ms 0ms,height 0ms 0ms,transform .15s cubic-bezier(.1,.7,.1,1) .15s,opacity .15s .15s;width:100%}}@media screen and (min-width:60em){[dir=ltr] .md-search__inner{float:right}[dir=rtl] .md-search__inner{float:left}.md-search__inner{padding:.1rem 0;position:relative;transition:width .25s cubic-bezier(.1,.7,.1,1);width:11.7rem}}@media screen and (min-width:60em) and (max-width:76.1875em){[data-md-toggle=search]:checked~.md-header .md-search__inner{width:23.4rem}}@media screen and (min-width:76.25em){[data-md-toggle=search]:checked~.md-header .md-search__inner{width:34.4rem}}.md-search__form{background-color:var(--md-default-bg-color);box-shadow:0 0 .6rem #0000;height:2.4rem;position:relative;transition:color .25s,background-color .25s;z-index:2}@media screen and (min-width:60em){.md-search__form{background-color:#00000042;border-radius:.1rem;height:1.8rem}.md-search__form:hover{background-color:#ffffff1f}}[data-md-toggle=search]:checked~.md-header .md-search__form{background-color:var(--md-default-bg-color);border-radius:.1rem .1rem 0 0;box-shadow:0 0 .6rem #00000012;color:var(--md-default-fg-color)}[dir=ltr] .md-search__input{padding-left:3.6rem;padding-right:2.2rem}[dir=rtl] .md-search__input{padding-left:2.2rem;padding-right:3.6rem}.md-search__input{background:#0000;font-size:.9rem;height:100%;position:relative;text-overflow:ellipsis;width:100%;z-index:2}.md-search__input::placeholder{transition:color .25s}.md-search__input::placeholder,.md-search__input~.md-search__icon{color:var(--md-default-fg-color--light)}.md-search__input::-ms-clear{display:none}@media screen and (max-width:59.9375em){.md-search__input{font-size:.9rem;height:2.4rem;width:100%}}@media screen and (min-width:60em){[dir=ltr] .md-search__input{padding-left:2.2rem}[dir=rtl] .md-search__input{padding-right:2.2rem}.md-search__input{color:inherit;font-size:.8rem}.md-search__input::placeholder{color:var(--md-primary-bg-color--light)}.md-search__input+.md-search__icon{color:var(--md-primary-bg-color)}[data-md-toggle=search]:checked~.md-header .md-search__input{text-overflow:clip}[data-md-toggle=search]:checked~.md-header .md-search__input+.md-search__icon,[data-md-toggle=search]:checked~.md-header .md-search__input::placeholder{color:var(--md-default-fg-color--light)}}.md-search__icon{cursor:pointer;display:inline-block;height:1.2rem;transition:color .25s,opacity .25s;width:1.2rem}.md-search__icon:hover{opacity:.7}[dir=ltr] .md-search__icon[for=__search]{left:.5rem}[dir=rtl] .md-search__icon[for=__search]{right:.5rem}.md-search__icon[for=__search]{position:absolute;top:.3rem;z-index:2}[dir=rtl] .md-search__icon[for=__search] svg{transform:scaleX(-1)}@media screen and (max-width:59.9375em){[dir=ltr] .md-search__icon[for=__search]{left:.8rem}[dir=rtl] .md-search__icon[for=__search]{right:.8rem}.md-search__icon[for=__search]{top:.6rem}.md-search__icon[for=__search] svg:first-child{display:none}}@media screen and (min-width:60em){.md-search__icon[for=__search]{pointer-events:none}.md-search__icon[for=__search] svg:last-child{display:none}}[dir=ltr] .md-search__options{right:.5rem}[dir=rtl] .md-search__options{left:.5rem}.md-search__options{pointer-events:none;position:absolute;top:.3rem;z-index:2}@media screen and (max-width:59.9375em){[dir=ltr] .md-search__options{right:.8rem}[dir=rtl] .md-search__options{left:.8rem}.md-search__options{top:.6rem}}[dir=ltr] .md-search__options>*{margin-left:.2rem}[dir=rtl] .md-search__options>*{margin-right:.2rem}.md-search__options>*{color:var(--md-default-fg-color--light);opacity:0;transform:scale(.75);transition:transform .15s cubic-bezier(.1,.7,.1,1),opacity .15s}.md-search__options>:not(.focus-visible){-webkit-tap-highlight-color:transparent;outline:none}[data-md-toggle=search]:checked~.md-header .md-search__input:valid~.md-search__options>*{opacity:1;pointer-events:auto;transform:scale(1)}[data-md-toggle=search]:checked~.md-header .md-search__input:valid~.md-search__options>:hover{opacity:.7}[dir=ltr] .md-search__suggest{padding-left:3.6rem;padding-right:2.2rem}[dir=rtl] .md-search__suggest{padding-left:2.2rem;padding-right:3.6rem}.md-search__suggest{align-items:center;color:var(--md-default-fg-color--lighter);display:flex;font-size:.9rem;height:100%;opacity:0;position:absolute;top:0;transition:opacity 50ms;white-space:nowrap;width:100%}@media screen and (min-width:60em){[dir=ltr] .md-search__suggest{padding-left:2.2rem}[dir=rtl] .md-search__suggest{padding-right:2.2rem}.md-search__suggest{font-size:.8rem}}[data-md-toggle=search]:checked~.md-header .md-search__suggest{opacity:1;transition:opacity .3s .1s}[dir=ltr] .md-search__output{border-bottom-left-radius:.1rem}[dir=ltr] .md-search__output,[dir=rtl] .md-search__output{border-bottom-right-radius:.1rem}[dir=rtl] .md-search__output{border-bottom-left-radius:.1rem}.md-search__output{overflow:hidden;position:absolute;width:100%;z-index:1}@media screen and (max-width:59.9375em){.md-search__output{bottom:0;top:2.4rem}}@media screen and (min-width:60em){.md-search__output{opacity:0;top:1.9rem;transition:opacity .4s}[data-md-toggle=search]:checked~.md-header .md-search__output{box-shadow:var(--md-shadow-z3);opacity:1}}.md-search__scrollwrap{-webkit-backface-visibility:hidden;backface-visibility:hidden;background-color:var(--md-default-bg-color);height:100%;overflow-y:auto;touch-action:pan-y}@media (-webkit-max-device-pixel-ratio:1),(max-resolution:1dppx){.md-search__scrollwrap{transform:translateZ(0)}}@media screen and (min-width:60em) and (max-width:76.1875em){.md-search__scrollwrap{width:23.4rem}}@media screen and (min-width:76.25em){.md-search__scrollwrap{width:34.4rem}}@media screen and (min-width:60em){.md-search__scrollwrap{max-height:0;scrollbar-color:var(--md-default-fg-color--lighter) #0000;scrollbar-width:thin}[data-md-toggle=search]:checked~.md-header .md-search__scrollwrap{max-height:75vh}.md-search__scrollwrap:hover{scrollbar-color:var(--md-accent-fg-color) #0000}.md-search__scrollwrap::-webkit-scrollbar{height:.2rem;width:.2rem}.md-search__scrollwrap::-webkit-scrollbar-thumb{background-color:var(--md-default-fg-color--lighter)}.md-search__scrollwrap::-webkit-scrollbar-thumb:hover{background-color:var(--md-accent-fg-color)}}.md-search-result{color:var(--md-default-fg-color);word-break:break-word}.md-search-result__meta{background-color:var(--md-default-fg-color--lightest);color:var(--md-default-fg-color--light);font-size:.64rem;line-height:1.8rem;padding:0 .8rem;scroll-snap-align:start}@media screen and (min-width:60em){[dir=ltr] .md-search-result__meta{padding-left:2.2rem}[dir=rtl] .md-search-result__meta{padding-right:2.2rem}}.md-search-result__list{list-style:none;margin:0;padding:0;-webkit-user-select:none;-moz-user-select:none;user-select:none}.md-search-result__item{box-shadow:0 -.05rem var(--md-default-fg-color--lightest)}.md-search-result__item:first-child{box-shadow:none}.md-search-result__link{display:block;outline:none;scroll-snap-align:start;transition:background-color .25s}.md-search-result__link:-webkit-any(:focus,:hover){background-color:var(--md-accent-fg-color--transparent)}.md-search-result__link:-moz-any(:focus,:hover){background-color:var(--md-accent-fg-color--transparent)}.md-search-result__link:is(:focus,:hover){background-color:var(--md-accent-fg-color--transparent)}.md-search-result__link:last-child p:last-child{margin-bottom:.6rem}.md-search-result__more summary{color:var(--md-typeset-a-color);cursor:pointer;display:block;font-size:.64rem;outline:none;padding:.75em .8rem;scroll-snap-align:start;transition:color .25s,background-color .25s}@media screen and (min-width:60em){[dir=ltr] .md-search-result__more summary{padding-left:2.2rem}[dir=rtl] .md-search-result__more summary{padding-right:2.2rem}}.md-search-result__more summary:-webkit-any(:focus,:hover){background-color:var(--md-accent-fg-color--transparent);color:var(--md-accent-fg-color)}.md-search-result__more summary:-moz-any(:focus,:hover){background-color:var(--md-accent-fg-color--transparent);color:var(--md-accent-fg-color)}.md-search-result__more summary:is(:focus,:hover){background-color:var(--md-accent-fg-color--transparent);color:var(--md-accent-fg-color)}.md-search-result__more summary::marker{display:none}.md-search-result__more summary::-webkit-details-marker{display:none}.md-search-result__more summary~*>*{opacity:.65}.md-search-result__article{overflow:hidden;padding:0 .8rem;position:relative}@media screen and (min-width:60em){[dir=ltr] .md-search-result__article{padding-left:2.2rem}[dir=rtl] .md-search-result__article{padding-right:2.2rem}}.md-search-result__article--document .md-search-result__title{font-size:.8rem;font-weight:400;line-height:1.4;margin:.55rem 0}[dir=ltr] .md-search-result__icon{left:0}[dir=rtl] .md-search-result__icon{right:0}.md-search-result__icon{color:var(--md-default-fg-color--light);height:1.2rem;margin:.5rem;position:absolute;width:1.2rem}@media screen and (max-width:59.9375em){.md-search-result__icon{display:none}}.md-search-result__icon:after{background-color:currentcolor;content:"";display:inline-block;height:100%;-webkit-mask-image:var(--md-search-result-icon);mask-image:var(--md-search-result-icon);-webkit-mask-position:center;mask-position:center;-webkit-mask-repeat:no-repeat;mask-repeat:no-repeat;-webkit-mask-size:contain;mask-size:contain;width:100%}[dir=rtl] .md-search-result__icon:after{transform:scaleX(-1)}.md-search-result__title{font-size:.64rem;font-weight:700;line-height:1.6;margin:.5em 0}.md-search-result__teaser{-webkit-box-orient:vertical;-webkit-line-clamp:2;color:var(--md-default-fg-color--light);display:-webkit-box;font-size:.64rem;line-height:1.6;margin:.5em 0;max-height:2rem;overflow:hidden;text-overflow:ellipsis}@media screen and (max-width:44.9375em){.md-search-result__teaser{-webkit-line-clamp:3;max-height:3rem}}@media screen and (min-width:60em) and (max-width:76.1875em){.md-search-result__teaser{-webkit-line-clamp:3;max-height:3rem}}.md-search-result__teaser mark{background-color:initial;text-decoration:underline}.md-search-result__terms{font-size:.64rem;font-style:italic;margin:.5em 0}.md-search-result mark{background-color:initial;color:var(--md-accent-fg-color)}.md-select{position:relative;z-index:1}.md-select__inner{background-color:var(--md-default-bg-color);border-radius:.1rem;box-shadow:var(--md-shadow-z2);color:var(--md-default-fg-color);left:50%;margin-top:.2rem;max-height:0;opacity:0;position:absolute;top:calc(100% - .2rem);transform:translate3d(-50%,.3rem,0);transition:transform .25s 375ms,opacity .25s .25s,max-height 0ms .5s}.md-select:-webkit-any(:focus-within,:hover) .md-select__inner{max-height:10rem;opacity:1;transform:translate3d(-50%,0,0);-webkit-transition:transform .25s cubic-bezier(.1,.7,.1,1),opacity .25s,max-height 0ms;transition:transform .25s cubic-bezier(.1,.7,.1,1),opacity .25s,max-height 0ms}.md-select:-moz-any(:focus-within,:hover) .md-select__inner{max-height:10rem;opacity:1;transform:translate3d(-50%,0,0);-moz-transition:transform .25s cubic-bezier(.1,.7,.1,1),opacity .25s,max-height 0ms;transition:transform .25s cubic-bezier(.1,.7,.1,1),opacity .25s,max-height 0ms}.md-select:is(:focus-within,:hover) .md-select__inner{max-height:10rem;opacity:1;transform:translate3d(-50%,0,0);transition:transform .25s cubic-bezier(.1,.7,.1,1),opacity .25s,max-height 0ms}.md-select__inner:after{border-bottom:.2rem solid #0000;border-bottom-color:var(--md-default-bg-color);border-left:.2rem solid #0000;border-right:.2rem solid #0000;border-top:0;content:"";height:0;left:50%;margin-left:-.2rem;margin-top:-.2rem;position:absolute;top:0;width:0}.md-select__list{border-radius:.1rem;font-size:.8rem;list-style-type:none;margin:0;max-height:inherit;overflow:auto;padding:0}.md-select__item{line-height:1.8rem}[dir=ltr] .md-select__link{padding-left:.6rem;padding-right:1.2rem}[dir=rtl] .md-select__link{padding-left:1.2rem;padding-right:.6rem}.md-select__link{cursor:pointer;display:block;outline:none;scroll-snap-align:start;transition:background-color .25s,color .25s;width:100%}.md-select__link:-webkit-any(:focus,:hover){color:var(--md-accent-fg-color)}.md-select__link:-moz-any(:focus,:hover){color:var(--md-accent-fg-color)}.md-select__link:is(:focus,:hover){color:var(--md-accent-fg-color)}.md-select__link:focus{background-color:var(--md-default-fg-color--lightest)}.md-sidebar{align-self:flex-start;flex-shrink:0;padding:1.2rem 0;position:-webkit-sticky;position:sticky;top:2.4rem;width:12.1rem}@media print{.md-sidebar{display:none}}@media screen and (max-width:76.1875em){[dir=ltr] .md-sidebar--primary{left:-12.1rem}[dir=rtl] .md-sidebar--primary{right:-12.1rem}.md-sidebar--primary{background-color:var(--md-default-bg-color);display:block;height:100%;position:fixed;top:0;transform:translateX(0);transition:transform .25s cubic-bezier(.4,0,.2,1),box-shadow .25s;width:12.1rem;z-index:5}[data-md-toggle=drawer]:checked~.md-container .md-sidebar--primary{box-shadow:var(--md-shadow-z3);transform:translateX(12.1rem)}[dir=rtl] [data-md-toggle=drawer]:checked~.md-container .md-sidebar--primary{transform:translateX(-12.1rem)}.md-sidebar--primary .md-sidebar__scrollwrap{bottom:0;left:0;margin:0;overflow:hidden;position:absolute;right:0;scroll-snap-type:none;top:0}}@media screen and (min-width:76.25em){.md-sidebar{height:0}.no-js .md-sidebar{height:auto}.md-header--lifted~.md-container .md-sidebar{top:4.8rem}}.md-sidebar--secondary{display:none;order:2}@media screen and (min-width:60em){.md-sidebar--secondary{height:0}.no-js .md-sidebar--secondary{height:auto}.md-sidebar--secondary:not([hidden]){display:block}.md-sidebar--secondary .md-sidebar__scrollwrap{touch-action:pan-y}}.md-sidebar__scrollwrap{scrollbar-gutter:stable;-webkit-backface-visibility:hidden;backface-visibility:hidden;margin:0 .2rem;overflow-y:auto;scrollbar-color:var(--md-default-fg-color--lighter) #0000;scrollbar-width:thin}.md-sidebar__scrollwrap:hover{scrollbar-color:var(--md-accent-fg-color) #0000}.md-sidebar__scrollwrap::-webkit-scrollbar{height:.2rem;width:.2rem}.md-sidebar__scrollwrap::-webkit-scrollbar-thumb{background-color:var(--md-default-fg-color--lighter)}.md-sidebar__scrollwrap::-webkit-scrollbar-thumb:hover{background-color:var(--md-accent-fg-color)}@supports selector(::-webkit-scrollbar){.md-sidebar__scrollwrap{scrollbar-gutter:auto}[dir=ltr] .md-sidebar__inner{padding-right:calc(100% - 11.5rem)}[dir=rtl] .md-sidebar__inner{padding-left:calc(100% - 11.5rem)}}@media screen and (max-width:76.1875em){.md-overlay{background-color:#0000008a;height:0;opacity:0;position:fixed;top:0;transition:width 0ms .25s,height 0ms .25s,opacity .25s;width:0;z-index:5}[data-md-toggle=drawer]:checked~.md-overlay{height:100%;opacity:1;transition:width 0ms,height 0ms,opacity .25s;width:100%}}@keyframes facts{0%{height:0}to{height:.65rem}}@keyframes fact{0%{opacity:0;transform:translateY(100%)}50%{opacity:0}to{opacity:1;transform:translateY(0)}}:root{--md-source-forks-icon:url('data:image/svg+xml;charset=utf-8,');--md-source-repositories-icon:url('data:image/svg+xml;charset=utf-8,');--md-source-stars-icon:url('data:image/svg+xml;charset=utf-8,');--md-source-version-icon:url('data:image/svg+xml;charset=utf-8,')}.md-source{-webkit-backface-visibility:hidden;backface-visibility:hidden;display:block;font-size:.65rem;line-height:1.2;outline-color:var(--md-accent-fg-color);transition:opacity .25s;white-space:nowrap}.md-source:hover{opacity:.7}.md-source__icon{display:inline-block;height:2.4rem;vertical-align:middle;width:2rem}[dir=ltr] .md-source__icon svg{margin-left:.6rem}[dir=rtl] .md-source__icon svg{margin-right:.6rem}.md-source__icon svg{margin-top:.6rem}[dir=ltr] .md-source__icon+.md-source__repository{margin-left:-2rem}[dir=rtl] .md-source__icon+.md-source__repository{margin-right:-2rem}[dir=ltr] .md-source__icon+.md-source__repository{padding-left:2rem}[dir=rtl] .md-source__icon+.md-source__repository{padding-right:2rem}[dir=ltr] .md-source__repository{margin-left:.6rem}[dir=rtl] .md-source__repository{margin-right:.6rem}.md-source__repository{display:inline-block;max-width:calc(100% - 1.2rem);overflow:hidden;text-overflow:ellipsis;vertical-align:middle}.md-source__facts{display:flex;font-size:.55rem;gap:.4rem;list-style-type:none;margin:.1rem 0 0;opacity:.75;overflow:hidden;padding:0;width:100%}.md-source__repository--active .md-source__facts{animation:facts .25s ease-in}.md-source__fact{overflow:hidden;text-overflow:ellipsis}.md-source__repository--active .md-source__fact{animation:fact .4s ease-out}[dir=ltr] .md-source__fact:before{margin-right:.1rem}[dir=rtl] .md-source__fact:before{margin-left:.1rem}.md-source__fact:before{background-color:currentcolor;content:"";display:inline-block;height:.6rem;-webkit-mask-position:center;mask-position:center;-webkit-mask-repeat:no-repeat;mask-repeat:no-repeat;-webkit-mask-size:contain;mask-size:contain;vertical-align:text-top;width:.6rem}.md-source__fact:nth-child(1n+2){flex-shrink:0}.md-source__fact--version:before{-webkit-mask-image:var(--md-source-version-icon);mask-image:var(--md-source-version-icon)}.md-source__fact--stars:before{-webkit-mask-image:var(--md-source-stars-icon);mask-image:var(--md-source-stars-icon)}.md-source__fact--forks:before{-webkit-mask-image:var(--md-source-forks-icon);mask-image:var(--md-source-forks-icon)}.md-source__fact--repositories:before{-webkit-mask-image:var(--md-source-repositories-icon);mask-image:var(--md-source-repositories-icon)}.md-tabs{background-color:var(--md-primary-fg-color);color:var(--md-primary-bg-color);display:block;line-height:1.3;overflow:auto;width:100%;z-index:3}@media print{.md-tabs{display:none}}@media screen and (max-width:76.1875em){.md-tabs{display:none}}.md-tabs[hidden]{pointer-events:none}[dir=ltr] .md-tabs__list{margin-left:.2rem}[dir=rtl] .md-tabs__list{margin-right:.2rem}.md-tabs__list{contain:content;list-style:none;margin:0;padding:0;white-space:nowrap}.md-tabs__item{display:inline-block;height:2.4rem;padding-left:.6rem;padding-right:.6rem}.md-tabs__link{-webkit-backface-visibility:hidden;backface-visibility:hidden;display:block;font-size:.7rem;margin-top:.8rem;opacity:.7;outline-color:var(--md-accent-fg-color);outline-offset:.2rem;transition:transform .4s cubic-bezier(.1,.7,.1,1),opacity .25s}.md-tabs__link--active,.md-tabs__link:-webkit-any(:focus,:hover){color:inherit;opacity:1}.md-tabs__link--active,.md-tabs__link:-moz-any(:focus,:hover){color:inherit;opacity:1}.md-tabs__link--active,.md-tabs__link:is(:focus,:hover){color:inherit;opacity:1}.md-tabs__item:nth-child(2) .md-tabs__link{transition-delay:20ms}.md-tabs__item:nth-child(3) .md-tabs__link{transition-delay:40ms}.md-tabs__item:nth-child(4) .md-tabs__link{transition-delay:60ms}.md-tabs__item:nth-child(5) .md-tabs__link{transition-delay:80ms}.md-tabs__item:nth-child(6) .md-tabs__link{transition-delay:.1s}.md-tabs__item:nth-child(7) .md-tabs__link{transition-delay:.12s}.md-tabs__item:nth-child(8) .md-tabs__link{transition-delay:.14s}.md-tabs__item:nth-child(9) .md-tabs__link{transition-delay:.16s}.md-tabs__item:nth-child(10) .md-tabs__link{transition-delay:.18s}.md-tabs__item:nth-child(11) .md-tabs__link{transition-delay:.2s}.md-tabs__item:nth-child(12) .md-tabs__link{transition-delay:.22s}.md-tabs__item:nth-child(13) .md-tabs__link{transition-delay:.24s}.md-tabs__item:nth-child(14) .md-tabs__link{transition-delay:.26s}.md-tabs__item:nth-child(15) .md-tabs__link{transition-delay:.28s}.md-tabs__item:nth-child(16) .md-tabs__link{transition-delay:.3s}.md-tabs[hidden] .md-tabs__link{opacity:0;transform:translateY(50%);transition:transform 0ms .1s,opacity .1s}:root{--md-tag-icon:url('data:image/svg+xml;charset=utf-8,')}.md-typeset .md-tags{margin-bottom:.75em;margin-top:-.125em}[dir=ltr] .md-typeset .md-tag{margin-right:.5em}[dir=rtl] .md-typeset .md-tag{margin-left:.5em}.md-typeset .md-tag{background:var(--md-default-fg-color--lightest);border-radius:2.4rem;display:inline-block;font-size:.64rem;font-weight:700;letter-spacing:normal;line-height:1.6;margin-bottom:.5em;padding:.3125em .9375em;vertical-align:middle}.md-typeset .md-tag[href]{-webkit-tap-highlight-color:transparent;color:inherit;outline:none;transition:color 125ms,background-color 125ms}.md-typeset .md-tag[href]:focus,.md-typeset .md-tag[href]:hover{background-color:var(--md-accent-fg-color);color:var(--md-accent-bg-color)}[id]>.md-typeset .md-tag{vertical-align:text-top}.md-typeset .md-tag-icon:before{background-color:var(--md-default-fg-color--lighter);content:"";display:inline-block;height:1.2em;margin-right:.4em;-webkit-mask-image:var(--md-tag-icon);mask-image:var(--md-tag-icon);-webkit-mask-position:center;mask-position:center;-webkit-mask-repeat:no-repeat;mask-repeat:no-repeat;-webkit-mask-size:contain;mask-size:contain;transition:background-color 125ms;vertical-align:text-bottom;width:1.2em}.md-typeset .md-tag-icon:-webkit-any(a:focus,a:hover):before{background-color:var(--md-accent-bg-color)}.md-typeset .md-tag-icon:-moz-any(a:focus,a:hover):before{background-color:var(--md-accent-bg-color)}.md-typeset .md-tag-icon:is(a:focus,a:hover):before{background-color:var(--md-accent-bg-color)}@keyframes pulse{0%{box-shadow:0 0 0 0 var(--md-default-fg-color--lightest);transform:scale(.95)}75%{box-shadow:0 0 0 .625em #0000;transform:scale(1)}to{box-shadow:0 0 0 0 #0000;transform:scale(.95)}}:root{--md-tooltip-width:20rem}.md-tooltip{-webkit-backface-visibility:hidden;backface-visibility:hidden;background-color:var(--md-default-bg-color);border-radius:.1rem;box-shadow:var(--md-shadow-z2);color:var(--md-default-fg-color);font-family:var(--md-text-font-family);left:clamp(var(--md-tooltip-0,0rem) + .8rem,var(--md-tooltip-x),100vw + var(--md-tooltip-0,0rem) + .8rem - var(--md-tooltip-width) - 2 * .8rem);max-width:calc(100vw - 1.6rem);opacity:0;position:absolute;top:var(--md-tooltip-y);transform:translateY(-.4rem);transition:transform 0ms .25s,opacity .25s,z-index .25s;width:var(--md-tooltip-width);z-index:0}.md-tooltip--active{opacity:1;transform:translateY(0);transition:transform .25s cubic-bezier(.1,.7,.1,1),opacity .25s,z-index 0ms;z-index:2}:-webkit-any(.focus-visible>.md-tooltip,.md-tooltip:target){outline:var(--md-accent-fg-color) auto}:-moz-any(.focus-visible>.md-tooltip,.md-tooltip:target){outline:var(--md-accent-fg-color) auto}:is(.focus-visible>.md-tooltip,.md-tooltip:target){outline:var(--md-accent-fg-color) auto}.md-tooltip__inner{font-size:.64rem;padding:.8rem}.md-tooltip__inner.md-typeset>:first-child{margin-top:0}.md-tooltip__inner.md-typeset>:last-child{margin-bottom:0}.md-annotation{font-weight:400;outline:none;white-space:normal}[dir=rtl] .md-annotation{direction:rtl}.md-annotation:not([hidden]){display:inline-block;line-height:1.325}.md-annotation__index{cursor:pointer;font-family:var(--md-code-font-family);font-size:.85em;margin:0 1ch;outline:none;position:relative;-webkit-user-select:none;-moz-user-select:none;user-select:none;z-index:0}.md-annotation .md-annotation__index{color:#fff;transition:z-index .25s}.md-annotation .md-annotation__index:-webkit-any(:focus,:hover){color:#fff}.md-annotation .md-annotation__index:-moz-any(:focus,:hover){color:#fff}.md-annotation .md-annotation__index:is(:focus,:hover){color:#fff}.md-annotation__index:after{background-color:var(--md-default-fg-color--lighter);border-radius:2ch;content:"";height:2.2ch;left:-.125em;margin:0 -.4ch;padding:0 .4ch;position:absolute;top:0;transition:color .25s,background-color .25s;width:calc(100% + 1.2ch);width:max(2.2ch,100% + 1.2ch);z-index:-1}@media not all and (prefers-reduced-motion){[data-md-visible]>.md-annotation__index:after{animation:pulse 2s infinite}}.md-tooltip--active+.md-annotation__index:after{animation:none;transition:color .25s,background-color .25s}code .md-annotation__index{font-family:var(--md-code-font-family);font-size:inherit}:-webkit-any(.md-tooltip--active+.md-annotation__index,:hover>.md-annotation__index){color:var(--md-accent-bg-color)}:-moz-any(.md-tooltip--active+.md-annotation__index,:hover>.md-annotation__index){color:var(--md-accent-bg-color)}:is(.md-tooltip--active+.md-annotation__index,:hover>.md-annotation__index){color:var(--md-accent-bg-color)}:-webkit-any(.md-tooltip--active+.md-annotation__index,:hover>.md-annotation__index):after{background-color:var(--md-accent-fg-color)}:-moz-any(.md-tooltip--active+.md-annotation__index,:hover>.md-annotation__index):after{background-color:var(--md-accent-fg-color)}:is(.md-tooltip--active+.md-annotation__index,:hover>.md-annotation__index):after{background-color:var(--md-accent-fg-color)}.md-tooltip--active+.md-annotation__index{animation:none;transition:none;z-index:2}.md-annotation__index [data-md-annotation-id]{display:inline-block;line-height:90%}.md-annotation__index [data-md-annotation-id]:before{content:attr(data-md-annotation-id);display:inline-block;padding-bottom:.1em;transform:scale(1.15);transition:transform .4s cubic-bezier(.1,.7,.1,1);vertical-align:.065em}@media not print{.md-annotation__index [data-md-annotation-id]:before{content:"+"}:focus-within>.md-annotation__index [data-md-annotation-id]:before{transform:scale(1.25) rotate(45deg)}}[dir=ltr] .md-top{margin-left:50%}[dir=rtl] .md-top{margin-right:50%}.md-top{background-color:var(--md-default-bg-color);border-radius:1.6rem;box-shadow:var(--md-shadow-z2);color:var(--md-default-fg-color--light);display:block;font-size:.7rem;outline:none;padding:.4rem .8rem;position:fixed;top:3.2rem;transform:translate(-50%);transition:color 125ms,background-color 125ms,transform 125ms cubic-bezier(.4,0,.2,1),opacity 125ms;z-index:2}@media print{.md-top{display:none}}[dir=rtl] .md-top{transform:translate(50%)}.md-top[hidden]{opacity:0;pointer-events:none;transform:translate(-50%,.2rem);transition-duration:0ms}[dir=rtl] .md-top[hidden]{transform:translate(50%,.2rem)}.md-top:-webkit-any(:focus,:hover){background-color:var(--md-accent-fg-color);color:var(--md-accent-bg-color)}.md-top:-moz-any(:focus,:hover){background-color:var(--md-accent-fg-color);color:var(--md-accent-bg-color)}.md-top:is(:focus,:hover){background-color:var(--md-accent-fg-color);color:var(--md-accent-bg-color)}.md-top svg{display:inline-block;vertical-align:-.5em}@keyframes hoverfix{0%{pointer-events:none}}:root{--md-version-icon:url('data:image/svg+xml;charset=utf-8,')}.md-version{flex-shrink:0;font-size:.8rem;height:2.4rem}[dir=ltr] .md-version__current{margin-left:1.4rem;margin-right:.4rem}[dir=rtl] .md-version__current{margin-left:.4rem;margin-right:1.4rem}.md-version__current{color:inherit;cursor:pointer;outline:none;position:relative;top:.05rem}[dir=ltr] .md-version__current:after{margin-left:.4rem}[dir=rtl] .md-version__current:after{margin-right:.4rem}.md-version__current:after{background-color:currentcolor;content:"";display:inline-block;height:.6rem;-webkit-mask-image:var(--md-version-icon);mask-image:var(--md-version-icon);-webkit-mask-position:center;mask-position:center;-webkit-mask-repeat:no-repeat;mask-repeat:no-repeat;-webkit-mask-size:contain;mask-size:contain;width:.4rem}.md-version__list{background-color:var(--md-default-bg-color);border-radius:.1rem;box-shadow:var(--md-shadow-z2);color:var(--md-default-fg-color);list-style-type:none;margin:.2rem .8rem;max-height:0;opacity:0;overflow:auto;padding:0;position:absolute;scroll-snap-type:y mandatory;top:.15rem;transition:max-height 0ms .5s,opacity .25s .25s;z-index:3}.md-version:-webkit-any(:focus-within,:hover) .md-version__list{max-height:10rem;opacity:1;-webkit-transition:max-height 0ms,opacity .25s;transition:max-height 0ms,opacity .25s}.md-version:-moz-any(:focus-within,:hover) .md-version__list{max-height:10rem;opacity:1;-moz-transition:max-height 0ms,opacity .25s;transition:max-height 0ms,opacity .25s}.md-version:is(:focus-within,:hover) .md-version__list{max-height:10rem;opacity:1;transition:max-height 0ms,opacity .25s}@media (pointer:coarse){.md-version:hover .md-version__list{animation:hoverfix .25s forwards}.md-version:focus-within .md-version__list{animation:none}}.md-version__item{line-height:1.8rem}[dir=ltr] .md-version__link{padding-left:.6rem;padding-right:1.2rem}[dir=rtl] .md-version__link{padding-left:1.2rem;padding-right:.6rem}.md-version__link{cursor:pointer;display:block;outline:none;scroll-snap-align:start;transition:color .25s,background-color .25s;white-space:nowrap;width:100%}.md-version__link:-webkit-any(:focus,:hover){color:var(--md-accent-fg-color)}.md-version__link:-moz-any(:focus,:hover){color:var(--md-accent-fg-color)}.md-version__link:is(:focus,:hover){color:var(--md-accent-fg-color)}.md-version__link:focus{background-color:var(--md-default-fg-color--lightest)}:root{--md-admonition-icon--note:url('data:image/svg+xml;charset=utf-8,');--md-admonition-icon--abstract:url('data:image/svg+xml;charset=utf-8,');--md-admonition-icon--info:url('data:image/svg+xml;charset=utf-8,');--md-admonition-icon--tip:url('data:image/svg+xml;charset=utf-8,');--md-admonition-icon--success:url('data:image/svg+xml;charset=utf-8,');--md-admonition-icon--question:url('data:image/svg+xml;charset=utf-8,');--md-admonition-icon--warning:url('data:image/svg+xml;charset=utf-8,');--md-admonition-icon--failure:url('data:image/svg+xml;charset=utf-8,');--md-admonition-icon--danger:url('data:image/svg+xml;charset=utf-8,');--md-admonition-icon--bug:url('data:image/svg+xml;charset=utf-8,');--md-admonition-icon--example:url('data:image/svg+xml;charset=utf-8,');--md-admonition-icon--quote:url('data:image/svg+xml;charset=utf-8,')}.md-typeset .admonition,.md-typeset details{background-color:var(--md-admonition-bg-color);border:.05rem solid #448aff;border-radius:.2rem;box-shadow:var(--md-shadow-z1);color:var(--md-admonition-fg-color);display:flow-root;font-size:.64rem;margin:1.5625em 0;padding:0 .6rem;page-break-inside:avoid}@media print{.md-typeset .admonition,.md-typeset details{box-shadow:none}}.md-typeset .admonition>*,.md-typeset details>*{box-sizing:border-box}.md-typeset .admonition :-webkit-any(.admonition,details),.md-typeset details :-webkit-any(.admonition,details){margin-bottom:1em;margin-top:1em}.md-typeset .admonition :-moz-any(.admonition,details),.md-typeset details :-moz-any(.admonition,details){margin-bottom:1em;margin-top:1em}.md-typeset .admonition :is(.admonition,details),.md-typeset details :is(.admonition,details){margin-bottom:1em;margin-top:1em}.md-typeset .admonition .md-typeset__scrollwrap,.md-typeset details .md-typeset__scrollwrap{margin:1em -.6rem}.md-typeset .admonition .md-typeset__table,.md-typeset details .md-typeset__table{padding:0 .6rem}.md-typeset .admonition>.tabbed-set:only-child,.md-typeset details>.tabbed-set:only-child{margin-top:0}html .md-typeset .admonition>:last-child,html .md-typeset details>:last-child{margin-bottom:.6rem}[dir=ltr] .md-typeset .admonition-title,[dir=ltr] .md-typeset summary{padding-left:2rem;padding-right:.6rem}[dir=rtl] .md-typeset .admonition-title,[dir=rtl] .md-typeset summary{padding-left:.6rem;padding-right:2rem}[dir=ltr] .md-typeset .admonition-title,[dir=ltr] .md-typeset summary{border-left-width:.2rem}[dir=rtl] .md-typeset .admonition-title,[dir=rtl] .md-typeset summary{border-right-width:.2rem}[dir=ltr] .md-typeset .admonition-title,[dir=ltr] .md-typeset summary{border-top-left-radius:.1rem}[dir=ltr] .md-typeset .admonition-title,[dir=ltr] .md-typeset summary,[dir=rtl] .md-typeset .admonition-title,[dir=rtl] .md-typeset summary{border-top-right-radius:.1rem}[dir=rtl] .md-typeset .admonition-title,[dir=rtl] .md-typeset summary{border-top-left-radius:.1rem}.md-typeset .admonition-title,.md-typeset summary{background-color:#448aff1a;border:none;font-weight:700;margin:0 -.6rem;padding-bottom:.4rem;padding-top:.4rem;position:relative}html .md-typeset .admonition-title:last-child,html .md-typeset summary:last-child{margin-bottom:0}[dir=ltr] .md-typeset .admonition-title:before,[dir=ltr] .md-typeset summary:before{left:.6rem}[dir=rtl] .md-typeset .admonition-title:before,[dir=rtl] .md-typeset summary:before{right:.6rem}.md-typeset .admonition-title:before,.md-typeset summary:before{background-color:#448aff;content:"";height:1rem;-webkit-mask-image:var(--md-admonition-icon--note);mask-image:var(--md-admonition-icon--note);-webkit-mask-position:center;mask-position:center;-webkit-mask-repeat:no-repeat;mask-repeat:no-repeat;-webkit-mask-size:contain;mask-size:contain;position:absolute;top:.625em;width:1rem}.md-typeset .admonition-title code,.md-typeset summary code{box-shadow:0 0 0 .05rem var(--md-default-fg-color--lightest)}.md-typeset :-webkit-any(.admonition,details):-webkit-any(.note){border-color:#448aff}.md-typeset :-moz-any(.admonition,details):-moz-any(.note){border-color:#448aff}.md-typeset :is(.admonition,details):is(.note){border-color:#448aff}.md-typeset :-webkit-any(.note)>:-webkit-any(.admonition-title,summary){background-color:#448aff1a}.md-typeset :-moz-any(.note)>:-moz-any(.admonition-title,summary){background-color:#448aff1a}.md-typeset :is(.note)>:is(.admonition-title,summary){background-color:#448aff1a}.md-typeset :-webkit-any(.note)>:-webkit-any(.admonition-title,summary):before{background-color:#448aff;-webkit-mask-image:var(--md-admonition-icon--note);mask-image:var(--md-admonition-icon--note)}.md-typeset :-moz-any(.note)>:-moz-any(.admonition-title,summary):before{background-color:#448aff;mask-image:var(--md-admonition-icon--note)}.md-typeset :is(.note)>:is(.admonition-title,summary):before{background-color:#448aff;-webkit-mask-image:var(--md-admonition-icon--note);mask-image:var(--md-admonition-icon--note)}.md-typeset :-webkit-any(.note)>:-webkit-any(.admonition-title,summary):after{color:#448aff}.md-typeset :-moz-any(.note)>:-moz-any(.admonition-title,summary):after{color:#448aff}.md-typeset :is(.note)>:is(.admonition-title,summary):after{color:#448aff}.md-typeset :-webkit-any(.admonition,details):-webkit-any(.abstract,.summary,.tldr){border-color:#00b0ff}.md-typeset :-moz-any(.admonition,details):-moz-any(.abstract,.summary,.tldr){border-color:#00b0ff}.md-typeset :is(.admonition,details):is(.abstract,.summary,.tldr){border-color:#00b0ff}.md-typeset :-webkit-any(.abstract,.summary,.tldr)>:-webkit-any(.admonition-title,summary){background-color:#00b0ff1a}.md-typeset :-moz-any(.abstract,.summary,.tldr)>:-moz-any(.admonition-title,summary){background-color:#00b0ff1a}.md-typeset :is(.abstract,.summary,.tldr)>:is(.admonition-title,summary){background-color:#00b0ff1a}.md-typeset :-webkit-any(.abstract,.summary,.tldr)>:-webkit-any(.admonition-title,summary):before{background-color:#00b0ff;-webkit-mask-image:var(--md-admonition-icon--abstract);mask-image:var(--md-admonition-icon--abstract)}.md-typeset :-moz-any(.abstract,.summary,.tldr)>:-moz-any(.admonition-title,summary):before{background-color:#00b0ff;mask-image:var(--md-admonition-icon--abstract)}.md-typeset :is(.abstract,.summary,.tldr)>:is(.admonition-title,summary):before{background-color:#00b0ff;-webkit-mask-image:var(--md-admonition-icon--abstract);mask-image:var(--md-admonition-icon--abstract)}.md-typeset :-webkit-any(.abstract,.summary,.tldr)>:-webkit-any(.admonition-title,summary):after{color:#00b0ff}.md-typeset :-moz-any(.abstract,.summary,.tldr)>:-moz-any(.admonition-title,summary):after{color:#00b0ff}.md-typeset :is(.abstract,.summary,.tldr)>:is(.admonition-title,summary):after{color:#00b0ff}.md-typeset :-webkit-any(.admonition,details):-webkit-any(.info,.todo){border-color:#00b8d4}.md-typeset :-moz-any(.admonition,details):-moz-any(.info,.todo){border-color:#00b8d4}.md-typeset :is(.admonition,details):is(.info,.todo){border-color:#00b8d4}.md-typeset :-webkit-any(.info,.todo)>:-webkit-any(.admonition-title,summary){background-color:#00b8d41a}.md-typeset :-moz-any(.info,.todo)>:-moz-any(.admonition-title,summary){background-color:#00b8d41a}.md-typeset :is(.info,.todo)>:is(.admonition-title,summary){background-color:#00b8d41a}.md-typeset :-webkit-any(.info,.todo)>:-webkit-any(.admonition-title,summary):before{background-color:#00b8d4;-webkit-mask-image:var(--md-admonition-icon--info);mask-image:var(--md-admonition-icon--info)}.md-typeset :-moz-any(.info,.todo)>:-moz-any(.admonition-title,summary):before{background-color:#00b8d4;mask-image:var(--md-admonition-icon--info)}.md-typeset :is(.info,.todo)>:is(.admonition-title,summary):before{background-color:#00b8d4;-webkit-mask-image:var(--md-admonition-icon--info);mask-image:var(--md-admonition-icon--info)}.md-typeset :-webkit-any(.info,.todo)>:-webkit-any(.admonition-title,summary):after{color:#00b8d4}.md-typeset :-moz-any(.info,.todo)>:-moz-any(.admonition-title,summary):after{color:#00b8d4}.md-typeset :is(.info,.todo)>:is(.admonition-title,summary):after{color:#00b8d4}.md-typeset :-webkit-any(.admonition,details):-webkit-any(.tip,.hint,.important){border-color:#00bfa5}.md-typeset :-moz-any(.admonition,details):-moz-any(.tip,.hint,.important){border-color:#00bfa5}.md-typeset :is(.admonition,details):is(.tip,.hint,.important){border-color:#00bfa5}.md-typeset :-webkit-any(.tip,.hint,.important)>:-webkit-any(.admonition-title,summary){background-color:#00bfa51a}.md-typeset :-moz-any(.tip,.hint,.important)>:-moz-any(.admonition-title,summary){background-color:#00bfa51a}.md-typeset :is(.tip,.hint,.important)>:is(.admonition-title,summary){background-color:#00bfa51a}.md-typeset :-webkit-any(.tip,.hint,.important)>:-webkit-any(.admonition-title,summary):before{background-color:#00bfa5;-webkit-mask-image:var(--md-admonition-icon--tip);mask-image:var(--md-admonition-icon--tip)}.md-typeset :-moz-any(.tip,.hint,.important)>:-moz-any(.admonition-title,summary):before{background-color:#00bfa5;mask-image:var(--md-admonition-icon--tip)}.md-typeset :is(.tip,.hint,.important)>:is(.admonition-title,summary):before{background-color:#00bfa5;-webkit-mask-image:var(--md-admonition-icon--tip);mask-image:var(--md-admonition-icon--tip)}.md-typeset :-webkit-any(.tip,.hint,.important)>:-webkit-any(.admonition-title,summary):after{color:#00bfa5}.md-typeset :-moz-any(.tip,.hint,.important)>:-moz-any(.admonition-title,summary):after{color:#00bfa5}.md-typeset :is(.tip,.hint,.important)>:is(.admonition-title,summary):after{color:#00bfa5}.md-typeset :-webkit-any(.admonition,details):-webkit-any(.success,.check,.done){border-color:#00c853}.md-typeset :-moz-any(.admonition,details):-moz-any(.success,.check,.done){border-color:#00c853}.md-typeset :is(.admonition,details):is(.success,.check,.done){border-color:#00c853}.md-typeset :-webkit-any(.success,.check,.done)>:-webkit-any(.admonition-title,summary){background-color:#00c8531a}.md-typeset :-moz-any(.success,.check,.done)>:-moz-any(.admonition-title,summary){background-color:#00c8531a}.md-typeset :is(.success,.check,.done)>:is(.admonition-title,summary){background-color:#00c8531a}.md-typeset :-webkit-any(.success,.check,.done)>:-webkit-any(.admonition-title,summary):before{background-color:#00c853;-webkit-mask-image:var(--md-admonition-icon--success);mask-image:var(--md-admonition-icon--success)}.md-typeset :-moz-any(.success,.check,.done)>:-moz-any(.admonition-title,summary):before{background-color:#00c853;mask-image:var(--md-admonition-icon--success)}.md-typeset :is(.success,.check,.done)>:is(.admonition-title,summary):before{background-color:#00c853;-webkit-mask-image:var(--md-admonition-icon--success);mask-image:var(--md-admonition-icon--success)}.md-typeset :-webkit-any(.success,.check,.done)>:-webkit-any(.admonition-title,summary):after{color:#00c853}.md-typeset :-moz-any(.success,.check,.done)>:-moz-any(.admonition-title,summary):after{color:#00c853}.md-typeset :is(.success,.check,.done)>:is(.admonition-title,summary):after{color:#00c853}.md-typeset :-webkit-any(.admonition,details):-webkit-any(.question,.help,.faq){border-color:#64dd17}.md-typeset :-moz-any(.admonition,details):-moz-any(.question,.help,.faq){border-color:#64dd17}.md-typeset :is(.admonition,details):is(.question,.help,.faq){border-color:#64dd17}.md-typeset :-webkit-any(.question,.help,.faq)>:-webkit-any(.admonition-title,summary){background-color:#64dd171a}.md-typeset :-moz-any(.question,.help,.faq)>:-moz-any(.admonition-title,summary){background-color:#64dd171a}.md-typeset :is(.question,.help,.faq)>:is(.admonition-title,summary){background-color:#64dd171a}.md-typeset :-webkit-any(.question,.help,.faq)>:-webkit-any(.admonition-title,summary):before{background-color:#64dd17;-webkit-mask-image:var(--md-admonition-icon--question);mask-image:var(--md-admonition-icon--question)}.md-typeset :-moz-any(.question,.help,.faq)>:-moz-any(.admonition-title,summary):before{background-color:#64dd17;mask-image:var(--md-admonition-icon--question)}.md-typeset :is(.question,.help,.faq)>:is(.admonition-title,summary):before{background-color:#64dd17;-webkit-mask-image:var(--md-admonition-icon--question);mask-image:var(--md-admonition-icon--question)}.md-typeset :-webkit-any(.question,.help,.faq)>:-webkit-any(.admonition-title,summary):after{color:#64dd17}.md-typeset :-moz-any(.question,.help,.faq)>:-moz-any(.admonition-title,summary):after{color:#64dd17}.md-typeset :is(.question,.help,.faq)>:is(.admonition-title,summary):after{color:#64dd17}.md-typeset :-webkit-any(.admonition,details):-webkit-any(.warning,.caution,.attention){border-color:#ff9100}.md-typeset :-moz-any(.admonition,details):-moz-any(.warning,.caution,.attention){border-color:#ff9100}.md-typeset :is(.admonition,details):is(.warning,.caution,.attention){border-color:#ff9100}.md-typeset :-webkit-any(.warning,.caution,.attention)>:-webkit-any(.admonition-title,summary){background-color:#ff91001a}.md-typeset :-moz-any(.warning,.caution,.attention)>:-moz-any(.admonition-title,summary){background-color:#ff91001a}.md-typeset :is(.warning,.caution,.attention)>:is(.admonition-title,summary){background-color:#ff91001a}.md-typeset :-webkit-any(.warning,.caution,.attention)>:-webkit-any(.admonition-title,summary):before{background-color:#ff9100;-webkit-mask-image:var(--md-admonition-icon--warning);mask-image:var(--md-admonition-icon--warning)}.md-typeset :-moz-any(.warning,.caution,.attention)>:-moz-any(.admonition-title,summary):before{background-color:#ff9100;mask-image:var(--md-admonition-icon--warning)}.md-typeset :is(.warning,.caution,.attention)>:is(.admonition-title,summary):before{background-color:#ff9100;-webkit-mask-image:var(--md-admonition-icon--warning);mask-image:var(--md-admonition-icon--warning)}.md-typeset :-webkit-any(.warning,.caution,.attention)>:-webkit-any(.admonition-title,summary):after{color:#ff9100}.md-typeset :-moz-any(.warning,.caution,.attention)>:-moz-any(.admonition-title,summary):after{color:#ff9100}.md-typeset :is(.warning,.caution,.attention)>:is(.admonition-title,summary):after{color:#ff9100}.md-typeset :-webkit-any(.admonition,details):-webkit-any(.failure,.fail,.missing){border-color:#ff5252}.md-typeset :-moz-any(.admonition,details):-moz-any(.failure,.fail,.missing){border-color:#ff5252}.md-typeset :is(.admonition,details):is(.failure,.fail,.missing){border-color:#ff5252}.md-typeset :-webkit-any(.failure,.fail,.missing)>:-webkit-any(.admonition-title,summary){background-color:#ff52521a}.md-typeset :-moz-any(.failure,.fail,.missing)>:-moz-any(.admonition-title,summary){background-color:#ff52521a}.md-typeset :is(.failure,.fail,.missing)>:is(.admonition-title,summary){background-color:#ff52521a}.md-typeset :-webkit-any(.failure,.fail,.missing)>:-webkit-any(.admonition-title,summary):before{background-color:#ff5252;-webkit-mask-image:var(--md-admonition-icon--failure);mask-image:var(--md-admonition-icon--failure)}.md-typeset :-moz-any(.failure,.fail,.missing)>:-moz-any(.admonition-title,summary):before{background-color:#ff5252;mask-image:var(--md-admonition-icon--failure)}.md-typeset :is(.failure,.fail,.missing)>:is(.admonition-title,summary):before{background-color:#ff5252;-webkit-mask-image:var(--md-admonition-icon--failure);mask-image:var(--md-admonition-icon--failure)}.md-typeset :-webkit-any(.failure,.fail,.missing)>:-webkit-any(.admonition-title,summary):after{color:#ff5252}.md-typeset :-moz-any(.failure,.fail,.missing)>:-moz-any(.admonition-title,summary):after{color:#ff5252}.md-typeset :is(.failure,.fail,.missing)>:is(.admonition-title,summary):after{color:#ff5252}.md-typeset :-webkit-any(.admonition,details):-webkit-any(.danger,.error){border-color:#ff1744}.md-typeset :-moz-any(.admonition,details):-moz-any(.danger,.error){border-color:#ff1744}.md-typeset :is(.admonition,details):is(.danger,.error){border-color:#ff1744}.md-typeset :-webkit-any(.danger,.error)>:-webkit-any(.admonition-title,summary){background-color:#ff17441a}.md-typeset :-moz-any(.danger,.error)>:-moz-any(.admonition-title,summary){background-color:#ff17441a}.md-typeset :is(.danger,.error)>:is(.admonition-title,summary){background-color:#ff17441a}.md-typeset :-webkit-any(.danger,.error)>:-webkit-any(.admonition-title,summary):before{background-color:#ff1744;-webkit-mask-image:var(--md-admonition-icon--danger);mask-image:var(--md-admonition-icon--danger)}.md-typeset :-moz-any(.danger,.error)>:-moz-any(.admonition-title,summary):before{background-color:#ff1744;mask-image:var(--md-admonition-icon--danger)}.md-typeset :is(.danger,.error)>:is(.admonition-title,summary):before{background-color:#ff1744;-webkit-mask-image:var(--md-admonition-icon--danger);mask-image:var(--md-admonition-icon--danger)}.md-typeset :-webkit-any(.danger,.error)>:-webkit-any(.admonition-title,summary):after{color:#ff1744}.md-typeset :-moz-any(.danger,.error)>:-moz-any(.admonition-title,summary):after{color:#ff1744}.md-typeset :is(.danger,.error)>:is(.admonition-title,summary):after{color:#ff1744}.md-typeset :-webkit-any(.admonition,details):-webkit-any(.bug){border-color:#f50057}.md-typeset :-moz-any(.admonition,details):-moz-any(.bug){border-color:#f50057}.md-typeset :is(.admonition,details):is(.bug){border-color:#f50057}.md-typeset :-webkit-any(.bug)>:-webkit-any(.admonition-title,summary){background-color:#f500571a}.md-typeset :-moz-any(.bug)>:-moz-any(.admonition-title,summary){background-color:#f500571a}.md-typeset :is(.bug)>:is(.admonition-title,summary){background-color:#f500571a}.md-typeset :-webkit-any(.bug)>:-webkit-any(.admonition-title,summary):before{background-color:#f50057;-webkit-mask-image:var(--md-admonition-icon--bug);mask-image:var(--md-admonition-icon--bug)}.md-typeset :-moz-any(.bug)>:-moz-any(.admonition-title,summary):before{background-color:#f50057;mask-image:var(--md-admonition-icon--bug)}.md-typeset :is(.bug)>:is(.admonition-title,summary):before{background-color:#f50057;-webkit-mask-image:var(--md-admonition-icon--bug);mask-image:var(--md-admonition-icon--bug)}.md-typeset :-webkit-any(.bug)>:-webkit-any(.admonition-title,summary):after{color:#f50057}.md-typeset :-moz-any(.bug)>:-moz-any(.admonition-title,summary):after{color:#f50057}.md-typeset :is(.bug)>:is(.admonition-title,summary):after{color:#f50057}.md-typeset :-webkit-any(.admonition,details):-webkit-any(.example){border-color:#7c4dff}.md-typeset :-moz-any(.admonition,details):-moz-any(.example){border-color:#7c4dff}.md-typeset :is(.admonition,details):is(.example){border-color:#7c4dff}.md-typeset :-webkit-any(.example)>:-webkit-any(.admonition-title,summary){background-color:#7c4dff1a}.md-typeset :-moz-any(.example)>:-moz-any(.admonition-title,summary){background-color:#7c4dff1a}.md-typeset :is(.example)>:is(.admonition-title,summary){background-color:#7c4dff1a}.md-typeset :-webkit-any(.example)>:-webkit-any(.admonition-title,summary):before{background-color:#7c4dff;-webkit-mask-image:var(--md-admonition-icon--example);mask-image:var(--md-admonition-icon--example)}.md-typeset :-moz-any(.example)>:-moz-any(.admonition-title,summary):before{background-color:#7c4dff;mask-image:var(--md-admonition-icon--example)}.md-typeset :is(.example)>:is(.admonition-title,summary):before{background-color:#7c4dff;-webkit-mask-image:var(--md-admonition-icon--example);mask-image:var(--md-admonition-icon--example)}.md-typeset :-webkit-any(.example)>:-webkit-any(.admonition-title,summary):after{color:#7c4dff}.md-typeset :-moz-any(.example)>:-moz-any(.admonition-title,summary):after{color:#7c4dff}.md-typeset :is(.example)>:is(.admonition-title,summary):after{color:#7c4dff}.md-typeset :-webkit-any(.admonition,details):-webkit-any(.quote,.cite){border-color:#9e9e9e}.md-typeset :-moz-any(.admonition,details):-moz-any(.quote,.cite){border-color:#9e9e9e}.md-typeset :is(.admonition,details):is(.quote,.cite){border-color:#9e9e9e}.md-typeset :-webkit-any(.quote,.cite)>:-webkit-any(.admonition-title,summary){background-color:#9e9e9e1a}.md-typeset :-moz-any(.quote,.cite)>:-moz-any(.admonition-title,summary){background-color:#9e9e9e1a}.md-typeset :is(.quote,.cite)>:is(.admonition-title,summary){background-color:#9e9e9e1a}.md-typeset :-webkit-any(.quote,.cite)>:-webkit-any(.admonition-title,summary):before{background-color:#9e9e9e;-webkit-mask-image:var(--md-admonition-icon--quote);mask-image:var(--md-admonition-icon--quote)}.md-typeset :-moz-any(.quote,.cite)>:-moz-any(.admonition-title,summary):before{background-color:#9e9e9e;mask-image:var(--md-admonition-icon--quote)}.md-typeset :is(.quote,.cite)>:is(.admonition-title,summary):before{background-color:#9e9e9e;-webkit-mask-image:var(--md-admonition-icon--quote);mask-image:var(--md-admonition-icon--quote)}.md-typeset :-webkit-any(.quote,.cite)>:-webkit-any(.admonition-title,summary):after{color:#9e9e9e}.md-typeset :-moz-any(.quote,.cite)>:-moz-any(.admonition-title,summary):after{color:#9e9e9e}.md-typeset :is(.quote,.cite)>:is(.admonition-title,summary):after{color:#9e9e9e}:root{--md-footnotes-icon:url('data:image/svg+xml;charset=utf-8,')}.md-typeset .footnote{color:var(--md-default-fg-color--light);font-size:.64rem}[dir=ltr] .md-typeset .footnote>ol{margin-left:0}[dir=rtl] .md-typeset .footnote>ol{margin-right:0}.md-typeset .footnote>ol>li{transition:color 125ms}.md-typeset .footnote>ol>li:target{color:var(--md-default-fg-color)}.md-typeset .footnote>ol>li:focus-within .footnote-backref{opacity:1;transform:translateX(0);transition:none}.md-typeset .footnote>ol>li:-webkit-any(:hover,:target) .footnote-backref{opacity:1;transform:translateX(0)}.md-typeset .footnote>ol>li:-moz-any(:hover,:target) .footnote-backref{opacity:1;transform:translateX(0)}.md-typeset .footnote>ol>li:is(:hover,:target) .footnote-backref{opacity:1;transform:translateX(0)}.md-typeset .footnote>ol>li>:first-child{margin-top:0}.md-typeset .footnote-ref{font-size:.75em;font-weight:700}html .md-typeset .footnote-ref{outline-offset:.1rem}.md-typeset [id^="fnref:"]:target>.footnote-ref{outline:auto}.md-typeset .footnote-backref{color:var(--md-typeset-a-color);display:inline-block;font-size:0;opacity:0;transform:translateX(.25rem);transition:color .25s,transform .25s .25s,opacity 125ms .25s;vertical-align:text-bottom}@media print{.md-typeset .footnote-backref{color:var(--md-typeset-a-color);opacity:1;transform:translateX(0)}}[dir=rtl] .md-typeset .footnote-backref{transform:translateX(-.25rem)}.md-typeset .footnote-backref:hover{color:var(--md-accent-fg-color)}.md-typeset .footnote-backref:before{background-color:currentcolor;content:"";display:inline-block;height:.8rem;-webkit-mask-image:var(--md-footnotes-icon);mask-image:var(--md-footnotes-icon);-webkit-mask-position:center;mask-position:center;-webkit-mask-repeat:no-repeat;mask-repeat:no-repeat;-webkit-mask-size:contain;mask-size:contain;width:.8rem}[dir=rtl] .md-typeset .footnote-backref:before svg{transform:scaleX(-1)}[dir=ltr] .md-typeset .headerlink{margin-left:.5rem}[dir=rtl] .md-typeset .headerlink{margin-right:.5rem}.md-typeset .headerlink{color:var(--md-default-fg-color--lighter);display:inline-block;opacity:0;transition:color .25s,opacity 125ms}@media print{.md-typeset .headerlink{display:none}}.md-typeset .headerlink:focus,.md-typeset :-webkit-any(:hover,:target)>.headerlink{opacity:1;-webkit-transition:color .25s,opacity 125ms;transition:color .25s,opacity 125ms}.md-typeset .headerlink:focus,.md-typeset :-moz-any(:hover,:target)>.headerlink{opacity:1;-moz-transition:color .25s,opacity 125ms;transition:color .25s,opacity 125ms}.md-typeset .headerlink:focus,.md-typeset :is(:hover,:target)>.headerlink{opacity:1;transition:color .25s,opacity 125ms}.md-typeset .headerlink:-webkit-any(:focus,:hover),.md-typeset :target>.headerlink{color:var(--md-accent-fg-color)}.md-typeset .headerlink:-moz-any(:focus,:hover),.md-typeset :target>.headerlink{color:var(--md-accent-fg-color)}.md-typeset .headerlink:is(:focus,:hover),.md-typeset :target>.headerlink{color:var(--md-accent-fg-color)}.md-typeset :target{--md-scroll-margin:3.6rem;--md-scroll-offset:0rem;scroll-margin-top:calc(var(--md-scroll-margin) - var(--md-scroll-offset))}@media screen and (min-width:76.25em){.md-header--lifted~.md-container .md-typeset :target{--md-scroll-margin:6rem}}.md-typeset :-webkit-any(h1,h2,h3):target{--md-scroll-offset:0.2rem}.md-typeset :-moz-any(h1,h2,h3):target{--md-scroll-offset:0.2rem}.md-typeset :is(h1,h2,h3):target{--md-scroll-offset:0.2rem}.md-typeset h4:target{--md-scroll-offset:0.15rem}.md-typeset div.arithmatex{overflow:auto}@media screen and (max-width:44.9375em){.md-typeset div.arithmatex{margin:0 -.8rem}}.md-typeset div.arithmatex>*{margin-left:auto!important;margin-right:auto!important;padding:0 .8rem;touch-action:auto;width:-webkit-min-content;width:-moz-min-content;width:min-content}.md-typeset div.arithmatex>* mjx-container{margin:0!important}.md-typeset :-webkit-any(del,ins,.comment).critic{-webkit-box-decoration-break:clone;box-decoration-break:clone}.md-typeset :-moz-any(del,ins,.comment).critic{box-decoration-break:clone}.md-typeset :is(del,ins,.comment).critic{-webkit-box-decoration-break:clone;box-decoration-break:clone}.md-typeset del.critic{background-color:var(--md-typeset-del-color)}.md-typeset ins.critic{background-color:var(--md-typeset-ins-color)}.md-typeset .critic.comment{color:var(--md-code-hl-comment-color)}.md-typeset .critic.comment:before{content:"/* "}.md-typeset .critic.comment:after{content:" */"}.md-typeset .critic.block{box-shadow:none;display:block;margin:1em 0;overflow:auto;padding-left:.8rem;padding-right:.8rem}.md-typeset .critic.block>:first-child{margin-top:.5em}.md-typeset .critic.block>:last-child{margin-bottom:.5em}:root{--md-details-icon:url('data:image/svg+xml;charset=utf-8,')}.md-typeset details{display:flow-root;overflow:visible;padding-top:0}.md-typeset details[open]>summary:after{transform:rotate(90deg)}.md-typeset details:not([open]){box-shadow:none;padding-bottom:0}.md-typeset details:not([open])>summary{border-radius:.1rem}[dir=ltr] .md-typeset summary{padding-right:1.8rem}[dir=rtl] .md-typeset summary{padding-left:1.8rem}[dir=ltr] .md-typeset summary{border-top-left-radius:.1rem}[dir=ltr] .md-typeset summary,[dir=rtl] .md-typeset summary{border-top-right-radius:.1rem}[dir=rtl] .md-typeset summary{border-top-left-radius:.1rem}.md-typeset summary{cursor:pointer;display:block;min-height:1rem}.md-typeset summary.focus-visible{outline-color:var(--md-accent-fg-color);outline-offset:.2rem}.md-typeset summary:not(.focus-visible){-webkit-tap-highlight-color:transparent;outline:none}[dir=ltr] .md-typeset summary:after{right:.4rem}[dir=rtl] .md-typeset summary:after{left:.4rem}.md-typeset summary:after{background-color:currentcolor;content:"";height:1rem;-webkit-mask-image:var(--md-details-icon);mask-image:var(--md-details-icon);-webkit-mask-position:center;mask-position:center;-webkit-mask-repeat:no-repeat;mask-repeat:no-repeat;-webkit-mask-size:contain;mask-size:contain;position:absolute;top:.625em;transform:rotate(0deg);transition:transform .25s;width:1rem}[dir=rtl] .md-typeset summary:after{transform:rotate(180deg)}.md-typeset summary::marker{display:none}.md-typeset summary::-webkit-details-marker{display:none}.md-typeset :-webkit-any(.emojione,.twemoji,.gemoji){display:inline-flex;height:1.125em;vertical-align:text-top}.md-typeset :-moz-any(.emojione,.twemoji,.gemoji){display:inline-flex;height:1.125em;vertical-align:text-top}.md-typeset :is(.emojione,.twemoji,.gemoji){display:inline-flex;height:1.125em;vertical-align:text-top}.md-typeset :-webkit-any(.emojione,.twemoji,.gemoji) svg{fill:currentcolor;max-height:100%;width:1.125em}.md-typeset :-moz-any(.emojione,.twemoji,.gemoji) svg{fill:currentcolor;max-height:100%;width:1.125em}.md-typeset :is(.emojione,.twemoji,.gemoji) svg{fill:currentcolor;max-height:100%;width:1.125em}.highlight :-webkit-any(.o,.ow){color:var(--md-code-hl-operator-color)}.highlight :-moz-any(.o,.ow){color:var(--md-code-hl-operator-color)}.highlight :is(.o,.ow){color:var(--md-code-hl-operator-color)}.highlight .p{color:var(--md-code-hl-punctuation-color)}.highlight :-webkit-any(.cpf,.l,.s,.sb,.sc,.s2,.si,.s1,.ss){color:var(--md-code-hl-string-color)}.highlight :-moz-any(.cpf,.l,.s,.sb,.sc,.s2,.si,.s1,.ss){color:var(--md-code-hl-string-color)}.highlight :is(.cpf,.l,.s,.sb,.sc,.s2,.si,.s1,.ss){color:var(--md-code-hl-string-color)}.highlight :-webkit-any(.cp,.se,.sh,.sr,.sx){color:var(--md-code-hl-special-color)}.highlight :-moz-any(.cp,.se,.sh,.sr,.sx){color:var(--md-code-hl-special-color)}.highlight :is(.cp,.se,.sh,.sr,.sx){color:var(--md-code-hl-special-color)}.highlight :-webkit-any(.m,.mb,.mf,.mh,.mi,.il,.mo){color:var(--md-code-hl-number-color)}.highlight :-moz-any(.m,.mb,.mf,.mh,.mi,.il,.mo){color:var(--md-code-hl-number-color)}.highlight :is(.m,.mb,.mf,.mh,.mi,.il,.mo){color:var(--md-code-hl-number-color)}.highlight :-webkit-any(.k,.kd,.kn,.kp,.kr,.kt){color:var(--md-code-hl-keyword-color)}.highlight :-moz-any(.k,.kd,.kn,.kp,.kr,.kt){color:var(--md-code-hl-keyword-color)}.highlight :is(.k,.kd,.kn,.kp,.kr,.kt){color:var(--md-code-hl-keyword-color)}.highlight :-webkit-any(.kc,.n){color:var(--md-code-hl-name-color)}.highlight :-moz-any(.kc,.n){color:var(--md-code-hl-name-color)}.highlight :is(.kc,.n){color:var(--md-code-hl-name-color)}.highlight :-webkit-any(.no,.nb,.bp){color:var(--md-code-hl-constant-color)}.highlight :-moz-any(.no,.nb,.bp){color:var(--md-code-hl-constant-color)}.highlight :is(.no,.nb,.bp){color:var(--md-code-hl-constant-color)}.highlight :-webkit-any(.nc,.ne,.nf,.nn){color:var(--md-code-hl-function-color)}.highlight :-moz-any(.nc,.ne,.nf,.nn){color:var(--md-code-hl-function-color)}.highlight :is(.nc,.ne,.nf,.nn){color:var(--md-code-hl-function-color)}.highlight :-webkit-any(.nd,.ni,.nl,.nt){color:var(--md-code-hl-keyword-color)}.highlight :-moz-any(.nd,.ni,.nl,.nt){color:var(--md-code-hl-keyword-color)}.highlight :is(.nd,.ni,.nl,.nt){color:var(--md-code-hl-keyword-color)}.highlight :-webkit-any(.c,.cm,.c1,.ch,.cs,.sd){color:var(--md-code-hl-comment-color)}.highlight :-moz-any(.c,.cm,.c1,.ch,.cs,.sd){color:var(--md-code-hl-comment-color)}.highlight :is(.c,.cm,.c1,.ch,.cs,.sd){color:var(--md-code-hl-comment-color)}.highlight :-webkit-any(.na,.nv,.vc,.vg,.vi){color:var(--md-code-hl-variable-color)}.highlight :-moz-any(.na,.nv,.vc,.vg,.vi){color:var(--md-code-hl-variable-color)}.highlight :is(.na,.nv,.vc,.vg,.vi){color:var(--md-code-hl-variable-color)}.highlight :-webkit-any(.ge,.gr,.gh,.go,.gp,.gs,.gu,.gt){color:var(--md-code-hl-generic-color)}.highlight :-moz-any(.ge,.gr,.gh,.go,.gp,.gs,.gu,.gt){color:var(--md-code-hl-generic-color)}.highlight :is(.ge,.gr,.gh,.go,.gp,.gs,.gu,.gt){color:var(--md-code-hl-generic-color)}.highlight :-webkit-any(.gd,.gi){border-radius:.1rem;margin:0 -.125em;padding:0 .125em}.highlight :-moz-any(.gd,.gi){border-radius:.1rem;margin:0 -.125em;padding:0 .125em}.highlight :is(.gd,.gi){border-radius:.1rem;margin:0 -.125em;padding:0 .125em}.highlight .gd{background-color:var(--md-typeset-del-color)}.highlight .gi{background-color:var(--md-typeset-ins-color)}.highlight .hll{background-color:var(--md-code-hl-color);display:block;margin:0 -1.1764705882em;padding:0 1.1764705882em}.highlight span.filename{background-color:var(--md-code-bg-color);border-bottom:.05rem solid var(--md-default-fg-color--lightest);border-top-left-radius:.1rem;border-top-right-radius:.1rem;display:flow-root;font-size:.85em;font-weight:700;margin-top:1em;padding:.6617647059em 1.1764705882em;position:relative}.highlight span.filename+pre{margin-top:0}.highlight span.filename+pre>code{border-top-left-radius:0;border-top-right-radius:0}.highlight [data-linenos]:before{background-color:var(--md-code-bg-color);box-shadow:-.05rem 0 var(--md-default-fg-color--lightest) inset;color:var(--md-default-fg-color--light);content:attr(data-linenos);float:left;left:-1.1764705882em;margin-left:-1.1764705882em;margin-right:1.1764705882em;padding-left:1.1764705882em;position:-webkit-sticky;position:sticky;-webkit-user-select:none;-moz-user-select:none;user-select:none;z-index:3}.highlight code a[id]{position:absolute;visibility:hidden}.highlight code[data-md-copying] .hll{display:contents}.highlight code[data-md-copying] .md-annotation{display:none}.highlighttable{display:flow-root}.highlighttable :-webkit-any(tbody,td){display:block;padding:0}.highlighttable :-moz-any(tbody,td){display:block;padding:0}.highlighttable :is(tbody,td){display:block;padding:0}.highlighttable tr{display:flex}.highlighttable pre{margin:0}.highlighttable th.filename{flex-grow:1;padding:0;text-align:left}.highlighttable th.filename span.filename{margin-top:0}.highlighttable .linenos{background-color:var(--md-code-bg-color);border-bottom-left-radius:.1rem;border-top-left-radius:.1rem;font-size:.85em;padding:.7720588235em 0 .7720588235em 1.1764705882em;-webkit-user-select:none;-moz-user-select:none;user-select:none}.highlighttable .linenodiv{box-shadow:-.05rem 0 var(--md-default-fg-color--lightest) inset;padding-right:.5882352941em}.highlighttable .linenodiv pre{color:var(--md-default-fg-color--light);text-align:right}.highlighttable .code{flex:1;min-width:0}.linenodiv a{color:inherit}.md-typeset .highlighttable{direction:ltr;margin:1em 0}.md-typeset .highlighttable>tbody>tr>.code>div>pre>code{border-bottom-left-radius:0;border-top-left-radius:0}.md-typeset .highlight+.result{border:.05rem solid var(--md-code-bg-color);border-bottom-left-radius:.1rem;border-bottom-right-radius:.1rem;border-top-width:.1rem;margin-top:-1.125em;overflow:visible;padding:0 1em}.md-typeset .highlight+.result:after{clear:both;content:"";display:block}@media screen and (max-width:44.9375em){.md-content__inner>.highlight{margin:1em -.8rem}.md-content__inner>.highlight>.filename,.md-content__inner>.highlight>.highlighttable>tbody>tr>.code>div>pre>code,.md-content__inner>.highlight>.highlighttable>tbody>tr>.filename span.filename,.md-content__inner>.highlight>.highlighttable>tbody>tr>.linenos,.md-content__inner>.highlight>pre>code{border-radius:0}.md-content__inner>.highlight+.result{border-left-width:0;border-radius:0;border-right-width:0;margin-left:-.8rem;margin-right:-.8rem}}.md-typeset .keys kbd:-webkit-any(:before,:after){-moz-osx-font-smoothing:initial;-webkit-font-smoothing:initial;color:inherit;margin:0;position:relative}.md-typeset .keys kbd:-moz-any(:before,:after){-moz-osx-font-smoothing:initial;-webkit-font-smoothing:initial;color:inherit;margin:0;position:relative}.md-typeset .keys kbd:is(:before,:after){-moz-osx-font-smoothing:initial;-webkit-font-smoothing:initial;color:inherit;margin:0;position:relative}.md-typeset .keys span{color:var(--md-default-fg-color--light);padding:0 .2em}.md-typeset .keys .key-alt:before,.md-typeset .keys .key-left-alt:before,.md-typeset .keys .key-right-alt:before{content:"⎇";padding-right:.4em}.md-typeset .keys .key-command:before,.md-typeset .keys .key-left-command:before,.md-typeset .keys .key-right-command:before{content:"⌘";padding-right:.4em}.md-typeset .keys .key-control:before,.md-typeset .keys .key-left-control:before,.md-typeset .keys .key-right-control:before{content:"⌃";padding-right:.4em}.md-typeset .keys .key-left-meta:before,.md-typeset .keys .key-meta:before,.md-typeset .keys .key-right-meta:before{content:"◆";padding-right:.4em}.md-typeset .keys .key-left-option:before,.md-typeset .keys .key-option:before,.md-typeset .keys .key-right-option:before{content:"⌥";padding-right:.4em}.md-typeset .keys .key-left-shift:before,.md-typeset .keys .key-right-shift:before,.md-typeset .keys .key-shift:before{content:"⇧";padding-right:.4em}.md-typeset .keys .key-left-super:before,.md-typeset .keys .key-right-super:before,.md-typeset .keys .key-super:before{content:"❖";padding-right:.4em}.md-typeset .keys .key-left-windows:before,.md-typeset .keys .key-right-windows:before,.md-typeset .keys .key-windows:before{content:"⊞";padding-right:.4em}.md-typeset .keys .key-arrow-down:before{content:"↓";padding-right:.4em}.md-typeset .keys .key-arrow-left:before{content:"←";padding-right:.4em}.md-typeset .keys .key-arrow-right:before{content:"→";padding-right:.4em}.md-typeset .keys .key-arrow-up:before{content:"↑";padding-right:.4em}.md-typeset .keys .key-backspace:before{content:"⌫";padding-right:.4em}.md-typeset .keys .key-backtab:before{content:"⇤";padding-right:.4em}.md-typeset .keys .key-caps-lock:before{content:"⇪";padding-right:.4em}.md-typeset .keys .key-clear:before{content:"⌧";padding-right:.4em}.md-typeset .keys .key-context-menu:before{content:"☰";padding-right:.4em}.md-typeset .keys .key-delete:before{content:"⌦";padding-right:.4em}.md-typeset .keys .key-eject:before{content:"⏏";padding-right:.4em}.md-typeset .keys .key-end:before{content:"⤓";padding-right:.4em}.md-typeset .keys .key-escape:before{content:"⎋";padding-right:.4em}.md-typeset .keys .key-home:before{content:"⤒";padding-right:.4em}.md-typeset .keys .key-insert:before{content:"⎀";padding-right:.4em}.md-typeset .keys .key-page-down:before{content:"⇟";padding-right:.4em}.md-typeset .keys .key-page-up:before{content:"⇞";padding-right:.4em}.md-typeset .keys .key-print-screen:before{content:"⎙";padding-right:.4em}.md-typeset .keys .key-tab:after{content:"⇥";padding-left:.4em}.md-typeset .keys .key-num-enter:after{content:"⌤";padding-left:.4em}.md-typeset .keys .key-enter:after{content:"⏎";padding-left:.4em}:root{--md-tabbed-icon--prev:url('data:image/svg+xml;charset=utf-8,');--md-tabbed-icon--next:url('data:image/svg+xml;charset=utf-8,')}.md-typeset .tabbed-set{border-radius:.1rem;display:flex;flex-flow:column wrap;margin:1em 0;position:relative}.md-typeset .tabbed-set>input{height:0;opacity:0;position:absolute;width:0}.md-typeset .tabbed-set>input:target{--md-scroll-offset:0.625em}.md-typeset .tabbed-labels{-ms-overflow-style:none;box-shadow:0 -.05rem var(--md-default-fg-color--lightest) inset;display:flex;max-width:100%;overflow:auto;scrollbar-width:none}@media print{.md-typeset .tabbed-labels{display:contents}}@media screen{.js .md-typeset .tabbed-labels{position:relative}.js .md-typeset .tabbed-labels:before{background:var(--md-accent-fg-color);bottom:0;content:"";display:block;height:2px;left:0;position:absolute;transform:translateX(var(--md-indicator-x));transition:width 225ms,transform .25s;transition-timing-function:cubic-bezier(.4,0,.2,1);width:var(--md-indicator-width)}}.md-typeset .tabbed-labels::-webkit-scrollbar{display:none}.md-typeset .tabbed-labels>label{border-bottom:.1rem solid #0000;border-radius:.1rem .1rem 0 0;color:var(--md-default-fg-color--light);cursor:pointer;flex-shrink:0;font-size:.64rem;font-weight:700;padding:.78125em 1.25em .625em;scroll-margin-inline-start:1rem;transition:background-color .25s,color .25s;white-space:nowrap;width:auto}@media print{.md-typeset .tabbed-labels>label:first-child{order:1}.md-typeset .tabbed-labels>label:nth-child(2){order:2}.md-typeset .tabbed-labels>label:nth-child(3){order:3}.md-typeset .tabbed-labels>label:nth-child(4){order:4}.md-typeset .tabbed-labels>label:nth-child(5){order:5}.md-typeset .tabbed-labels>label:nth-child(6){order:6}.md-typeset .tabbed-labels>label:nth-child(7){order:7}.md-typeset .tabbed-labels>label:nth-child(8){order:8}.md-typeset .tabbed-labels>label:nth-child(9){order:9}.md-typeset .tabbed-labels>label:nth-child(10){order:10}.md-typeset .tabbed-labels>label:nth-child(11){order:11}.md-typeset .tabbed-labels>label:nth-child(12){order:12}.md-typeset .tabbed-labels>label:nth-child(13){order:13}.md-typeset .tabbed-labels>label:nth-child(14){order:14}.md-typeset .tabbed-labels>label:nth-child(15){order:15}.md-typeset .tabbed-labels>label:nth-child(16){order:16}.md-typeset .tabbed-labels>label:nth-child(17){order:17}.md-typeset .tabbed-labels>label:nth-child(18){order:18}.md-typeset .tabbed-labels>label:nth-child(19){order:19}.md-typeset .tabbed-labels>label:nth-child(20){order:20}}.md-typeset .tabbed-labels>label:hover{color:var(--md-accent-fg-color)}.md-typeset .tabbed-content{width:100%}@media print{.md-typeset .tabbed-content{display:contents}}.md-typeset .tabbed-block{display:none}@media print{.md-typeset .tabbed-block{display:block}.md-typeset .tabbed-block:first-child{order:1}.md-typeset .tabbed-block:nth-child(2){order:2}.md-typeset .tabbed-block:nth-child(3){order:3}.md-typeset .tabbed-block:nth-child(4){order:4}.md-typeset .tabbed-block:nth-child(5){order:5}.md-typeset .tabbed-block:nth-child(6){order:6}.md-typeset .tabbed-block:nth-child(7){order:7}.md-typeset .tabbed-block:nth-child(8){order:8}.md-typeset .tabbed-block:nth-child(9){order:9}.md-typeset .tabbed-block:nth-child(10){order:10}.md-typeset .tabbed-block:nth-child(11){order:11}.md-typeset .tabbed-block:nth-child(12){order:12}.md-typeset .tabbed-block:nth-child(13){order:13}.md-typeset .tabbed-block:nth-child(14){order:14}.md-typeset .tabbed-block:nth-child(15){order:15}.md-typeset .tabbed-block:nth-child(16){order:16}.md-typeset .tabbed-block:nth-child(17){order:17}.md-typeset .tabbed-block:nth-child(18){order:18}.md-typeset .tabbed-block:nth-child(19){order:19}.md-typeset .tabbed-block:nth-child(20){order:20}}.md-typeset .tabbed-block>.highlight:first-child>pre,.md-typeset .tabbed-block>pre:first-child{margin:0}.md-typeset .tabbed-block>.highlight:first-child>pre>code,.md-typeset .tabbed-block>pre:first-child>code{border-top-left-radius:0;border-top-right-radius:0}.md-typeset .tabbed-block>.highlight:first-child>.filename{border-top-left-radius:0;border-top-right-radius:0;margin:0}.md-typeset .tabbed-block>.highlight:first-child>.highlighttable{margin:0}.md-typeset .tabbed-block>.highlight:first-child>.highlighttable>tbody>tr>.filename span.filename,.md-typeset .tabbed-block>.highlight:first-child>.highlighttable>tbody>tr>.linenos{border-top-left-radius:0;border-top-right-radius:0;margin:0}.md-typeset .tabbed-block>.highlight:first-child>.highlighttable>tbody>tr>.code>div>pre>code{border-top-left-radius:0;border-top-right-radius:0}.md-typeset .tabbed-block>.highlight:first-child+.result{margin-top:-.125em}.md-typeset .tabbed-block>.tabbed-set{margin:0}.md-typeset .tabbed-button{align-self:center;border-radius:100%;color:var(--md-default-fg-color--light);cursor:pointer;display:block;height:.9rem;margin-top:.1rem;pointer-events:auto;transition:background-color .25s;width:.9rem}.md-typeset .tabbed-button:hover{background-color:var(--md-accent-fg-color--transparent);color:var(--md-accent-fg-color)}.md-typeset .tabbed-button:after{background-color:currentcolor;content:"";display:block;height:100%;-webkit-mask-image:var(--md-tabbed-icon--prev);mask-image:var(--md-tabbed-icon--prev);-webkit-mask-position:center;mask-position:center;-webkit-mask-repeat:no-repeat;mask-repeat:no-repeat;-webkit-mask-size:contain;mask-size:contain;transition:background-color .25s,transform .25s;width:100%}.md-typeset .tabbed-control{background:linear-gradient(to right,var(--md-default-bg-color) 60%,#0000);display:flex;height:1.9rem;justify-content:start;pointer-events:none;position:absolute;transition:opacity 125ms;width:1.2rem}[dir=rtl] .md-typeset .tabbed-control{transform:rotate(180deg)}.md-typeset .tabbed-control[hidden]{opacity:0}.md-typeset .tabbed-control--next{background:linear-gradient(to left,var(--md-default-bg-color) 60%,#0000);justify-content:end;right:0}.md-typeset .tabbed-control--next .tabbed-button:after{-webkit-mask-image:var(--md-tabbed-icon--next);mask-image:var(--md-tabbed-icon--next)}@media screen and (max-width:44.9375em){[dir=ltr] .md-content__inner>.tabbed-set .tabbed-labels{padding-left:.8rem}[dir=rtl] .md-content__inner>.tabbed-set .tabbed-labels{padding-right:.8rem}.md-content__inner>.tabbed-set .tabbed-labels{margin:0 -.8rem;max-width:100vw;scroll-padding-inline-start:.8rem}[dir=ltr] .md-content__inner>.tabbed-set .tabbed-labels:after{padding-right:.8rem}[dir=rtl] .md-content__inner>.tabbed-set .tabbed-labels:after{padding-left:.8rem}.md-content__inner>.tabbed-set .tabbed-labels:after{content:""}[dir=ltr] .md-content__inner>.tabbed-set .tabbed-labels~.tabbed-control--prev{margin-left:-.8rem}[dir=rtl] .md-content__inner>.tabbed-set .tabbed-labels~.tabbed-control--prev{margin-right:-.8rem}[dir=ltr] .md-content__inner>.tabbed-set .tabbed-labels~.tabbed-control--prev{padding-left:.8rem}[dir=rtl] .md-content__inner>.tabbed-set .tabbed-labels~.tabbed-control--prev{padding-right:.8rem}.md-content__inner>.tabbed-set .tabbed-labels~.tabbed-control--prev{width:2rem}[dir=ltr] .md-content__inner>.tabbed-set .tabbed-labels~.tabbed-control--next{margin-right:-.8rem}[dir=rtl] .md-content__inner>.tabbed-set .tabbed-labels~.tabbed-control--next{margin-left:-.8rem}[dir=ltr] .md-content__inner>.tabbed-set .tabbed-labels~.tabbed-control--next{padding-right:.8rem}[dir=rtl] .md-content__inner>.tabbed-set .tabbed-labels~.tabbed-control--next{padding-left:.8rem}.md-content__inner>.tabbed-set .tabbed-labels~.tabbed-control--next{width:2rem}}@media screen{.md-typeset .tabbed-set>input:first-child:checked~.tabbed-labels>:first-child,.md-typeset .tabbed-set>input:nth-child(10):checked~.tabbed-labels>:nth-child(10),.md-typeset .tabbed-set>input:nth-child(11):checked~.tabbed-labels>:nth-child(11),.md-typeset .tabbed-set>input:nth-child(12):checked~.tabbed-labels>:nth-child(12),.md-typeset .tabbed-set>input:nth-child(13):checked~.tabbed-labels>:nth-child(13),.md-typeset .tabbed-set>input:nth-child(14):checked~.tabbed-labels>:nth-child(14),.md-typeset .tabbed-set>input:nth-child(15):checked~.tabbed-labels>:nth-child(15),.md-typeset .tabbed-set>input:nth-child(16):checked~.tabbed-labels>:nth-child(16),.md-typeset .tabbed-set>input:nth-child(17):checked~.tabbed-labels>:nth-child(17),.md-typeset .tabbed-set>input:nth-child(18):checked~.tabbed-labels>:nth-child(18),.md-typeset .tabbed-set>input:nth-child(19):checked~.tabbed-labels>:nth-child(19),.md-typeset .tabbed-set>input:nth-child(2):checked~.tabbed-labels>:nth-child(2),.md-typeset .tabbed-set>input:nth-child(20):checked~.tabbed-labels>:nth-child(20),.md-typeset .tabbed-set>input:nth-child(3):checked~.tabbed-labels>:nth-child(3),.md-typeset .tabbed-set>input:nth-child(4):checked~.tabbed-labels>:nth-child(4),.md-typeset .tabbed-set>input:nth-child(5):checked~.tabbed-labels>:nth-child(5),.md-typeset .tabbed-set>input:nth-child(6):checked~.tabbed-labels>:nth-child(6),.md-typeset .tabbed-set>input:nth-child(7):checked~.tabbed-labels>:nth-child(7),.md-typeset .tabbed-set>input:nth-child(8):checked~.tabbed-labels>:nth-child(8),.md-typeset .tabbed-set>input:nth-child(9):checked~.tabbed-labels>:nth-child(9){color:var(--md-accent-fg-color)}.md-typeset .no-js .tabbed-set>input:first-child:checked~.tabbed-labels>:first-child,.md-typeset .no-js .tabbed-set>input:nth-child(10):checked~.tabbed-labels>:nth-child(10),.md-typeset .no-js .tabbed-set>input:nth-child(11):checked~.tabbed-labels>:nth-child(11),.md-typeset .no-js .tabbed-set>input:nth-child(12):checked~.tabbed-labels>:nth-child(12),.md-typeset .no-js .tabbed-set>input:nth-child(13):checked~.tabbed-labels>:nth-child(13),.md-typeset .no-js .tabbed-set>input:nth-child(14):checked~.tabbed-labels>:nth-child(14),.md-typeset .no-js .tabbed-set>input:nth-child(15):checked~.tabbed-labels>:nth-child(15),.md-typeset .no-js .tabbed-set>input:nth-child(16):checked~.tabbed-labels>:nth-child(16),.md-typeset .no-js .tabbed-set>input:nth-child(17):checked~.tabbed-labels>:nth-child(17),.md-typeset .no-js .tabbed-set>input:nth-child(18):checked~.tabbed-labels>:nth-child(18),.md-typeset .no-js .tabbed-set>input:nth-child(19):checked~.tabbed-labels>:nth-child(19),.md-typeset .no-js .tabbed-set>input:nth-child(2):checked~.tabbed-labels>:nth-child(2),.md-typeset .no-js .tabbed-set>input:nth-child(20):checked~.tabbed-labels>:nth-child(20),.md-typeset .no-js .tabbed-set>input:nth-child(3):checked~.tabbed-labels>:nth-child(3),.md-typeset .no-js .tabbed-set>input:nth-child(4):checked~.tabbed-labels>:nth-child(4),.md-typeset .no-js .tabbed-set>input:nth-child(5):checked~.tabbed-labels>:nth-child(5),.md-typeset .no-js .tabbed-set>input:nth-child(6):checked~.tabbed-labels>:nth-child(6),.md-typeset .no-js .tabbed-set>input:nth-child(7):checked~.tabbed-labels>:nth-child(7),.md-typeset .no-js .tabbed-set>input:nth-child(8):checked~.tabbed-labels>:nth-child(8),.md-typeset .no-js .tabbed-set>input:nth-child(9):checked~.tabbed-labels>:nth-child(9),.no-js .md-typeset .tabbed-set>input:first-child:checked~.tabbed-labels>:first-child,.no-js .md-typeset .tabbed-set>input:nth-child(10):checked~.tabbed-labels>:nth-child(10),.no-js .md-typeset .tabbed-set>input:nth-child(11):checked~.tabbed-labels>:nth-child(11),.no-js .md-typeset .tabbed-set>input:nth-child(12):checked~.tabbed-labels>:nth-child(12),.no-js .md-typeset .tabbed-set>input:nth-child(13):checked~.tabbed-labels>:nth-child(13),.no-js .md-typeset .tabbed-set>input:nth-child(14):checked~.tabbed-labels>:nth-child(14),.no-js .md-typeset .tabbed-set>input:nth-child(15):checked~.tabbed-labels>:nth-child(15),.no-js .md-typeset .tabbed-set>input:nth-child(16):checked~.tabbed-labels>:nth-child(16),.no-js .md-typeset .tabbed-set>input:nth-child(17):checked~.tabbed-labels>:nth-child(17),.no-js .md-typeset .tabbed-set>input:nth-child(18):checked~.tabbed-labels>:nth-child(18),.no-js .md-typeset .tabbed-set>input:nth-child(19):checked~.tabbed-labels>:nth-child(19),.no-js .md-typeset .tabbed-set>input:nth-child(2):checked~.tabbed-labels>:nth-child(2),.no-js .md-typeset .tabbed-set>input:nth-child(20):checked~.tabbed-labels>:nth-child(20),.no-js .md-typeset .tabbed-set>input:nth-child(3):checked~.tabbed-labels>:nth-child(3),.no-js .md-typeset .tabbed-set>input:nth-child(4):checked~.tabbed-labels>:nth-child(4),.no-js .md-typeset .tabbed-set>input:nth-child(5):checked~.tabbed-labels>:nth-child(5),.no-js .md-typeset .tabbed-set>input:nth-child(6):checked~.tabbed-labels>:nth-child(6),.no-js .md-typeset .tabbed-set>input:nth-child(7):checked~.tabbed-labels>:nth-child(7),.no-js .md-typeset .tabbed-set>input:nth-child(8):checked~.tabbed-labels>:nth-child(8),.no-js .md-typeset .tabbed-set>input:nth-child(9):checked~.tabbed-labels>:nth-child(9){border-color:var(--md-accent-fg-color)}}.md-typeset .tabbed-set>input:first-child.focus-visible~.tabbed-labels>:first-child,.md-typeset .tabbed-set>input:nth-child(10).focus-visible~.tabbed-labels>:nth-child(10),.md-typeset .tabbed-set>input:nth-child(11).focus-visible~.tabbed-labels>:nth-child(11),.md-typeset .tabbed-set>input:nth-child(12).focus-visible~.tabbed-labels>:nth-child(12),.md-typeset .tabbed-set>input:nth-child(13).focus-visible~.tabbed-labels>:nth-child(13),.md-typeset .tabbed-set>input:nth-child(14).focus-visible~.tabbed-labels>:nth-child(14),.md-typeset .tabbed-set>input:nth-child(15).focus-visible~.tabbed-labels>:nth-child(15),.md-typeset .tabbed-set>input:nth-child(16).focus-visible~.tabbed-labels>:nth-child(16),.md-typeset .tabbed-set>input:nth-child(17).focus-visible~.tabbed-labels>:nth-child(17),.md-typeset .tabbed-set>input:nth-child(18).focus-visible~.tabbed-labels>:nth-child(18),.md-typeset .tabbed-set>input:nth-child(19).focus-visible~.tabbed-labels>:nth-child(19),.md-typeset .tabbed-set>input:nth-child(2).focus-visible~.tabbed-labels>:nth-child(2),.md-typeset .tabbed-set>input:nth-child(20).focus-visible~.tabbed-labels>:nth-child(20),.md-typeset .tabbed-set>input:nth-child(3).focus-visible~.tabbed-labels>:nth-child(3),.md-typeset .tabbed-set>input:nth-child(4).focus-visible~.tabbed-labels>:nth-child(4),.md-typeset .tabbed-set>input:nth-child(5).focus-visible~.tabbed-labels>:nth-child(5),.md-typeset .tabbed-set>input:nth-child(6).focus-visible~.tabbed-labels>:nth-child(6),.md-typeset .tabbed-set>input:nth-child(7).focus-visible~.tabbed-labels>:nth-child(7),.md-typeset .tabbed-set>input:nth-child(8).focus-visible~.tabbed-labels>:nth-child(8),.md-typeset .tabbed-set>input:nth-child(9).focus-visible~.tabbed-labels>:nth-child(9){background-color:var(--md-accent-fg-color--transparent)}.md-typeset .tabbed-set>input:first-child:checked~.tabbed-content>:first-child,.md-typeset .tabbed-set>input:nth-child(10):checked~.tabbed-content>:nth-child(10),.md-typeset .tabbed-set>input:nth-child(11):checked~.tabbed-content>:nth-child(11),.md-typeset .tabbed-set>input:nth-child(12):checked~.tabbed-content>:nth-child(12),.md-typeset .tabbed-set>input:nth-child(13):checked~.tabbed-content>:nth-child(13),.md-typeset .tabbed-set>input:nth-child(14):checked~.tabbed-content>:nth-child(14),.md-typeset .tabbed-set>input:nth-child(15):checked~.tabbed-content>:nth-child(15),.md-typeset .tabbed-set>input:nth-child(16):checked~.tabbed-content>:nth-child(16),.md-typeset .tabbed-set>input:nth-child(17):checked~.tabbed-content>:nth-child(17),.md-typeset .tabbed-set>input:nth-child(18):checked~.tabbed-content>:nth-child(18),.md-typeset .tabbed-set>input:nth-child(19):checked~.tabbed-content>:nth-child(19),.md-typeset .tabbed-set>input:nth-child(2):checked~.tabbed-content>:nth-child(2),.md-typeset .tabbed-set>input:nth-child(20):checked~.tabbed-content>:nth-child(20),.md-typeset .tabbed-set>input:nth-child(3):checked~.tabbed-content>:nth-child(3),.md-typeset .tabbed-set>input:nth-child(4):checked~.tabbed-content>:nth-child(4),.md-typeset .tabbed-set>input:nth-child(5):checked~.tabbed-content>:nth-child(5),.md-typeset .tabbed-set>input:nth-child(6):checked~.tabbed-content>:nth-child(6),.md-typeset .tabbed-set>input:nth-child(7):checked~.tabbed-content>:nth-child(7),.md-typeset .tabbed-set>input:nth-child(8):checked~.tabbed-content>:nth-child(8),.md-typeset .tabbed-set>input:nth-child(9):checked~.tabbed-content>:nth-child(9){display:block}:root{--md-tasklist-icon:url('data:image/svg+xml;charset=utf-8,');--md-tasklist-icon--checked:url('data:image/svg+xml;charset=utf-8,')}.md-typeset .task-list-item{list-style-type:none;position:relative}[dir=ltr] .md-typeset .task-list-item [type=checkbox]{left:-2em}[dir=rtl] .md-typeset .task-list-item [type=checkbox]{right:-2em}.md-typeset .task-list-item [type=checkbox]{position:absolute;top:.45em}.md-typeset .task-list-control [type=checkbox]{opacity:0;z-index:-1}[dir=ltr] .md-typeset .task-list-indicator:before{left:-1.5em}[dir=rtl] .md-typeset .task-list-indicator:before{right:-1.5em}.md-typeset .task-list-indicator:before{background-color:var(--md-default-fg-color--lightest);content:"";height:1.25em;-webkit-mask-image:var(--md-tasklist-icon);mask-image:var(--md-tasklist-icon);-webkit-mask-position:center;mask-position:center;-webkit-mask-repeat:no-repeat;mask-repeat:no-repeat;-webkit-mask-size:contain;mask-size:contain;position:absolute;top:.15em;width:1.25em}.md-typeset [type=checkbox]:checked+.task-list-indicator:before{background-color:#00e676;-webkit-mask-image:var(--md-tasklist-icon--checked);mask-image:var(--md-tasklist-icon--checked)}:root>*{--md-mermaid-font-family:var(--md-text-font-family),sans-serif;--md-mermaid-edge-color:var(--md-code-fg-color);--md-mermaid-node-bg-color:var(--md-accent-fg-color--transparent);--md-mermaid-node-fg-color:var(--md-accent-fg-color);--md-mermaid-label-bg-color:var(--md-default-bg-color);--md-mermaid-label-fg-color:var(--md-code-fg-color)}.mermaid{line-height:normal;margin:1em 0}@media screen and (min-width:45em){[dir=ltr] .md-typeset .inline{float:left}[dir=rtl] .md-typeset .inline{float:right}[dir=ltr] .md-typeset .inline{margin-right:.8rem}[dir=rtl] .md-typeset .inline{margin-left:.8rem}.md-typeset .inline{margin-bottom:.8rem;margin-top:0;width:11.7rem}[dir=ltr] .md-typeset .inline.end{float:right}[dir=rtl] .md-typeset .inline.end{float:left}[dir=ltr] .md-typeset .inline.end{margin-left:.8rem;margin-right:0}[dir=rtl] .md-typeset .inline.end{margin-left:0;margin-right:.8rem}} \ No newline at end of file diff --git a/assets/stylesheets/main.975780f9.min.css.map b/assets/stylesheets/main.975780f9.min.css.map new file mode 100644 index 0000000..5e13ffb --- /dev/null +++ b/assets/stylesheets/main.975780f9.min.css.map @@ -0,0 +1 @@ +{"version":3,"sources":["src/assets/stylesheets/main/extensions/pymdownx/_keys.scss","../../../src/assets/stylesheets/main.scss","src/assets/stylesheets/main/_resets.scss","src/assets/stylesheets/main/_colors.scss","src/assets/stylesheets/main/_icons.scss","src/assets/stylesheets/main/_typeset.scss","src/assets/stylesheets/utilities/_break.scss","src/assets/stylesheets/main/layout/_banner.scss","src/assets/stylesheets/main/layout/_base.scss","src/assets/stylesheets/main/layout/_clipboard.scss","src/assets/stylesheets/main/layout/_consent.scss","src/assets/stylesheets/main/layout/_content.scss","src/assets/stylesheets/main/layout/_dialog.scss","src/assets/stylesheets/main/layout/_feedback.scss","src/assets/stylesheets/main/layout/_footer.scss","src/assets/stylesheets/main/layout/_form.scss","src/assets/stylesheets/main/layout/_header.scss","src/assets/stylesheets/main/layout/_nav.scss","src/assets/stylesheets/main/layout/_search.scss","src/assets/stylesheets/main/layout/_select.scss","src/assets/stylesheets/main/layout/_sidebar.scss","src/assets/stylesheets/main/layout/_source.scss","src/assets/stylesheets/main/layout/_tabs.scss","src/assets/stylesheets/main/layout/_tag.scss","src/assets/stylesheets/main/layout/_tooltip.scss","src/assets/stylesheets/main/layout/_top.scss","src/assets/stylesheets/main/layout/_version.scss","src/assets/stylesheets/main/extensions/markdown/_admonition.scss","node_modules/material-design-color/material-color.scss","src/assets/stylesheets/main/extensions/markdown/_footnotes.scss","src/assets/stylesheets/main/extensions/markdown/_toc.scss","src/assets/stylesheets/main/extensions/pymdownx/_arithmatex.scss","src/assets/stylesheets/main/extensions/pymdownx/_critic.scss","src/assets/stylesheets/main/extensions/pymdownx/_details.scss","src/assets/stylesheets/main/extensions/pymdownx/_emoji.scss","src/assets/stylesheets/main/extensions/pymdownx/_highlight.scss","src/assets/stylesheets/main/extensions/pymdownx/_tabbed.scss","src/assets/stylesheets/main/extensions/pymdownx/_tasklist.scss","src/assets/stylesheets/main/integrations/_mermaid.scss","src/assets/stylesheets/main/_modifiers.scss"],"names":[],"mappings":"AAgGM,gBCo+GN,CCxiHA,KAEE,6BAAA,CAAA,0BAAA,CAAA,qBAAA,CADA,qBDzBF,CC8BA,iBAGE,kBD3BF,CC8BE,gCANF,iBAOI,yBDzBF,CACF,CC6BA,KACE,QD1BF,CC8BA,qBAIE,uCD3BF,CC+BA,EACE,aAAA,CACA,oBD5BF,CCgCA,GAME,QAAA,CAJA,kBAAA,CADA,aAAA,CAEA,aAAA,CAEA,gBAAA,CADA,SD3BF,CCiCA,MACE,aD9BF,CCkCA,QAEE,eD/BF,CCmCA,IACE,iBDhCF,CCoCA,MACE,uBAAA,CACA,gBDjCF,CCqCA,MAEE,eAAA,CACA,kBDlCF,CCsCA,OAKE,gBAAA,CACA,QAAA,CAFA,mBAAA,CADA,iBAAA,CAFA,QAAA,CACA,SD/BF,CCuCA,MACE,QAAA,CACA,YDpCF,CErDA,MAIE,6BAAA,CACA,oCAAA,CACA,mCAAA,CACA,0BAAA,CACA,sCAAA,CAGA,4BAAA,CACA,2CAAA,CACA,yBAAA,CACA,qCFmDF,CEpCA,qCAGE,+BAAA,CACA,sCAAA,CACA,wCAAA,CACA,yCAAA,CACA,0BAAA,CACA,sCAAA,CACA,wCAAA,CACA,yCAAA,CAGA,0BAAA,CACA,0BAAA,CAGA,4BAAA,CACA,iCAAA,CACA,kCAAA,CACA,mCAAA,CACA,mCAAA,CACA,kCAAA,CACA,iCAAA,CACA,+CAAA,CACA,6DAAA,CACA,gEAAA,CACA,4DAAA,CACA,4DAAA,CACA,6DAAA,CAGA,6CAAA,CAGA,+CAAA,CAGA,iCAAA,CAGA,gCAAA,CACA,gCAAA,CAGA,8BAAA,CACA,kCAAA,CACA,qCAAA,CAGA,kCAAA,CAGA,mDAAA,CACA,mDAAA,CAGA,yBAAA,CACA,qCAAA,CACA,uCAAA,CACA,8BAAA,CACA,oCAAA,CAGA,8DAAA,CAKA,8DAAA,CAKA,0DFaF,CGjHE,aAIE,iBAAA,CAHA,aAAA,CAEA,aAAA,CADA,YHsHJ,CI3HA,KACE,kCAAA,CACA,iCAAA,CAGA,uGAAA,CAKA,mFJ4HF,CItHA,WAGE,mCAAA,CACA,sCJyHF,CIrHA,wBANE,6BJmIF,CI7HA,aAIE,4BAAA,CACA,sCJwHF,CIhHA,MACE,0NAAA,CACA,mNAAA,CACA,oNJmHF,CI5GA,YAGE,gCAAA,CAAA,kBAAA,CAFA,eAAA,CACA,eJgHF,CI3GE,aAPF,YAQI,gBJ8GF,CACF,CI3GE,uGAME,iBAAA,CAAA,cJ6GJ,CIzGE,eAEE,uCAAA,CAEA,aAAA,CACA,eAAA,CAJA,iBJgHJ,CIvGE,8BAPE,eAAA,CAGA,qBJkHJ,CI9GE,eAGE,kBAAA,CACA,eAAA,CAHA,oBJ6GJ,CIrGE,eAGE,gBAAA,CADA,eAAA,CAGA,qBAAA,CADA,eAAA,CAHA,mBJ2GJ,CInGE,kBACE,eJqGJ,CIjGE,eAEE,eAAA,CACA,qBAAA,CAFA,YJqGJ,CI/FE,8BAGE,uCAAA,CAEA,cAAA,CADA,eAAA,CAEA,qBAAA,CAJA,eJqGJ,CI7FE,eACE,wBJ+FJ,CI3FE,eAGE,+DAAA,CAFA,iBAAA,CACA,cJ8FJ,CIzFE,cACE,+BAAA,CACA,qBJ2FJ,CIxFI,mCAEE,sBJyFN,CIrFI,wCAEE,+BJsFN,CInFM,kDACE,uDJqFR,CIhFI,mBACE,kBAAA,CACA,iCJkFN,CI9EI,4BACE,uCAAA,CACA,oBJgFN,CI3EE,iDAGE,6BAAA,CACA,aAAA,CACA,2BJ6EJ,CI1EI,aARF,iDASI,oBJ+EJ,CACF,CI3EE,iBAIE,wCAAA,CACA,mBAAA,CACA,kCAAA,CAAA,0BAAA,CAJA,eAAA,CADA,uBAAA,CAEA,qBJgFJ,CI1EI,qCAEE,uCAAA,CADA,YJ6EN,CIvEE,gBAEE,iBAAA,CACA,eAAA,CAFA,iBJ2EJ,CItEI,qBAQE,kCAAA,CAAA,0BAAA,CADA,eAAA,CANA,aAAA,CACA,QAAA,CAIA,uCAAA,CAFA,aAAA,CADA,oCAAA,CAQA,yDAAA,CADA,oBAAA,CADA,iBAAA,CAJA,iBJ8EN,CIrEM,2BACE,+CJuER,CInEM,wCAEE,YAAA,CADA,WJsER,CIjEM,8CACE,oDJmER,CIhEQ,oDACE,0CJkEV,CI3DE,gBAOE,4CAAA,CACA,mBAAA,CACA,mKACE,CAPF,gCAAA,CAFA,oBAAA,CAGA,eAAA,CAFA,uBAAA,CAGA,uBAAA,CACA,qBJgEJ,CItDE,iBAGE,6CAAA,CACA,kCAAA,CAAA,0BAAA,CAHA,aAAA,CACA,qBJ0DJ,CIpDE,iBAEE,6DAAA,CACA,WAAA,CAFA,oBJwDJ,CInDI,oBANF,iBAOI,iBJsDJ,CInDI,yDAWE,2CAAA,CACA,mBAAA,CACA,8BAAA,CAJA,gCAAA,CAKA,mBAAA,CAXA,oBAAA,CAOA,eAAA,CAHA,cAAA,CADA,aAAA,CADA,6BAAA,CAAA,qBAAA,CAGA,mBAAA,CAPA,iBAAA,CAGA,UJ+DN,CInEI,sDAWE,2CAAA,CACA,mBAAA,CACA,8BAAA,CAJA,gCAAA,CAKA,mBAAA,CAXA,oBAAA,CAOA,eAAA,CAHA,cAAA,CADA,aAAA,CADA,0BAAA,CAAA,qBAAA,CAGA,mBAAA,CAPA,iBAAA,CAGA,UJ+DN,CInEI,mEAEE,MJiEN,CInEI,gEAEE,MJiEN,CInEI,0DAEE,MJiEN,CInEI,mEAEE,OJiEN,CInEI,gEAEE,OJiEN,CInEI,0DAEE,OJiEN,CInEI,gDAWE,2CAAA,CACA,mBAAA,CACA,8BAAA,CAJA,gCAAA,CAKA,mBAAA,CAXA,oBAAA,CAOA,eAAA,CAHA,cAAA,CADA,aAAA,CADA,6BAAA,CAAA,0BAAA,CAAA,qBAAA,CAGA,mBAAA,CAPA,iBAAA,CAGA,UJ+DN,CACF,CIhDE,kBACE,WJkDJ,CI9CE,oDAEE,qBJgDJ,CIlDE,oDAEE,sBJgDJ,CI5CE,iCACE,kBJiDJ,CIlDE,iCACE,mBJiDJ,CIlDE,iCAIE,2DJ8CJ,CIlDE,iCAIE,4DJ8CJ,CIlDE,uBAGE,uCAAA,CADA,aAAA,CAAA,cJgDJ,CI1CE,eACE,oBJ4CJ,CIxCE,kDAEE,kBJ2CJ,CI7CE,kDAEE,mBJ2CJ,CI7CE,8BAGE,SJ0CJ,CIvCI,0DACE,iBJ0CN,CItCI,oCACE,2BJyCN,CItCM,0CACE,2BJyCR,CIpCI,wDAEE,kBJuCN,CIzCI,wDAEE,mBJuCN,CIzCI,oCACE,kBJwCN,CIpCM,kGAEE,aJwCR,CIpCM,0DACE,eJuCR,CInCM,4EACE,kBAAA,CAAA,eJuCR,CIxCM,sEACE,kBAAA,CAAA,eJuCR,CIxCM,gGAEE,kBJsCR,CIxCM,0FAEE,kBJsCR,CIxCM,8EAEE,kBJsCR,CIxCM,gGAEE,mBJsCR,CIxCM,0FAEE,mBJsCR,CIxCM,8EAEE,mBJsCR,CIxCM,0DACE,kBAAA,CAAA,eJuCR,CIhCE,yBAEE,mBJkCJ,CIpCE,yBAEE,oBJkCJ,CIpCE,eACE,mBAAA,CAAA,cJmCJ,CI9BE,kDAIE,WAAA,CADA,cJiCJ,CIzBI,4BAEE,oBJ2BN,CIvBI,6BAEE,oBJyBN,CIrBI,kCACE,YJuBN,CInBI,8EAEE,YJoBN,CIfE,mBACE,iBAAA,CAGA,eAAA,CADA,cAAA,CAEA,iBAAA,CAHA,yBAAA,CAAA,sBAAA,CAAA,iBJoBJ,CIdI,uBACE,aJgBN,CIXE,uBAGE,iBAAA,CADA,eAAA,CADA,eJeJ,CITE,mBACE,cJWJ,CIPE,+BAKE,2CAAA,CACA,iDAAA,CACA,mBAAA,CANA,oBAAA,CAGA,gBAAA,CAFA,cAAA,CACA,aAAA,CAKA,iBJSJ,CINI,aAXF,+BAYI,aJSJ,CACF,CIJI,iCACE,gBJMN,CICM,gEACE,YJCR,CIFM,6DACE,YJCR,CIFM,uDACE,YJCR,CIGM,+DACE,eJDR,CIAM,4DACE,eJDR,CIAM,sDACE,eJDR,CIMI,gEACE,eJJN,CIGI,6DACE,eJJN,CIGI,uDACE,eJJN,CIOM,0EACE,gBJLR,CIIM,uEACE,gBJLR,CIIM,iEACE,gBJLR,CIUI,kCAGE,eAAA,CAFA,cAAA,CACA,sBAAA,CAEA,kBJRN,CIYI,kCAGE,qDAAA,CAFA,sBAAA,CACA,kBJTN,CIcI,wCACE,iCJZN,CIeM,8CACE,iCAAA,CACA,sDJbR,CIkBI,iCACE,iBJhBN,CIqBE,wCACE,cJnBJ,CIsBI,wDAIE,gBJdN,CIUI,wDAIE,iBJdN,CIUI,8CAUE,UAAA,CATA,oBAAA,CAEA,YAAA,CAGA,oDAAA,CAAA,4CAAA,CACA,6BAAA,CAAA,qBAAA,CACA,yBAAA,CAAA,iBAAA,CACA,iCAAA,CAJA,0BAAA,CAHA,WJZN,CIwBI,oDACE,oDJtBN,CI0BI,mEACE,kDAAA,CACA,yDAAA,CAAA,iDJxBN,CI4BI,oEACE,kDAAA,CACA,0DAAA,CAAA,kDJ1BN,CI+BE,wBACE,iBAAA,CACA,eAAA,CACA,iBJ7BJ,CIiCE,mBACE,oBAAA,CACA,kBAAA,CACA,eJ/BJ,CIkCI,aANF,mBAOI,aJ/BJ,CACF,CIkCI,8BACE,aAAA,CAEA,QAAA,CACA,eAAA,CAFA,UJ9BN,CK7VI,wCD0YF,uBACE,iBJzCF,CI4CE,4BACE,eJ1CJ,CACF,CM/hBA,WAGE,0CAAA,CADA,+BAAA,CADA,aNmiBF,CM9hBE,aANF,WAOI,YNiiBF,CACF,CM9hBE,oBAEE,uCAAA,CADA,gCNiiBJ,CM5hBE,kBAGE,eAAA,CAFA,iBAAA,CACA,eN+hBJ,CM1hBE,6BACE,WN+hBJ,CMhiBE,6BACE,UN+hBJ,CMhiBE,mBAEE,aAAA,CACA,cAAA,CACA,uBN4hBJ,CMzhBI,yBACE,UN2hBN,CO3jBA,KASE,cAAA,CARA,WAAA,CACA,iBP+jBF,CK3ZI,oCEtKJ,KAaI,gBPwjBF,CACF,CKhaI,oCEtKJ,KAkBI,cPwjBF,CACF,COnjBA,KASE,2CAAA,CAPA,YAAA,CACA,qBAAA,CAKA,eAAA,CAHA,eAAA,CAJA,iBAAA,CAGA,UPyjBF,COjjBE,aAZF,KAaI,aPojBF,CACF,CKjaI,wCEhJF,yBAII,cPijBJ,CACF,COxiBA,SAEE,gBAAA,CAAA,iBAAA,CADA,eP4iBF,COviBA,cACE,YAAA,CACA,qBAAA,CACA,WP0iBF,COviBE,aANF,cAOI,aP0iBF,CACF,COtiBA,SACE,WPyiBF,COtiBE,gBACE,YAAA,CACA,WAAA,CACA,iBPwiBJ,COniBA,aACE,eAAA,CAEA,sBAAA,CADA,kBPuiBF,CO7hBA,WACE,YPgiBF,CO3hBA,WAGE,QAAA,CACA,SAAA,CAHA,iBAAA,CACA,OPgiBF,CO3hBE,uCACE,aP6hBJ,COzhBE,+BAEE,uCAAA,CADA,kBP4hBJ,COthBA,SASE,2CAAA,CACA,mBAAA,CAHA,gCAAA,CACA,gBAAA,CAHA,YAAA,CAQA,SAAA,CAFA,uCAAA,CALA,mBAAA,CALA,cAAA,CAWA,2BAAA,CARA,UPgiBF,COphBE,eAGE,SAAA,CADA,uBAAA,CAEA,oEACE,CAJF,UPyhBJ,CO3gBA,MACE,WP8gBF,CQxqBA,MACE,+PR0qBF,CQpqBA,cAQE,mBAAA,CADA,0CAAA,CAIA,cAAA,CALA,YAAA,CAGA,uCAAA,CACA,oBAAA,CATA,iBAAA,CAEA,UAAA,CADA,QAAA,CAUA,qBAAA,CAPA,WAAA,CADA,SR+qBF,CQpqBE,aAfF,cAgBI,YRuqBF,CACF,CQpqBE,kCAEE,uCAAA,CADA,YRuqBJ,CQlqBE,qBACE,uCRoqBJ,CQhqBE,yCACE,+BRkqBJ,CQnqBE,sCACE,+BRkqBJ,CQnqBE,gCACE,+BRkqBJ,CQ7pBE,oBAKE,6BAAA,CAKA,UAAA,CATA,aAAA,CAEA,cAAA,CACA,aAAA,CAEA,2CAAA,CAAA,mCAAA,CACA,4BAAA,CAAA,oBAAA,CACA,6BAAA,CAAA,qBAAA,CACA,yBAAA,CAAA,iBAAA,CAPA,aRuqBJ,CQ3pBE,sBACE,cR6pBJ,CQ1pBI,2BACE,2CR4pBN,CQtpBI,sDAEE,uDAAA,CADA,+BRypBN,CQ1pBI,mDAEE,uDAAA,CADA,+BRypBN,CQ1pBI,6CAEE,uDAAA,CADA,+BRypBN,CS/tBA,mBACE,GAEE,SAAA,CADA,0BTmuBF,CS/tBA,GAEE,SAAA,CADA,uBTkuBF,CACF,CS7tBA,mBACE,GACE,ST+tBF,CS5tBA,GACE,ST8tBF,CACF,CSntBE,qBASE,2BAAA,CADA,mCAAA,CAAA,2BAAA,CAFA,0BAAA,CADA,WAAA,CAEA,SAAA,CANA,cAAA,CACA,KAAA,CAEA,UAAA,CADA,ST2tBJ,CSjtBE,mBAcE,mDAAA,CANA,2CAAA,CACA,QAAA,CACA,mBAAA,CARA,QAAA,CASA,kDACE,CAPF,eAAA,CAEA,aAAA,CADA,SAAA,CALA,cAAA,CAGA,UAAA,CADA,ST4tBJ,CS7sBE,kBACE,aT+sBJ,CS3sBE,sBACE,YAAA,CACA,YT6sBJ,CS1sBI,oCACE,aT4sBN,CSvsBE,sBACE,mBTysBJ,CStsBI,6CACE,cTwsBN,CKlmBI,wCIvGA,6CAKI,aAAA,CAEA,gBAAA,CACA,iBAAA,CAFA,UT0sBN,CACF,CSnsBE,kBACE,cTqsBJ,CUtyBA,YACE,WAAA,CAIA,WVsyBF,CUnyBE,mBACE,qBAAA,CACA,iBVqyBJ,CKzoBI,sCKtJE,4EACE,kBVkyBN,CU9xBI,0JACE,mBVgyBN,CUjyBI,8EACE,kBVgyBN,CACF,CU3xBI,0BAGE,UAAA,CAFA,aAAA,CACA,YV8xBN,CUzxBI,+BACE,eV2xBN,CUrxBE,8BACE,WV0xBJ,CU3xBE,8BACE,UV0xBJ,CU3xBE,8BAGE,iBVwxBJ,CU3xBE,8BAGE,kBVwxBJ,CU3xBE,oBAEE,cAAA,CAEA,SVuxBJ,CUpxBI,aAPF,oBAQI,YVuxBJ,CACF,CUpxBI,gCACE,yCVsxBN,CUlxBI,wBACE,cAAA,CACA,kBVoxBN,CUjxBM,kCACE,oBVmxBR,CWp1BA,qBAEE,WXk2BF,CWp2BA,qBAEE,UXk2BF,CWp2BA,WAOE,2CAAA,CACA,mBAAA,CALA,YAAA,CAMA,8BAAA,CAJA,iBAAA,CAMA,SAAA,CALA,mBAAA,CASA,mBAAA,CAdA,cAAA,CASA,0BAAA,CAEA,wCACE,CATF,SXg2BF,CWl1BE,aAlBF,WAmBI,YXq1BF,CACF,CWl1BE,mBAEE,SAAA,CAIA,mBAAA,CALA,uBAAA,CAEA,kEXq1BJ,CW90BE,kBACE,gCAAA,CACA,eXg1BJ,CYn3BA,aACE,gBAAA,CACA,iBZs3BF,CYn3BE,sBAGE,WAAA,CAFA,QAAA,CACA,SZs3BJ,CYj3BE,oBAEE,eAAA,CADA,eZo3BJ,CY/2BE,oBACE,iBZi3BJ,CY72BE,mBAIE,sBAAA,CAFA,YAAA,CACA,cAAA,CAEA,sBAAA,CAJA,iBZm3BJ,CY52BI,iDACE,yCZ82BN,CY12BI,6BACE,iBZ42BN,CYv2BE,mBAGE,uCAAA,CACA,cAAA,CAHA,aAAA,CACA,cAAA,CAGA,sBZy2BJ,CYt2BI,gDACE,+BZw2BN,CYp2BI,4BACE,0CAAA,CACA,mBZs2BN,CYj2BE,mBAGE,SAAA,CAFA,iBAAA,CACA,2BAAA,CAEA,8DZm2BJ,CY91BI,qBAEE,aAAA,CADA,eZi2BN,CY51BI,6BAEE,SAAA,CADA,uBZ+1BN,Ca76BA,WAEE,0CAAA,CADA,+Bbi7BF,Ca76BE,aALF,WAMI,Ybg7BF,CACF,Ca76BE,kBACE,6BAAA,CAEA,aAAA,CADA,abg7BJ,Ca56BI,gCACE,Yb86BN,Caz6BE,iBACE,YAAA,CAKA,cAAA,CAIA,uCAAA,CADA,eAAA,CADA,oBAAA,CADA,kBAAA,CAIA,uBbu6BJ,Cap6BI,4CACE,Ubs6BN,Cav6BI,yCACE,Ubs6BN,Cav6BI,mCACE,Ubs6BN,Cal6BI,+BACE,oBbo6BN,CKrxBI,wCQrII,yCACE,Yb65BR,CACF,Cax5BI,iCACE,gBb25BN,Ca55BI,iCACE,iBb25BN,Ca55BI,uBAEE,gBb05BN,Cav5BM,iCACE,eby5BR,Can5BE,kBAEE,WAAA,CAGA,eAAA,CACA,kBAAA,CAHA,6BAAA,CACA,cAAA,CAHA,iBAAA,CAMA,kBbq5BJ,Caj5BE,mBACE,YAAA,CACA,abm5BJ,Ca/4BE,sBAKE,gBAAA,CAHA,MAAA,CACA,gBAAA,CAGA,UAAA,CAFA,cAAA,CAHA,iBAAA,CACA,Obq5BJ,Ca54BA,gBACE,gDb+4BF,Ca54BE,uBACE,YAAA,CACA,cAAA,CACA,6BAAA,CACA,ab84BJ,Ca14BE,kCACE,sCb44BJ,Caz4BI,6DACE,+Bb24BN,Ca54BI,0DACE,+Bb24BN,Ca54BI,oDACE,+Bb24BN,Can4BA,cAIE,wCAAA,CACA,gBAAA,CAHA,iBAAA,CACA,eAAA,CAFA,Ub04BF,CKj2BI,mCQ1CJ,cASI,Ubs4BF,CACF,Cal4BE,yBACE,sCbo4BJ,Ca73BA,WACE,cAAA,CACA,qBbg4BF,CK92BI,mCQpBJ,WAMI,ebg4BF,CACF,Ca73BE,iBACE,oBAAA,CAEA,aAAA,CACA,iBAAA,CAFA,Ybi4BJ,Ca53BI,wBACE,eb83BN,Ca13BI,qBAGE,iBAAA,CAFA,gBAAA,CACA,mBb63BN,CcpiCE,uBAKE,kBAAA,CACA,mBAAA,CAHA,gCAAA,CAIA,cAAA,CANA,oBAAA,CAGA,eAAA,CAFA,kBAAA,CAMA,gEduiCJ,CcjiCI,gCAEE,2CAAA,CACA,uCAAA,CAFA,gCdqiCN,Cc/hCI,kDAEE,0CAAA,CACA,sCAAA,CAFA,+BdmiCN,CcpiCI,+CAEE,0CAAA,CACA,sCAAA,CAFA,+BdmiCN,CcpiCI,yCAEE,0CAAA,CACA,sCAAA,CAFA,+BdmiCN,Cc5hCE,gCAKE,4BdiiCJ,CctiCE,gEAME,6BdgiCJ,CctiCE,gCAME,4BdgiCJ,CctiCE,sBAIE,6DAAA,CAGA,8BAAA,CAJA,eAAA,CAFA,aAAA,CACA,eAAA,CAMA,sCd8hCJ,CczhCI,iDACE,6CAAA,CACA,8Bd2hCN,Cc7hCI,8CACE,6CAAA,CACA,8Bd2hCN,Cc7hCI,wCACE,6CAAA,CACA,8Bd2hCN,CcvhCI,+BACE,UdyhCN,Ce5kCA,WAOE,2CAAA,CAGA,8CACE,CALF,gCAAA,CADA,aAAA,CAFA,MAAA,CAFA,uBAAA,CAAA,eAAA,CAEA,OAAA,CADA,KAAA,CAEA,SfmlCF,CexkCE,aAfF,WAgBI,Yf2kCF,CACF,CexkCE,mBACE,2BAAA,CACA,iEf0kCJ,CepkCE,mBACE,kDACE,CAEF,kEfokCJ,Ce9jCE,kBAEE,kBAAA,CADA,YAAA,CAEA,efgkCJ,Ce5jCE,mBAKE,kBAAA,CAGA,cAAA,CALA,YAAA,CAIA,uCAAA,CAHA,aAAA,CAHA,iBAAA,CAQA,uBAAA,CAHA,qBAAA,CAJA,SfqkCJ,Ce3jCI,yBACE,Uf6jCN,CezjCI,iCACE,oBf2jCN,CevjCI,uCAEE,uCAAA,CADA,Yf0jCN,CerjCI,2BACE,YAAA,CACA,afujCN,CK18BI,wCU/GA,2BAMI,YfujCN,CACF,CepjCM,iDAIE,iBAAA,CAHA,aAAA,CAEA,aAAA,CADA,UfwjCR,Ce1jCM,8CAIE,iBAAA,CAHA,aAAA,CAEA,aAAA,CADA,UfwjCR,Ce1jCM,wCAIE,iBAAA,CAHA,aAAA,CAEA,aAAA,CADA,UfwjCR,CKx+BI,mCUzEA,iCAII,YfijCN,CACF,Ce9iCM,wCACE,YfgjCR,Ce5iCM,+CACE,oBf8iCR,CKn/BI,sCUtDA,iCAII,YfyiCN,CACF,CepiCE,kBAEE,YAAA,CACA,cAAA,CAFA,iBAAA,CAIA,8DACE,CAFF,kBfuiCJ,CejiCI,oCAGE,SAAA,CAIA,mBAAA,CALA,6BAAA,CAEA,8DACE,CAJF,UfuiCN,Ce9hCM,8CACE,8BfgiCR,Ce3hCI,8BACE,ef6hCN,CexhCE,4BAGE,kBf6hCJ,CehiCE,4BAGE,iBf6hCJ,CehiCE,4BAIE,gBf4hCJ,CehiCE,4BAIE,iBf4hCJ,CehiCE,kBACE,WAAA,CAIA,eAAA,CAHA,aAAA,CAIA,kBf0hCJ,CevhCI,4CAGE,SAAA,CAIA,mBAAA,CALA,8BAAA,CAEA,8DACE,CAJF,Uf6hCN,CephCM,sDACE,6BfshCR,CelhCM,8DAGE,SAAA,CAIA,mBAAA,CALA,uBAAA,CAEA,8DACE,CAJF,SfwhCR,Ce7gCI,uCAGE,WAAA,CAFA,iBAAA,CACA,UfghCN,Ce1gCE,mBACE,YAAA,CACA,aAAA,CACA,cAAA,CAEA,+CACE,CAFF,kBf6gCJ,CevgCI,8DACE,WAAA,CACA,SAAA,CACA,oCfygCN,CelgCE,mBACE,YfogCJ,CKzjCI,mCUoDF,6BAQI,gBfogCJ,Ce5gCA,6BAQI,iBfogCJ,Ce5gCA,mBAKI,aAAA,CAEA,iBAAA,CADA,afsgCJ,CACF,CKjkCI,sCUoDF,6BAaI,kBfogCJ,CejhCA,6BAaI,mBfogCJ,CACF,CgB5uCA,MACE,0MAAA,CACA,gMAAA,CACA,yNhB+uCF,CgBzuCA,QACE,eAAA,CACA,ehB4uCF,CgBzuCE,eACE,aAAA,CAGA,eAAA,CADA,eAAA,CADA,eAAA,CAGA,sBhB2uCJ,CgBxuCI,+BACE,YhB0uCN,CgBvuCM,mCAEE,WAAA,CADA,UhB0uCR,CgBluCQ,6DAME,iBAAA,CALA,aAAA,CAGA,aAAA,CADA,cAAA,CAEA,kBAAA,CAHA,UhBwuCV,CgB1uCQ,0DAME,iBAAA,CALA,aAAA,CAGA,aAAA,CADA,cAAA,CAEA,kBAAA,CAHA,UhBwuCV,CgB1uCQ,oDAME,iBAAA,CALA,aAAA,CAGA,aAAA,CADA,cAAA,CAEA,kBAAA,CAHA,UhBwuCV,CgB7tCE,cAGE,eAAA,CAFA,QAAA,CACA,ShBguCJ,CgB3tCE,cACE,ehB6tCJ,CgB1tCI,sCACE,ehB4tCN,CgB7tCI,sCACE,chB4tCN,CgBvtCE,cAEE,kBAAA,CAKA,cAAA,CANA,YAAA,CAEA,6BAAA,CACA,iBAAA,CACA,eAAA,CAIA,uBAAA,CAHA,sBAAA,CAEA,sBhB0tCJ,CgBttCI,sBACE,uChBwtCN,CgBptCI,oCACE,+BhBstCN,CgBltCI,0CACE,UhBotCN,CgBhtCI,yCACE,+BhBktCN,CgBntCI,sCACE,+BhBktCN,CgBntCI,gCACE,+BhBktCN,CgB9sCI,4BACE,uCAAA,CACA,oBhBgtCN,CgB5sCI,0CACE,YhB8sCN,CgB3sCM,yDAKE,6BAAA,CAJA,aAAA,CAEA,WAAA,CACA,qCAAA,CAAA,6BAAA,CAFA,UhBgtCR,CgBzsCM,kDACE,YhB2sCR,CgBtsCI,gBAEE,cAAA,CADA,YhBysCN,CgBnsCE,cACE,ahBqsCJ,CgBjsCE,gBACE,YhBmsCJ,CKjpCI,wCW3CA,0CASE,2CAAA,CAHA,YAAA,CACA,qBAAA,CACA,WAAA,CAJA,MAAA,CAFA,iBAAA,CAEA,OAAA,CADA,KAAA,CAEA,ShBksCJ,CgBvrCI,4DACE,eAAA,CACA,ehByrCN,CgB3rCI,yDACE,eAAA,CACA,ehByrCN,CgB3rCI,mDACE,eAAA,CACA,ehByrCN,CgBrrCI,gCAOE,qDAAA,CAHA,uCAAA,CAIA,cAAA,CANA,aAAA,CAGA,kBAAA,CAFA,wBAAA,CAFA,iBAAA,CAKA,kBhByrCN,CgBprCM,wDAGE,UhB0rCR,CgB7rCM,wDAGE,WhB0rCR,CgB7rCM,8CAIE,aAAA,CAEA,aAAA,CACA,YAAA,CANA,iBAAA,CACA,SAAA,CAGA,YhBwrCR,CgBnrCQ,oDAIE,6BAAA,CAKA,UAAA,CARA,aAAA,CAEA,WAAA,CAEA,2CAAA,CAAA,mCAAA,CACA,4BAAA,CAAA,oBAAA,CACA,6BAAA,CAAA,qBAAA,CACA,yBAAA,CAAA,iBAAA,CANA,UhB4rCV,CgBhrCM,8CAEE,2CAAA,CACA,gEACE,CAHF,eAAA,CAIA,4BAAA,CACA,kBhBirCR,CgB9qCQ,2DACE,YhBgrCV,CgB3qCM,8CAGE,2CAAA,CAFA,gCAAA,CACA,ehB8qCR,CgBzqCM,yCAIE,aAAA,CADA,UAAA,CAEA,YAAA,CACA,aAAA,CALA,iBAAA,CAEA,WAAA,CADA,ShB+qCR,CgBtqCI,+BACE,MhBwqCN,CgBpqCI,+BAEE,4DAAA,CADA,ShBuqCN,CgBnqCM,qDACE,+BhBqqCR,CgBlqCQ,gFACE,+BhBoqCV,CgBrqCQ,6EACE,+BhBoqCV,CgBrqCQ,uEACE,+BhBoqCV,CgB9pCI,+BACE,YAAA,CACA,mBhBgqCN,CgB7pCM,uDAGE,mBhBgqCR,CgBnqCM,uDAGE,kBhBgqCR,CgBnqCM,6CAIE,gBAAA,CAFA,aAAA,CADA,YhBkqCR,CgB5pCQ,mDAIE,6BAAA,CAKA,UAAA,CARA,aAAA,CAEA,WAAA,CAEA,2CAAA,CAAA,mCAAA,CACA,4BAAA,CAAA,oBAAA,CACA,6BAAA,CAAA,qBAAA,CACA,yBAAA,CAAA,iBAAA,CANA,UhBqqCV,CgBrpCM,+CACE,mBhBupCR,CgB/oCM,4CAEE,wBAAA,CADA,ehBkpCR,CgB9oCQ,oEACE,mBhBgpCV,CgBjpCQ,oEACE,oBhBgpCV,CgB5oCQ,4EACE,iBhB8oCV,CgB/oCQ,4EACE,kBhB8oCV,CgB1oCQ,oFACE,mBhB4oCV,CgB7oCQ,oFACE,oBhB4oCV,CgBxoCQ,4FACE,mBhB0oCV,CgB3oCQ,4FACE,oBhB0oCV,CgBnoCE,mBACE,wBhBqoCJ,CgBjoCE,wBACE,YAAA,CAEA,SAAA,CADA,0BAAA,CAEA,oEhBmoCJ,CgB9nCI,kCACE,2BhBgoCN,CgB3nCE,gCAEE,SAAA,CADA,uBAAA,CAEA,qEhB6nCJ,CgBxnCI,8CAEE,kCAAA,CAAA,0BhBynCN,CACF,CK/xCI,wCW8KA,0CACE,YhBonCJ,CgBjnCI,yDACE,UhBmnCN,CgB/mCI,wDACE,YhBinCN,CgB7mCI,kDACE,YhB+mCN,CgB1mCE,gBAIE,iDAAA,CADA,gCAAA,CAFA,aAAA,CACA,ehB8mCJ,CACF,CK51CM,6DWuPF,6CACE,YhBwmCJ,CgBrmCI,4DACE,UhBumCN,CgBnmCI,2DACE,YhBqmCN,CgBjmCI,qDACE,YhBmmCN,CACF,CKp1CI,mCWyPA,kCAME,qCAAA,CACA,qDAAA,CANA,uBAAA,CAAA,eAAA,CACA,KAAA,CAGA,ShB8lCJ,CgBzlCI,6CACE,uBhB2lCN,CgBvlCI,gDACE,YhBylCN,CACF,CKn2CI,sCW7JJ,QA6aI,oDhBulCF,CgBplCE,gCAME,qCAAA,CACA,qDAAA,CANA,uBAAA,CAAA,eAAA,CACA,KAAA,CAGA,ShBslCJ,CgBjlCI,8CACE,uBhBmlCN,CgBzkCE,sEACE,YhB8kCJ,CgB1kCE,6DACE,ahB4kCJ,CgB7kCE,0DACE,ahB4kCJ,CgB7kCE,oDACE,ahB4kCJ,CgBxkCE,6CACE,YhB0kCJ,CgBtkCE,uBACE,aAAA,CACA,ehBwkCJ,CgBrkCI,kCACE,ehBukCN,CgBnkCI,qCACE,eAAA,CACA,mBhBqkCN,CgBlkCM,mDACE,mBhBokCR,CgBhkCM,mDACE,YhBkkCR,CgB7jCI,+BACE,ahB+jCN,CgB5jCM,2DACE,ShB8jCR,CgBxjCE,cAGE,kBAAA,CADA,YAAA,CAEA,+CACE,CAJF,WhB6jCJ,CgBrjCI,wBACE,wBhBujCN,CgBnjCI,oBACE,uDhBqjCN,CgBjjCI,oBAKE,6BAAA,CAKA,UAAA,CATA,oBAAA,CAEA,WAAA,CAGA,2CAAA,CAAA,mCAAA,CACA,4BAAA,CAAA,oBAAA,CACA,6BAAA,CAAA,qBAAA,CACA,yBAAA,CAAA,iBAAA,CALA,qBAAA,CAFA,UhB2jCN,CgB/iCI,0JAEE,uBhBgjCN,CgBliCI,+HACE,YhBwiCN,CgBriCM,oDACE,aAAA,CACA,ShBuiCR,CgBpiCQ,kEAOE,qCAAA,CACA,qDAAA,CAFA,eAAA,CAFA,YAAA,CACA,eAAA,CAJA,uBAAA,CAAA,eAAA,CACA,KAAA,CACA,ShB2iCV,CgBniCU,4FACE,mBhBqiCZ,CgBjiCU,gFACE,YhBmiCZ,CgB3hCI,2CACE,ahB6hCN,CgB1hCM,iFACE,mBhB4hCR,CgB7hCM,iFACE,kBhB4hCR,CgBnhCI,mFACE,ehBqhCN,CgBlhCM,iGACE,ShBohCR,CgB/gCI,qFAGE,mDhBihCN,CgBphCI,qFAGE,oDhBihCN,CgBphCI,2EACE,aAAA,CACA,oBhBkhCN,CgB9gCM,0FACE,YhBghCR,CACF,CiBroDA,MACE,igBjBwoDF,CiBloDA,WACE,iBjBqoDF,CKv+CI,mCY/JJ,WAKI,ejBqoDF,CACF,CiBloDE,kBACE,YjBooDJ,CiBhoDE,oBAEE,SAAA,CADA,SjBmoDJ,CKh+CI,wCYpKF,8BAQI,YjB0oDJ,CiBlpDA,8BAQI,ajB0oDJ,CiBlpDA,oBAYI,2CAAA,CACA,kBAAA,CAHA,WAAA,CACA,eAAA,CAOA,mBAAA,CAZA,iBAAA,CACA,SAAA,CAOA,uBAAA,CACA,4CACE,CAPF,UjByoDJ,CiB7nDI,+DACE,SAAA,CACA,oCjB+nDN,CACF,CKtgDI,mCYjJF,8BAiCI,MjBioDJ,CiBlqDA,8BAiCI,OjBioDJ,CiBlqDA,oBAoCI,0BAAA,CACA,cAAA,CAFA,QAAA,CAJA,cAAA,CACA,KAAA,CAMA,sDACE,CALF,OjBgoDJ,CiBtnDI,+DAME,YAAA,CACA,SAAA,CACA,4CACE,CARF,UjB2nDN,CACF,CKrgDI,wCYxGA,+DAII,mBjB6mDN,CACF,CKnjDM,6DY/DF,+DASI,mBjB6mDN,CACF,CKxjDM,6DY/DF,+DAcI,mBjB6mDN,CACF,CiBxmDE,kBAEE,kCAAA,CAAA,0BjBymDJ,CKvhDI,wCYpFF,4BAQI,MjBgnDJ,CiBxnDA,4BAQI,OjBgnDJ,CiBxnDA,kBAWI,QAAA,CAGA,SAAA,CAFA,eAAA,CANA,cAAA,CACA,KAAA,CAMA,wBAAA,CAEA,qGACE,CANF,OAAA,CADA,SjB+mDJ,CiBlmDI,4BACE,yBjBomDN,CiBhmDI,6DAEE,WAAA,CAEA,SAAA,CADA,uBAAA,CAEA,sGACE,CALF,UjBsmDN,CACF,CKlkDI,mCYjEF,4BA2CI,WjBgmDJ,CiB3oDA,4BA2CI,UjBgmDJ,CiB3oDA,kBA6CI,eAAA,CAHA,iBAAA,CAIA,8CAAA,CAFA,ajB+lDJ,CACF,CKjmDM,6DYOF,6DAII,ajB0lDN,CACF,CKhlDI,sCYfA,6DASI,ajB0lDN,CACF,CiBrlDE,iBAIE,2CAAA,CACA,0BAAA,CAFA,aAAA,CAFA,iBAAA,CAKA,2CACE,CALF,SjB2lDJ,CK7lDI,mCYAF,iBAaI,0BAAA,CACA,mBAAA,CAFA,ajBulDJ,CiBllDI,uBACE,0BjBolDN,CACF,CiBhlDI,4DAEE,2CAAA,CACA,6BAAA,CACA,8BAAA,CAHA,gCjBqlDN,CiB7kDE,4BAKE,mBAAA,CAAA,oBjBklDJ,CiBvlDE,4BAKE,mBAAA,CAAA,oBjBklDJ,CiBvlDE,kBAQE,gBAAA,CAFA,eAAA,CAFA,WAAA,CAHA,iBAAA,CAMA,sBAAA,CAJA,UAAA,CADA,SjBqlDJ,CiB5kDI,+BACE,qBjB8kDN,CiB1kDI,kEAEE,uCjB2kDN,CiBvkDI,6BACE,YjBykDN,CK7mDI,wCYaF,kBA8BI,eAAA,CADA,aAAA,CADA,UjB0kDJ,CACF,CKvoDI,mCYgCF,4BAmCI,mBjB0kDJ,CiB7mDA,4BAmCI,oBjB0kDJ,CiB7mDA,kBAoCI,aAAA,CACA,ejBwkDJ,CiBrkDI,+BACE,uCjBukDN,CiBnkDI,mCACE,gCjBqkDN,CiBjkDI,6DACE,kBjBmkDN,CiBhkDM,wJAEE,uCjBikDR,CACF,CiB3jDE,iBAIE,cAAA,CAHA,oBAAA,CAEA,aAAA,CAEA,kCACE,CAJF,YjBgkDJ,CiBxjDI,uBACE,UjB0jDN,CiBtjDI,yCAGE,UjByjDN,CiB5jDI,yCAGE,WjByjDN,CiB5jDI,+BACE,iBAAA,CACA,SAAA,CAEA,SjBwjDN,CiBrjDM,6CACE,oBjBujDR,CK1pDI,wCY2FA,yCAcI,UjBsjDN,CiBpkDE,yCAcI,WjBsjDN,CiBpkDE,+BAaI,SjBujDN,CiBnjDM,+CACE,YjBqjDR,CACF,CKtrDI,mCY8GA,+BAwBI,mBjBojDN,CiBjjDM,8CACE,YjBmjDR,CACF,CiB7iDE,8BAGE,WjBijDJ,CiBpjDE,8BAGE,UjBijDJ,CiBpjDE,oBAKE,mBAAA,CAJA,iBAAA,CACA,SAAA,CAEA,SjBgjDJ,CKlrDI,wCY8HF,8BAUI,WjB+iDJ,CiBzjDA,8BAUI,UjB+iDJ,CiBzjDA,oBASI,SjBgjDJ,CACF,CiB5iDI,gCACE,iBjBkjDN,CiBnjDI,gCACE,kBjBkjDN,CiBnjDI,sBAEE,uCAAA,CAEA,SAAA,CADA,oBAAA,CAEA,+DjB8iDN,CiBziDM,yCAEE,uCAAA,CADA,YjB4iDR,CiBviDM,yFAGE,SAAA,CACA,mBAAA,CAFA,kBjB0iDR,CiBriDQ,8FACE,UjBuiDV,CiBhiDE,8BAOE,mBAAA,CAAA,oBjBuiDJ,CiB9iDE,8BAOE,mBAAA,CAAA,oBjBuiDJ,CiB9iDE,oBAIE,kBAAA,CAIA,yCAAA,CALA,YAAA,CAMA,eAAA,CAHA,WAAA,CAKA,SAAA,CAVA,iBAAA,CACA,KAAA,CAUA,uBAAA,CAFA,kBAAA,CALA,UjByiDJ,CK5uDI,mCY8LF,8BAgBI,mBjBmiDJ,CiBnjDA,8BAgBI,oBjBmiDJ,CiBnjDA,oBAiBI,ejBkiDJ,CACF,CiB/hDI,+DACE,SAAA,CACA,0BjBiiDN,CiB5hDE,6BAKE,+BjB+hDJ,CiBpiDE,0DAME,gCjB8hDJ,CiBpiDE,6BAME,+BjB8hDJ,CiBpiDE,mBAIE,eAAA,CAHA,iBAAA,CAEA,UAAA,CADA,SjBkiDJ,CK3uDI,wCYuMF,mBAWI,QAAA,CADA,UjB+hDJ,CACF,CKpwDI,mCY0NF,mBAiBI,SAAA,CADA,UAAA,CAEA,sBjB8hDJ,CiB3hDI,8DACE,8BAAA,CACA,SjB6hDN,CACF,CiBxhDE,uBAKE,kCAAA,CAAA,0BAAA,CAFA,2CAAA,CAFA,WAAA,CACA,eAAA,CAOA,kBjBshDJ,CiBnhDI,iEAZF,uBAaI,uBjBshDJ,CACF,CKjzDM,6DY6QJ,uBAkBI,ajBshDJ,CACF,CKhyDI,sCYuPF,uBAuBI,ajBshDJ,CACF,CKryDI,mCYuPF,uBA4BI,YAAA,CAEA,yDAAA,CADA,oBjBuhDJ,CiBnhDI,kEACE,ejBqhDN,CiBjhDI,6BACE,+CjBmhDN,CiB/gDI,0CAEE,YAAA,CADA,WjBkhDN,CiB7gDI,gDACE,oDjB+gDN,CiB5gDM,sDACE,0CjB8gDR,CACF,CiBvgDA,kBACE,gCAAA,CACA,qBjB0gDF,CiBvgDE,wBAKE,qDAAA,CAHA,uCAAA,CACA,gBAAA,CACA,kBAAA,CAHA,eAAA,CAKA,uBjBygDJ,CKz0DI,mCY0TF,kCAUI,mBjBygDJ,CiBnhDA,kCAUI,oBjBygDJ,CACF,CiBrgDE,wBAGE,eAAA,CAFA,QAAA,CACA,SAAA,CAGA,wBAAA,CAAA,qBAAA,CAAA,gBjBsgDJ,CiBlgDE,wBACE,yDjBogDJ,CiBjgDI,oCACE,ejBmgDN,CiB9/CE,wBACE,aAAA,CACA,YAAA,CAEA,uBAAA,CADA,gCjBigDJ,CiB7/CI,mDACE,uDjB+/CN,CiBhgDI,gDACE,uDjB+/CN,CiBhgDI,0CACE,uDjB+/CN,CiB3/CI,gDACE,mBjB6/CN,CiBx/CE,gCAGE,+BAAA,CAGA,cAAA,CALA,aAAA,CAGA,gBAAA,CACA,YAAA,CAHA,mBAAA,CAQA,uBAAA,CAHA,2CjB2/CJ,CKh3DI,mCY8WF,0CAcI,mBjBw/CJ,CiBtgDA,0CAcI,oBjBw/CJ,CACF,CiBr/CI,2DAEE,uDAAA,CADA,+BjBw/CN,CiBz/CI,wDAEE,uDAAA,CADA,+BjBw/CN,CiBz/CI,kDAEE,uDAAA,CADA,+BjBw/CN,CiBn/CI,wCACE,YjBq/CN,CiBh/CI,wDACE,YjBk/CN,CiB9+CI,oCACE,WjBg/CN,CiB3+CE,2BAGE,eAAA,CADA,eAAA,CADA,iBjB++CJ,CKv4DI,mCYuZF,qCAOI,mBjB6+CJ,CiBp/CA,qCAOI,oBjB6+CJ,CACF,CiBv+CM,8DAGE,eAAA,CADA,eAAA,CAEA,eAAA,CAHA,ejB4+CR,CiBn+CE,kCAEE,MjBy+CJ,CiB3+CE,kCAEE,OjBy+CJ,CiB3+CE,wBAME,uCAAA,CAFA,aAAA,CACA,YAAA,CAJA,iBAAA,CAEA,YjBw+CJ,CKv4DI,wCY4ZF,wBAUI,YjBq+CJ,CACF,CiBl+CI,8BAIE,6BAAA,CAKA,UAAA,CARA,oBAAA,CAEA,WAAA,CAEA,+CAAA,CAAA,uCAAA,CACA,4BAAA,CAAA,oBAAA,CACA,6BAAA,CAAA,qBAAA,CACA,yBAAA,CAAA,iBAAA,CANA,UjB2+CN,CiBj+CM,wCACE,oBjBm+CR,CiB79CE,yBAGE,gBAAA,CADA,eAAA,CAEA,eAAA,CAHA,ajBk+CJ,CiB39CE,0BASE,2BAAA,CACA,oBAAA,CALA,uCAAA,CAJA,mBAAA,CAKA,gBAAA,CACA,eAAA,CAJA,aAAA,CADA,eAAA,CAEA,eAAA,CAIA,sBjB+9CJ,CK56DI,wCYqcF,0BAeI,oBAAA,CADA,ejB89CJ,CACF,CK39DM,6DY8eJ,0BAqBI,oBAAA,CADA,ejB89CJ,CACF,CiB19CI,+BAEE,wBAAA,CADA,yBjB69CN,CiBv9CE,yBAEE,gBAAA,CACA,iBAAA,CAFA,ajB29CJ,CiBr9CE,uBAEE,wBAAA,CADA,+BjBw9CJ,CkB9nEA,WACE,iBAAA,CACA,SlBioEF,CkB9nEE,kBAOE,2CAAA,CACA,mBAAA,CACA,8BAAA,CAHA,gCAAA,CAHA,QAAA,CAEA,gBAAA,CADA,YAAA,CAOA,SAAA,CAVA,iBAAA,CACA,sBAAA,CAQA,mCAAA,CAEA,oElBgoEJ,CkB1nEI,+DACE,gBAAA,CAEA,SAAA,CADA,+BAAA,CAEA,sFACE,CADF,8ElB4nEN,CkBhoEI,4DACE,gBAAA,CAEA,SAAA,CADA,+BAAA,CAEA,mFACE,CADF,8ElB4nEN,CkBhoEI,sDACE,gBAAA,CAEA,SAAA,CADA,+BAAA,CAEA,8ElB4nEN,CkBrnEI,wBAUE,+BAAA,CAAA,8CAAA,CAFA,6BAAA,CAAA,8BAAA,CACA,YAAA,CAEA,UAAA,CANA,QAAA,CAFA,QAAA,CAIA,kBAAA,CADA,iBAAA,CALA,iBAAA,CACA,KAAA,CAEA,OlB8nEN,CkBlnEE,iBAOE,mBAAA,CAFA,eAAA,CACA,oBAAA,CAJA,QAAA,CADA,kBAAA,CAGA,aAAA,CADA,SlBwnEJ,CkBhnEE,iBACE,kBlBknEJ,CkB9mEE,2BAGE,kBAAA,CAAA,oBlBonEJ,CkBvnEE,2BAGE,mBAAA,CAAA,mBlBonEJ,CkBvnEE,iBAKE,cAAA,CAJA,aAAA,CAGA,YAAA,CAKA,uBAAA,CAHA,2CACE,CALF,UlBqnEJ,CkB3mEI,4CACE,+BlB6mEN,CkB9mEI,yCACE,+BlB6mEN,CkB9mEI,mCACE,+BlB6mEN,CkBzmEI,uBACE,qDlB2mEN,CmB/rEA,YAIE,qBAAA,CADA,aAAA,CAGA,gBAAA,CALA,uBAAA,CAAA,eAAA,CACA,UAAA,CAGA,anBmsEF,CmB/rEE,aATF,YAUI,YnBksEF,CACF,CKphEI,wCc3KF,+BAMI,anBssEJ,CmB5sEA,+BAMI,cnBssEJ,CmB5sEA,qBAWI,2CAAA,CAHA,aAAA,CAEA,WAAA,CANA,cAAA,CACA,KAAA,CAOA,uBAAA,CACA,iEACE,CALF,aAAA,CAFA,SnBqsEJ,CmB1rEI,mEACE,8BAAA,CACA,6BnB4rEN,CmBzrEM,6EACE,8BnB2rER,CmBtrEI,6CAEE,QAAA,CAAA,MAAA,CACA,QAAA,CAEA,eAAA,CAJA,iBAAA,CACA,OAAA,CAEA,qBAAA,CAFA,KnB2rEN,CACF,CKnkEI,sCctKJ,YAuDI,QnBsrEF,CmBnrEE,mBACE,WnBqrEJ,CmBjrEE,6CACE,UnBmrEJ,CACF,CmB/qEE,uBACE,YAAA,CACA,OnBirEJ,CKllEI,mCcjGF,uBAMI,QnBirEJ,CmB9qEI,8BACE,WnBgrEN,CmB5qEI,qCACE,anB8qEN,CmB1qEI,+CACE,kBnB4qEN,CACF,CmBvqEE,wBAUE,uBAAA,CANA,kCAAA,CAAA,0BAAA,CAHA,cAAA,CACA,eAAA,CASA,yDAAA,CAFA,oBnBsqEJ,CmBjqEI,8BACE,+CnBmqEN,CmB/pEI,2CAEE,YAAA,CADA,WnBkqEN,CmB7pEI,iDACE,oDnB+pEN,CmB5pEM,uDACE,0CnB8pER,CmBhpEE,wCAGE,wBACE,qBnBgpEJ,CmB5oEE,6BACE,kCnB8oEJ,CmB/oEE,6BACE,iCnB8oEJ,CACF,CK1mEI,wCc5BF,YAME,0BAAA,CADA,QAAA,CAEA,SAAA,CANA,cAAA,CACA,KAAA,CAMA,sDACE,CALF,OAAA,CADA,SnB+oEF,CmBpoEE,4CAEE,WAAA,CACA,SAAA,CACA,4CACE,CAJF,UnByoEJ,CACF,CoBtzEA,iBACE,GACE,QpBwzEF,CoBrzEA,GACE,apBuzEF,CACF,CoBnzEA,gBACE,GAEE,SAAA,CADA,0BpBszEF,CoBlzEA,IACE,SpBozEF,CoBjzEA,GAEE,SAAA,CADA,uBpBozEF,CACF,CoB3yEA,MACE,mgBAAA,CACA,oiBAAA,CACA,0nBAAA,CACA,mhBpB6yEF,CoBvyEA,WAOE,kCAAA,CAAA,0BAAA,CANA,aAAA,CACA,gBAAA,CACA,eAAA,CAEA,uCAAA,CAGA,uBAAA,CAJA,kBpB6yEF,CoBtyEE,iBACE,UpBwyEJ,CoBpyEE,iBACE,oBAAA,CAEA,aAAA,CACA,qBAAA,CAFA,UpBwyEJ,CoBnyEI,+BAEE,iBpBqyEN,CoBvyEI,+BAEE,kBpBqyEN,CoBvyEI,qBACE,gBpBsyEN,CoBjyEI,kDACE,iBpBoyEN,CoBryEI,kDACE,kBpBoyEN,CoBryEI,kDAEE,iBpBmyEN,CoBryEI,kDAEE,kBpBmyEN,CoB9xEE,iCAGE,iBpBmyEJ,CoBtyEE,iCAGE,kBpBmyEJ,CoBtyEE,uBACE,oBAAA,CACA,6BAAA,CAEA,eAAA,CACA,sBAAA,CACA,qBpBgyEJ,CoB5xEE,kBACE,YAAA,CAMA,gBAAA,CALA,SAAA,CAMA,oBAAA,CAJA,gBAAA,CAKA,WAAA,CAHA,eAAA,CADA,SAAA,CAFA,UpBoyEJ,CoB3xEI,iDACE,4BpB6xEN,CoBxxEE,iBACE,eAAA,CACA,sBpB0xEJ,CoBvxEI,gDACE,2BpByxEN,CoBrxEI,kCAIE,kBpB6xEN,CoBjyEI,kCAIE,iBpB6xEN,CoBjyEI,wBAME,6BAAA,CAIA,UAAA,CATA,oBAAA,CAEA,YAAA,CAIA,4BAAA,CAAA,oBAAA,CACA,6BAAA,CAAA,qBAAA,CACA,yBAAA,CAAA,iBAAA,CAJA,uBAAA,CAHA,WpB+xEN,CoBnxEI,iCACE,apBqxEN,CoBjxEI,iCACE,gDAAA,CAAA,wCpBmxEN,CoB/wEI,+BACE,8CAAA,CAAA,sCpBixEN,CoB7wEI,+BACE,8CAAA,CAAA,sCpB+wEN,CoB3wEI,sCACE,qDAAA,CAAA,6CpB6wEN,CqBp6EA,SASE,2CAAA,CAFA,gCAAA,CAHA,aAAA,CAIA,eAAA,CAFA,aAAA,CADA,UAAA,CAFA,SrB26EF,CqBl6EE,aAZF,SAaI,YrBq6EF,CACF,CK1vEI,wCgBzLJ,SAkBI,YrBq6EF,CACF,CqBl6EE,iBACE,mBrBo6EJ,CqBh6EE,yBAEE,iBrBs6EJ,CqBx6EE,yBAEE,kBrBs6EJ,CqBx6EE,eAME,eAAA,CADA,eAAA,CAJA,QAAA,CAEA,SAAA,CACA,kBrBo6EJ,CqB95EE,eACE,oBAAA,CACA,aAAA,CACA,kBAAA,CAAA,mBrBg6EJ,CqB35EE,eAOE,kCAAA,CAAA,0BAAA,CANA,aAAA,CAEA,eAAA,CADA,gBAAA,CAMA,UAAA,CAJA,uCAAA,CACA,oBAAA,CAIA,8DrB45EJ,CqBv5EI,iEAEE,aAAA,CACA,SrBw5EN,CqB35EI,8DAEE,aAAA,CACA,SrBw5EN,CqB35EI,wDAEE,aAAA,CACA,SrBw5EN,CqBn5EM,2CACE,qBrBq5ER,CqBt5EM,2CACE,qBrBw5ER,CqBz5EM,2CACE,qBrB25ER,CqB55EM,2CACE,qBrB85ER,CqB/5EM,2CACE,oBrBi6ER,CqBl6EM,2CACE,qBrBo6ER,CqBr6EM,2CACE,qBrBu6ER,CqBx6EM,2CACE,qBrB06ER,CqB36EM,4CACE,qBrB66ER,CqB96EM,4CACE,oBrBg7ER,CqBj7EM,4CACE,qBrBm7ER,CqBp7EM,4CACE,qBrBs7ER,CqBv7EM,4CACE,qBrBy7ER,CqB17EM,4CACE,qBrB47ER,CqB77EM,4CACE,oBrB+7ER,CqBz7EI,gCAEE,SAAA,CADA,yBAAA,CAEA,wCrB27EN,CsBxgFA,MACE,wStB2gFF,CsBlgFE,qBAEE,mBAAA,CADA,kBtBsgFJ,CsBjgFE,8BAEE,iBtB4gFJ,CsB9gFE,8BAEE,gBtB4gFJ,CsB9gFE,oBAUE,+CAAA,CACA,oBAAA,CAVA,oBAAA,CAKA,gBAAA,CADA,eAAA,CAGA,qBAAA,CADA,eAAA,CAJA,kBAAA,CACA,uBAAA,CAKA,qBtBqgFJ,CsBhgFI,0BAGE,uCAAA,CAFA,aAAA,CACA,YAAA,CAEA,6CtBkgFN,CsB7/EM,gEAGE,0CAAA,CADA,+BtB+/ER,CsBz/EI,yBACE,uBtB2/EN,CsBn/EI,gCAME,oDAAA,CAMA,UAAA,CAXA,oBAAA,CAEA,YAAA,CACA,iBAAA,CAGA,qCAAA,CAAA,6BAAA,CACA,4BAAA,CAAA,oBAAA,CACA,6BAAA,CAAA,qBAAA,CACA,yBAAA,CAAA,iBAAA,CACA,iCAAA,CANA,0BAAA,CAHA,WtB+/EN,CsBj/EI,6DACE,0CtBm/EN,CsBp/EI,0DACE,0CtBm/EN,CsBp/EI,oDACE,0CtBm/EN,CuB5jFA,iBACE,GACE,uDAAA,CACA,oBvB+jFF,CuB5jFA,IACE,6BAAA,CACA,kBvB8jFF,CuB3jFA,GACE,wBAAA,CACA,oBvB6jFF,CACF,CuBrjFA,MACE,wBvBujFF,CuBjjFA,YAwBE,kCAAA,CAAA,0BAAA,CALA,2CAAA,CACA,mBAAA,CACA,8BAAA,CAJA,gCAAA,CACA,sCAAA,CAfA,+IACE,CAYF,8BAAA,CASA,SAAA,CAxBA,iBAAA,CACA,uBAAA,CAoBA,4BAAA,CAIA,uDACE,CAZF,6BAAA,CADA,SvB4jFF,CuB1iFE,oBAGE,SAAA,CADA,uBAAA,CAEA,2EACE,CAJF,SvB+iFJ,CuBriFE,4DACE,sCvBuiFJ,CuBxiFE,yDACE,sCvBuiFJ,CuBxiFE,mDACE,sCvBuiFJ,CuBniFE,mBAEE,gBAAA,CADA,avBsiFJ,CuBliFI,2CACE,YvBoiFN,CuBhiFI,0CACE,evBkiFN,CuB1hFA,eACE,eAAA,CAEA,YAAA,CADA,kBvB8hFF,CuB1hFE,yBACE,avB4hFJ,CuBxhFE,6BACE,oBAAA,CAGA,iBvBwhFJ,CuBphFE,sBAOE,cAAA,CAFA,sCAAA,CADA,eAAA,CADA,YAAA,CAGA,YAAA,CALA,iBAAA,CAOA,wBAAA,CAAA,qBAAA,CAAA,gBAAA,CANA,SvB4hFJ,CuBnhFI,qCACE,UAAA,CACA,uBvBqhFN,CuBlhFM,gEACE,UvBohFR,CuBrhFM,6DACE,UvBohFR,CuBrhFM,uDACE,UvBohFR,CuB5gFI,4BAYE,oDAAA,CACA,iBAAA,CAIA,UAAA,CARA,YAAA,CANA,YAAA,CAOA,cAAA,CACA,cAAA,CAVA,iBAAA,CACA,KAAA,CAYA,2CACE,CARF,wBAAA,CACA,6BAAA,CAJA,UvBuhFN,CuBvgFM,4CAGE,8CACE,2BvBugFR,CACF,CuBngFM,gDAIE,cAAA,CAHA,2CvBsgFR,CuB9/EI,2BAEE,sCAAA,CADA,iBvBigFN,CuB5/EI,qFACE,+BvB8/EN,CuB//EI,kFACE,+BvB8/EN,CuB//EI,4EACE,+BvB8/EN,CuB3/EM,2FACE,0CvB6/ER,CuB9/EM,wFACE,0CvB6/ER,CuB9/EM,kFACE,0CvB6/ER,CuBx/EI,0CAGE,cAAA,CADA,eAAA,CADA,SvB4/EN,CuBt/EI,8CACE,oBAAA,CACA,evBw/EN,CuBr/EM,qDAME,mCAAA,CALA,oBAAA,CACA,mBAAA,CAEA,qBAAA,CACA,iDAAA,CAFA,qBvB0/ER,CuBn/EQ,iBAVF,qDAWI,WvBs/ER,CuBn/EQ,mEACE,mCvBq/EV,CACF,CwBntFA,kBAKE,exB+tFF,CwBpuFA,kBAKE,gBxB+tFF,CwBpuFA,QASE,2CAAA,CACA,oBAAA,CAEA,8BAAA,CALA,uCAAA,CAHA,aAAA,CAIA,eAAA,CAGA,YAAA,CALA,mBAAA,CALA,cAAA,CACA,UAAA,CAWA,yBAAA,CACA,mGACE,CAZF,SxBiuFF,CwB/sFE,aArBF,QAsBI,YxBktFF,CACF,CwB/sFE,kBACE,wBxBitFJ,CwB7sFE,gBAEE,SAAA,CAEA,mBAAA,CAHA,+BAAA,CAEA,uBxBgtFJ,CwB5sFI,0BACE,8BxB8sFN,CwBzsFE,mCAEE,0CAAA,CADA,+BxB4sFJ,CwB7sFE,gCAEE,0CAAA,CADA,+BxB4sFJ,CwB7sFE,0BAEE,0CAAA,CADA,+BxB4sFJ,CwBvsFE,YACE,oBAAA,CACA,oBxBysFJ,CyB7vFA,oBACE,GACE,mBzBgwFF,CACF,CyBxvFA,MACE,wfzB0vFF,CyBpvFA,YACE,aAAA,CAEA,eAAA,CADA,azBwvFF,CyBpvFE,+BAOE,kBAAA,CAAA,kBzBqvFJ,CyB5vFE,+BAOE,iBAAA,CAAA,mBzBqvFJ,CyB5vFE,qBAQE,aAAA,CAEA,cAAA,CADA,YAAA,CARA,iBAAA,CAKA,UzBsvFJ,CyB/uFI,qCAIE,iBzBuvFN,CyB3vFI,qCAIE,kBzBuvFN,CyB3vFI,2BAKE,6BAAA,CAKA,UAAA,CATA,oBAAA,CAEA,YAAA,CAGA,yCAAA,CAAA,iCAAA,CACA,4BAAA,CAAA,oBAAA,CACA,6BAAA,CAAA,qBAAA,CACA,yBAAA,CAAA,iBAAA,CAPA,WzByvFN,CyB5uFE,kBAUE,2CAAA,CACA,mBAAA,CACA,8BAAA,CAJA,gCAAA,CACA,oBAAA,CAJA,kBAAA,CADA,YAAA,CASA,SAAA,CANA,aAAA,CADA,SAAA,CALA,iBAAA,CAgBA,4BAAA,CAfA,UAAA,CAYA,+CACE,CAZF,SzB0vFJ,CyBzuFI,gEACE,gBAAA,CACA,SAAA,CACA,8CACE,CADF,sCzB2uFN,CyB9uFI,6DACE,gBAAA,CACA,SAAA,CACA,2CACE,CADF,sCzB2uFN,CyB9uFI,uDACE,gBAAA,CACA,SAAA,CACA,sCzB2uFN,CyBruFI,wBAGE,oCACE,gCzBquFN,CyBjuFI,2CACE,czBmuFN,CACF,CyB9tFE,kBACE,kBzBguFJ,CyB5tFE,4BAGE,kBAAA,CAAA,oBzBmuFJ,CyBtuFE,4BAGE,mBAAA,CAAA,mBzBmuFJ,CyBtuFE,kBAME,cAAA,CALA,aAAA,CAIA,YAAA,CAKA,uBAAA,CAHA,2CACE,CAJF,kBAAA,CAFA,UzBouFJ,CyBztFI,6CACE,+BzB2tFN,CyB5tFI,0CACE,+BzB2tFN,CyB5tFI,oCACE,+BzB2tFN,CyBvtFI,wBACE,qDzBytFN,C0B1zFA,MAEI,uWAAA,CAAA,8WAAA,CAAA,sPAAA,CAAA,8xBAAA,CAAA,0MAAA,CAAA,gbAAA,CAAA,gMAAA,CAAA,iQAAA,CAAA,0VAAA,CAAA,6aAAA,CAAA,8SAAA,CAAA,gM1Bm1FJ,C0Bv0FE,4CAQE,8CAAA,CACA,2BAAA,CACA,mBAAA,CACA,8BAAA,CANA,mCAAA,CAHA,iBAAA,CAIA,gBAAA,CAHA,iBAAA,CACA,eAAA,CAGA,uB1B80FJ,C0Bv0FI,aAdF,4CAeI,e1B20FJ,CACF,C0Bv0FI,gDACE,qB1B00FN,C0Bt0FI,gHAEE,iBAAA,CADA,c1B00FN,C0B30FI,0GAEE,iBAAA,CADA,c1B00FN,C0B30FI,8FAEE,iBAAA,CADA,c1B00FN,C0Br0FI,4FACE,iB1Bw0FN,C0Bp0FI,kFACE,e1Bu0FN,C0Bn0FI,0FACE,Y1Bs0FN,C0Bl0FI,8EACE,mB1Bq0FN,C0Bh0FE,sEAME,iBAAA,CAAA,mB1Bw0FJ,C0B90FE,sEAME,kBAAA,CAAA,kB1Bw0FJ,C0B90FE,sEAUE,uB1Bo0FJ,C0B90FE,sEAUE,wB1Bo0FJ,C0B90FE,sEAWE,4B1Bm0FJ,C0B90FE,4IAYE,6B1Bk0FJ,C0B90FE,sEAYE,4B1Bk0FJ,C0B90FE,kDAQE,0BAAA,CACA,WAAA,CAFA,eAAA,CAHA,eAAA,CACA,oBAAA,CAAA,iBAAA,CAHA,iB1B40FJ,C0B/zFI,kFACE,e1Bk0FN,C0B9zFI,oFAGE,U1By0FN,C0B50FI,oFAGE,W1By0FN,C0B50FI,gEAME,wBCsIU,CDjIV,UAAA,CANA,WAAA,CAEA,kDAAA,CAAA,0CAAA,CACA,4BAAA,CAAA,oBAAA,CACA,6BAAA,CAAA,qBAAA,CACA,yBAAA,CAAA,iBAAA,CATA,iBAAA,CACA,UAAA,CAEA,U1Bw0FN,C0B7zFI,4DACE,4D1Bg0FN,C0B3yFE,iEACE,oB1B8yFJ,C0B/yFE,2DACE,oB1B8yFJ,C0B/yFE,+CACE,oB1B8yFJ,C0B1yFE,wEACE,0B1B6yFJ,C0B9yFE,kEACE,0B1B6yFJ,C0B9yFE,sDACE,0B1B6yFJ,C0B1yFI,+EACE,wBAnBG,CAoBH,kDAAA,CAAA,0C1B4yFN,C0B9yFI,yEACE,wBAnBG,CAoBH,0C1B4yFN,C0B9yFI,6DACE,wBAnBG,CAoBH,kDAAA,CAAA,0C1B4yFN,C0BxyFI,8EACE,a1B0yFN,C0B3yFI,wEACE,a1B0yFN,C0B3yFI,4DACE,a1B0yFN,C0B1zFE,oFACE,oB1B6zFJ,C0B9zFE,8EACE,oB1B6zFJ,C0B9zFE,kEACE,oB1B6zFJ,C0BzzFE,2FACE,0B1B4zFJ,C0B7zFE,qFACE,0B1B4zFJ,C0B7zFE,yEACE,0B1B4zFJ,C0BzzFI,kGACE,wBAnBG,CAoBH,sDAAA,CAAA,8C1B2zFN,C0B7zFI,4FACE,wBAnBG,CAoBH,8C1B2zFN,C0B7zFI,gFACE,wBAnBG,CAoBH,sDAAA,CAAA,8C1B2zFN,C0BvzFI,iGACE,a1ByzFN,C0B1zFI,2FACE,a1ByzFN,C0B1zFI,+EACE,a1ByzFN,C0Bz0FE,uEACE,oB1B40FJ,C0B70FE,iEACE,oB1B40FJ,C0B70FE,qDACE,oB1B40FJ,C0Bx0FE,8EACE,0B1B20FJ,C0B50FE,wEACE,0B1B20FJ,C0B50FE,4DACE,0B1B20FJ,C0Bx0FI,qFACE,wBAnBG,CAoBH,kDAAA,CAAA,0C1B00FN,C0B50FI,+EACE,wBAnBG,CAoBH,0C1B00FN,C0B50FI,mEACE,wBAnBG,CAoBH,kDAAA,CAAA,0C1B00FN,C0Bt0FI,oFACE,a1Bw0FN,C0Bz0FI,8EACE,a1Bw0FN,C0Bz0FI,kEACE,a1Bw0FN,C0Bx1FE,iFACE,oB1B21FJ,C0B51FE,2EACE,oB1B21FJ,C0B51FE,+DACE,oB1B21FJ,C0Bv1FE,wFACE,0B1B01FJ,C0B31FE,kFACE,0B1B01FJ,C0B31FE,sEACE,0B1B01FJ,C0Bv1FI,+FACE,wBAnBG,CAoBH,iDAAA,CAAA,yC1By1FN,C0B31FI,yFACE,wBAnBG,CAoBH,yC1By1FN,C0B31FI,6EACE,wBAnBG,CAoBH,iDAAA,CAAA,yC1By1FN,C0Br1FI,8FACE,a1Bu1FN,C0Bx1FI,wFACE,a1Bu1FN,C0Bx1FI,4EACE,a1Bu1FN,C0Bv2FE,iFACE,oB1B02FJ,C0B32FE,2EACE,oB1B02FJ,C0B32FE,+DACE,oB1B02FJ,C0Bt2FE,wFACE,0B1By2FJ,C0B12FE,kFACE,0B1By2FJ,C0B12FE,sEACE,0B1By2FJ,C0Bt2FI,+FACE,wBAnBG,CAoBH,qDAAA,CAAA,6C1Bw2FN,C0B12FI,yFACE,wBAnBG,CAoBH,6C1Bw2FN,C0B12FI,6EACE,wBAnBG,CAoBH,qDAAA,CAAA,6C1Bw2FN,C0Bp2FI,8FACE,a1Bs2FN,C0Bv2FI,wFACE,a1Bs2FN,C0Bv2FI,4EACE,a1Bs2FN,C0Bt3FE,gFACE,oB1By3FJ,C0B13FE,0EACE,oB1By3FJ,C0B13FE,8DACE,oB1By3FJ,C0Br3FE,uFACE,0B1Bw3FJ,C0Bz3FE,iFACE,0B1Bw3FJ,C0Bz3FE,qEACE,0B1Bw3FJ,C0Br3FI,8FACE,wBAnBG,CAoBH,sDAAA,CAAA,8C1Bu3FN,C0Bz3FI,wFACE,wBAnBG,CAoBH,8C1Bu3FN,C0Bz3FI,4EACE,wBAnBG,CAoBH,sDAAA,CAAA,8C1Bu3FN,C0Bn3FI,6FACE,a1Bq3FN,C0Bt3FI,uFACE,a1Bq3FN,C0Bt3FI,2EACE,a1Bq3FN,C0Br4FE,wFACE,oB1Bw4FJ,C0Bz4FE,kFACE,oB1Bw4FJ,C0Bz4FE,sEACE,oB1Bw4FJ,C0Bp4FE,+FACE,0B1Bu4FJ,C0Bx4FE,yFACE,0B1Bu4FJ,C0Bx4FE,6EACE,0B1Bu4FJ,C0Bp4FI,sGACE,wBAnBG,CAoBH,qDAAA,CAAA,6C1Bs4FN,C0Bx4FI,gGACE,wBAnBG,CAoBH,6C1Bs4FN,C0Bx4FI,oFACE,wBAnBG,CAoBH,qDAAA,CAAA,6C1Bs4FN,C0Bl4FI,qGACE,a1Bo4FN,C0Br4FI,+FACE,a1Bo4FN,C0Br4FI,mFACE,a1Bo4FN,C0Bp5FE,mFACE,oB1Bu5FJ,C0Bx5FE,6EACE,oB1Bu5FJ,C0Bx5FE,iEACE,oB1Bu5FJ,C0Bn5FE,0FACE,0B1Bs5FJ,C0Bv5FE,oFACE,0B1Bs5FJ,C0Bv5FE,wEACE,0B1Bs5FJ,C0Bn5FI,iGACE,wBAnBG,CAoBH,qDAAA,CAAA,6C1Bq5FN,C0Bv5FI,2FACE,wBAnBG,CAoBH,6C1Bq5FN,C0Bv5FI,+EACE,wBAnBG,CAoBH,qDAAA,CAAA,6C1Bq5FN,C0Bj5FI,gGACE,a1Bm5FN,C0Bp5FI,0FACE,a1Bm5FN,C0Bp5FI,8EACE,a1Bm5FN,C0Bn6FE,0EACE,oB1Bs6FJ,C0Bv6FE,oEACE,oB1Bs6FJ,C0Bv6FE,wDACE,oB1Bs6FJ,C0Bl6FE,iFACE,0B1Bq6FJ,C0Bt6FE,2EACE,0B1Bq6FJ,C0Bt6FE,+DACE,0B1Bq6FJ,C0Bl6FI,wFACE,wBAnBG,CAoBH,oDAAA,CAAA,4C1Bo6FN,C0Bt6FI,kFACE,wBAnBG,CAoBH,4C1Bo6FN,C0Bt6FI,sEACE,wBAnBG,CAoBH,oDAAA,CAAA,4C1Bo6FN,C0Bh6FI,uFACE,a1Bk6FN,C0Bn6FI,iFACE,a1Bk6FN,C0Bn6FI,qEACE,a1Bk6FN,C0Bl7FE,gEACE,oB1Bq7FJ,C0Bt7FE,0DACE,oB1Bq7FJ,C0Bt7FE,8CACE,oB1Bq7FJ,C0Bj7FE,uEACE,0B1Bo7FJ,C0Br7FE,iEACE,0B1Bo7FJ,C0Br7FE,qDACE,0B1Bo7FJ,C0Bj7FI,8EACE,wBAnBG,CAoBH,iDAAA,CAAA,yC1Bm7FN,C0Br7FI,wEACE,wBAnBG,CAoBH,yC1Bm7FN,C0Br7FI,4DACE,wBAnBG,CAoBH,iDAAA,CAAA,yC1Bm7FN,C0B/6FI,6EACE,a1Bi7FN,C0Bl7FI,uEACE,a1Bi7FN,C0Bl7FI,2DACE,a1Bi7FN,C0Bj8FE,oEACE,oB1Bo8FJ,C0Br8FE,8DACE,oB1Bo8FJ,C0Br8FE,kDACE,oB1Bo8FJ,C0Bh8FE,2EACE,0B1Bm8FJ,C0Bp8FE,qEACE,0B1Bm8FJ,C0Bp8FE,yDACE,0B1Bm8FJ,C0Bh8FI,kFACE,wBAnBG,CAoBH,qDAAA,CAAA,6C1Bk8FN,C0Bp8FI,4EACE,wBAnBG,CAoBH,6C1Bk8FN,C0Bp8FI,gEACE,wBAnBG,CAoBH,qDAAA,CAAA,6C1Bk8FN,C0B97FI,iFACE,a1Bg8FN,C0Bj8FI,2EACE,a1Bg8FN,C0Bj8FI,+DACE,a1Bg8FN,C0Bh9FE,wEACE,oB1Bm9FJ,C0Bp9FE,kEACE,oB1Bm9FJ,C0Bp9FE,sDACE,oB1Bm9FJ,C0B/8FE,+EACE,0B1Bk9FJ,C0Bn9FE,yEACE,0B1Bk9FJ,C0Bn9FE,6DACE,0B1Bk9FJ,C0B/8FI,sFACE,wBAnBG,CAoBH,mDAAA,CAAA,2C1Bi9FN,C0Bn9FI,gFACE,wBAnBG,CAoBH,2C1Bi9FN,C0Bn9FI,oEACE,wBAnBG,CAoBH,mDAAA,CAAA,2C1Bi9FN,C0B78FI,qFACE,a1B+8FN,C0Bh9FI,+EACE,a1B+8FN,C0Bh9FI,mEACE,a1B+8FN,C4BjnGA,MACE,wM5BonGF,C4B3mGE,sBACE,uCAAA,CACA,gB5B8mGJ,C4B3mGI,mCACE,a5B6mGN,C4B9mGI,mCACE,c5B6mGN,C4BzmGM,4BACE,sB5B2mGR,C4BxmGQ,mCACE,gC5B0mGV,C4BtmGQ,2DAEE,SAAA,CADA,uBAAA,CAEA,e5BwmGV,C4BpmGQ,0EAEE,SAAA,CADA,uB5BumGV,C4BxmGQ,uEAEE,SAAA,CADA,uB5BumGV,C4BxmGQ,iEAEE,SAAA,CADA,uB5BumGV,C4BlmGQ,yCACE,Y5BomGV,C4B7lGE,0BAEE,eAAA,CADA,e5BgmGJ,C4B5lGI,+BACE,oB5B8lGN,C4BzlGE,gDACE,Y5B2lGJ,C4BvlGE,8BAEE,+BAAA,CADA,oBAAA,CAGA,WAAA,CAGA,SAAA,CADA,4BAAA,CAEA,4DACE,CAJF,0B5B2lGJ,C4BllGI,aAdF,8BAeI,+BAAA,CAEA,SAAA,CADA,uB5BslGJ,CACF,C4BllGI,wCACE,6B5BolGN,C4BhlGI,oCACE,+B5BklGN,C4B9kGI,qCAIE,6BAAA,CAKA,UAAA,CARA,oBAAA,CAEA,YAAA,CAEA,2CAAA,CAAA,mCAAA,CACA,4BAAA,CAAA,oBAAA,CACA,6BAAA,CAAA,qBAAA,CACA,yBAAA,CAAA,iBAAA,CANA,W5BulGN,C4B1kGQ,mDACE,oB5B4kGV,C6B1rGE,kCAEE,iB7BgsGJ,C6BlsGE,kCAEE,kB7BgsGJ,C6BlsGE,wBAGE,yCAAA,CAFA,oBAAA,CAGA,SAAA,CACA,mC7B6rGJ,C6BxrGI,aAVF,wBAWI,Y7B2rGJ,CACF,C6BvrGE,mFAEE,SAAA,CACA,2CACE,CADF,mC7ByrGJ,C6B5rGE,gFAEE,SAAA,CACA,wCACE,CADF,mC7ByrGJ,C6B5rGE,0EAEE,SAAA,CACA,mC7ByrGJ,C6BnrGE,mFAEE,+B7BqrGJ,C6BvrGE,gFAEE,+B7BqrGJ,C6BvrGE,0EAEE,+B7BqrGJ,C6BjrGE,oBACE,yBAAA,CACA,uBAAA,CAGA,yE7BirGJ,CKljGI,sCwBrHE,qDACE,uB7B0qGN,CACF,C6BrqGE,0CACE,yB7BuqGJ,C6BxqGE,uCACE,yB7BuqGJ,C6BxqGE,iCACE,yB7BuqGJ,C6BnqGE,sBACE,0B7BqqGJ,C8BhuGE,2BACE,a9BmuGJ,CK9iGI,wCyBtLF,2BAKI,e9BmuGJ,CACF,C8BhuGI,6BAEE,0BAAA,CAAA,2BAAA,CACA,eAAA,CACA,iBAAA,CAHA,yBAAA,CAAA,sBAAA,CAAA,iB9BquGN,C8B/tGM,2CACE,kB9BiuGR,C+BlvGE,kDACE,kCAAA,CAAA,0B/BqvGJ,C+BtvGE,+CACE,0B/BqvGJ,C+BtvGE,yCACE,kCAAA,CAAA,0B/BqvGJ,C+BjvGE,uBACE,4C/BmvGJ,C+B/uGE,uBACE,4C/BivGJ,C+B7uGE,4BACE,qC/B+uGJ,C+B5uGI,mCACE,a/B8uGN,C+B1uGI,kCACE,a/B4uGN,C+BvuGE,0BAKE,eAAA,CAJA,aAAA,CACA,YAAA,CAEA,aAAA,CADA,kBAAA,CAAA,mB/B2uGJ,C+BtuGI,uCACE,e/BwuGN,C+BpuGI,sCACE,kB/BsuGN,CgCrxGA,MACE,8LhCwxGF,CgC/wGE,oBACE,iBAAA,CAEA,gBAAA,CADA,ahCmxGJ,CgC/wGI,wCACE,uBhCixGN,CgC7wGI,gCAEE,eAAA,CADA,gBhCgxGN,CgCzwGM,wCACE,mBhC2wGR,CgCrwGE,8BAGE,oBhC0wGJ,CgC7wGE,8BAGE,mBhC0wGJ,CgC7wGE,8BAIE,4BhCywGJ,CgC7wGE,4DAKE,6BhCwwGJ,CgC7wGE,8BAKE,4BhCwwGJ,CgC7wGE,oBAME,cAAA,CALA,aAAA,CACA,ehC2wGJ,CgCpwGI,kCACE,uCAAA,CACA,oBhCswGN,CgClwGI,wCAEE,uCAAA,CADA,YhCqwGN,CgChwGI,oCAGE,WhC4wGN,CgC/wGI,oCAGE,UhC4wGN,CgC/wGI,0BAME,6BAAA,CAOA,UAAA,CARA,WAAA,CAEA,yCAAA,CAAA,iCAAA,CACA,4BAAA,CAAA,oBAAA,CACA,6BAAA,CAAA,qBAAA,CACA,yBAAA,CAAA,iBAAA,CATA,iBAAA,CACA,UAAA,CASA,sBAAA,CACA,yBAAA,CARA,UhC2wGN,CgC/vGM,oCACE,wBhCiwGR,CgC5vGI,4BACE,YhC8vGN,CgCzvGI,4CACE,YhC2vGN,CiC90GE,qDACE,mBAAA,CACA,cAAA,CACA,uBjCi1GJ,CiCp1GE,kDACE,mBAAA,CACA,cAAA,CACA,uBjCi1GJ,CiCp1GE,4CACE,mBAAA,CACA,cAAA,CACA,uBjCi1GJ,CiC90GI,yDAGE,iBAAA,CADA,eAAA,CADA,ajCk1GN,CiCn1GI,sDAGE,iBAAA,CADA,eAAA,CADA,ajCk1GN,CiCn1GI,gDAGE,iBAAA,CADA,eAAA,CADA,ajCk1GN,CkCx1GE,gCACE,sClC21GJ,CkC51GE,6BACE,sClC21GJ,CkC51GE,uBACE,sClC21GJ,CkCx1GE,cACE,yClC01GJ,CkC90GE,4DACE,oClCg1GJ,CkCj1GE,yDACE,oClCg1GJ,CkCj1GE,mDACE,oClCg1GJ,CkCx0GE,6CACE,qClC00GJ,CkC30GE,0CACE,qClC00GJ,CkC30GE,oCACE,qClC00GJ,CkCh0GE,oDACE,oClCk0GJ,CkCn0GE,iDACE,oClCk0GJ,CkCn0GE,2CACE,oClCk0GJ,CkCzzGE,gDACE,qClC2zGJ,CkC5zGE,6CACE,qClC2zGJ,CkC5zGE,uCACE,qClC2zGJ,CkCtzGE,gCACE,kClCwzGJ,CkCzzGE,6BACE,kClCwzGJ,CkCzzGE,uBACE,kClCwzGJ,CkClzGE,qCACE,sClCozGJ,CkCrzGE,kCACE,sClCozGJ,CkCrzGE,4BACE,sClCozGJ,CkC7yGE,yCACE,sClC+yGJ,CkChzGE,sCACE,sClC+yGJ,CkChzGE,gCACE,sClC+yGJ,CkCxyGE,yCACE,qClC0yGJ,CkC3yGE,sCACE,qClC0yGJ,CkC3yGE,gCACE,qClC0yGJ,CkCjyGE,gDACE,qClCmyGJ,CkCpyGE,6CACE,qClCmyGJ,CkCpyGE,uCACE,qClCmyGJ,CkC3xGE,6CACE,sClC6xGJ,CkC9xGE,0CACE,sClC6xGJ,CkC9xGE,oCACE,sClC6xGJ,CkClxGE,yDACE,qClCoxGJ,CkCrxGE,sDACE,qClCoxGJ,CkCrxGE,gDACE,qClCoxGJ,CkC/wGE,iCAGE,mBAAA,CAFA,gBAAA,CACA,gBlCkxGJ,CkCpxGE,8BAGE,mBAAA,CAFA,gBAAA,CACA,gBlCkxGJ,CkCpxGE,wBAGE,mBAAA,CAFA,gBAAA,CACA,gBlCkxGJ,CkC9wGE,eACE,4ClCgxGJ,CkC7wGE,eACE,4ClC+wGJ,CkC3wGE,gBAIE,wCAAA,CAHA,aAAA,CACA,wBAAA,CACA,wBlC8wGJ,CkCzwGE,yBAOE,wCAAA,CACA,+DAAA,CACA,4BAAA,CACA,6BAAA,CARA,iBAAA,CAIA,eAAA,CADA,eAAA,CAFA,cAAA,CACA,oCAAA,CAHA,iBlCoxGJ,CkCxwGI,6BACE,YlC0wGN,CkCvwGM,kCACE,wBAAA,CACA,yBlCywGR,CkCnwGE,iCAWE,wCAAA,CACA,+DAAA,CAFA,uCAAA,CAGA,0BAAA,CAPA,UAAA,CAJA,oBAAA,CAMA,2BAAA,CADA,2BAAA,CAEA,2BAAA,CARA,uBAAA,CAAA,eAAA,CAaA,wBAAA,CAAA,qBAAA,CAAA,gBAAA,CATA,SlC4wGJ,CkC1vGE,sBACE,iBAAA,CACA,iBlC4vGJ,CkCpvGI,sCACE,gBlCsvGN,CkClvGI,gDACE,YlCovGN,CkC1uGA,gBACE,iBlC6uGF,CkCzuGE,uCACE,aAAA,CACA,SlC2uGJ,CkC7uGE,oCACE,aAAA,CACA,SlC2uGJ,CkC7uGE,8BACE,aAAA,CACA,SlC2uGJ,CkCtuGE,mBACE,YlCwuGJ,CkCnuGE,oBACE,QlCquGJ,CkCjuGE,4BACE,WAAA,CACA,SAAA,CACA,elCmuGJ,CkChuGI,0CACE,YlCkuGN,CkC5tGE,yBAIE,wCAAA,CAEA,+BAAA,CADA,4BAAA,CAFA,eAAA,CADA,oDAAA,CAKA,wBAAA,CAAA,qBAAA,CAAA,gBlC8tGJ,CkC1tGE,2BAEE,+DAAA,CADA,2BlC6tGJ,CkCztGI,+BACE,uCAAA,CACA,gBlC2tGN,CkCttGE,sBACE,MAAA,CACA,WlCwtGJ,CkCntGA,aACE,alCstGF,CkC5sGE,4BAEE,aAAA,CADA,YlCgtGJ,CkC5sGI,wDAEE,2BAAA,CADA,wBlC+sGN,CkCzsGE,+BAKE,2CAAA,CAEA,+BAAA,CADA,gCAAA,CADA,sBAAA,CAJA,mBAAA,CAEA,gBAAA,CADA,alCgtGJ,CkCxsGI,qCAEE,UAAA,CACA,UAAA,CAFA,alC4sGN,CK70GI,wC6BgJF,8BACE,iBlCisGF,CkCvrGE,wSAGE,elC6rGJ,CkCzrGE,sCAEE,mBAAA,CACA,eAAA,CADA,oBAAA,CADA,kBAAA,CAAA,mBlC6rGJ,CACF,CDphHI,kDAIE,+BAAA,CACA,8BAAA,CAFA,aAAA,CADA,QAAA,CADA,iBC0hHN,CD3hHI,+CAIE,+BAAA,CACA,8BAAA,CAFA,aAAA,CADA,QAAA,CADA,iBC0hHN,CD3hHI,yCAIE,+BAAA,CACA,8BAAA,CAFA,aAAA,CADA,QAAA,CADA,iBC0hHN,CDlhHI,uBAEE,uCAAA,CADA,cCqhHN,CDh+GM,iHAEE,WAlDkB,CAiDlB,kBC2+GR,CD5+GM,6HAEE,WAlDkB,CAiDlB,kBCu/GR,CDx/GM,6HAEE,WAlDkB,CAiDlB,kBCmgHR,CDpgHM,oHAEE,WAlDkB,CAiDlB,kBC+gHR,CDhhHM,0HAEE,WAlDkB,CAiDlB,kBC2hHR,CD5hHM,uHAEE,WAlDkB,CAiDlB,kBCuiHR,CDxiHM,uHAEE,WAlDkB,CAiDlB,kBCmjHR,CDpjHM,6HAEE,WAlDkB,CAiDlB,kBC+jHR,CDhkHM,yCAEE,WAlDkB,CAiDlB,kBCmkHR,CDpkHM,yCAEE,WAlDkB,CAiDlB,kBCukHR,CDxkHM,0CAEE,WAlDkB,CAiDlB,kBC2kHR,CD5kHM,uCAEE,WAlDkB,CAiDlB,kBC+kHR,CDhlHM,wCAEE,WAlDkB,CAiDlB,kBCmlHR,CDplHM,sCAEE,WAlDkB,CAiDlB,kBCulHR,CDxlHM,wCAEE,WAlDkB,CAiDlB,kBC2lHR,CD5lHM,oCAEE,WAlDkB,CAiDlB,kBC+lHR,CDhmHM,2CAEE,WAlDkB,CAiDlB,kBCmmHR,CDpmHM,qCAEE,WAlDkB,CAiDlB,kBCumHR,CDxmHM,oCAEE,WAlDkB,CAiDlB,kBC2mHR,CD5mHM,kCAEE,WAlDkB,CAiDlB,kBC+mHR,CDhnHM,qCAEE,WAlDkB,CAiDlB,kBCmnHR,CDpnHM,mCAEE,WAlDkB,CAiDlB,kBCunHR,CDxnHM,qCAEE,WAlDkB,CAiDlB,kBC2nHR,CD5nHM,wCAEE,WAlDkB,CAiDlB,kBC+nHR,CDhoHM,sCAEE,WAlDkB,CAiDlB,kBCmoHR,CDpoHM,2CAEE,WAlDkB,CAiDlB,kBCuoHR,CD5nHM,iCAEE,WAPkB,CAMlB,iBC+nHR,CDhoHM,uCAEE,WAPkB,CAMlB,iBCmoHR,CDpoHM,mCAEE,WAPkB,CAMlB,iBCuoHR,CmCztHA,MACE,qMAAA,CACA,mMnC4tHF,CmCntHE,wBAKE,mBAAA,CAHA,YAAA,CACA,qBAAA,CACA,YAAA,CAHA,iBnC0tHJ,CmChtHI,8BAGE,QAAA,CACA,SAAA,CAHA,iBAAA,CACA,OnCotHN,CmC/sHM,qCACE,0BnCitHR,CmClrHE,2BAKE,uBAAA,CADA,+DAAA,CAHA,YAAA,CACA,cAAA,CACA,aAAA,CAGA,oBnCorHJ,CmCjrHI,aATF,2BAUI,gBnCorHJ,CACF,CmCjrHI,cAGE,+BACE,iBnCirHN,CmC9qHM,sCAOE,oCAAA,CALA,QAAA,CAWA,UAAA,CATA,aAAA,CAEA,UAAA,CAHA,MAAA,CAFA,iBAAA,CAOA,2CAAA,CACA,qCACE,CAEF,kDAAA,CAPA,+BnCsrHR,CACF,CmCzqHI,8CACE,YnC2qHN,CmCvqHI,iCAQE,+BAAA,CACA,6BAAA,CALA,uCAAA,CAMA,cAAA,CATA,aAAA,CAKA,gBAAA,CADA,eAAA,CAFA,8BAAA,CAWA,+BAAA,CAHA,2CACE,CALF,kBAAA,CALA,UnCmrHN,CmCpqHM,aAII,6CACE,OnCmqHV,CmCpqHQ,8CACE,OnCsqHV,CmCvqHQ,8CACE,OnCyqHV,CmC1qHQ,8CACE,OnC4qHV,CmC7qHQ,8CACE,OnC+qHV,CmChrHQ,8CACE,OnCkrHV,CmCnrHQ,8CACE,OnCqrHV,CmCtrHQ,8CACE,OnCwrHV,CmCzrHQ,8CACE,OnC2rHV,CmC5rHQ,+CACE,QnC8rHV,CmC/rHQ,+CACE,QnCisHV,CmClsHQ,+CACE,QnCosHV,CmCrsHQ,+CACE,QnCusHV,CmCxsHQ,+CACE,QnC0sHV,CmC3sHQ,+CACE,QnC6sHV,CmC9sHQ,+CACE,QnCgtHV,CmCjtHQ,+CACE,QnCmtHV,CmCptHQ,+CACE,QnCstHV,CmCvtHQ,+CACE,QnCytHV,CmC1tHQ,+CACE,QnC4tHV,CACF,CmCvtHM,uCACE,+BnCytHR,CmCntHE,4BACE,UnCqtHJ,CmCltHI,aAJF,4BAKI,gBnCqtHJ,CACF,CmCjtHE,0BACE,YnCmtHJ,CmChtHI,aAJF,0BAKI,anCmtHJ,CmC/sHM,sCACE,OnCitHR,CmCltHM,uCACE,OnCotHR,CmCrtHM,uCACE,OnCutHR,CmCxtHM,uCACE,OnC0tHR,CmC3tHM,uCACE,OnC6tHR,CmC9tHM,uCACE,OnCguHR,CmCjuHM,uCACE,OnCmuHR,CmCpuHM,uCACE,OnCsuHR,CmCvuHM,uCACE,OnCyuHR,CmC1uHM,wCACE,QnC4uHR,CmC7uHM,wCACE,QnC+uHR,CmChvHM,wCACE,QnCkvHR,CmCnvHM,wCACE,QnCqvHR,CmCtvHM,wCACE,QnCwvHR,CmCzvHM,wCACE,QnC2vHR,CmC5vHM,wCACE,QnC8vHR,CmC/vHM,wCACE,QnCiwHR,CmClwHM,wCACE,QnCowHR,CmCrwHM,wCACE,QnCuwHR,CmCxwHM,wCACE,QnC0wHR,CACF,CmCpwHI,+FAEE,QnCswHN,CmCnwHM,yGACE,wBAAA,CACA,yBnCswHR,CmC7vHM,2DAEE,wBAAA,CACA,yBAAA,CAFA,QnCiwHR,CmC1vHM,iEACE,QnC4vHR,CmCzvHQ,qLAGE,wBAAA,CACA,yBAAA,CAFA,QnC6vHV,CmCvvHQ,6FACE,wBAAA,CACA,yBnCyvHV,CmCpvHM,yDACE,kBnCsvHR,CmCjvHI,sCACE,QnCmvHN,CmC9uHE,2BAEE,iBAAA,CAKA,kBAAA,CADA,uCAAA,CAEA,cAAA,CAPA,aAAA,CAGA,YAAA,CACA,gBAAA,CAKA,mBAAA,CADA,gCAAA,CANA,WnCuvHJ,CmC7uHI,iCAEE,uDAAA,CADA,+BnCgvHN,CmC3uHI,iCAIE,6BAAA,CAQA,UAAA,CAXA,aAAA,CAEA,WAAA,CAKA,8CAAA,CAAA,sCAAA,CACA,4BAAA,CAAA,oBAAA,CACA,6BAAA,CAAA,qBAAA,CACA,yBAAA,CAAA,iBAAA,CANA,+CACE,CAJF,UnCqvHN,CmCtuHE,4BAME,yEACE,CALF,YAAA,CAGA,aAAA,CAFA,qBAAA,CAUA,mBAAA,CAZA,iBAAA,CAWA,wBAAA,CARA,YnC4uHJ,CmChuHI,sCACE,wBnCkuHN,CmC9tHI,oCACE,SnCguHN,CmC5tHI,kCAGE,wEACE,CAFF,mBAAA,CADA,OnCguHN,CmCttHM,uDACE,8CAAA,CAAA,sCnCwtHR,CKx0HI,wC8B8HF,wDAGE,kBnC+sHF,CmCltHA,wDAGE,mBnC+sHF,CmCltHA,8CAEE,eAAA,CADA,eAAA,CAGA,iCnC8sHF,CmC1sHE,8DACE,mBnC6sHJ,CmC9sHE,8DACE,kBnC6sHJ,CmC9sHE,oDAEE,UnC4sHJ,CmCxsHE,8EAEE,kBnC2sHJ,CmC7sHE,8EAEE,mBnC2sHJ,CmC7sHE,8EAGE,kBnC0sHJ,CmC7sHE,8EAGE,mBnC0sHJ,CmC7sHE,oEACE,UnC4sHJ,CmCtsHE,8EAEE,mBnCysHJ,CmC3sHE,8EAEE,kBnCysHJ,CmC3sHE,8EAGE,mBnCwsHJ,CmC3sHE,8EAGE,kBnCwsHJ,CmC3sHE,oEACE,UnC0sHJ,CACF,CmC5rHE,cAHF,olDAII,+BnC+rHF,CmC5rHE,g8GACE,sCnC8rHJ,CACF,CmCzrHA,4sDACE,uDnC4rHF,CmCxrHA,wmDACE,anC2rHF,CoCxiIA,MACE,mVAAA,CAEA,4VpC4iIF,CoCliIE,4BAEE,oBAAA,CADA,iBpCsiIJ,CoCjiII,sDAGE,SpCmiIN,CoCtiII,sDAGE,UpCmiIN,CoCtiII,4CACE,iBAAA,CACA,SpCoiIN,CoC9hIE,+CAEE,SAAA,CADA,UpCiiIJ,CoC5hIE,kDAGE,WpCsiIJ,CoCziIE,kDAGE,YpCsiIJ,CoCziIE,wCAME,qDAAA,CAKA,UAAA,CANA,aAAA,CAEA,0CAAA,CAAA,kCAAA,CACA,4BAAA,CAAA,oBAAA,CACA,6BAAA,CAAA,qBAAA,CACA,yBAAA,CAAA,iBAAA,CATA,iBAAA,CACA,SAAA,CAEA,YpCqiIJ,CoC1hIE,gEACE,wBTyWa,CSxWb,mDAAA,CAAA,2CpC4hIJ,CqC9kIA,QACE,8DAAA,CAGA,+CAAA,CACA,iEAAA,CACA,oDAAA,CACA,sDAAA,CACA,mDrC+kIF,CqC3kIA,SAEE,kBAAA,CADA,YrC+kIF,CKt7HI,mCiChKA,8BACE,UtC8lIJ,CsC/lIE,8BACE,WtC8lIJ,CsC/lIE,8BAIE,kBtC2lIJ,CsC/lIE,8BAIE,iBtC2lIJ,CsC/lIE,oBAKE,mBAAA,CAFA,YAAA,CADA,atC6lIJ,CsCvlII,kCACE,WtC0lIN,CsC3lII,kCACE,UtC0lIN,CsC3lII,kCAEE,iBAAA,CAAA,ctCylIN,CsC3lII,kCAEE,aAAA,CAAA,kBtCylIN,CACF","file":"main.css"} \ No newline at end of file diff --git a/assets/stylesheets/palette.2505c338.min.css b/assets/stylesheets/palette.2505c338.min.css new file mode 100644 index 0000000..3c005dd --- /dev/null +++ b/assets/stylesheets/palette.2505c338.min.css @@ -0,0 +1 @@ +@media screen{[data-md-color-scheme=slate]{--md-hue:232;--md-default-fg-color:hsla(var(--md-hue),75%,95%,1);--md-default-fg-color--light:hsla(var(--md-hue),75%,90%,0.62);--md-default-fg-color--lighter:hsla(var(--md-hue),75%,90%,0.32);--md-default-fg-color--lightest:hsla(var(--md-hue),75%,90%,0.12);--md-default-bg-color:hsla(var(--md-hue),15%,21%,1);--md-default-bg-color--light:hsla(var(--md-hue),15%,21%,0.54);--md-default-bg-color--lighter:hsla(var(--md-hue),15%,21%,0.26);--md-default-bg-color--lightest:hsla(var(--md-hue),15%,21%,0.07);--md-code-fg-color:hsla(var(--md-hue),18%,86%,1);--md-code-bg-color:hsla(var(--md-hue),15%,15%,1);--md-code-hl-color:#4287ff26;--md-code-hl-number-color:#e6695b;--md-code-hl-special-color:#f06090;--md-code-hl-function-color:#c973d9;--md-code-hl-constant-color:#9383e2;--md-code-hl-keyword-color:#6791e0;--md-code-hl-string-color:#2fb170;--md-code-hl-name-color:var(--md-code-fg-color);--md-code-hl-operator-color:var(--md-default-fg-color--light);--md-code-hl-punctuation-color:var(--md-default-fg-color--light);--md-code-hl-comment-color:var(--md-default-fg-color--light);--md-code-hl-generic-color:var(--md-default-fg-color--light);--md-code-hl-variable-color:var(--md-default-fg-color--light);--md-typeset-color:var(--md-default-fg-color);--md-typeset-a-color:var(--md-primary-fg-color);--md-typeset-mark-color:#4287ff4d;--md-typeset-kbd-color:hsla(var(--md-hue),15%,94%,0.12);--md-typeset-kbd-accent-color:hsla(var(--md-hue),15%,94%,0.2);--md-typeset-kbd-border-color:hsla(var(--md-hue),15%,14%,1);--md-typeset-table-color:hsla(var(--md-hue),75%,95%,0.12);--md-admonition-fg-color:var(--md-default-fg-color);--md-admonition-bg-color:var(--md-default-bg-color);--md-footer-bg-color:hsla(var(--md-hue),15%,12%,0.87);--md-footer-bg-color--dark:hsla(var(--md-hue),15%,10%,1);--md-shadow-z1:0 0.2rem 0.5rem #0003,0 0 0.05rem #0000001a;--md-shadow-z2:0 0.2rem 0.5rem #0000004d,0 0 0.05rem #00000040;--md-shadow-z3:0 0.2rem 0.5rem #0006,0 0 0.05rem #00000059}[data-md-color-scheme=slate] img[src$="#gh-light-mode-only"],[data-md-color-scheme=slate] img[src$="#only-light"]{display:none}[data-md-color-scheme=slate] img[src$="#gh-dark-mode-only"],[data-md-color-scheme=slate] img[src$="#only-dark"]{display:initial}[data-md-color-scheme=slate][data-md-color-primary=pink]{--md-typeset-a-color:#ed5487}[data-md-color-scheme=slate][data-md-color-primary=purple]{--md-typeset-a-color:#bd78c9}[data-md-color-scheme=slate][data-md-color-primary=deep-purple]{--md-typeset-a-color:#a682e3}[data-md-color-scheme=slate][data-md-color-primary=indigo]{--md-typeset-a-color:#6c91d5}[data-md-color-scheme=slate][data-md-color-primary=teal]{--md-typeset-a-color:#00ccb8}[data-md-color-scheme=slate][data-md-color-primary=green]{--md-typeset-a-color:#71c174}[data-md-color-scheme=slate][data-md-color-primary=deep-orange]{--md-typeset-a-color:#ff9575}[data-md-color-scheme=slate][data-md-color-primary=brown]{--md-typeset-a-color:#c7846b}[data-md-color-scheme=slate][data-md-color-primary=black],[data-md-color-scheme=slate][data-md-color-primary=blue-grey],[data-md-color-scheme=slate][data-md-color-primary=grey],[data-md-color-scheme=slate][data-md-color-primary=white]{--md-typeset-a-color:#6c91d5}[data-md-color-switching] *,[data-md-color-switching] :after,[data-md-color-switching] :before{transition-duration:0ms!important}}[data-md-color-accent=red]{--md-accent-fg-color:#ff1947;--md-accent-fg-color--transparent:#ff19471a;--md-accent-bg-color:#fff;--md-accent-bg-color--light:#ffffffb3}[data-md-color-accent=pink]{--md-accent-fg-color:#f50056;--md-accent-fg-color--transparent:#f500561a;--md-accent-bg-color:#fff;--md-accent-bg-color--light:#ffffffb3}[data-md-color-accent=purple]{--md-accent-fg-color:#df41fb;--md-accent-fg-color--transparent:#df41fb1a;--md-accent-bg-color:#fff;--md-accent-bg-color--light:#ffffffb3}[data-md-color-accent=deep-purple]{--md-accent-fg-color:#7c4dff;--md-accent-fg-color--transparent:#7c4dff1a;--md-accent-bg-color:#fff;--md-accent-bg-color--light:#ffffffb3}[data-md-color-accent=indigo]{--md-accent-fg-color:#526cfe;--md-accent-fg-color--transparent:#526cfe1a;--md-accent-bg-color:#fff;--md-accent-bg-color--light:#ffffffb3}[data-md-color-accent=blue]{--md-accent-fg-color:#4287ff;--md-accent-fg-color--transparent:#4287ff1a;--md-accent-bg-color:#fff;--md-accent-bg-color--light:#ffffffb3}[data-md-color-accent=light-blue]{--md-accent-fg-color:#0091eb;--md-accent-fg-color--transparent:#0091eb1a;--md-accent-bg-color:#fff;--md-accent-bg-color--light:#ffffffb3}[data-md-color-accent=cyan]{--md-accent-fg-color:#00bad6;--md-accent-fg-color--transparent:#00bad61a;--md-accent-bg-color:#fff;--md-accent-bg-color--light:#ffffffb3}[data-md-color-accent=teal]{--md-accent-fg-color:#00bda4;--md-accent-fg-color--transparent:#00bda41a;--md-accent-bg-color:#fff;--md-accent-bg-color--light:#ffffffb3}[data-md-color-accent=green]{--md-accent-fg-color:#00c753;--md-accent-fg-color--transparent:#00c7531a;--md-accent-bg-color:#fff;--md-accent-bg-color--light:#ffffffb3}[data-md-color-accent=light-green]{--md-accent-fg-color:#63de17;--md-accent-fg-color--transparent:#63de171a;--md-accent-bg-color:#fff;--md-accent-bg-color--light:#ffffffb3}[data-md-color-accent=lime]{--md-accent-fg-color:#b0eb00;--md-accent-fg-color--transparent:#b0eb001a;--md-accent-bg-color:#000000de;--md-accent-bg-color--light:#0000008a}[data-md-color-accent=yellow]{--md-accent-fg-color:#ffd500;--md-accent-fg-color--transparent:#ffd5001a;--md-accent-bg-color:#000000de;--md-accent-bg-color--light:#0000008a}[data-md-color-accent=amber]{--md-accent-fg-color:#fa0;--md-accent-fg-color--transparent:#ffaa001a;--md-accent-bg-color:#000000de;--md-accent-bg-color--light:#0000008a}[data-md-color-accent=orange]{--md-accent-fg-color:#ff9100;--md-accent-fg-color--transparent:#ff91001a;--md-accent-bg-color:#000000de;--md-accent-bg-color--light:#0000008a}[data-md-color-accent=deep-orange]{--md-accent-fg-color:#ff6e42;--md-accent-fg-color--transparent:#ff6e421a;--md-accent-bg-color:#fff;--md-accent-bg-color--light:#ffffffb3}[data-md-color-primary=red]{--md-primary-fg-color:#ef5552;--md-primary-fg-color--light:#e57171;--md-primary-fg-color--dark:#e53734;--md-primary-bg-color:#fff;--md-primary-bg-color--light:#ffffffb3}[data-md-color-primary=pink]{--md-primary-fg-color:#e92063;--md-primary-fg-color--light:#ec417a;--md-primary-fg-color--dark:#c3185d;--md-primary-bg-color:#fff;--md-primary-bg-color--light:#ffffffb3}[data-md-color-primary=purple]{--md-primary-fg-color:#ab47bd;--md-primary-fg-color--light:#bb69c9;--md-primary-fg-color--dark:#8c24a8;--md-primary-bg-color:#fff;--md-primary-bg-color--light:#ffffffb3}[data-md-color-primary=deep-purple]{--md-primary-fg-color:#7e56c2;--md-primary-fg-color--light:#9574cd;--md-primary-fg-color--dark:#673ab6;--md-primary-bg-color:#fff;--md-primary-bg-color--light:#ffffffb3}[data-md-color-primary=indigo]{--md-primary-fg-color:#4051b5;--md-primary-fg-color--light:#5d6cc0;--md-primary-fg-color--dark:#303fa1;--md-primary-bg-color:#fff;--md-primary-bg-color--light:#ffffffb3}[data-md-color-primary=blue]{--md-primary-fg-color:#2094f3;--md-primary-fg-color--light:#42a5f5;--md-primary-fg-color--dark:#1975d2;--md-primary-bg-color:#fff;--md-primary-bg-color--light:#ffffffb3}[data-md-color-primary=light-blue]{--md-primary-fg-color:#02a6f2;--md-primary-fg-color--light:#28b5f6;--md-primary-fg-color--dark:#0287cf;--md-primary-bg-color:#fff;--md-primary-bg-color--light:#ffffffb3}[data-md-color-primary=cyan]{--md-primary-fg-color:#00bdd6;--md-primary-fg-color--light:#25c5da;--md-primary-fg-color--dark:#0097a8;--md-primary-bg-color:#fff;--md-primary-bg-color--light:#ffffffb3}[data-md-color-primary=teal]{--md-primary-fg-color:#009485;--md-primary-fg-color--light:#26a699;--md-primary-fg-color--dark:#007a6c;--md-primary-bg-color:#fff;--md-primary-bg-color--light:#ffffffb3}[data-md-color-primary=green]{--md-primary-fg-color:#4cae4f;--md-primary-fg-color--light:#68bb6c;--md-primary-fg-color--dark:#398e3d;--md-primary-bg-color:#fff;--md-primary-bg-color--light:#ffffffb3}[data-md-color-primary=light-green]{--md-primary-fg-color:#8bc34b;--md-primary-fg-color--light:#9ccc66;--md-primary-fg-color--dark:#689f38;--md-primary-bg-color:#fff;--md-primary-bg-color--light:#ffffffb3}[data-md-color-primary=lime]{--md-primary-fg-color:#cbdc38;--md-primary-fg-color--light:#d3e156;--md-primary-fg-color--dark:#b0b52c;--md-primary-bg-color:#000000de;--md-primary-bg-color--light:#0000008a}[data-md-color-primary=yellow]{--md-primary-fg-color:#ffec3d;--md-primary-fg-color--light:#ffee57;--md-primary-fg-color--dark:#fbc02d;--md-primary-bg-color:#000000de;--md-primary-bg-color--light:#0000008a}[data-md-color-primary=amber]{--md-primary-fg-color:#ffc105;--md-primary-fg-color--light:#ffc929;--md-primary-fg-color--dark:#ffa200;--md-primary-bg-color:#000000de;--md-primary-bg-color--light:#0000008a}[data-md-color-primary=orange]{--md-primary-fg-color:#ffa724;--md-primary-fg-color--light:#ffa724;--md-primary-fg-color--dark:#fa8900;--md-primary-bg-color:#000000de;--md-primary-bg-color--light:#0000008a}[data-md-color-primary=deep-orange]{--md-primary-fg-color:#ff6e42;--md-primary-fg-color--light:#ff8a66;--md-primary-fg-color--dark:#f4511f;--md-primary-bg-color:#fff;--md-primary-bg-color--light:#ffffffb3}[data-md-color-primary=brown]{--md-primary-fg-color:#795649;--md-primary-fg-color--light:#8d6e62;--md-primary-fg-color--dark:#5d4037;--md-primary-bg-color:#fff;--md-primary-bg-color--light:#ffffffb3}[data-md-color-primary=grey]{--md-primary-fg-color:#757575;--md-primary-fg-color--light:#9e9e9e;--md-primary-fg-color--dark:#616161;--md-primary-bg-color:#fff;--md-primary-bg-color--light:#ffffffb3;--md-typeset-a-color:#4051b5}[data-md-color-primary=blue-grey]{--md-primary-fg-color:#546d78;--md-primary-fg-color--light:#607c8a;--md-primary-fg-color--dark:#455a63;--md-primary-bg-color:#fff;--md-primary-bg-color--light:#ffffffb3;--md-typeset-a-color:#4051b5}[data-md-color-primary=light-green]:not([data-md-color-scheme=slate]){--md-typeset-a-color:#72ad2e}[data-md-color-primary=lime]:not([data-md-color-scheme=slate]){--md-typeset-a-color:#8b990a}[data-md-color-primary=yellow]:not([data-md-color-scheme=slate]){--md-typeset-a-color:#b8a500}[data-md-color-primary=amber]:not([data-md-color-scheme=slate]){--md-typeset-a-color:#d19d00}[data-md-color-primary=orange]:not([data-md-color-scheme=slate]){--md-typeset-a-color:#e68a00}[data-md-color-primary=white]{--md-primary-fg-color:#fff;--md-primary-fg-color--light:#ffffffb3;--md-primary-fg-color--dark:#00000012;--md-primary-bg-color:#000000de;--md-primary-bg-color--light:#0000008a;--md-typeset-a-color:#4051b5}[data-md-color-primary=white] .md-button{color:var(--md-typeset-a-color)}[data-md-color-primary=white] .md-button--primary{background-color:var(--md-typeset-a-color);border-color:var(--md-typeset-a-color);color:#fff}@media screen and (min-width:60em){[data-md-color-primary=white] .md-search__form{background-color:#00000012}[data-md-color-primary=white] .md-search__form:hover{background-color:#00000052}[data-md-color-primary=white] .md-search__input+.md-search__icon{color:#000000de}}@media screen and (min-width:76.25em){[data-md-color-primary=white] .md-tabs{border-bottom:.05rem solid #00000012}}[data-md-color-primary=black]{--md-primary-fg-color:#000;--md-primary-fg-color--light:#0000008a;--md-primary-fg-color--dark:#000;--md-primary-bg-color:#fff;--md-primary-bg-color--light:#ffffffb3;--md-typeset-a-color:#4051b5}[data-md-color-primary=black] .md-button{color:var(--md-typeset-a-color)}[data-md-color-primary=black] .md-button--primary{background-color:var(--md-typeset-a-color);border-color:var(--md-typeset-a-color);color:#fff}[data-md-color-primary=black] .md-header{background-color:#000}@media screen and (max-width:59.9375em){[data-md-color-primary=black] .md-nav__source{background-color:#000000de}}@media screen and (min-width:60em){[data-md-color-primary=black] .md-search__form{background-color:#ffffff1f}[data-md-color-primary=black] .md-search__form:hover{background-color:#ffffff4d}}@media screen and (max-width:76.1875em){html [data-md-color-primary=black] .md-nav--primary .md-nav__title[for=__drawer]{background-color:#000}}@media screen and (min-width:76.25em){[data-md-color-primary=black] .md-tabs{background-color:#000}} \ No newline at end of file diff --git a/assets/stylesheets/palette.2505c338.min.css.map b/assets/stylesheets/palette.2505c338.min.css.map new file mode 100644 index 0000000..3aec190 --- /dev/null +++ b/assets/stylesheets/palette.2505c338.min.css.map @@ -0,0 +1 @@ +{"version":3,"sources":["src/assets/stylesheets/palette/_scheme.scss","../../../src/assets/stylesheets/palette.scss","src/assets/stylesheets/palette/_accent.scss","src/assets/stylesheets/palette/_primary.scss","src/assets/stylesheets/utilities/_break.scss"],"names":[],"mappings":"AA2BA,cAGE,6BAKE,YAAA,CAGA,mDAAA,CACA,6DAAA,CACA,+DAAA,CACA,gEAAA,CACA,mDAAA,CACA,6DAAA,CACA,+DAAA,CACA,gEAAA,CAGA,gDAAA,CACA,gDAAA,CAGA,4BAAA,CACA,iCAAA,CACA,kCAAA,CACA,mCAAA,CACA,mCAAA,CACA,kCAAA,CACA,iCAAA,CACA,+CAAA,CACA,6DAAA,CACA,gEAAA,CACA,4DAAA,CACA,4DAAA,CACA,6DAAA,CAGA,6CAAA,CAGA,+CAAA,CAGA,iCAAA,CAGA,uDAAA,CACA,6DAAA,CACA,2DAAA,CAGA,yDAAA,CAGA,mDAAA,CACA,mDAAA,CAGA,qDAAA,CACA,wDAAA,CAGA,0DAAA,CAKA,8DAAA,CAKA,0DCxDF,CD6DE,kHAEE,YC3DJ,CD+DE,gHAEE,eC7DJ,CDoFE,yDACE,4BClFJ,CDiFE,2DACE,4BC/EJ,CD8EE,gEACE,4BC5EJ,CD2EE,2DACE,4BCzEJ,CDwEE,yDACE,4BCtEJ,CDqEE,0DACE,4BCnEJ,CDkEE,gEACE,4BChEJ,CD+DE,0DACE,4BC7DJ,CD4DE,2OACE,4BCjDJ,CDwDA,+FAGE,iCCtDF,CACF,CCjDE,2BACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCD6CN,CCvDE,4BACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCDoDN,CC9DE,8BACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCD2DN,CCrEE,mCACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCDkEN,CC5EE,8BACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCDyEN,CCnFE,4BACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCDgFN,CC1FE,kCACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCDuFN,CCjGE,4BACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCD8FN,CCxGE,4BACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCDqGN,CC/GE,6BACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCD4GN,CCtHE,mCACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCDmHN,CC7HE,4BACE,4BAAA,CACA,2CAAA,CAIE,8BAAA,CACA,qCD6HN,CCpIE,8BACE,4BAAA,CACA,2CAAA,CAIE,8BAAA,CACA,qCDoIN,CC3IE,6BACE,yBAAA,CACA,2CAAA,CAIE,8BAAA,CACA,qCD2IN,CClJE,8BACE,4BAAA,CACA,2CAAA,CAIE,8BAAA,CACA,qCDkJN,CCzJE,mCACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCDsJN,CE3JE,4BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFwJN,CEnKE,6BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFgKN,CE3KE,+BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFwKN,CEnLE,oCACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFgLN,CE3LE,+BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFwLN,CEnME,6BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFgMN,CE3ME,mCACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFwMN,CEnNE,6BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFgNN,CE3NE,6BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFwNN,CEnOE,8BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFgON,CE3OE,oCACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFwON,CEnPE,6BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAIE,+BAAA,CACA,sCFmPN,CE3PE,+BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAIE,+BAAA,CACA,sCF2PN,CEnQE,8BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAIE,+BAAA,CACA,sCFmQN,CE3QE,+BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAIE,+BAAA,CACA,sCF2QN,CEnRE,oCACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFgRN,CE3RE,8BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFwRN,CEnSE,6BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCAAA,CAKA,4BF4RN,CE5SE,kCACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCAAA,CAKA,4BFqSN,CEtRE,sEACE,4BFyRJ,CE1RE,+DACE,4BF6RJ,CE9RE,iEACE,4BFiSJ,CElSE,gEACE,4BFqSJ,CEtSE,iEACE,4BFySJ,CEhSA,8BACE,0BAAA,CACA,sCAAA,CACA,qCAAA,CACA,+BAAA,CACA,sCAAA,CAGA,4BFiSF,CE9RE,yCACE,+BFgSJ,CE7RI,kDAEE,0CAAA,CACA,sCAAA,CAFA,UFiSN,CG7MI,mCD1EA,+CACE,0BF0RJ,CEvRI,qDACE,0BFyRN,CEpRE,iEACE,eFsRJ,CACF,CGxNI,sCDvDA,uCACE,oCFkRJ,CACF,CEzQA,8BACE,0BAAA,CACA,sCAAA,CACA,gCAAA,CACA,0BAAA,CACA,sCAAA,CAGA,4BF0QF,CEvQE,yCACE,+BFyQJ,CEtQI,kDAEE,0CAAA,CACA,sCAAA,CAFA,UF0QN,CEnQE,yCACE,qBFqQJ,CG9NI,wCDhCA,8CACE,0BFiQJ,CACF,CGtPI,mCDJA,+CACE,0BF6PJ,CE1PI,qDACE,0BF4PN,CACF,CG3OI,wCDTA,iFACE,qBFuPJ,CACF,CGnQI,sCDmBA,uCACE,qBFmPJ,CACF","file":"palette.css"} \ No newline at end of file diff --git a/assets/workshop-enroll.png b/assets/workshop-enroll.png new file mode 100644 index 0000000..77b0456 Binary files /dev/null and b/assets/workshop-enroll.png differ diff --git a/breakout/breakout.md b/breakout/breakout.md new file mode 100644 index 0000000..85c86c6 --- /dev/null +++ b/breakout/breakout.md @@ -0,0 +1,126 @@ +# Breakout prompts & dedicated working space + +## Dedicated working space + +Each team will have a room that has been reserved for their use at all team times. Those rooms will shift for each breakout time, shown below. However, your team is also welcome to explore the building and find other spaces that make you more comfortable or creative. For example, you may want to check out: + + - The SEEC Cafe dining area (north end of the building, lots of windows in the eating room!) + - The SEEC lobby (both north and south) + - The Earth Lab conference room (S340). Note that this room may sometimes be reserved by Earth Lab staff. + - The southern end of the first floor, S148 + - Outside if it is sunny! The grass or SEEC courtyard! +You are also welcome to use other rooms if they are available, but please be aware that other classes, study groups, or workshops may have reserved them and kick you out. + +| Day 1 | Team | Dedicated Space | +|--------------|-------|-----------------| +| Morning team time | Team 1 | S221 | +| | Team 2 | S149 | +| | Team 3 | C325 | +| | Team 4 | S240 | +| | Team 5 | Viz Studio (S372A) | +| | Team 6 | Viz Studio (S372B) | +| Afternoon team time | Team 1 | Viz Studio (S372B) | +| | Team 2 | S221 | +| | Team 3 | S149 | +| | Team 4 | C325 | +| | Team 5 | S240 | +| | Team 6 | Viz Studio (S372A) | + +| Day 2 | Team | Dedicated Space | +|--------------|-------|-----------------| +| Morning team time | Team 1 | Viz Studio (S372A) | +| | Team 2 | Viz Studio (S372B) | +| | Team 3 | S221 | +| | Team 4 | S149 | +| | Team 5 | C325 | +| | Team 6 | S240 | +| Afternoon team time | Team 1 | S240 | +| | Team 2 | Viz Studio (S372A) | +| | Team 3 | Viz Studio (S372B) | +| | Team 4 | S221 | +| | Team 5 | S149 | +| | Team 6 | C325 | + +| Day 3 | Team | Dedicated Space | +|--------------|-------|-----------------| +| Morning team time | Team 1 | C325 | +| | Team 2 | S240 | +| | Team 3 | Viz Studio (S372A) | +| | Team 4 | Viz Studio (S372B) | +| | Team 5 | S221 | +| | Team 6 | S149 | + + +## Breakout Prompts + +For ease of access to breakout group prompts throughout the codefest. + +### Breakout #0: Virtual meeting #3 + +Introduce yourselves! Please briefly share: + + - Your preferred name and where you are currently based + - A skill or area of expertise that you feel you are bringing to the table + - Something you are worried about regarding the codefest + - Something that brings you joy + - What is a topic that you are excited to investigate for two days related to forest carbon in the Southern Rocky mountains? + - What datasets are you excited to potentially use? + +*(Tuesday morning you will have ~2.5 hours to continue brainstorming, with a draft question ready by noon! So don't stress, this is just a first opportunity to get a sense of what your team is generally excited about.)* + +### Breakout #1: Day 1 morning team time + +*In-person introductions* + + - Who are you and why are you excited to be here? + +*Establish team norms* + + - Note-taking and documenting the flow of ideas + - Expectations for work outside of official event hours + +*Brainstorm* + + - What will your team project be for the next 2.5 days?! This should be a specific scientific question related to forest carbon in the Southern Rocky Mountains that you think is potentially answerable (at least in a very rough form) by the end of the event. + - Think about… + - What are you each excited about and what skills do you have around the table that can be leveraged? + - What datasets are you familiar with and/or excited to work with? Spin up some instances and get familiar with the data! + - Evaluation criteria (linked on the website!) + +*Bring back* + + - One spokesperson to talk for 1 minute + - Your specific, answerable scientific question + - One ‘need’ that you see, whether that is help accessing an additional dataset, guidance on a dataset already available, or just your first step to get cracking + + +### Breakout #2: Day 1 afternoon team time + +Establish: + + - How are you going to divide work, responsibilities, and code workflows? + - How are you going to manage people working in different coding languages? + +Take time to explore the datasets you intend to use and make sure you know how to work with and visualize them. + +Map out an initial workflow. What are the steps you will need to take to get from start to 'finish'? + +Begin work! + +### Breakout #3: Day 2 morning team time + +Code, code, code! + +Focus on concrete, tractable problems, and don't get sucked into unneccessary coding or debugging. Is there an easier or faster way to answer your question? + +Graphics and deliverables are your friend! Demonstrate the progress you're making and remember to document what you are doing and WHY you're making the decisions you are. + +Keep your repo up to date! + +### Breakout #4: Day 2 afternoon team time + +Prepare your deliverables. What figures are necessary to tell the story of your project? What do you want in your presentation and on your team website? + +### Breakout #5: Day 3 morning team time + +Finalize all deliverables, push them to your website & GitHub, and finalize your presentation! \ No newline at end of file diff --git a/breakout/index.html b/breakout/index.html new file mode 100644 index 0000000..a4d0a13 --- /dev/null +++ b/breakout/index.html @@ -0,0 +1,1600 @@ + + + + + + + + + + + + + + + + + + + + + + Breakout prompts & dedicated working space - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    Breakout prompts & dedicated working space

    +

    Dedicated working space

    +

    Each team will have a room that has been reserved for their use at all team times. Those rooms will shift for each breakout time, shown below. However, your team is also welcome to explore the building and find other spaces that make you more comfortable or creative. For example, you may want to check out:

    +
      +
    • The SEEC Cafe dining area (north end of the building, lots of windows in the eating room!)
    • +
    • The SEEC lobby (both north and south)
    • +
    • The Earth Lab conference room (S340). Note that this room may sometimes be reserved by Earth Lab staff.
    • +
    • The southern end of the first floor, S148
    • +
    • Outside if it is sunny! The grass or SEEC courtyard! +You are also welcome to use other rooms if they are available, but please be aware that other classes, study groups, or workshops may have reserved them and kick you out.
    • +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Day 1TeamDedicated Space
    Morning team timeTeam 1S221
    Team 2S149
    Team 3C325
    Team 4S240
    Team 5Viz Studio (S372A)
    Team 6Viz Studio (S372B)
    Afternoon team timeTeam 1Viz Studio (S372B)
    Team 2S221
    Team 3S149
    Team 4C325
    Team 5S240
    Team 6Viz Studio (S372A)
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Day 2TeamDedicated Space
    Morning team timeTeam 1Viz Studio (S372A)
    Team 2Viz Studio (S372B)
    Team 3S221
    Team 4S149
    Team 5C325
    Team 6S240
    Afternoon team timeTeam 1S240
    Team 2Viz Studio (S372A)
    Team 3Viz Studio (S372B)
    Team 4S221
    Team 5S149
    Team 6C325
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Day 3TeamDedicated Space
    Morning team timeTeam 1C325
    Team 2S240
    Team 3Viz Studio (S372A)
    Team 4Viz Studio (S372B)
    Team 5S221
    Team 6S149
    +

    Breakout Prompts

    +

    For ease of access to breakout group prompts throughout the codefest.

    +

    Breakout #0: Virtual meeting #3

    +

    Introduce yourselves! Please briefly share:

    +
      +
    • Your preferred name and where you are currently based
    • +
    • A skill or area of expertise that you feel you are bringing to the table
    • +
    • Something you are worried about regarding the codefest
    • +
    • Something that brings you joy
    • +
    • What is a topic that you are excited to investigate for two days related to forest carbon in the Southern Rocky mountains?
    • +
    • What datasets are you excited to potentially use?
    • +
    +

    (Tuesday morning you will have ~2.5 hours to continue brainstorming, with a draft question ready by noon! So don't stress, this is just a first opportunity to get a sense of what your team is generally excited about.)

    +

    Breakout #1: Day 1 morning team time

    +

    In-person introductions

    +
      +
    • Who are you and why are you excited to be here?
    • +
    +

    Establish team norms

    +
      +
    • Note-taking and documenting the flow of ideas
    • +
    • Expectations for work outside of official event hours
    • +
    +

    Brainstorm

    +
      +
    • What will your team project be for the next 2.5 days?! This should be a specific scientific question related to forest carbon in the Southern Rocky Mountains that you think is potentially answerable (at least in a very rough form) by the end of the event.
    • +
    • Think about…
        +
      • What are you each excited about and what skills do you have around the table that can be leveraged?
      • +
      • What datasets are you familiar with and/or excited to work with? Spin up some instances and get familiar with the data!
      • +
      • Evaluation criteria (linked on the website!)
      • +
      +
    • +
    +

    Bring back

    +
      +
    • One spokesperson to talk for 1 minute
    • +
    • Your specific, answerable scientific question
    • +
    • One ‘need’ that you see, whether that is help accessing an additional dataset, guidance on a dataset already available, or just your first step to get cracking
    • +
    +

    Breakout #2: Day 1 afternoon team time

    +

    Establish:

    +
      +
    • How are you going to divide work, responsibilities, and code workflows?
    • +
    • How are you going to manage people working in different coding languages?
    • +
    +

    Take time to explore the datasets you intend to use and make sure you know how to work with and visualize them.

    +

    Map out an initial workflow. What are the steps you will need to take to get from start to 'finish'?

    +

    Begin work!

    +

    Breakout #3: Day 2 morning team time

    +

    Code, code, code!

    +

    Focus on concrete, tractable problems, and don't get sucked into unneccessary coding or debugging. Is there an easier or faster way to answer your question?

    +

    Graphics and deliverables are your friend! Demonstrate the progress you're making and remember to document what you are doing and WHY you're making the decisions you are.

    +

    Keep your repo up to date!

    +

    Breakout #4: Day 2 afternoon team time

    +

    Prepare your deliverables. What figures are necessary to tell the story of your project? What do you want in your presentation and on your team website?

    +

    Breakout #5: Day 3 morning team time

    +

    Finalize all deliverables, push them to your website & GitHub, and finalize your presentation!

    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/collaborating-on-the-cloud/cyverse-instructions/cyverse-instructions.md b/collaborating-on-the-cloud/cyverse-instructions/cyverse-instructions.md new file mode 100644 index 0000000..f17af07 --- /dev/null +++ b/collaborating-on-the-cloud/cyverse-instructions/cyverse-instructions.md @@ -0,0 +1,133 @@ +# Connecting to Cyverse and GitHub + +## Log in to Cyverse + +1. Go to the Cyverse user account website [https://user.cyverse.org/](https://user.cyverse.org/) + +image + +2. Click `Sign up` (if you do not already have an account). When you make this account, please use the email that you have been using to communicate with our team regarding the event. That email is attached to our CyVerse workshop. + + image + +3. Log in to Cyverse [https://user.cyverse.org/](https://user.cyverse.org/) with your new account. + + image + +4. From your account, go to the navigation bar at left and select 'Workshops' + +![user portal](../assets/cyverse-user-portal.png) + +5. From the workshop page, find the workshop titled "Forest Carbon Codefest". It should look like this: + +![workshop tile](../assets/fcc-workshop-tile.png) + +6. Click on the tile, and then on the page for the workshop, click, "Enroll" at upper right. You should be enrolled automatically if you are using the email you have given our team. + +![workshop enroll](../assets/workshop-enroll.png) + + +7. Head over to the Cyverse Discovery Environment by clicking on 'Services' at the upper right and then 'Discovery Environment' under 'My Services'. + +![user portal](../assets/cyverse-user-portal.png) + + + You should now see the Discovery Environment: + + image + + + +## Open up an analysis with the hackathon environment (Jupyter Lab) + +1. From the Cyverse Discovery Environment, click on `Apps` in the left menu + + image + +2. Select `JupyterLab ESIIL` + +![select jupyterlab esiil](../assets/select-j-esiil.png) + +3. Configure and launch your analysis - the defaults are fine for now: + +![defaults1](../assets/defaults1.png) + + image + +![defaults3](../assets/defaults3.png) + +4. Click `Go to analysis`: + + image + +5. Now you should see Jupyter Lab! + image + +## Set up your GitHub credentials + +### If you would prefer to follow a video instead of a written outline, we have prepared a video here: + + + + +1. From Jupyter Lab, click on the GitHub icon on the left menu: + + image + +2. Click `Clone a Repository`: + + image + +3. Paste the link to the innovation-summit-utils [https://github.com/CU-ESIIL/innovation-summit-utils.git](https://github.com/CU-ESIIL/innovation-summit-utils.git) and click `Clone`: + + image + + +4. You should now see the `innovation-summit-utils` folder in your directory tree (provided you haven't changed directories from the default `/home/jovyan/data-store` + + image + +5. Go into the `innovation-summit-utils` folder: + + image + +6. open up the `create_github_keypair.ipynb` notebook by double-clicking: + +![create github keypair](../assets/create-github-keypair.png) + +7. Select the default kernel + + image + +8. Now you should see the notebook open. Click the `play` button at the top. You will be prompted to enter your GitHub username and email: + + image + + image + + image + +9. You should now see your Public Key. Copy the WHOLE LINE including `ssh-ed25519` at the beginning and the `jovyan@...` at the end +image + +10. Go to your GitHub settings page (you may need to log in to GitHub first): + + image + +11. Select `SSH and GPG keys` + + image + +12. Select `New SSH key` + + image + +13. Give your key a descriptive name, paste your ENTIRE public key in the `Key` input box, and click `Add SSH Key`. You may need to re-authenticate with your password or two-factor authentication.: + + image + +14. You should now see your new SSH key in your `Authentication Keys` list! Now you will be able to clone private repositories and push changes to GitHub from your Cyverse analysis! + + image + +> NOTE! Your GitHub authentication is ONLY for the analysis you're working with right now. You will be able to use it as long as you want there, but once you start a new analysis you will need to go through this process again. Feel free to delete keys from old analyses that have been shut down. \ No newline at end of file diff --git a/collaborating-on-the-cloud/cyverse-instructions/index.html b/collaborating-on-the-cloud/cyverse-instructions/index.html new file mode 100644 index 0000000..ff4b596 --- /dev/null +++ b/collaborating-on-the-cloud/cyverse-instructions/index.html @@ -0,0 +1,1472 @@ + + + + + + + + + + + + + + + + + + + + + + Connecting to Cyverse and GitHub - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    Connecting to Cyverse and GitHub

    +

    Log in to Cyverse

    +
      +
    1. Go to the Cyverse user account website https://user.cyverse.org/
    2. +
    +

    image

    +
      +
    1. Click Sign up (if you do not already have an account). When you make this account, please use the email that you have been using to communicate with our team regarding the event. That email is attached to our CyVerse workshop.
    2. +
    +

    image

    +
      +
    1. Log in to Cyverse https://user.cyverse.org/ with your new account.
    2. +
    +

    image

    +
      +
    1. From your account, go to the navigation bar at left and select 'Workshops'
    2. +
    +

    user portal

    +
      +
    1. From the workshop page, find the workshop titled "Forest Carbon Codefest". It should look like this:
    2. +
    +

    workshop tile

    +
      +
    1. Click on the tile, and then on the page for the workshop, click, "Enroll" at upper right. You should be enrolled automatically if you are using the email you have given our team.
    2. +
    +

    workshop enroll

    +
      +
    1. Head over to the Cyverse Discovery Environment by clicking on 'Services' at the upper right and then 'Discovery Environment' under 'My Services'.
    2. +
    +

    user portal

    +

    You should now see the Discovery Environment:

    +

    image

    +

    Open up an analysis with the hackathon environment (Jupyter Lab)

    +
      +
    1. From the Cyverse Discovery Environment, click on Apps in the left menu
    2. +
    +

    image

    +
      +
    1. Select JupyterLab ESIIL
    2. +
    +

    select jupyterlab esiil

    +
      +
    1. Configure and launch your analysis - the defaults are fine for now:
    2. +
    +

    defaults1

    +

    image

    +

    defaults3

    +
      +
    1. Click Go to analysis:
    2. +
    +

    image

    +
      +
    1. Now you should see Jupyter Lab! + image
    2. +
    +

    Set up your GitHub credentials

    +

    If you would prefer to follow a video instead of a written outline, we have prepared a video here:

    +

    + +

    +
      +
    1. From Jupyter Lab, click on the GitHub icon on the left menu:
    2. +
    +

    image

    +
      +
    1. Click Clone a Repository:
    2. +
    +

    image

    +
      +
    1. Paste the link to the innovation-summit-utils https://github.com/CU-ESIIL/innovation-summit-utils.git and click Clone:
    2. +
    +

    image

    +
      +
    1. You should now see the innovation-summit-utils folder in your directory tree (provided you haven't changed directories from the default /home/jovyan/data-store
    2. +
    +

    image

    +
      +
    1. Go into the innovation-summit-utils folder:
    2. +
    +

    image

    +
      +
    1. open up the create_github_keypair.ipynb notebook by double-clicking:
    2. +
    +

    create github keypair

    +
      +
    1. Select the default kernel
    2. +
    +

    image

    +
      +
    1. Now you should see the notebook open. Click the play button at the top. You will be prompted to enter your GitHub username and email:
    2. +
    +

    image

    +

    image

    +

    image

    +
      +
    1. +

      You should now see your Public Key. Copy the WHOLE LINE including ssh-ed25519 at the beginning and the jovyan@... at the end +image

      +
    2. +
    3. +

      Go to your GitHub settings page (you may need to log in to GitHub first):

      +

      image

      +
    4. +
    5. +

      Select SSH and GPG keys

      +

      image

      +
    6. +
    7. +

      Select New SSH key

      +

      image

      +
    8. +
    9. +

      Give your key a descriptive name, paste your ENTIRE public key in the Key input box, and click Add SSH Key. You may need to re-authenticate with your password or two-factor authentication.:

      +

      image

      +
    10. +
    11. +

      You should now see your new SSH key in your Authentication Keys list! Now you will be able to clone private repositories and push changes to GitHub from your Cyverse analysis!

      +

      image

      +
    12. +
    +
    +

    NOTE! Your GitHub authentication is ONLY for the analysis you're working with right now. You will be able to use it as long as you want there, but once you start a new analysis you will need to go through this process again. Feel free to delete keys from old analyses that have been shut down.

    +
    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/collaborating-on-the-cloud/cyverse_data_management/cyverse_data_management.md b/collaborating-on-the-cloud/cyverse_data_management/cyverse_data_management.md new file mode 100644 index 0000000..b9fc7ec --- /dev/null +++ b/collaborating-on-the-cloud/cyverse_data_management/cyverse_data_management.md @@ -0,0 +1,52 @@ +# Cyverse data management + +## Cloud-to-instance data access + +The best and most efficient way to access most data from within your Cyverse instance is via APIs, VSI, or STAC. Examples of such data access can be found throughout the data library. This is the preferred method of data access since it keeps data on the cloud, puts it directly on your instance, and then the data is removed upon instance termination. Note that any data you want to keep must be moved off the instance and to the Cyverse data store prior to instance termination (see below, "Saving data from your instance to the data store"). + +## Pre-downloaded data on Cyverse data store + +Some data can be time consuming or frustrating to access. Or, you or one of your teammates may just be much more comfortable working with data that has effectively been 'downloaded locally'. In an attempt to streamline your projects, the ESIIL and Earth Lab teams have loaded a set of data onto the Cyverse data store, which can be read from your Cyverse instance. + +Pre-downloaded data for the Forest Carbon Codefest can be found in the Cyverse data store at [this link.](https://de.cyverse.org/data/ds/iplant/home/shared/earthlab/forest_carbon_codefest?type=folder&resourceId=74dd0094-8d46-11ee-a930-90e2ba675364) + +The path directory to this location from within a Cyverse instance is: +``` +~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest +``` +Note that, while data CAN be read on your instance directly from the data store, it is usually best to move the data to your instance prior to reading and processing the data. Having the data directly on your instance will dramatically improve processing time and performance. (see below, "Moving data from the data store to your instance") + +## Moving data from the data store to your instance + +Use the terminal command line interface on your instance to move data from the data store to your instance (whether that is pre-downloaded data or data that you have saved to your team folder). The home directory of your instance is: +``` +/home/jovyan +``` +To do so, open the Terminal from your launcher + +![launch terminal](../assets/launch-terminal.png) + +Then, use the 'cp' command to copy data from the data store to your instance. Use the flag -r if you are moving an entire directory or directory structure. + +The command is in the form: +``` +cp -r data-store-location new-location-on-instance +``` +For example, the below command will move the entire LCMAP_SR_1985-2021 directory to a new data directory on your instance: +``` +cp -r ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/LCMAP_SR_1985_2021 /home/jovyan/data/ +``` +## Saving data from your instance to the data store + +Any data or outputs that you want to keep, such as newly derived datasets or figures, must be moved off the instance and to the Cyverse data store prior to instance termination. To do so, you will follow the same steps as in "Moving data from the data store to your instance" (see above), but with the directories in the command reversed. + +All team outputs should be stored in the subdirectories named TeamX in this directory: +``` +~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/Team_outputs +``` +Each team has their own directory; make sure you are saving to the correct one! + +For example, if you were on Team1 and wanted to save a figures directory, you could use the below command: +``` +cp -r /home/jovyan/figures ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/Team_outputs/Team1/ +``` \ No newline at end of file diff --git a/collaborating-on-the-cloud/cyverse_data_management/index.html b/collaborating-on-the-cloud/cyverse_data_management/index.html new file mode 100644 index 0000000..e50d0d4 --- /dev/null +++ b/collaborating-on-the-cloud/cyverse_data_management/index.html @@ -0,0 +1,1381 @@ + + + + + + + + + + + + + + + + + + + + + + Cyverse data management - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    Cyverse data management

    +

    Cloud-to-instance data access

    +

    The best and most efficient way to access most data from within your Cyverse instance is via APIs, VSI, or STAC. Examples of such data access can be found throughout the data library. This is the preferred method of data access since it keeps data on the cloud, puts it directly on your instance, and then the data is removed upon instance termination. Note that any data you want to keep must be moved off the instance and to the Cyverse data store prior to instance termination (see below, "Saving data from your instance to the data store").

    +

    Pre-downloaded data on Cyverse data store

    +

    Some data can be time consuming or frustrating to access. Or, you or one of your teammates may just be much more comfortable working with data that has effectively been 'downloaded locally'. In an attempt to streamline your projects, the ESIIL and Earth Lab teams have loaded a set of data onto the Cyverse data store, which can be read from your Cyverse instance.

    +

    Pre-downloaded data for the Forest Carbon Codefest can be found in the Cyverse data store at this link.

    +

    The path directory to this location from within a Cyverse instance is: +

    ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest
    +
    +Note that, while data CAN be read on your instance directly from the data store, it is usually best to move the data to your instance prior to reading and processing the data. Having the data directly on your instance will dramatically improve processing time and performance. (see below, "Moving data from the data store to your instance")

    +

    Moving data from the data store to your instance

    +

    Use the terminal command line interface on your instance to move data from the data store to your instance (whether that is pre-downloaded data or data that you have saved to your team folder). The home directory of your instance is: +

    /home/jovyan
    +
    +To do so, open the Terminal from your launcher

    +

    launch terminal

    +

    Then, use the 'cp' command to copy data from the data store to your instance. Use the flag -r if you are moving an entire directory or directory structure.

    +

    The command is in the form: +

    cp -r data-store-location new-location-on-instance
    +
    +For example, the below command will move the entire LCMAP_SR_1985-2021 directory to a new data directory on your instance: +
    cp -r ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/LCMAP_SR_1985_2021 /home/jovyan/data/
    +

    +

    Saving data from your instance to the data store

    +

    Any data or outputs that you want to keep, such as newly derived datasets or figures, must be moved off the instance and to the Cyverse data store prior to instance termination. To do so, you will follow the same steps as in "Moving data from the data store to your instance" (see above), but with the directories in the command reversed.

    +

    All team outputs should be stored in the subdirectories named TeamX in this directory: +

    ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/Team_outputs
    +
    +Each team has their own directory; make sure you are saving to the correct one!

    +

    For example, if you were on Team1 and wanted to save a figures directory, you could use the below command: +

    cp -r /home/jovyan/figures ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/Team_outputs/Team1/
    +

    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/collaborating-on-the-cloud/github-basics/github-basics.md b/collaborating-on-the-cloud/github-basics/github-basics.md new file mode 100644 index 0000000..de2f280 --- /dev/null +++ b/collaborating-on-the-cloud/github-basics/github-basics.md @@ -0,0 +1,420 @@ +Github essentials +================ + +## I. Introduction (2 minutes) + +### A. Brief overview of GitHub: + +GitHub is a web-based platform that provides version control and +collaboration features using Git, a distributed version control system. +It enables developers to work together on projects, track changes to +code, and efficiently manage different versions of the project. GitHub +is widely used in the software development industry and is an essential +tool for collaborative projects and maintaining code quality. + +### B. Introduce GitHub Desktop and JupyterHub GitHub widget: + +GitHub Desktop is a graphical user interface (GUI) application that +simplifies working with Git and GitHub by providing a more visual and +intuitive way to manage repositories, branches, commits, and other Git +features. JupyterHub GitHub widget, on the other hand, is a built-in +widget that integrates Git and GitHub functionality directly into +Jupyter notebooks, allowing users to perform version control and +collaboration tasks within the Jupyter environment. Both tools help +streamline the process of working with GitHub and make it more +accessible to users with varying levels of experience with Git and +version control. + +#### 1. Download GitHub Desktop + +##### Step 1: Download GitHub Desktop + +Go to the GitHub Desktop download page: https://desktop.github.com/ + +Click on the “Download for Windows” or “Download for macOS” button, +depending on your operating system. The download should start +automatically. + +##### Step 2: Install GitHub Desktop + +For Windows: + +Locate the downloaded installer file (usually in the Downloads folder) +and double-click on it to run the installer. + +Follow the installation instructions that appear on the screen, +accepting the default settings or customizing them as desired. + +Once the installation is complete, GitHub Desktop will launch +automatically. For macOS: + +Locate the downloaded .zip file (usually in the Downloads folder) and +double-click on it to extract the GitHub Desktop application. + +Drag the extracted “GitHub Desktop” application into the “Applications” +folder. + +Open the “Applications” folder and double-click on “GitHub Desktop” to +launch the application. + +##### Step 3: Set up GitHub Desktop + +When GitHub Desktop launches for the first time, you will be prompted to +sign in with your GitHub account. If you don’t have one, you can create +one at https://github.com/join. + +Enter your GitHub username (or email) and password, and click on “Sign +in.” + +You will then be prompted to configure Git. Enter your name and email +address, which will be used for your commit messages. Click “Continue” +when you’re done. Choose whether you want to submit usage data to help +improve GitHub Desktop. Click “Finish” to complete the setup. + +Now, you have successfully installed and set up GitHub Desktop. You can +start using it to clone repositories, make changes, commit, and sync +with the remote repositories on GitHub. + +#### 1. Download GitHub for JupyterHub cloud service + +### Step 1: Accessing JupyterHub on the cloud + +Visit the JupyterHub cloud service you want to use (e.g., Binder, Google +Colab, or a custom JupyterHub deployment provided by your organization). + +Sign in with your credentials or authenticate using a third-party +service if required. + +### Step 2: Launch a new Jupyter Notebook or open an existing one + +Click on the “New” button (usually located in the top right corner) and +select “Python” to create a new Jupyter Notebook or open an existing one +from the file browser. + +Once the notebook is open, you will see the Jupyter Notebook interface +with the familiar cells for writing and executing code. + +### Step 3: Install and enable the JupyterLab Git extension + +In your Jupyter Notebook, create a new code cell and run the following +command to install the JupyterLab Git extension: + +!pip install jupyterlab-git + +Restart the Jupyter Notebook server for the changes to take effect. + +### Step 4: Using the JupyterHub GitHub widget + +In the Jupyter Notebook interface, you should now see a Git icon on the +left sidebar. Click on it to open the GitHub widget. + +To clone a repository, click on the “+” icon in the GitHub widget and +enter the repository URL. This will clone the repository into your +JupyterHub workspace. You can now navigate through the cloned +repository, make changes, and use the GitHub widget to stage, commit, +and push your changes back to the remote repository. + +To create and manage branches, use the branch icon in the GitHub widget. +You can create new branches, switch between branches, and merge branches +using this interface. + +To sync your local repository with the remote repository, use the “Pull” +and “Push” buttons in the GitHub widget. + +Now, you know how to access and use the JupyterHub GitHub widget running +on the cloud. This allows you to work with Git and GitHub directly from +your Jupyter Notebook interface, streamlining your workflow and making +collaboration easier. + +### C. GitHub in Rstudio: + +Integrating GitHub with RStudio allows users to manage their Git +repositories and collaborate on projects directly within the RStudio +environment. It offers similar functionality to GitHub Desktop but +caters specifically to R users working within RStudio. By configuring +RStudio to work with Git, creating or opening RStudio projects, and +linking projects to GitHub repositories, users can enjoy a seamless +workflow for version control and collaboration. RStudio’s Git pane +enables users to stage, commit, and push changes to remote repositories, +as well as manage branches and sync local repositories with remote ones, +providing a comprehensive solution for R developers working with GitHub. + +#### Step 1: Install Git + +Before integrating GitHub with RStudio, you need to have Git installed +on your computer. Visit the official Git website (https://git-scm.com/) +to download and install the latest version of Git for your operating +system. + +#### Step 2: Configure RStudio to work with Git + +Open RStudio. + +Go to “Tools” \> “Global Options” in the top menu. In the “Global +Options” window, click on the “Git/SVN” tab. + +Check that the “Git executable” field is pointing to the correct +location of the installed Git. If not, click “Browse” and navigate to +the location of the Git executable file (usually found in the “bin” +folder of the Git installation directory). + +Click “OK” to save the changes. + +#### Step 3: Create or open an RStudio project + +To create a new RStudio project, go to “File” \> “New Project” in the +top menu. You can either create a new directory or choose an existing +one for your project. + +To open an existing RStudio project, go to “File” \> “Open Project” and +navigate to the project’s “.Rproj” file. + +#### Step 4: Link your RStudio project to a GitHub repository + +In the RStudio project, go to the “Tools” menu and select “Version +Control” \> “Project Setup.” + +In the “Project Setup” window, select “Git” as the version control +system and click “OK.” + +A new “.git” folder will be created in your project directory, +initializing it as a Git repository. Commit any changes you have made so +far by clicking on the “Commit” button in the “Git” pane in RStudio. + +To link your local repository to a remote GitHub repository, go to your +GitHub account and create a new repository. + +Copy the remote repository’s URL (e.g., +“https://github.com/username/repository.git”). + +In RStudio, open the “Shell” by going to “Tools” \> “Shell.” + +In the shell, run the following command to add the remote repository: + +git remote add origin https://github.com/username/repository.git + +Replace the URL with the one you copied from your GitHub repository. + +Push your changes to the remote repository by running the following +command in the shell: + +git push -u origin master + +Now, your RStudio project is linked to a GitHub repository. You can use +the “Git” pane in RStudio to stage, commit, and push changes to the +remote repository, as well as manage branches and sync your local +repository with the remote one. + +By integrating GitHub with RStudio, you can streamline your workflow, +collaborate more effectively with your team, and manage your Git +repositories directly from the RStudio interface. + +## II. GitHub Basics (4 minutes) + +### A. Repository: + +A repository, often abbreviated as “repo,” is the fundamental building +block of GitHub. It is a storage space for your project files, including +the code, documentation, and other related resources. Each repository +also contains the complete history of all changes made to the project +files, which is crucial for effective version control. Repositories can +be public, allowing anyone to access and contribute, or private, +restricting access to specific collaborators. + +### B. Fork and Clone: + +Forking and cloning are two essential operations for working with +repositories on GitHub. Forking creates a personal copy of someone +else’s repository under your GitHub account, enabling you to make +changes to the project without affecting the original repo. Cloning, on +the other hand, is the process of downloading a remote repository to +your local machine for offline development. In GitHub Desktop, you can +clone a repository by selecting “Clone a repository from the Internet” +and entering the repository URL. In JupyterHub GitHub widget, you can +clone a repository by entering the repo URL in the “Clone Repository” +section of the widget. + +### C. Branches: + +Branches are a critical aspect of Git version control, as they allow you +to create multiple parallel versions of your project within a single +repository. This is particularly useful when working on new features or +bug fixes, as it prevents changes from interfering with the main (or +“master”) branch until they are ready to be merged. Creating a new +branch in GitHub Desktop can be done by clicking the “Current Branch” +dropdown and selecting “New Branch.” In JupyterHub GitHub widget, you +can create a new branch by clicking the “New Branch” button in the +“Branches” section of the widget. + +### D. Replace ‘master’ with ‘main’: + +In recent years, there has been a growing awareness of the importance of +inclusive language in technology. One such example is the use of the +term “master” in the context of the default branch in a GitHub +repository. The term “master” has historical connections to the +“master/slave” file structure, which evokes an unsavory colonial past +associated with slavery. In light of this, many developers and +organizations have begun to replace the term “master” with more neutral +terms, such as “main.” We encourage you to follow this practice and +change the default branch name in your repositories from “master” to +“main” or another suitable alternative. This small change can help +promote a more inclusive and welcoming environment within the technology +community. + +## III. Collaboration and Version Control (5 minutes) + +### A. Commits: + +Commits are snapshots of your project’s changes at a specific point in +time, serving as the fundamental building blocks of Git’s version +control system. Commits make it possible to track changes, revert to +previous versions, and collaborate with others. In GitHub Desktop, you +can make a commit by staging the changes you want to include, adding a +descriptive commit message, and clicking “Commit to \[branch_name\].” In +JupyterHub GitHub widget, you can create a commit by selecting the files +with changes, entering a commit message, and clicking the “Commit” +button. + +### B. Push: + +In GitHub, “push” is a fundamental operation in the version control +process that transfers commits from your local repository to a remote +repository, such as the one hosted on GitHub. When you push changes, you +synchronize the remote repository with the latest updates made to your +local repository, making those changes accessible to other collaborators +working on the same project. This operation ensures that the remote +repository reflects the most recent state of your work and allows your +team members to stay up to date with your changes. Pushing is an +essential step in distributed version control systems like Git, as it +promotes efficient collaboration among multiple contributors and +provides a centralized location for tracking the project’s history and +progress. + +In GitHub, the concepts of “commit” and “push” represent two distinct +steps in the version control process. A “commit” is the action of saving +changes to your local repository. When you commit changes, you create a +snapshot of your work, accompanied by a unique identifier and an +optional descriptive message. Commits allow you to track the progress of +your work over time and make it easy to revert to a previous state if +necessary. On the other hand, “push” is the action of transferring your +local commits to a remote repository, such as the one hosted on GitHub. +Pushing makes your changes accessible to others collaborating on the +same project and ensures that the remote repository stays up to date +with your local repository. In summary, committing saves changes +locally, while pushing synchronizes those changes with a remote +repository, allowing for seamless collaboration among multiple +contributors. + +### C. Pull Requests: + +Pull requests are a collaboration feature on GitHub that enables +developers to propose changes to a repository, discuss those changes, +and ultimately merge them into the main branch. To create a pull +request, you must first push your changes to a branch on your fork of +the repository. Then, using either GitHub Desktop or JupyterHub GitHub +widget, you can navigate to the original repository, click the “Pull +Request” tab, and create a new pull request. After the pull request is +reviewed and approved, it can be merged into the main branch. + +### D. Merging and Resolving Conflicts: + +Merging is the process of combining changes from one branch into +another. This is typically done when a feature or bugfix has been +completed and is ready to be integrated into the main branch. Conflicts +can arise during the merging process if the same lines of code have been +modified in both branches. To resolve conflicts, you must manually +review the changes and decide which version to keep. In GitHub Desktop, +you can merge branches by selecting the target branch and choosing +“Merge into Current Branch.” Conflicts will be highlighted, and you can +edit the files to resolve them before committing the changes. In +JupyterHub GitHub widget, you can merge branches by selecting the target +branch in the “Branches” section and clicking the “Merge” button. If +conflicts occur, the widget will prompt you to resolve them before +completing the merge. + +## IV. Additional Features (2 minutes) + +### A. Issues and Project Management: + +Issues are a powerful feature in GitHub that allows developers to track +and manage bugs, enhancements, and other tasks within a project. Issues +can be assigned to collaborators, labeled for easy organization, and +linked to specific commits or pull requests. They provide a centralized +location for discussing and addressing project-related concerns, +fostering collaboration and transparent communication among team +members. Using issues effectively can significantly improve the overall +management and organization of your projects. + +### B. GitHub Pages: + +GitHub Pages is a service offered by GitHub that allows you to host +static websites directly from a repository. By creating a new branch +named “gh-pages” in your repository and adding the necessary files +(HTML, CSS, JavaScript, etc.), GitHub will automatically build and +deploy your website to a publicly accessible URL. This is particularly +useful for showcasing project documentation, creating personal +portfolios, or hosting project demos. With GitHub Pages, you can take +advantage of the version control and collaboration features of GitHub +while easily sharing your work with others. + +## V. Conclusion (2 minutes) + +### A. Recap of the essentials of GitHub: + +In this brief introduction, we have covered the essentials of GitHub, +including the basics of repositories, forking, cloning, branching, +commits, pull requests, merging, and resolving conflicts. We have also +discussed additional features like issues for project management and +GitHub Pages for hosting websites directly from a repository. + +### B. Encourage further exploration and learning: + +While this introduction provides a solid foundation for understanding +and using GitHub, there is still much more to learn and explore. As you +continue to use GitHub in your projects, you will discover new features +and workflows that can enhance your productivity and collaboration. We +encourage you to dive deeper into the platform and experiment with +different tools and techniques. + +### C. Share resources for learning more about GitHub: + +There are many resources available for learning more about GitHub and +expanding your skills. Some popular resources include GitHub Guides +(https://guides.github.com/), which offers a collection of tutorials and +best practices, the official GitHub documentation +(https://docs.github.com/), and various online tutorials and courses. By +engaging with these resources and participating in the GitHub community, +you can further develop your understanding of the platform and become a +more proficient user. + +## V. Conclusion (2 minutes) + +### A. Recap of the essentials of GitHub: + +In this brief introduction, we have covered the essentials of GitHub, +including the basics of repositories, forking, cloning, branching, +commits, pull requests, merging, and resolving conflicts. We have also +discussed additional features like issues for project management and +GitHub Pages for hosting websites directly from a repository. + +### B. Encourage further exploration and learning: + +While this introduction provides a solid foundation for understanding +and using GitHub, there is still much more to learn and explore. As you +continue to use GitHub in your projects, you will discover new features +and workflows that can enhance your productivity and collaboration. We +encourage you to dive deeper into the platform and experiment with +different tools and techniques. + +### C. Share resources for learning more about GitHub: + +There are many resources available for learning more about GitHub and +expanding your skills. Some popular resources include GitHub Guides +(https://guides.github.com/), which offers a collection of tutorials and +best practices, the official GitHub documentation +(https://docs.github.com/), and various online tutorials and courses. By +engaging with these resources and participating in the GitHub community, +you can further develop your understanding of the platform and become a +more proficient user. + +By Ty Tuff, ESIIL \ No newline at end of file diff --git a/collaborating-on-the-cloud/github-basics/index.html b/collaborating-on-the-cloud/github-basics/index.html new file mode 100644 index 0000000..9bb93d3 --- /dev/null +++ b/collaborating-on-the-cloud/github-basics/index.html @@ -0,0 +1,1664 @@ + + + + + + + + + + + + + + + + + + + + + + Github essentials - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    Github essentials

    +

    I. Introduction (2 minutes)

    +

    A. Brief overview of GitHub:

    +

    GitHub is a web-based platform that provides version control and +collaboration features using Git, a distributed version control system. +It enables developers to work together on projects, track changes to +code, and efficiently manage different versions of the project. GitHub +is widely used in the software development industry and is an essential +tool for collaborative projects and maintaining code quality.

    +

    B. Introduce GitHub Desktop and JupyterHub GitHub widget:

    +

    GitHub Desktop is a graphical user interface (GUI) application that +simplifies working with Git and GitHub by providing a more visual and +intuitive way to manage repositories, branches, commits, and other Git +features. JupyterHub GitHub widget, on the other hand, is a built-in +widget that integrates Git and GitHub functionality directly into +Jupyter notebooks, allowing users to perform version control and +collaboration tasks within the Jupyter environment. Both tools help +streamline the process of working with GitHub and make it more +accessible to users with varying levels of experience with Git and +version control.

    +

    1. Download GitHub Desktop

    +
    Step 1: Download GitHub Desktop
    +

    Go to the GitHub Desktop download page: https://desktop.github.com/

    +

    Click on the “Download for Windows” or “Download for macOS” button, +depending on your operating system. The download should start +automatically.

    +
    Step 2: Install GitHub Desktop
    +

    For Windows:

    +

    Locate the downloaded installer file (usually in the Downloads folder) +and double-click on it to run the installer.

    +

    Follow the installation instructions that appear on the screen, +accepting the default settings or customizing them as desired.

    +

    Once the installation is complete, GitHub Desktop will launch +automatically. For macOS:

    +

    Locate the downloaded .zip file (usually in the Downloads folder) and +double-click on it to extract the GitHub Desktop application.

    +

    Drag the extracted “GitHub Desktop” application into the “Applications” +folder.

    +

    Open the “Applications” folder and double-click on “GitHub Desktop” to +launch the application.

    +
    Step 3: Set up GitHub Desktop
    +

    When GitHub Desktop launches for the first time, you will be prompted to +sign in with your GitHub account. If you don’t have one, you can create +one at https://github.com/join.

    +

    Enter your GitHub username (or email) and password, and click on “Sign +in.”

    +

    You will then be prompted to configure Git. Enter your name and email +address, which will be used for your commit messages. Click “Continue” +when you’re done. Choose whether you want to submit usage data to help +improve GitHub Desktop. Click “Finish” to complete the setup.

    +

    Now, you have successfully installed and set up GitHub Desktop. You can +start using it to clone repositories, make changes, commit, and sync +with the remote repositories on GitHub.

    +

    1. Download GitHub for JupyterHub cloud service

    +

    Step 1: Accessing JupyterHub on the cloud

    +

    Visit the JupyterHub cloud service you want to use (e.g., Binder, Google +Colab, or a custom JupyterHub deployment provided by your organization).

    +

    Sign in with your credentials or authenticate using a third-party +service if required.

    +

    Step 2: Launch a new Jupyter Notebook or open an existing one

    +

    Click on the “New” button (usually located in the top right corner) and +select “Python” to create a new Jupyter Notebook or open an existing one +from the file browser.

    +

    Once the notebook is open, you will see the Jupyter Notebook interface +with the familiar cells for writing and executing code.

    +

    Step 3: Install and enable the JupyterLab Git extension

    +

    In your Jupyter Notebook, create a new code cell and run the following +command to install the JupyterLab Git extension:

    +

    !pip install jupyterlab-git

    +

    Restart the Jupyter Notebook server for the changes to take effect.

    +

    Step 4: Using the JupyterHub GitHub widget

    +

    In the Jupyter Notebook interface, you should now see a Git icon on the +left sidebar. Click on it to open the GitHub widget.

    +

    To clone a repository, click on the “+” icon in the GitHub widget and +enter the repository URL. This will clone the repository into your +JupyterHub workspace. You can now navigate through the cloned +repository, make changes, and use the GitHub widget to stage, commit, +and push your changes back to the remote repository.

    +

    To create and manage branches, use the branch icon in the GitHub widget. +You can create new branches, switch between branches, and merge branches +using this interface.

    +

    To sync your local repository with the remote repository, use the “Pull” +and “Push” buttons in the GitHub widget.

    +

    Now, you know how to access and use the JupyterHub GitHub widget running +on the cloud. This allows you to work with Git and GitHub directly from +your Jupyter Notebook interface, streamlining your workflow and making +collaboration easier.

    +

    C. GitHub in Rstudio:

    +

    Integrating GitHub with RStudio allows users to manage their Git +repositories and collaborate on projects directly within the RStudio +environment. It offers similar functionality to GitHub Desktop but +caters specifically to R users working within RStudio. By configuring +RStudio to work with Git, creating or opening RStudio projects, and +linking projects to GitHub repositories, users can enjoy a seamless +workflow for version control and collaboration. RStudio’s Git pane +enables users to stage, commit, and push changes to remote repositories, +as well as manage branches and sync local repositories with remote ones, +providing a comprehensive solution for R developers working with GitHub.

    +

    Step 1: Install Git

    +

    Before integrating GitHub with RStudio, you need to have Git installed +on your computer. Visit the official Git website (https://git-scm.com/) +to download and install the latest version of Git for your operating +system.

    +

    Step 2: Configure RStudio to work with Git

    +

    Open RStudio.

    +

    Go to “Tools” > “Global Options” in the top menu. In the “Global +Options” window, click on the “Git/SVN” tab.

    +

    Check that the “Git executable” field is pointing to the correct +location of the installed Git. If not, click “Browse” and navigate to +the location of the Git executable file (usually found in the “bin” +folder of the Git installation directory).

    +

    Click “OK” to save the changes.

    +

    Step 3: Create or open an RStudio project

    +

    To create a new RStudio project, go to “File” > “New Project” in the +top menu. You can either create a new directory or choose an existing +one for your project.

    +

    To open an existing RStudio project, go to “File” > “Open Project” and +navigate to the project’s “.Rproj” file.

    + +

    In the RStudio project, go to the “Tools” menu and select “Version +Control” > “Project Setup.”

    +

    In the “Project Setup” window, select “Git” as the version control +system and click “OK.”

    +

    A new “.git” folder will be created in your project directory, +initializing it as a Git repository. Commit any changes you have made so +far by clicking on the “Commit” button in the “Git” pane in RStudio.

    +

    To link your local repository to a remote GitHub repository, go to your +GitHub account and create a new repository.

    +

    Copy the remote repository’s URL (e.g., +“https://github.com/username/repository.git”).

    +

    In RStudio, open the “Shell” by going to “Tools” > “Shell.”

    +

    In the shell, run the following command to add the remote repository:

    +

    git remote add origin https://github.com/username/repository.git

    +

    Replace the URL with the one you copied from your GitHub repository.

    +

    Push your changes to the remote repository by running the following +command in the shell:

    +

    git push -u origin master

    +

    Now, your RStudio project is linked to a GitHub repository. You can use +the “Git” pane in RStudio to stage, commit, and push changes to the +remote repository, as well as manage branches and sync your local +repository with the remote one.

    +

    By integrating GitHub with RStudio, you can streamline your workflow, +collaborate more effectively with your team, and manage your Git +repositories directly from the RStudio interface.

    +

    II. GitHub Basics (4 minutes)

    +

    A. Repository:

    +

    A repository, often abbreviated as “repo,” is the fundamental building +block of GitHub. It is a storage space for your project files, including +the code, documentation, and other related resources. Each repository +also contains the complete history of all changes made to the project +files, which is crucial for effective version control. Repositories can +be public, allowing anyone to access and contribute, or private, +restricting access to specific collaborators.

    +

    B. Fork and Clone:

    +

    Forking and cloning are two essential operations for working with +repositories on GitHub. Forking creates a personal copy of someone +else’s repository under your GitHub account, enabling you to make +changes to the project without affecting the original repo. Cloning, on +the other hand, is the process of downloading a remote repository to +your local machine for offline development. In GitHub Desktop, you can +clone a repository by selecting “Clone a repository from the Internet” +and entering the repository URL. In JupyterHub GitHub widget, you can +clone a repository by entering the repo URL in the “Clone Repository” +section of the widget.

    +

    C. Branches:

    +

    Branches are a critical aspect of Git version control, as they allow you +to create multiple parallel versions of your project within a single +repository. This is particularly useful when working on new features or +bug fixes, as it prevents changes from interfering with the main (or +“master”) branch until they are ready to be merged. Creating a new +branch in GitHub Desktop can be done by clicking the “Current Branch” +dropdown and selecting “New Branch.” In JupyterHub GitHub widget, you +can create a new branch by clicking the “New Branch” button in the +“Branches” section of the widget.

    +

    D. Replace ‘master’ with ‘main’:

    +

    In recent years, there has been a growing awareness of the importance of +inclusive language in technology. One such example is the use of the +term “master” in the context of the default branch in a GitHub +repository. The term “master” has historical connections to the +“master/slave” file structure, which evokes an unsavory colonial past +associated with slavery. In light of this, many developers and +organizations have begun to replace the term “master” with more neutral +terms, such as “main.” We encourage you to follow this practice and +change the default branch name in your repositories from “master” to +“main” or another suitable alternative. This small change can help +promote a more inclusive and welcoming environment within the technology +community.

    +

    III. Collaboration and Version Control (5 minutes)

    +

    A. Commits:

    +

    Commits are snapshots of your project’s changes at a specific point in +time, serving as the fundamental building blocks of Git’s version +control system. Commits make it possible to track changes, revert to +previous versions, and collaborate with others. In GitHub Desktop, you +can make a commit by staging the changes you want to include, adding a +descriptive commit message, and clicking “Commit to [branch_name].” In +JupyterHub GitHub widget, you can create a commit by selecting the files +with changes, entering a commit message, and clicking the “Commit” +button.

    +

    B. Push:

    +

    In GitHub, “push” is a fundamental operation in the version control +process that transfers commits from your local repository to a remote +repository, such as the one hosted on GitHub. When you push changes, you +synchronize the remote repository with the latest updates made to your +local repository, making those changes accessible to other collaborators +working on the same project. This operation ensures that the remote +repository reflects the most recent state of your work and allows your +team members to stay up to date with your changes. Pushing is an +essential step in distributed version control systems like Git, as it +promotes efficient collaboration among multiple contributors and +provides a centralized location for tracking the project’s history and +progress.

    +

    In GitHub, the concepts of “commit” and “push” represent two distinct +steps in the version control process. A “commit” is the action of saving +changes to your local repository. When you commit changes, you create a +snapshot of your work, accompanied by a unique identifier and an +optional descriptive message. Commits allow you to track the progress of +your work over time and make it easy to revert to a previous state if +necessary. On the other hand, “push” is the action of transferring your +local commits to a remote repository, such as the one hosted on GitHub. +Pushing makes your changes accessible to others collaborating on the +same project and ensures that the remote repository stays up to date +with your local repository. In summary, committing saves changes +locally, while pushing synchronizes those changes with a remote +repository, allowing for seamless collaboration among multiple +contributors.

    +

    C. Pull Requests:

    +

    Pull requests are a collaboration feature on GitHub that enables +developers to propose changes to a repository, discuss those changes, +and ultimately merge them into the main branch. To create a pull +request, you must first push your changes to a branch on your fork of +the repository. Then, using either GitHub Desktop or JupyterHub GitHub +widget, you can navigate to the original repository, click the “Pull +Request” tab, and create a new pull request. After the pull request is +reviewed and approved, it can be merged into the main branch.

    +

    D. Merging and Resolving Conflicts:

    +

    Merging is the process of combining changes from one branch into +another. This is typically done when a feature or bugfix has been +completed and is ready to be integrated into the main branch. Conflicts +can arise during the merging process if the same lines of code have been +modified in both branches. To resolve conflicts, you must manually +review the changes and decide which version to keep. In GitHub Desktop, +you can merge branches by selecting the target branch and choosing +“Merge into Current Branch.” Conflicts will be highlighted, and you can +edit the files to resolve them before committing the changes. In +JupyterHub GitHub widget, you can merge branches by selecting the target +branch in the “Branches” section and clicking the “Merge” button. If +conflicts occur, the widget will prompt you to resolve them before +completing the merge.

    +

    IV. Additional Features (2 minutes)

    +

    A. Issues and Project Management:

    +

    Issues are a powerful feature in GitHub that allows developers to track +and manage bugs, enhancements, and other tasks within a project. Issues +can be assigned to collaborators, labeled for easy organization, and +linked to specific commits or pull requests. They provide a centralized +location for discussing and addressing project-related concerns, +fostering collaboration and transparent communication among team +members. Using issues effectively can significantly improve the overall +management and organization of your projects.

    +

    B. GitHub Pages:

    +

    GitHub Pages is a service offered by GitHub that allows you to host +static websites directly from a repository. By creating a new branch +named “gh-pages” in your repository and adding the necessary files +(HTML, CSS, JavaScript, etc.), GitHub will automatically build and +deploy your website to a publicly accessible URL. This is particularly +useful for showcasing project documentation, creating personal +portfolios, or hosting project demos. With GitHub Pages, you can take +advantage of the version control and collaboration features of GitHub +while easily sharing your work with others.

    +

    V. Conclusion (2 minutes)

    +

    A. Recap of the essentials of GitHub:

    +

    In this brief introduction, we have covered the essentials of GitHub, +including the basics of repositories, forking, cloning, branching, +commits, pull requests, merging, and resolving conflicts. We have also +discussed additional features like issues for project management and +GitHub Pages for hosting websites directly from a repository.

    +

    B. Encourage further exploration and learning:

    +

    While this introduction provides a solid foundation for understanding +and using GitHub, there is still much more to learn and explore. As you +continue to use GitHub in your projects, you will discover new features +and workflows that can enhance your productivity and collaboration. We +encourage you to dive deeper into the platform and experiment with +different tools and techniques.

    +

    C. Share resources for learning more about GitHub:

    +

    There are many resources available for learning more about GitHub and +expanding your skills. Some popular resources include GitHub Guides +(https://guides.github.com/), which offers a collection of tutorials and +best practices, the official GitHub documentation +(https://docs.github.com/), and various online tutorials and courses. By +engaging with these resources and participating in the GitHub community, +you can further develop your understanding of the platform and become a +more proficient user.

    +

    V. Conclusion (2 minutes)

    +

    A. Recap of the essentials of GitHub:

    +

    In this brief introduction, we have covered the essentials of GitHub, +including the basics of repositories, forking, cloning, branching, +commits, pull requests, merging, and resolving conflicts. We have also +discussed additional features like issues for project management and +GitHub Pages for hosting websites directly from a repository.

    +

    B. Encourage further exploration and learning:

    +

    While this introduction provides a solid foundation for understanding +and using GitHub, there is still much more to learn and explore. As you +continue to use GitHub in your projects, you will discover new features +and workflows that can enhance your productivity and collaboration. We +encourage you to dive deeper into the platform and experiment with +different tools and techniques.

    +

    C. Share resources for learning more about GitHub:

    +

    There are many resources available for learning more about GitHub and +expanding your skills. Some popular resources include GitHub Guides +(https://guides.github.com/), which offers a collection of tutorials and +best practices, the official GitHub documentation +(https://docs.github.com/), and various online tutorials and courses. By +engaging with these resources and participating in the GitHub community, +you can further develop your understanding of the platform and become a +more proficient user.

    +

    By Ty Tuff, ESIIL

    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/collaborating-on-the-cloud/markdown_basics/index.html b/collaborating-on-the-cloud/markdown_basics/index.html new file mode 100644 index 0000000..15f6924 --- /dev/null +++ b/collaborating-on-the-cloud/markdown_basics/index.html @@ -0,0 +1,2099 @@ + + + + + + + + + + + + + + + + + + + + + + Markdown for the Modern Researcher at ESIIL - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    Markdown for the Modern Researcher at ESIIL

    +

    Introduction

    +
      +
    • Overview of Markdown's relevance and utility in modern research.
    • +
    • How Markdown streamlines documentation in diverse scientific and coding environments.
    • +
    +

    Section 1: Mastering Markdown Syntax

    +
      +
    • Objective: Equip researchers with a thorough understanding of Markdown syntax and its diverse applications.
    • +
    • Topics Covered:
    • +
    • Fundamentals of Text Formatting (headings, lists, bold, italics)
    • +
    • Advanced Structures (tables, blockquotes)
    • +
    • Integrating Multimedia (image and video links)
    • +
    • Diagrams with Mermaid (creating flowcharts, mind maps, timelines)
    • +
    • Interactive Elements (hyperlinks, embedding interactive content)
    • +
    • Activities:
    • +
    • Crafting a Markdown document with various formatting elements.
    • +
    • Developing diagrams using Mermaid for research presentations.
    • +
    • Embedding multimedia elements in a Markdown document for enhanced communication.
    • +
    +

    Section 2: Markdown in Research Tools

    +
      +
    • Objective: Showcase the integration of Markdown in RStudio and Jupyter Notebooks for scientific documentation.
    • +
    • Topics Covered:
    • +
    • Implementing Markdown in RStudio (R Markdown, knitting to HTML/PDF)
    • +
    • Utilizing Markdown in Jupyter Notebooks (code and Markdown cells)
    • +
    • Best practices for documenting research code
    • +
    • Including code outputs and visualizations in documentation
    • +
    • Activities:
    • +
    • Creating and sharing an R Markdown document with annotated research data.
    • +
    • Building a comprehensive Jupyter Notebook with integrated Markdown annotations.
    • +
    +

    Section 3: Disseminating Research with Markdown and GitHub Pages

    +
      +
    • Objective: Teach researchers how to publish and manage Markdown-based documentation as web pages.
    • +
    • Topics Covered:
    • +
    • Setting up a GitHub repository for hosting documentation
    • +
    • Transforming Markdown files into web-friendly formats
    • +
    • Customizing web page layouts and themes
    • +
    • Advanced features using Jekyll
    • +
    • Version control and content management for documentation
    • +
    • Activities:
    • +
    • Publishing a research project documentation on GitHub Pages.
    • +
    • Applying custom themes and layouts to enhance online documentation.
    • +
    +

    Conclusion

    +
      +
    • Review of Markdown's role in enhancing research efficiency and clarity.
    • +
    • Encouraging the integration of Markdown into daily research activities for improved documentation and dissemination.
    • +
    +

    Additional Resources

    +
      +
    • Curated list of advanced Markdown tutorials, guides for GitHub Pages, and Jekyll resources for researchers.
    • +
    +

    Section 1: Mastering Markdown Syntax

    +

    1. Fundamentals of Text Formatting

    +
      +
    • Headings: Use # for different levels of headings.
    • +
    • +

      Heading Level 1

      +
    • +
    • +

      Heading Level 2

      +
    • +
    • +

      Heading Level 3

      +
    • +
    • +

      Lists: Bulleted lists use asterisks, numbers for ordered lists.

      +
    • +
    • Item 1
    • +
    • Item 2
        +
      • Subitem 2.1
      • +
      • Subitem 2.2
      • +
      +
    • +
    • +
        +
      1. First item
      2. +
      +
    • +
    • +
        +
      1. Second item
      2. +
      +
    • +
    • +

      Bold and Italics: Use asterisks or underscores.

      +
    • +
    • Bold Text
    • +
    • Italic Text
    • +
    +

    2. Advanced Structures

    +
      +
    • Tables: Create tables using dashes and pipes.
    • +
    • + + + + + + + + + + + + + + + + + + + + +
      Header 1Header 2Header 3
      Row 1DataData
      Row 2DataData
      +
    • +
    • +

      Add a ":"" to change text justification. Here the : is added on the left for left justification. + | Header 1 | Header 2 | Header 3 | + |---------:|--------- |----------| + | Row 1 | Data | Data | + | Row 2 | Data | Data |

      +
    • +
    • + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
      ANALYTICSENR
      EINVIRONMENT
      VELOPMOCOMUN
      EGAGELLAHCNE
      RATADEVELOPW
      EITSITNEICSR
      SOIGOLOIBHTL
      AHTLAEWEGNEL
      TITSITNEICSN
      IEESREHTOENI
      CSLLAHCEGLAN
      EGALLEHCNEIC
      +
    • +
    • +

      If you hit the boundaries of Markdown's capabilities, you can start to add html directly. Remember, this entire exercisse is to translate to html.

      +
    • +
    +

    Sudoku Puzzle +Fill in the blank cells with numbers from 1 to 9, such that each row, column, and 3x3 subgrid contains all the numbers from 1 to 9 without repetition.

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    537
    6195
    986
    863
    4831
    726
    628
    4195
    879
    + + + + + + + + + + +
    534678912
    672195348
    198342567
    859761423
    426853791
    713924856
    961537284
    287419635
    345286179
    + +
      +
    • Blockquotes: Use > for blockquotes.
    • +
    • +
      +

      This is a blockquote.

      +
      +
    • +
    • +
      +

      It can span multiple lines.

      +
      +
    • +
    +

    3. Integrating Multimedia

    +
      +
    • Images: Add images using the format ![alt text](image_url).
    • +
    • +

      Markdown Logo

      +
    • +
    • +

      Videos: Embed videos using HTML in Markdown.

      +
    • +
    • <iframe width="560" height="315" src="https://www.youtube.com/embed/dQw4w9WgXcQ" frameborder="0" allowfullscreen></iframe>
    • +
    +

    4. Diagrams with Mermaid

    +
      +
    • Flowcharts:
    • +
    +
        graph TD
    +    A[Start] --> B[Analyze Data]
    +    B --> C{Is Data Large?}
    +    C -->|Yes| D[Apply Big Data Solutions]
    +    C -->|No| E[Use Traditional Methods]
    +    D --> F[Machine Learning]
    +    E --> G[Statistical Analysis]
    +    F --> H{Model Accurate?}
    +    G --> I[Report Results]
    +    H -->|Yes| J[Deploy Model]
    +    H -->|No| K[Refine Model]
    +    J --> L[Monitor Performance]
    +    K --> F
    +    L --> M[End: Success]
    +    I --> N[End: Report Generated]
    +    style A fill:#f9f,stroke:#333,stroke-width:2px
    +    style M fill:#9f9,stroke:#333,stroke-width:2px
    +    style N fill:#9f9,stroke:#333,stroke-width:2px
    +
      +
    • +

      Mind Maps: +

          mindmap
      +  root((ESIIL))
      +    section Data Sources
      +      Satellite Imagery
      +        ::icon(fa fa-satellite)
      +      Remote Sensing Data
      +        Drones
      +        Aircraft
      +      On-ground Sensors
      +        Weather Stations
      +        IoT Devices
      +      Open Environmental Data
      +        Public Datasets
      +        ::icon(fa fa-database)
      +    section Research Focus
      +      Climate Change Analysis
      +        Ice Melt Patterns
      +        Sea Level Rise
      +      Biodiversity Monitoring
      +        Species Distribution
      +        Habitat Fragmentation
      +      Geospatial Analysis Techniques
      +        Machine Learning Models
      +        Predictive Analytics
      +    section Applications
      +      Conservation Strategies
      +        ::icon(fa fa-leaf)
      +      Urban Planning
      +        Green Spaces
      +      Disaster Response
      +        Flood Mapping
      +        Wildfire Tracking
      +    section Tools and Technologies
      +      GIS Software
      +        QGIS
      +        ArcGIS
      +      Programming Languages
      +        Python
      +        R
      +      Cloud Computing Platforms
      +        AWS
      +        Google Earth Engine
      +      Data Visualization
      +        D3.js
      +        Tableau

      +
    • +
    • +

      Timelines:

      +
    • +
    +
    gantt
    +    title ESIIL Year 2 Project Schedule
    +    dateFormat  YYYY-MM-DD
    +    section CI
    +    Sovereign OASIS via private jupiterhubs :2024-08-01, 2024-10-30
    +    OASIS documentation                    :2024-09-15, 70d
    +    Data cube OASIS via cyverse account    :2024-09-15, 100d
    +    Integrate with ESIIL User Management system :2024-08-01, 2024-11-30
    +    Build badges to deploy DE from mkdoc   :2024-09-01, 2024-12-15
    +    Streamline Github ssh key management   :2024-10-01, 2024-12-31
    +    Cyverse support (R proxy link)         :2024-11-01, 2024-12-31
    +    Cyverse use summary and statistics     :2024-08-01, 2024-12-15
    +
    +    section CI Consultation and Education
    +    Conferences/Invited talks              :2024-08-01, 2024-12-31
    +    Office hours                           :2024-08-15, 2024-12-15
    +    Proposals                              :2024-09-01, 2024-11-15
    +    Private lessons                        :2024-09-15, 2024-11-30
    +    Pre-event trainings                    :2024-10-01, 2024-12-15
    +    Textbook development w/ education team :2024-08-01, 2024-12-15
    +    Train the trainers / group lessons     :2024-08-15, 2024-11-30
    +    Tribal engagement                      :2024-09-01, 2024-12-15
    +    Ethical Space training                 :2024-09-15, 2024-12-31
    +
    +    section CI Design and Build
    +    Data library (repository)              :2024-08-01, 2024-10-30
    +    Analytics library (repository)         :2024-08-15, 2024-11-15
    +    Containers (repository)                :2024-09-01, 2024-11-30
    +    Cloud infrastructure templates (repository) :2024-09-15, 2024-12-15
    +    Tribal resilience Data Cube            :2024-10-01, 2024-12-31
    +
    
    +%%{init: { 'logLevel': 'debug', 'theme': 'base', 'gitGraph': {'rotateCommitLabel': true}} }%%
    +gitGraph
    +  commit id: "Start from template"
    +  branch c1
    +  commit id: "Set up SSH key pair"
    +  commit id: "Modify _config.yml for GitHub Pages"
    +  commit id: "Initial website structure"
    +  commit id: "Add new markdown pages"
    +  commit id: "Update navigation tree"
    +  commit id: "Edit existing pages"
    +  commit id: "Delete old markdown pages"
    +  commit id: "Finalize website updates"
    +  commit id: "Add new markdown pages"
    +  commit id: "Update navigation tree"
    +checkout c1
    +
    +  branch b1
    +
    +  commit
    +  commit
    +  checkout c1
    +  merge b1
    +
    %%{init: {"quadrantChart": {"chartWidth": 400, "chartHeight": 400}, "themeVariables": {"quadrant1TextFill": "#ff0000"} }}%%
    +quadrantChart
    +  x-axis Urgent --> Not Urgent
    +  y-axis Not Important --> "Important ❤"
    +  quadrant-1 Plan
    +  quadrant-2 Do
    +  quadrant-3 Delegate
    +  quadrant-4 Delete
    +
    timeline
    +    title Major Events in Environmental Science and Data Science
    +    section Environmental Science
    +        19th century : Foundations in Ecology and Conservation
    +        1962 : Publication of 'Silent Spring' by Rachel Carson
    +        1970 : First Earth Day
    +        1987 : Brundtland Report introduces Sustainable Development
    +        1992 : Rio Earth Summit
    +        2015 : Paris Agreement on Climate Change
    +    section Data Science
    +        1960s-1970s : Development of Database Management Systems
    +        1980s : Emergence of Data Warehousing
    +        1990s : Growth of the World Wide Web and Data Mining
    +        2000s : Big Data and Predictive Analytics
    +        2010s : AI and Machine Learning Revolution
    +        2020s : Integration of AI in Environmental Research
    +
    erDiagram
    +    CAR ||--o{ NAMED-DRIVER : allows
    +    CAR {
    +        string registrationNumber
    +        string make
    +        string model
    +    }
    +    PERSON ||--o{ NAMED-DRIVER : is
    +    PERSON {
    +        string firstName
    +        string lastName
    +        int age
    +    }
    +
    ---
    +config:
    +  sankey:
    +    showValues: false
    +---
    +sankey-beta
    +
    +NASA Data,Big Data Harmonization,100
    +    Satellite Imagery,Big Data Harmonization,80
    +    Open Environmental Data,Big Data Harmonization,70
    +    Remote Sensing Data,Big Data Harmonization,90
    +    Big Data Harmonization, Data Analysis and Integration,340
    +    Data Analysis and Integration,Climate Change Research,100
    +    Data Analysis and Integration,Biodiversity Monitoring,80
    +    Data Analysis and Integration,Geospatial Mapping,60
    +    Data Analysis and Integration,Urban Planning,50
    +    Data Analysis and Integration,Disaster Response,50
    +

    5. Interactive Elements

    +
      +
    • Hyperlinks: Use the format [link text](URL).
    • +
    • Google
    • +
    • +

      Play Tetris

      +
    • +
    • +

      Embedding Interactive Content: Use HTML tags or specific platform embed codes.

      +
    • +
    • <iframe src="https://example.com/interactive-content" width="600" height="400"></iframe>
    • +
    +

    6. Math Notation

    +

    Markdown can be combined with LaTeX for mathematical notation, useful in environmental data science for expressing statistical distributions, coordinate systems, and more. This requires a Markdown renderer with LaTeX support (like MathJax or KaTeX).

    +
      +
    • Inline Math: Use single dollar signs for inline math expressions. Representing the normal distribution.
    • +
    +

    Example: The probability density function of the normal distribution is given by \(f(x|\mu,\sigma) = \frac{1}{\sigma\sqrt{2\pi}}e^{-\frac{1}{2}\left(\frac{x-\mu}{\sigma}\right)^2}\).`

    +
      +
    • Display Math: Use double dollar signs for standalone equations.
    • +
    +

    Example: + $$ + f(x|\mu,\sigma) = \frac{1}{\sigma\sqrt{2\pi}}e{-\frac{1}{2}\left(\frac{x-\mu}{\sigma}\right)2} + $$

    +
      +
    • Common LaTeX Elements for Environmental Data Science:
    • +
    • Statistical Distributions:
        +
      • Normal Distribution: \frac{1}{\sigma\sqrt{2\pi}}e^{-\frac{1}{2}\left(\frac{x-\mu}{\sigma}\right)^2} for \(\frac{1}{\sigma\sqrt{2\pi}}e^{-\frac{1}{2}\left(\frac{x-\mu}{\sigma}\right)^2}\)
      • +
      • Poisson Distribution: P(k; \lambda) = \frac{\lambda^k e^{-\lambda}}{k!} for \(P(k; \lambda) = \frac{\lambda^k e^{-\lambda}}{k!}\)
      • +
      +
    • +
    • Coordinate Systems:
        +
      • Spherical Coordinates: (r, \theta, \phi) for \((r, \theta, \phi)\)
      • +
      • Cartesian Coordinates: (x, y, z) for \((x, y, z)\)
      • +
      +
    • +
    • Geospatial Equations:
        +
      • Haversine Formula for Distance: a = \sin^2\left(\frac{\Delta\phi}{2}\right) + \cos(\phi_1)\cos(\phi_2)\sin^2\left(\frac{\Delta\lambda}{2}\right) for \(a = \sin^2\left(\frac{\Delta\phi}{2}\right) + \cos(\phi_1)\cos(\phi_2)\sin^2\left(\frac{\Delta\lambda}{2}\right)\)
      • +
      +
    • +
    +

    Note: The rendering of these equations as formatted math will depend on your Markdown viewer's LaTeX capabilities.

    +

    7. Effective Citations in Markdown

    +

    Inline Citations

    +
      +
    • Objective: Learn how to use inline citations in Markdown.
    • +
    • Example Usage:
    • +
    • Inline citation of a single work:
        +
      • Some text with an inline citation. [@jones:envstudy:2020]
      • +
      +
    • +
    • Inline citation with specific page or section:
        +
      • More text with a specific section cited. [See @jones:envstudy:2020, §4.2]
      • +
      +
    • +
    • Contrasting views:
        +
      • Discussion of a topic with a contrasting view. [Contra @smith:climatechange:2019, p. 78]
      • +
      +
    • +
    +

    Footnote Citations

    +
      +
    • Objective: Understand how to use footnote citations in Markdown.
    • +
    • Example Usage:
    • +
    • Citing with a footnote:
        +
      • Some statement in the text.1
      • +
      +
    • +
    • Multiple references to the same footnote:
        +
      • Another statement referring to the same source.1
      • +
      +
    • +
    • A different citation:
        +
      • Additional comment with a new citation.2
      • +
      +
    • +
    +

    Creating Footnotes

    +
      +
    • Example Syntax:
    • +
    • +
    • +
    +
    +
    +
      +
    1. +

      First reference details. Example: Emma Jones, "Environmental Study," Nature Journal, May 2020, https://nature-journal.com/envstudy2020

      +
    2. +
    3. +

      Second reference details. Example: David Smith, "Climate Change Controversies," Science Daily, August 2019, https://sciencedaily.com/climatechange2019

      +
    4. +
    +
    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/collaborating-on-the-cloud/markdown_basics/markdown_basics.md b/collaborating-on-the-cloud/markdown_basics/markdown_basics.md new file mode 100644 index 0000000..fa69ea4 --- /dev/null +++ b/collaborating-on-the-cloud/markdown_basics/markdown_basics.md @@ -0,0 +1,434 @@ +# Markdown for the Modern Researcher at ESIIL + +## Introduction + +- Overview of Markdown's relevance and utility in modern research. +- How Markdown streamlines documentation in diverse scientific and coding environments. + +## Section 1: Mastering Markdown Syntax + +- **Objective:** Equip researchers with a thorough understanding of Markdown syntax and its diverse applications. +- **Topics Covered:** + - Fundamentals of Text Formatting (headings, lists, bold, italics) + - Advanced Structures (tables, blockquotes) + - Integrating Multimedia (image and video links) + - Diagrams with Mermaid (creating flowcharts, mind maps, timelines) + - Interactive Elements (hyperlinks, embedding interactive content) +- **Activities:** + - Crafting a Markdown document with various formatting elements. + - Developing diagrams using Mermaid for research presentations. + - Embedding multimedia elements in a Markdown document for enhanced communication. + +## Section 2: Markdown in Research Tools + +- **Objective:** Showcase the integration of Markdown in RStudio and Jupyter Notebooks for scientific documentation. +- **Topics Covered:** + - Implementing Markdown in RStudio (R Markdown, knitting to HTML/PDF) + - Utilizing Markdown in Jupyter Notebooks (code and Markdown cells) + - Best practices for documenting research code + - Including code outputs and visualizations in documentation +- **Activities:** + - Creating and sharing an R Markdown document with annotated research data. + - Building a comprehensive Jupyter Notebook with integrated Markdown annotations. + +## Section 3: Disseminating Research with Markdown and GitHub Pages + +- **Objective:** Teach researchers how to publish and manage Markdown-based documentation as web pages. +- **Topics Covered:** + - Setting up a GitHub repository for hosting documentation + - Transforming Markdown files into web-friendly formats + - Customizing web page layouts and themes + - Advanced features using Jekyll + - Version control and content management for documentation +- **Activities:** + - Publishing a research project documentation on GitHub Pages. + - Applying custom themes and layouts to enhance online documentation. + +## Conclusion + +- Review of Markdown's role in enhancing research efficiency and clarity. +- Encouraging the integration of Markdown into daily research activities for improved documentation and dissemination. + +## Additional Resources + +- Curated list of advanced Markdown tutorials, guides for GitHub Pages, and Jekyll resources for researchers. + + + + +## Section 1: Mastering Markdown Syntax + +### 1. Fundamentals of Text Formatting + +- **Headings**: Use `#` for different levels of headings. + - # Heading Level 1 + - ## Heading Level 2 + - ### Heading Level 3 + +- **Lists**: Bulleted lists use asterisks, numbers for ordered lists. + - Item 1 + - Item 2 + - Subitem 2.1 + - Subitem 2.2 + - 1. First item + - 2. Second item + +- **Bold and Italics**: Use asterisks or underscores. + - **Bold Text** + - *Italic Text* + +### 2. Advanced Structures + +- **Tables**: Create tables using dashes and pipes. + - | Header 1 | Header 2 | Header 3 | + |----------|----------|----------| + | Row 1 | Data | Data | + | Row 2 | Data | Data | + - Add a ":"" to change text justification. Here the : is added on the left for left justification. + | Header 1 | Header 2 | Header 3 | + |---------:|--------- |----------| + | Row 1 | Data | Data | + | Row 2 | Data | Data | + + - | | | | | | | | | | | | | + |---|---|---|---|---|---|---|---|---|---|---|---| + | A | N | A | L | Y | T | I | C | S | E | N | R | + | E | I | N | V | I | R | O | N | M | E | N | T | + | V | E | L | O | P | M | O | C | O | M | U | N | + | E | G | A | G | E | L | L | A | H | C | N | E | + | R | A | T | A | D | E | V | E | L | O | P | W | + | E | I | T | S | I | T | N | E | I | C | S | R | + | S | O | I | G | O | L | O | I | B | H | T | L | + | A | H | T | L | A | E | W | E | G | N | E | L | + | T | I | T | S | I | T | N | E | I | C | S | N | + | I | E | E | S | R | E | H | T | O | E | N | I | + | C | S | L | L | A | H | C | E | G | L | A | N | + | E | G | A | L | L | E | H | C | N | E | I | C | + + - If you hit the boundaries of Markdown's capabilities, you can start to add html directly. Remember, this entire exercisse is to translate to html. + +**Sudoku Puzzle** +Fill in the blank cells with numbers from 1 to 9, such that each row, column, and 3x3 subgrid contains all the numbers from 1 to 9 without repetition. + +| | | | | | | | | | +|---|---|---|---|---|---|---|---|---| +| 5 | 3 | | | 7 | | | | | +| 6 | | | 1 | 9 | 5 | | | | +| | 9 | 8 | | | | | 6 | | +| 8 | | | | 6 | | | | 3 | +| 4 | | | 8 | | 3 | | | 1 | +| 7 | | | | 2 | | | | 6 | +| | 6 | | | | | 2 | 8 | | +| | | | 4 | 1 | 9 | | | 5 | +| | | | | 8 | | | 7 | 9 | + + + + + + + + + + + + + + +
    534678912
    672195348
    198342567
    859761423
    426853791
    713924856
    961537284
    287419635
    345286179
    + + + + + +- **Blockquotes**: Use `>` for blockquotes. + - > This is a blockquote. + - > It can span multiple lines. + +### 3. Integrating Multimedia + +- **Images**: Add images using the format `![alt text](image_url)`. + - ![Markdown Logo](https://example.com/markdown-logo.png) + +- **Videos**: Embed videos using HTML in Markdown. + - `` + +### 4. Diagrams with Mermaid + +- **Flowcharts**: + +```mermaid + graph TD + A[Start] --> B[Analyze Data] + B --> C{Is Data Large?} + C -->|Yes| D[Apply Big Data Solutions] + C -->|No| E[Use Traditional Methods] + D --> F[Machine Learning] + E --> G[Statistical Analysis] + F --> H{Model Accurate?} + G --> I[Report Results] + H -->|Yes| J[Deploy Model] + H -->|No| K[Refine Model] + J --> L[Monitor Performance] + K --> F + L --> M[End: Success] + I --> N[End: Report Generated] + style A fill:#f9f,stroke:#333,stroke-width:2px + style M fill:#9f9,stroke:#333,stroke-width:2px + style N fill:#9f9,stroke:#333,stroke-width:2px +``` + +- **Mind Maps**: +```mermaid + mindmap + root((ESIIL)) + section Data Sources + Satellite Imagery + ::icon(fa fa-satellite) + Remote Sensing Data + Drones + Aircraft + On-ground Sensors + Weather Stations + IoT Devices + Open Environmental Data + Public Datasets + ::icon(fa fa-database) + section Research Focus + Climate Change Analysis + Ice Melt Patterns + Sea Level Rise + Biodiversity Monitoring + Species Distribution + Habitat Fragmentation + Geospatial Analysis Techniques + Machine Learning Models + Predictive Analytics + section Applications + Conservation Strategies + ::icon(fa fa-leaf) + Urban Planning + Green Spaces + Disaster Response + Flood Mapping + Wildfire Tracking + section Tools and Technologies + GIS Software + QGIS + ArcGIS + Programming Languages + Python + R + Cloud Computing Platforms + AWS + Google Earth Engine + Data Visualization + D3.js + Tableau +``` + +- **Timelines**: + +```mermaid +gantt + title ESIIL Year 2 Project Schedule + dateFormat YYYY-MM-DD + section CI + Sovereign OASIS via private jupiterhubs :2024-08-01, 2024-10-30 + OASIS documentation :2024-09-15, 70d + Data cube OASIS via cyverse account :2024-09-15, 100d + Integrate with ESIIL User Management system :2024-08-01, 2024-11-30 + Build badges to deploy DE from mkdoc :2024-09-01, 2024-12-15 + Streamline Github ssh key management :2024-10-01, 2024-12-31 + Cyverse support (R proxy link) :2024-11-01, 2024-12-31 + Cyverse use summary and statistics :2024-08-01, 2024-12-15 + + section CI Consultation and Education + Conferences/Invited talks :2024-08-01, 2024-12-31 + Office hours :2024-08-15, 2024-12-15 + Proposals :2024-09-01, 2024-11-15 + Private lessons :2024-09-15, 2024-11-30 + Pre-event trainings :2024-10-01, 2024-12-15 + Textbook development w/ education team :2024-08-01, 2024-12-15 + Train the trainers / group lessons :2024-08-15, 2024-11-30 + Tribal engagement :2024-09-01, 2024-12-15 + Ethical Space training :2024-09-15, 2024-12-31 + + section CI Design and Build + Data library (repository) :2024-08-01, 2024-10-30 + Analytics library (repository) :2024-08-15, 2024-11-15 + Containers (repository) :2024-09-01, 2024-11-30 + Cloud infrastructure templates (repository) :2024-09-15, 2024-12-15 + Tribal resilience Data Cube :2024-10-01, 2024-12-31 +``` + +```mermaid + +%%{init: { 'logLevel': 'debug', 'theme': 'base', 'gitGraph': {'rotateCommitLabel': true}} }%% +gitGraph + commit id: "Start from template" + branch c1 + commit id: "Set up SSH key pair" + commit id: "Modify _config.yml for GitHub Pages" + commit id: "Initial website structure" + commit id: "Add new markdown pages" + commit id: "Update navigation tree" + commit id: "Edit existing pages" + commit id: "Delete old markdown pages" + commit id: "Finalize website updates" + commit id: "Add new markdown pages" + commit id: "Update navigation tree" +checkout c1 + + branch b1 + + commit + commit + checkout c1 + merge b1 +``` + +```mermaid +%%{init: {"quadrantChart": {"chartWidth": 400, "chartHeight": 400}, "themeVariables": {"quadrant1TextFill": "#ff0000"} }}%% +quadrantChart + x-axis Urgent --> Not Urgent + y-axis Not Important --> "Important ❤" + quadrant-1 Plan + quadrant-2 Do + quadrant-3 Delegate + quadrant-4 Delete +``` + + +```mermaid +timeline + title Major Events in Environmental Science and Data Science + section Environmental Science + 19th century : Foundations in Ecology and Conservation + 1962 : Publication of 'Silent Spring' by Rachel Carson + 1970 : First Earth Day + 1987 : Brundtland Report introduces Sustainable Development + 1992 : Rio Earth Summit + 2015 : Paris Agreement on Climate Change + section Data Science + 1960s-1970s : Development of Database Management Systems + 1980s : Emergence of Data Warehousing + 1990s : Growth of the World Wide Web and Data Mining + 2000s : Big Data and Predictive Analytics + 2010s : AI and Machine Learning Revolution + 2020s : Integration of AI in Environmental Research +``` + + + + +```mermaid +erDiagram + CAR ||--o{ NAMED-DRIVER : allows + CAR { + string registrationNumber + string make + string model + } + PERSON ||--o{ NAMED-DRIVER : is + PERSON { + string firstName + string lastName + int age + } +``` + +```mermaid +--- +config: + sankey: + showValues: false +--- +sankey-beta + +NASA Data,Big Data Harmonization,100 + Satellite Imagery,Big Data Harmonization,80 + Open Environmental Data,Big Data Harmonization,70 + Remote Sensing Data,Big Data Harmonization,90 + Big Data Harmonization, Data Analysis and Integration,340 + Data Analysis and Integration,Climate Change Research,100 + Data Analysis and Integration,Biodiversity Monitoring,80 + Data Analysis and Integration,Geospatial Mapping,60 + Data Analysis and Integration,Urban Planning,50 + Data Analysis and Integration,Disaster Response,50 +``` + + +### 5. Interactive Elements + +- **Hyperlinks**: Use the format `[link text](URL)`. + - [Google](https://www.google.com) + - [Play Tetris](https://tetris.com/play-tetris) + +- **Embedding Interactive Content**: Use HTML tags or specific platform embed codes. + - `` + + + + +### 6. Math Notation + +Markdown can be combined with LaTeX for mathematical notation, useful in environmental data science for expressing statistical distributions, coordinate systems, and more. This requires a Markdown renderer with LaTeX support (like MathJax or KaTeX). + +- **Inline Math**: Use single dollar signs for inline math expressions. Representing the normal distribution. + + Example: The probability density function of the normal distribution is given by $f(x|\mu,\sigma) = \frac{1}{\sigma\sqrt{2\pi}}e^{-\frac{1}{2}\left(\frac{x-\mu}{\sigma}\right)^2}$.` + +- **Display Math**: Use double dollar signs for standalone equations. + + Example: + $$ + f(x|\mu,\sigma) = \frac{1}{\sigma\sqrt{2\pi}}e^{-\frac{1}{2}\left(\frac{x-\mu}{\sigma}\right)^2} + $$ + + +- **Common LaTeX Elements for Environmental Data Science**: + - **Statistical Distributions**: + - Normal Distribution: `\frac{1}{\sigma\sqrt{2\pi}}e^{-\frac{1}{2}\left(\frac{x-\mu}{\sigma}\right)^2}` for $\frac{1}{\sigma\sqrt{2\pi}}e^{-\frac{1}{2}\left(\frac{x-\mu}{\sigma}\right)^2}$ + - Poisson Distribution: `P(k; \lambda) = \frac{\lambda^k e^{-\lambda}}{k!}` for $P(k; \lambda) = \frac{\lambda^k e^{-\lambda}}{k!}$ + - **Coordinate Systems**: + - Spherical Coordinates: `(r, \theta, \phi)` for $(r, \theta, \phi)$ + - Cartesian Coordinates: `(x, y, z)` for $(x, y, z)$ + - **Geospatial Equations**: + - Haversine Formula for Distance: `a = \sin^2\left(\frac{\Delta\phi}{2}\right) + \cos(\phi_1)\cos(\phi_2)\sin^2\left(\frac{\Delta\lambda}{2}\right)` for $a = \sin^2\left(\frac{\Delta\phi}{2}\right) + \cos(\phi_1)\cos(\phi_2)\sin^2\left(\frac{\Delta\lambda}{2}\right)$ + +Note: The rendering of these equations as formatted math will depend on your Markdown viewer's LaTeX capabilities. + + + +### 7. Effective Citations in Markdown + +## Inline Citations + +- **Objective:** Learn how to use inline citations in Markdown. +- **Example Usage:** + - Inline citation of a single work: + - Some text with an inline citation. [@jones:envstudy:2020] + - Inline citation with specific page or section: + - More text with a specific section cited. [See @jones:envstudy:2020, §4.2] + - Contrasting views: + - Discussion of a topic with a contrasting view. [Contra @smith:climatechange:2019, p. 78] + +## Footnote Citations + +- **Objective:** Understand how to use footnote citations in Markdown. +- **Example Usage:** + - Citing with a footnote: + - Some statement in the text.[^1] + - Multiple references to the same footnote: + - Another statement referring to the same source.[^1] + - A different citation: + - Additional comment with a new citation.[^2] + +## Creating Footnotes + +- **Example Syntax:** + - [^1]: First reference details. Example: Emma Jones, "Environmental Study," Nature Journal, May 2020, https://nature-journal.com/envstudy2020. + - [^2]: Second reference details. Example: David Smith, "Climate Change Controversies," Science Daily, August 2019, https://sciencedaily.com/climatechange2019. + + diff --git a/countries_plot.png b/countries_plot.png new file mode 100644 index 0000000..4516259 Binary files /dev/null and b/countries_plot.png differ diff --git a/data-library/Pull_Sentinal2_l2_data/Pull_Sentinal2_l2_data.md b/data-library/Pull_Sentinal2_l2_data/Pull_Sentinal2_l2_data.md new file mode 100644 index 0000000..a138cf0 --- /dev/null +++ b/data-library/Pull_Sentinal2_l2_data/Pull_Sentinal2_l2_data.md @@ -0,0 +1,466 @@ +Pulling Sentinal 2 data +================ +Ty Tuff, ESIIL Data Scientist +2023-10-27 + +# Set Java Options + +``` r +# Run these Java options before anything else. +options(java.parameters = "-Xmx64G") +options(timeout = max(600, getOption("timeout"))) +``` + +# R libraries and global setting. + +``` r +#library(Rcpp) +library(sf) +library(gdalcubes) +library(rstac) +library(gdalUtils) +library(terra) +library(rgdal) +library(reshape2) +library(osmdata) +library(terra) +library(dplyr) +#library(glue) +library(stars) +library(ggplot2) +library(colorspace) +library(geos) +#library(glue) +library(osmdata) +library(ggthemes) +library(tidyr) +gdalcubes_options(parallel = 8) + +sf::sf_extSoftVersion() +``` + + GEOS GDAL proj.4 GDAL_with_GEOS USE_PROJ_H + "3.11.0" "3.5.3" "9.1.0" "true" "true" + PROJ + "9.1.0" + +``` r +gdalcubes_gdal_has_geos() +``` + + [1] TRUE + +# Start timer + +``` r +start <- Sys.time() +``` + +# Set color palette + +``` r +library(ggtern) +our_yellow <- rgb2hex(r = 253, g = 201, b = 51) +our_green <- rgb2hex(r = 10, g = 84, b = 62) +our_grey <- rgb2hex(r = 92, g = 96, b = 95) +our_white <- rgb2hex(r = 255, g = 255, b = 255) +``` + +# Load area of interest + +``` r +# Read the shapefile into an sf object +aoi_total <- st_read("/Users/ty/Documents/Github/Southern_California_Edison_Fire_Risk/SCE_Fire_Zone_V2/SCE_Fire_Zone_V2.shp") %>% + st_as_sf() +``` + + Reading layer `SCE_Fire_Zone_V2' from data source + `/Users/ty/Documents/Github/Southern_California_Edison_Fire_Risk/SCE_Fire_Zone_V2/SCE_Fire_Zone_V2.shp' + using driver `ESRI Shapefile' + Simple feature collection with 12 features and 5 fields + Geometry type: POLYGON + Dimension: XY + Bounding box: xmin: 176062.4 ymin: 3674043 xmax: 764123.1 ymax: 4254012 + Projected CRS: NAD83 / UTM zone 11N + +``` r +# Plot the entire spatial dataset +plot(aoi_total) +``` + +![](Pull_Sentinal2_l2_data_files/figure-gfm/load_aoi_polygon-1.png) + +``` r +# Filter the dataset to obtain the geometry with OBJECTID 5 +aoi <- aoi_total %>% + filter(OBJECTID == 5) + +# Obtain and plot the bounding box of the filtered geometry +shape_bbox <- st_bbox(aoi) +plot(aoi) +``` + +![](Pull_Sentinal2_l2_data_files/figure-gfm/load_aoi_polygon-2.png) + +``` r +# Transform the filtered geometry to EPSG:4326 and store its bounding box +aoi %>% st_transform("EPSG:4326") %>% + st_bbox() -> bbox_4326 + +# Transform the filtered geometry to EPSG:32618 and store its bounding box +aoi %>% st_transform("EPSG:32618") %>% + st_bbox() -> bbox_32618 +``` + +# Arrange STAC collection + +In this code chunk, the primary goal is to search for and obtain +satellite imagery data. The data source being tapped into is a +SpatioTemporal Asset Catalog (STAC) provided by an online service +(earth-search by Element84). Here’s a breakdown: + +A connection is established with the STAC service, searching +specifically within the “sentinel-s2-l2a-cogs” collection. -The search +is spatially constrained to a bounding box (bbox_4326) and temporally +limited to a range of one day, between May 15 and May 16, 2021. -Once +the search is conducted, the desired assets or spectral bands from the +returned satellite images are defined, ranging from Band 1 (B01) to Band +12 (B12) and including the Scene Classification Layer (SCL). -These +bands are then organized into an image collection for further processing +or analysis. + +``` r +# Initialize STAC connection +s = stac("https://earth-search.aws.element84.com/v0") + +# Search for Sentinel-2 images within specified bounding box and date range +items = s %>% + stac_search(collections = "sentinel-s2-l2a-cogs", + bbox = c(bbox_4326["xmin"], + bbox_4326["ymin"], + bbox_4326["xmax"], + bbox_4326["ymax"]), + datetime = "2021-05-15/2021-05-16") %>% + post_request() %>% + items_fetch(progress = FALSE) + +# Print number of found items +length(items$features) +``` + + [1] 12 + +``` r +# Prepare the assets for analysis +library(gdalcubes) +assets = c("B01", "B02", "B03", "B04", "B05", "B06", + "B07", + "B08", "B8A", "B09", "B11", "B12", "SCL") +s2_collection = stac_image_collection(items$features, asset_names = assets) + +# Display the image collection +s2_collection +``` + + Image collection object, referencing 12 images with 13 bands + Images: + name left top bottom right + 1 S2B_11SNS_20210515_1_L2A -117.0002 33.43957 32.44372 -115.8191 + 2 S2B_11SPS_20210515_1_L2A -115.9361 33.43490 32.42937 -114.7436 + 3 S2B_11SQS_20210515_0_L2A -114.8732 33.42092 32.41918 -113.9566 + 4 S2B_12STB_20210515_0_L2A -114.2244 33.40433 32.61015 -113.9559 + 5 S2B_11SNT_20210515_0_L2A -117.0002 34.34164 33.34577 -115.8066 + 6 S2B_11SPT_20210515_0_L2A -115.9253 34.33683 33.33091 -114.7198 + datetime srs + 1 2021-05-15T18:35:13 EPSG:32611 + 2 2021-05-15T18:35:10 EPSG:32611 + 3 2021-05-15T18:35:06 EPSG:32611 + 4 2021-05-15T18:35:01 EPSG:32612 + 5 2021-05-15T18:34:59 EPSG:32611 + 6 2021-05-15T18:34:55 EPSG:32611 + [ omitted 6 images ] + + Bands: + name offset scale unit nodata image_count + 1 B01 0 1 12 + 2 B02 0 1 12 + 3 B03 0 1 12 + 4 B04 0 1 12 + 5 B05 0 1 12 + 6 B06 0 1 12 + 7 B07 0 1 12 + 8 B08 0 1 12 + 9 B09 0 1 12 + 10 B11 0 1 12 + 11 B12 0 1 12 + 12 B8A 0 1 12 + 13 SCL 0 1 12 + +# Define view window + +In this code chunk, a ‘view’ on the previously obtained satellite image +collection is being defined. Think of this as setting up a specific lens +or perspective to look at the satellite data: + +-The view is set to the coordinate reference system EPSG:32618. -Spatial +resolution is defined as 100x100 meters. -Temporal resolution is defined +monthly (P1M), even though the actual range is only one day. -When there +are multiple values in a grid cell or timeframe, they are aggregated +using the median value. -If any resampling is needed, the nearest +neighbor method is used (near). -The spatial and temporal extents are +constrained to specific values. -By defining this view, it allows for +consistent analysis and visualization of the image collection within the +specified spatial and temporal resolutions and extents. + +``` r +# Define a specific view on the satellite image collection +v = cube_view( + srs = "EPSG:32618", + dx = 100, + dy = 100, + dt = "P1M", + aggregation = "median", + resampling = "near", + extent = list( + t0 = "2021-05-15", + t1 = "2021-05-16", + left = bbox_32618["xmin"], + right = bbox_32618["xmax"], + top = bbox_32618["ymax"], + bottom = bbox_32618["ymin"] + ) +) + +# Display the defined view +v +``` + + A data cube view object + + Dimensions: + low high count pixel_size + t 2021-05-01 2021-05-31 1 P1M + y 4471226.41402451 4741326.41402451 2701 100 + x -3463720.00044994 -3191420.00044994 2723 100 + + SRS: "EPSG:32618" + Temporal aggregation method: "median" + Spatial resampling method: "near" + +# Pull data + +In this chunk, the primary aim is to transform and prepare satellite +imagery data for analysis: + +-The current time is stored in variable a for tracking the time taken by +the process. -The previously defined ‘view’ on the satellite imagery, v, +is used to create a raster cube, a multi-dimensional array containing +the satellite data. This raster cube contains spatial, spectral, and +temporal data. -The desired spectral bands are selected. -The data is +limited to a specific area of interest, aoi. -The band names are renamed +to their respective wavelengths in nanometers for clarity. -A subset of +the data, comprising 50,000 random samples, is selected. -Unwanted +columns are removed, and the dataset is transformed into a long format, +where each row represents a particular date and wavelength combination. +-The entire process duration is computed by taking the difference +between the end time (b) and the start time (a). -The transformed +dataset y is then displayed. + +``` r +# Record start time +a <- Sys.time() + +# Transform the satellite image collection into a raster cube +x <- s2_collection %>% + raster_cube(v) %>% + select_bands(c("B01", "B02", "B03", "B04", + "B05", "B06", "B07", "B08", + "B8A", "B09", "B11", "B12")) %>% + extract_geom(aoi) %>% + rename( + "time" = "time", + "443" = "B01", + "490" = "B02", + "560" = "B03", + "665" = "B04", + "705" = "B05", + "740" = "B06", + "783" = "B07", + "842" = "B08", + "865" = "B8A", + "940" = "B09", + "1610" = "B11", + "2190" = "B12" + ) + +# Sample, transform and prepare data for analysis +y <- x %>% + slice_sample(n = 50000) %>% + select(-FID) %>% + pivot_longer(!time, names_to = "wavelength_nm", values_to = "reflectance") %>% + mutate(wavelength_nm = as.numeric(wavelength_nm)) + +# Record end time and compute duration +b <- Sys.time() +processing_time <- difftime(b, a) + +# Display the processing time and transformed dataset +processing_time +``` + + Time difference of 1.23593 mins + +``` r +y +``` + + # A tibble: 600,000 × 3 + time wavelength_nm reflectance + + 1 2021-05-01 443 1855 + 2 2021-05-01 490 2255 + 3 2021-05-01 560 2884 + 4 2021-05-01 665 3711 + 5 2021-05-01 705 3990 + 6 2021-05-01 740 4009 + 7 2021-05-01 783 4078 + 8 2021-05-01 842 4219 + 9 2021-05-01 865 4060 + 10 2021-05-01 940 4120 + # ℹ 599,990 more rows + +# Base plot + +``` r +# Set custom colors for the plot +our_green <- "#4CAF50" +our_white <- "#FFFFFF" +our_yellow <- "#FFEB3B" + +# Create a 2D density plot +day_density <- ggplot(data = y, aes(x = wavelength_nm, y = reflectance, group = time)) + + stat_smooth(color = our_green, fill = "lightgrey") + + geom_density2d(colour = "black", bins = 10, alpha = 0.1) + + stat_density2d(aes(alpha = ..level.., fill = ..level..), + linewidth = 2, bins = 10, geom = "polygon") + + scale_fill_gradient(low = our_white, high = our_yellow) + + scale_alpha(range = c(0.00, 0.8), guide = FALSE) + + theme_tufte() + + xlab("wavelength") + + ylab("reflectance") + + ylim(0, 16000) + + theme( + aspect.ratio = 5/14, + axis.text.x = element_text(angle = 90, vjust = 0.5, hjust = 1, + colour = c("darkblue", "blue", "green", "red", + "darkred", "darkred", "darkred", "darkred", + "darkred", "black", "black", "black", "black")), + axis.title.x = element_blank(), + axis.title.y = element_blank(), + plot.margin = margin(t = 30, r = 10, b = 40, l = 18) + ) + + scale_x_continuous(breaks = c(443, 490, 560, 665, 705, 740, 783, 842, 865, 940, 1610, 2190)) + +# Display the plot +day_density +``` + +![](Pull_Sentinal2_l2_data_files/figure-gfm/base_plot-1.png) + +# Inlay 1 - geographic zone + +``` r +guide_map <- ggplot(data= aoi_total) + + geom_sf(fill=our_yellow, color=our_white) + + geom_sf(data= aoi, fill=our_green, color=our_white) + + theme_tufte()+ + ggtitle("Zone 5")+ + theme(axis.text.x=element_blank(), #remove x axis labels + axis.ticks.x=element_blank(), #remove x axis ticks + axis.text.y=element_blank(), #remove y axis labels + axis.ticks.y=element_blank() #remove y axis ticks, bg=none + )+ theme(plot.title = element_text(hjust=0.8, vjust = -2)) +guide_map +``` + +![](Pull_Sentinal2_l2_data_files/figure-gfm/aoi_inlay-1.png) + +# Inlay 2 - date text + +``` r +library(geosphere) +aoi_total |> st_centroid() |> st_transform(crs="+proj=longlat") |> st_coordinates() |> colMeans() -> lat_long + +daylength_line <- daylength(lat = lat_long[2], 1:365) + +daylengths <- data.frame(time= 1:365, daylength = daylength_line) + +library(lubridate) + +# Create a template date object +date <- as.POSIXlt("2021-05-15") + +doy <- format(date, format = "%j") |> as.numeric() + +display_date <- format(date, format="%e %B %Y ") +``` + +# Inlay 3 - daylength + +``` r +date_inlay <- ggplot(data=daylengths) + + + ggtitle("Daylength")+ + geom_ribbon(aes(x=time, ymin=daylength, ymax=15), fill=our_grey, alpha=0.5) + + geom_ribbon(aes(x=time, ymax=daylength, ymin=9), fill=our_yellow, alpha=1) + + geom_hline(yintercept=12, color=our_white) + + geom_vline(xintercept=doy, color=our_green, size=1) + + theme_tufte() + + ylim(9,15) + + theme(axis.text.y=element_blank(), + axis.ticks.y=element_blank(), + axis.title.y=element_blank(), + axis.title.x=element_blank(), + axis.text.x=element_blank(), + axis.ticks.x=element_blank()) + theme(plot.title = element_text(hjust=0.5, vjust = 0)) +date_inlay +``` + +![](Pull_Sentinal2_l2_data_files/figure-gfm/date_inlay-1.png) + +# Ensemble map assembly + +``` r +library(cowplot) +library(magick) +map_overlay <- ggdraw(day_density) + + draw_plot(guide_map, x = 1.08, y = 1, hjust = 1, vjust = 1, width = 0.3, height = 0.3)+ + draw_plot(date_inlay, x = 1, y = 0.35, hjust = 1, vjust = 1, width = 0.1, height = 0.25)+ + geom_text(aes(x=1, y=0.08, label=display_date, hjust = 1), color=our_grey, cex=3, fontface='bold') + + # draw_image("Ty_powerline_plots/Southern_California_Edison_Logo.png", x = -0.24, y = 0.38, scale=.3)+ + # draw_image("Ty_powerline_plots/earthlab_logo.png", x = -0.38, y = 0.38, scale=.25)+ + geom_text(aes(x=0.4, y=.9, label="Spectral library - Monthly average"), color=our_green, hjust = 0, cex=8, fontface='bold') + + geom_text(aes(x=0.01, y=.04, + label="Created by ESIIL (T. Tuff) for Fall Hackathon -- October 2023. Sentinel 2 Data from 'https://earth-search.aws.element84.com/v0'"), color=our_grey, hjust = 0, cex=3) + +geom_text(aes(x=0.4, y=.1, label="wavelength (nm)"), color=our_grey, hjust = 0, cex=4, fontface='bold') + +geom_text(aes(x=0.01, y=.5,angle = 90, label="reflectance"), color=our_grey, hjust = 0, cex=4, fontface='bold') +map_overlay +``` + +![](Pull_Sentinal2_l2_data_files/figure-gfm/map_assembly-1.png) + +# Save map + +``` r +ggsave(map_overlay, file="day_density_15_May_2021_zone_5.png", bg="white", dpi = 600, width = 12, + height = 5) +``` + +# End timer + +``` r +end <- Sys.time() +difftime(end,start) +``` + + Time difference of 3.2202 mins diff --git a/data-library/Pull_Sentinal2_l2_data/index.html b/data-library/Pull_Sentinal2_l2_data/index.html new file mode 100644 index 0000000..bbf5879 --- /dev/null +++ b/data-library/Pull_Sentinal2_l2_data/index.html @@ -0,0 +1,1745 @@ + + + + + + + + + + + + + + + + + + + + + + Pulling Sentinal 2 data - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    Pulling Sentinal 2 data

    +

    Ty Tuff, ESIIL Data Scientist +2023-10-27

    +

    Set Java Options

    +
    # Run these Java options before anything else.
    +options(java.parameters = "-Xmx64G")
    +options(timeout = max(600, getOption("timeout")))
    +
    +

    R libraries and global setting.

    +
    #library(Rcpp)
    +library(sf)
    +library(gdalcubes)
    +library(rstac)
    +library(gdalUtils)
    +library(terra)
    +library(rgdal)
    +library(reshape2)
    +library(osmdata)
    +library(terra)
    +library(dplyr)
    +#library(glue)
    +library(stars)
    +library(ggplot2)
    +library(colorspace)
    +library(geos)
    +#library(glue)
    +library(osmdata)
    +library(ggthemes)
    +library(tidyr)
    +gdalcubes_options(parallel = 8)
    +
    +sf::sf_extSoftVersion()
    +
    +
              GEOS           GDAL         proj.4 GDAL_with_GEOS     USE_PROJ_H 
    +      "3.11.0"        "3.5.3"        "9.1.0"         "true"         "true" 
    +          PROJ 
    +       "9.1.0"
    +
    +
    gdalcubes_gdal_has_geos()
    +
    +
    [1] TRUE
    +
    +

    Start timer

    +
    start <- Sys.time()
    +
    +

    Set color palette

    +
    library(ggtern)
    +our_yellow <- rgb2hex(r = 253, g = 201, b = 51)
    +our_green <- rgb2hex(r = 10, g = 84, b = 62)
    +our_grey <- rgb2hex(r = 92, g = 96, b = 95)
    +our_white <- rgb2hex(r = 255, g = 255, b = 255)
    +
    +

    Load area of interest

    +
    # Read the shapefile into an sf object
    +aoi_total <- st_read("/Users/ty/Documents/Github/Southern_California_Edison_Fire_Risk/SCE_Fire_Zone_V2/SCE_Fire_Zone_V2.shp") %>% 
    +  st_as_sf()
    +
    +
    Reading layer `SCE_Fire_Zone_V2' from data source 
    +  `/Users/ty/Documents/Github/Southern_California_Edison_Fire_Risk/SCE_Fire_Zone_V2/SCE_Fire_Zone_V2.shp' 
    +  using driver `ESRI Shapefile'
    +Simple feature collection with 12 features and 5 fields
    +Geometry type: POLYGON
    +Dimension:     XY
    +Bounding box:  xmin: 176062.4 ymin: 3674043 xmax: 764123.1 ymax: 4254012
    +Projected CRS: NAD83 / UTM zone 11N
    +
    +
    # Plot the entire spatial dataset
    +plot(aoi_total)
    +
    +

    +
    # Filter the dataset to obtain the geometry with OBJECTID 5
    +aoi <- aoi_total %>%
    +  filter(OBJECTID == 5)
    +
    +# Obtain and plot the bounding box of the filtered geometry
    +shape_bbox <- st_bbox(aoi)
    +plot(aoi)
    +
    +

    +
    # Transform the filtered geometry to EPSG:4326 and store its bounding box
    +aoi %>% st_transform("EPSG:4326") %>%
    +  st_bbox() -> bbox_4326
    +
    +# Transform the filtered geometry to EPSG:32618 and store its bounding box
    +aoi %>% st_transform("EPSG:32618") %>%
    +  st_bbox() -> bbox_32618
    +
    +

    Arrange STAC collection

    +

    In this code chunk, the primary goal is to search for and obtain +satellite imagery data. The data source being tapped into is a +SpatioTemporal Asset Catalog (STAC) provided by an online service +(earth-search by Element84). Here’s a breakdown:

    +

    A connection is established with the STAC service, searching +specifically within the “sentinel-s2-l2a-cogs” collection. -The search +is spatially constrained to a bounding box (bbox_4326) and temporally +limited to a range of one day, between May 15 and May 16, 2021. -Once +the search is conducted, the desired assets or spectral bands from the +returned satellite images are defined, ranging from Band 1 (B01) to Band +12 (B12) and including the Scene Classification Layer (SCL). -These +bands are then organized into an image collection for further processing +or analysis.

    +
    # Initialize STAC connection
    +s = stac("https://earth-search.aws.element84.com/v0")
    +
    +# Search for Sentinel-2 images within specified bounding box and date range
    +items = s %>%
    +    stac_search(collections = "sentinel-s2-l2a-cogs",
    +                bbox = c(bbox_4326["xmin"], 
    +                         bbox_4326["ymin"],
    +                         bbox_4326["xmax"], 
    +                         bbox_4326["ymax"]), 
    +                datetime = "2021-05-15/2021-05-16") %>%
    +    post_request() %>%
    +    items_fetch(progress = FALSE)
    +
    +# Print number of found items
    +length(items$features)
    +
    +
    [1] 12
    +
    +
    # Prepare the assets for analysis
    +library(gdalcubes)
    +assets = c("B01", "B02", "B03", "B04", "B05", "B06", 
    +           "B07", 
    +           "B08", "B8A", "B09", "B11", "B12", "SCL")
    +s2_collection = stac_image_collection(items$features, asset_names = assets)
    +
    +# Display the image collection
    +s2_collection
    +
    +
    Image collection object, referencing 12 images with 13 bands
    +Images:
    +                      name      left      top   bottom     right
    +1 S2B_11SNS_20210515_1_L2A -117.0002 33.43957 32.44372 -115.8191
    +2 S2B_11SPS_20210515_1_L2A -115.9361 33.43490 32.42937 -114.7436
    +3 S2B_11SQS_20210515_0_L2A -114.8732 33.42092 32.41918 -113.9566
    +4 S2B_12STB_20210515_0_L2A -114.2244 33.40433 32.61015 -113.9559
    +5 S2B_11SNT_20210515_0_L2A -117.0002 34.34164 33.34577 -115.8066
    +6 S2B_11SPT_20210515_0_L2A -115.9253 34.33683 33.33091 -114.7198
    +             datetime        srs
    +1 2021-05-15T18:35:13 EPSG:32611
    +2 2021-05-15T18:35:10 EPSG:32611
    +3 2021-05-15T18:35:06 EPSG:32611
    +4 2021-05-15T18:35:01 EPSG:32612
    +5 2021-05-15T18:34:59 EPSG:32611
    +6 2021-05-15T18:34:55 EPSG:32611
    +[ omitted 6 images ]
    +
    +Bands:
    +   name offset scale unit nodata image_count
    +1   B01      0     1                      12
    +2   B02      0     1                      12
    +3   B03      0     1                      12
    +4   B04      0     1                      12
    +5   B05      0     1                      12
    +6   B06      0     1                      12
    +7   B07      0     1                      12
    +8   B08      0     1                      12
    +9   B09      0     1                      12
    +10  B11      0     1                      12
    +11  B12      0     1                      12
    +12  B8A      0     1                      12
    +13  SCL      0     1                      12
    +
    +

    Define view window

    +

    In this code chunk, a ‘view’ on the previously obtained satellite image +collection is being defined. Think of this as setting up a specific lens +or perspective to look at the satellite data:

    +

    -The view is set to the coordinate reference system EPSG:32618. -Spatial +resolution is defined as 100x100 meters. -Temporal resolution is defined +monthly (P1M), even though the actual range is only one day. -When there +are multiple values in a grid cell or timeframe, they are aggregated +using the median value. -If any resampling is needed, the nearest +neighbor method is used (near). -The spatial and temporal extents are +constrained to specific values. -By defining this view, it allows for +consistent analysis and visualization of the image collection within the +specified spatial and temporal resolutions and extents.

    +
    # Define a specific view on the satellite image collection
    +v = cube_view(
    +    srs = "EPSG:32618", 
    +    dx = 100, 
    +    dy = 100, 
    +    dt = "P1M", 
    +    aggregation = "median", 
    +    resampling = "near",
    +    extent = list(
    +        t0 = "2021-05-15", 
    +        t1 = "2021-05-16",
    +        left = bbox_32618["xmin"], 
    +        right = bbox_32618["xmax"],
    +        top = bbox_32618["ymax"], 
    +        bottom = bbox_32618["ymin"]
    +    )
    +)
    +
    +# Display the defined view
    +v
    +
    +
    A data cube view object
    +
    +Dimensions:
    +                low              high count pixel_size
    +t        2021-05-01        2021-05-31     1        P1M
    +y  4471226.41402451  4741326.41402451  2701        100
    +x -3463720.00044994 -3191420.00044994  2723        100
    +
    +SRS: "EPSG:32618"
    +Temporal aggregation method: "median"
    +Spatial resampling method: "near"
    +
    +

    Pull data

    +

    In this chunk, the primary aim is to transform and prepare satellite +imagery data for analysis:

    +

    -The current time is stored in variable a for tracking the time taken by +the process. -The previously defined ‘view’ on the satellite imagery, v, +is used to create a raster cube, a multi-dimensional array containing +the satellite data. This raster cube contains spatial, spectral, and +temporal data. -The desired spectral bands are selected. -The data is +limited to a specific area of interest, aoi. -The band names are renamed +to their respective wavelengths in nanometers for clarity. -A subset of +the data, comprising 50,000 random samples, is selected. -Unwanted +columns are removed, and the dataset is transformed into a long format, +where each row represents a particular date and wavelength combination. +-The entire process duration is computed by taking the difference +between the end time (b) and the start time (a). -The transformed +dataset y is then displayed.

    +
    # Record start time
    +a <- Sys.time()
    +
    +# Transform the satellite image collection into a raster cube
    +x <- s2_collection %>%
    +    raster_cube(v) %>%
    +    select_bands(c("B01", "B02", "B03", "B04", 
    +                   "B05", "B06", "B07", "B08", 
    +                   "B8A", "B09", "B11", "B12")) %>%
    +    extract_geom(aoi) %>%
    +    rename(
    +        "time" = "time",
    +        "443" = "B01",
    +        "490" = "B02",
    +        "560" = "B03",
    +        "665" = "B04",
    +        "705" = "B05",
    +        "740" = "B06",
    +        "783" = "B07",
    +        "842" = "B08",
    +        "865" = "B8A",
    +        "940" = "B09",
    +        "1610" = "B11",
    +        "2190" = "B12"
    +    )
    +
    +# Sample, transform and prepare data for analysis
    +y <- x %>%
    +    slice_sample(n = 50000) %>%
    +    select(-FID) %>%
    +    pivot_longer(!time, names_to = "wavelength_nm", values_to = "reflectance") %>%
    +    mutate(wavelength_nm = as.numeric(wavelength_nm))
    +
    +# Record end time and compute duration
    +b <- Sys.time()
    +processing_time <- difftime(b, a)
    +
    +# Display the processing time and transformed dataset
    +processing_time
    +
    +
    Time difference of 1.23593 mins
    +
    +
    y
    +
    +
    # A tibble: 600,000 × 3
    +   time       wavelength_nm reflectance
    +   <chr>              <dbl>       <dbl>
    + 1 2021-05-01           443        1855
    + 2 2021-05-01           490        2255
    + 3 2021-05-01           560        2884
    + 4 2021-05-01           665        3711
    + 5 2021-05-01           705        3990
    + 6 2021-05-01           740        4009
    + 7 2021-05-01           783        4078
    + 8 2021-05-01           842        4219
    + 9 2021-05-01           865        4060
    +10 2021-05-01           940        4120
    +# ℹ 599,990 more rows
    +
    +

    Base plot

    +
    # Set custom colors for the plot
    +our_green <- "#4CAF50"
    +our_white <- "#FFFFFF"
    +our_yellow <- "#FFEB3B"
    +
    +# Create a 2D density plot
    +day_density <- ggplot(data = y, aes(x = wavelength_nm, y = reflectance, group = time)) + 
    +  stat_smooth(color = our_green, fill = "lightgrey") +
    +  geom_density2d(colour = "black", bins = 10, alpha = 0.1) +
    +  stat_density2d(aes(alpha = ..level.., fill = ..level..), 
    +                 linewidth = 2, bins = 10, geom = "polygon") + 
    +  scale_fill_gradient(low = our_white, high = our_yellow) +
    +  scale_alpha(range = c(0.00, 0.8), guide = FALSE) +
    +  theme_tufte() +
    +  xlab("wavelength") +
    +  ylab("reflectance") +
    +  ylim(0, 16000) +
    +  theme(
    +    aspect.ratio = 5/14, 
    +    axis.text.x = element_text(angle = 90, vjust = 0.5, hjust = 1, 
    +                               colour = c("darkblue", "blue", "green", "red", 
    +                                          "darkred", "darkred", "darkred", "darkred", 
    +                                          "darkred", "black", "black", "black", "black")),
    +    axis.title.x = element_blank(),
    +    axis.title.y = element_blank(),
    +    plot.margin = margin(t = 30, r = 10, b = 40, l = 18)
    +  ) +
    +  scale_x_continuous(breaks = c(443, 490, 560, 665, 705, 740, 783, 842, 865, 940, 1610, 2190))
    +
    +# Display the plot
    +day_density
    +
    +

    +

    Inlay 1 - geographic zone

    +
    guide_map <- ggplot(data= aoi_total) +
    +  geom_sf(fill=our_yellow, color=our_white) +
    +  geom_sf(data= aoi, fill=our_green, color=our_white) +
    +  theme_tufte()+
    +  ggtitle("Zone 5")+
    +  theme(axis.text.x=element_blank(), #remove x axis labels
    +        axis.ticks.x=element_blank(), #remove x axis ticks
    +        axis.text.y=element_blank(),  #remove y axis labels
    +        axis.ticks.y=element_blank()  #remove y axis ticks, bg=none
    +        )+ theme(plot.title = element_text(hjust=0.8, vjust = -2))
    +guide_map
    +
    +

    +

    Inlay 2 - date text

    +
    library(geosphere)
    +aoi_total |> st_centroid()  |> st_transform(crs="+proj=longlat") |> st_coordinates() |> colMeans() -> lat_long
    +
    +daylength_line <- daylength(lat = lat_long[2], 1:365)
    +
    +daylengths <- data.frame(time= 1:365, daylength = daylength_line)
    +
    +library(lubridate)
    +
    +# Create a template date object
    +date <- as.POSIXlt("2021-05-15")
    +
    +doy <- format(date, format = "%j") |> as.numeric()
    +
    +display_date <- format(date, format="%e %B %Y   ")
    +
    +

    Inlay 3 - daylength

    +
    date_inlay <- ggplot(data=daylengths) + 
    +
    +  ggtitle("Daylength")+
    +  geom_ribbon(aes(x=time, ymin=daylength, ymax=15), fill=our_grey, alpha=0.5) +
    +  geom_ribbon(aes(x=time, ymax=daylength, ymin=9), fill=our_yellow, alpha=1) +
    +  geom_hline(yintercept=12, color=our_white) +
    + geom_vline(xintercept=doy, color=our_green, size=1) +
    +  theme_tufte() +
    +  ylim(9,15) +
    +  theme(axis.text.y=element_blank(),
    +        axis.ticks.y=element_blank(),
    +        axis.title.y=element_blank(),
    +        axis.title.x=element_blank(),
    +        axis.text.x=element_blank(),
    +        axis.ticks.x=element_blank()) + theme(plot.title = element_text(hjust=0.5, vjust = 0))
    +date_inlay
    +
    +

    +

    Ensemble map assembly

    +
    library(cowplot)
    +library(magick)
    +map_overlay <- ggdraw(day_density) + 
    +  draw_plot(guide_map, x = 1.08, y = 1, hjust = 1, vjust = 1, width = 0.3, height = 0.3)+
    +  draw_plot(date_inlay, x = 1, y = 0.35, hjust = 1, vjust = 1, width = 0.1, height = 0.25)+
    +  geom_text(aes(x=1, y=0.08, label=display_date, hjust = 1), color=our_grey, cex=3, fontface='bold') +
    + # draw_image("Ty_powerline_plots/Southern_California_Edison_Logo.png", x = -0.24, y = 0.38, scale=.3)+
    + # draw_image("Ty_powerline_plots/earthlab_logo.png", x = -0.38, y = 0.38, scale=.25)+
    +  geom_text(aes(x=0.4, y=.9, label="Spectral library - Monthly average"), color=our_green, hjust = 0, cex=8, fontface='bold') +
    +  geom_text(aes(x=0.01, y=.04, 
    +    label="Created by ESIIL (T. Tuff) for Fall Hackathon -- October 2023. Sentinel 2 Data from 'https://earth-search.aws.element84.com/v0'"), color=our_grey, hjust  = 0, cex=3) +
    +geom_text(aes(x=0.4, y=.1, label="wavelength (nm)"), color=our_grey, hjust = 0, cex=4, fontface='bold') +
    +geom_text(aes(x=0.01, y=.5,angle = 90, label="reflectance"), color=our_grey, hjust = 0, cex=4, fontface='bold')
    +map_overlay
    +
    +

    +

    Save map

    +
    ggsave(map_overlay, file="day_density_15_May_2021_zone_5.png", bg="white", dpi = 600, width = 12,
    +  height = 5)
    +
    +

    End timer

    +
    end <- Sys.time()
    +difftime(end,start)
    +
    +
    Time difference of 3.2202 mins
    +
    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/data-library/disturbance-stack/disturbance-stack.md b/data-library/disturbance-stack/disturbance-stack.md new file mode 100644 index 0000000..ee75394 --- /dev/null +++ b/data-library/disturbance-stack/disturbance-stack.md @@ -0,0 +1,47 @@ +# Earth Lab Disturbance Stack derived from Landfire + +The CU Boulder Earth Lab has integrated annual (1999-2020) disturbance presence data from Landfire with a new index of hotter drought into an easily managed raster data stack. + +To accelerate your access to this dataset, the ESIIL team has made disturbance stack data for the Southern Rockies available on the Cyverse data store at the below directory: + +``` +~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/disturbance +``` + +The stack data is in two versions, full and simplified. + +The full version (dist_stack_Southern_Rockies.tif) has the below values: + +| Code | Landfire disturbance status | Hotter-drought status | +|------|-----------------------------|------------------------------------------------| +| 0 | none | no hotter-drought/fewer than 4 thresholds exceeded | +| 1 | fire | no hotter-drought/fewer than 4 thresholds exceeded | +| 2 | insect/disease | no hotter-drought/fewer than 4 thresholds exceeded | +| 3 | other Landfire disturbance | no hotter-drought/fewer than 4 thresholds exceeded | +| 4 | none | hotter-drought with 4 thresholds exceeded | +| 5 | fire | hotter-drought with 4 thresholds exceeded | +| 6 | insects/disease | hotter-drought with 4 thresholds exceeded | +| 7 | other Landfire disturbance | hotter-drought with 4 thresholds exceeded | +| 8 | none | hotter-drought with 5 thresholds exceeded | +| 9 | fire | hotter-drought with 5 thresholds exceeded | +| 10 | insects/disease | hotter-drought with 5 thresholds exceeded | +| 11 | other Landfire disturbance | hotter-drought with 5 thresholds exceeded | +| 12 | none | hotter-drought with 6 thresholds exceeded | +| 13 | fire | hotter-drought with 6 thresholds exceeded | +| 14 | insects/disease | hotter-drought with 6 thresholds exceeded | +| 15 | other Landfire disturbance | hotter-drought with 6 thresholds exceeded | + + +The simplified version (simple_dist_stack_Southern_Rockies.tif) has the below values, and only includes the most extreme hot drought: + +| Code | Landfire disturbance status | Hotter-drought status | +|------|-----------------------------|------------------------------------------------| +| 0 | none | no hotter-drought/fewer than 6 thresholds exceeded | +| 1 | fire | no hotter-drought/fewer than 6 thresholds exceeded | +| 2 | insect/disease | no hotter-drought/fewer than 6 thresholds exceeded | +| 3 | none | hotter-drought with 6 thresholds exceeded | +| 4 | fire | hotter-drought with 6 thresholds exceeded | +| 5 | insect/disease | hhotter-drought with 6 thresholds exceeded | + + +Additional MODIS data is best accessed via VSI or STAC. \ No newline at end of file diff --git a/data-library/disturbance-stack/index.html b/data-library/disturbance-stack/index.html new file mode 100644 index 0000000..f65b4f4 --- /dev/null +++ b/data-library/disturbance-stack/index.html @@ -0,0 +1,1490 @@ + + + + + + + + + + + + + + + + + + + + + + Earth Lab Disturbance Stack derived from Landfire - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    Earth Lab Disturbance Stack derived from Landfire

    +

    The CU Boulder Earth Lab has integrated annual (1999-2020) disturbance presence data from Landfire with a new index of hotter drought into an easily managed raster data stack.

    +

    To accelerate your access to this dataset, the ESIIL team has made disturbance stack data for the Southern Rockies available on the Cyverse data store at the below directory:

    +
    ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/disturbance
    +
    +

    The stack data is in two versions, full and simplified.

    +

    The full version (dist_stack_Southern_Rockies.tif) has the below values:

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    CodeLandfire disturbance statusHotter-drought status
    0noneno hotter-drought/fewer than 4 thresholds exceeded
    1fireno hotter-drought/fewer than 4 thresholds exceeded
    2insect/diseaseno hotter-drought/fewer than 4 thresholds exceeded
    3other Landfire disturbanceno hotter-drought/fewer than 4 thresholds exceeded
    4nonehotter-drought with 4 thresholds exceeded
    5firehotter-drought with 4 thresholds exceeded
    6insects/diseasehotter-drought with 4 thresholds exceeded
    7other Landfire disturbancehotter-drought with 4 thresholds exceeded
    8nonehotter-drought with 5 thresholds exceeded
    9firehotter-drought with 5 thresholds exceeded
    10insects/diseasehotter-drought with 5 thresholds exceeded
    11other Landfire disturbancehotter-drought with 5 thresholds exceeded
    12nonehotter-drought with 6 thresholds exceeded
    13firehotter-drought with 6 thresholds exceeded
    14insects/diseasehotter-drought with 6 thresholds exceeded
    15other Landfire disturbancehotter-drought with 6 thresholds exceeded
    +

    The simplified version (simple_dist_stack_Southern_Rockies.tif) has the below values, and only includes the most extreme hot drought:

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    CodeLandfire disturbance statusHotter-drought status
    0noneno hotter-drought/fewer than 6 thresholds exceeded
    1fireno hotter-drought/fewer than 6 thresholds exceeded
    2insect/diseaseno hotter-drought/fewer than 6 thresholds exceeded
    3nonehotter-drought with 6 thresholds exceeded
    4firehotter-drought with 6 thresholds exceeded
    5insect/diseasehhotter-drought with 6 thresholds exceeded
    +

    Additional MODIS data is best accessed via VSI or STAC.

    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/data-library/drought/drought.md b/data-library/drought/drought.md new file mode 100644 index 0000000..ebb0b7d --- /dev/null +++ b/data-library/drought/drought.md @@ -0,0 +1,16 @@ +# Drought Indices + +There are a wide variety of drought indices and variables used to describe various forms of drought. This data is best accessed via VSI and STAC to enable climate data summarization at the desired temporal and spatial resolution. + +To accelerate your access to basic drought data, the ESIIL team has made annual averages of SPEI and PDSI for the Southern Rockies available on the Cyverse data store at the below directory: + +``` +~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/Disturbance/drought +``` + +SPEI, or the Standardised Precipitation-Evapotranspiration Index, is a multiscalar drought index based on climatic data. It can be used for determining the onset, duration and magnitude of drought conditions with respect to normal conditions in a variety of natural and managed systems such as crops, ecosystems, rivers, water resources, etc. [An overview of SPEI is available here](https://spei.csic.es/). The pre-compiled datasets are at the 30 day, 1 year, and 5 year time scales and are from [the TerraClimate dataset](https://www.climatologylab.org/terraclimate.html). + +PDSI, or the Palmer Drought Severity Index, uses readily available temperature and precipitation data to estimate relative dryness. However, it is not multiscalar. [An overview of PDSI from NCAR is here](https://climatedataguide.ucar.edu/climate-data/palmer-drought-severity-index-pdsi). The pre-compiled dataset is from [the TerraClimate dataset](https://www.climatologylab.org/terraclimate.html). + + + diff --git a/data-library/drought/index.html b/data-library/drought/index.html new file mode 100644 index 0000000..256ab43 --- /dev/null +++ b/data-library/drought/index.html @@ -0,0 +1,1356 @@ + + + + + + + + + + + + + + + + + + + + + + Drought Indices - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    Drought Indices

    +

    There are a wide variety of drought indices and variables used to describe various forms of drought. This data is best accessed via VSI and STAC to enable climate data summarization at the desired temporal and spatial resolution.

    +

    To accelerate your access to basic drought data, the ESIIL team has made annual averages of SPEI and PDSI for the Southern Rockies available on the Cyverse data store at the below directory:

    +
    ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/Disturbance/drought
    +
    +

    SPEI, or the Standardised Precipitation-Evapotranspiration Index, is a multiscalar drought index based on climatic data. It can be used for determining the onset, duration and magnitude of drought conditions with respect to normal conditions in a variety of natural and managed systems such as crops, ecosystems, rivers, water resources, etc. An overview of SPEI is available here. The pre-compiled datasets are at the 30 day, 1 year, and 5 year time scales and are from the TerraClimate dataset.

    +

    PDSI, or the Palmer Drought Severity Index, uses readily available temperature and precipitation data to estimate relative dryness. However, it is not multiscalar. An overview of PDSI from NCAR is here. The pre-compiled dataset is from the TerraClimate dataset.

    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/data-library/epa-ecoregions/epa-ecoregions.md b/data-library/epa-ecoregions/epa-ecoregions.md new file mode 100644 index 0000000..5f763f4 --- /dev/null +++ b/data-library/epa-ecoregions/epa-ecoregions.md @@ -0,0 +1,70 @@ +# EPA Ecoregions + +EPA ecoregions are a convenient spatial framework for ecosystem regions used by the United States Environmental Protection Agency. Full details on EPA ecoregions [can be found here.](https://www.epa.gov/eco-research/ecoregions) + +A Roman numeral classification scheme has been adopted for different hierarchical levels of ecoregions, ranging from general regions to more detailed: + +- Level I - 12 ecoregions in the continental U.S. +- Level II - 25 ecoregions in the continental U.S. +- Level III -105 ecoregions in the continental U.S. +- Level IV - 967 ecoregions in the conterminous U.S. + +Instructions for accessing spatial EPA ecoregion data can be found in the script code/create-data-library/access_epa_ecoregions.R. The script is also copied below: + +``` r +# This brief script demonstrates how to access level 3 and 4 EPA ecoregions for North America. +# Directly accessing the files via VSI is recommended, as this uses cloud-hosted data. +# A version for downloading the zipped files is also provided in case for some reason you need the actual files. + +# ESIIL, February 2024 +# Tyler L. McIntosh + +####### ACCESS SHAPEFILES DIRECTLY VIA VSI ######### + +require(glue) +require(sf) + +epa_l3 <- glue::glue( + "/vsizip/vsicurl/", #magic remote connection + "https://gaftp.epa.gov/EPADataCommons/ORD/Ecoregions/us/us_eco_l3.zip", #copied link to download location + "/us_eco_l3.shp") |> #path inside zip file + sf::st_read() +epa_l4 <- glue::glue( + "/vsizip/vsicurl/", #magic remote connection + "https://gaftp.epa.gov/EPADataCommons/ORD/Ecoregions/us/us_eco_l4.zip", #copied link to download location + "/us_eco_l4_no_st.shp") |> #path inside zip file + sf::st_read() + + + +######### DOWNLOAD ZIPPED DATA FILES ######### + +#Set up directory +directory <- "~/data/ecoregions" +if (!dir.exists(directory)) { + dir.create(directory) +} + +#Avoid download timeout +options(timeout = max(1000, getOption("timeout"))) + +#URLs for downloads +epaUrls <- c("https://gaftp.epa.gov/EPADataCommons/ORD/Ecoregions/us/us_eco_l3.zip", + "https://gaftp.epa.gov/EPADataCommons/ORD/Ecoregions/us/us_eco_l4.zip") +destFiles <- file.path(directory, basename(epaUrls)) + +#Download +mapply(FUN = function(url, destfile) {download.file(url = url, + destfile = destfile, + mode = "wb")}, + url = epaUrls, + destfile = destFiles) + +#Unzip downloaded files +mapply(FUN = function(destfile, exdir) {unzip(zipfile = destfile, + files = NULL, + exdir = exdir)}, + destfile = destFiles, + exdir = gsub(pattern = ".zip", replacement = "", x = destFiles)) + +``` \ No newline at end of file diff --git a/data-library/epa-ecoregions/index.html b/data-library/epa-ecoregions/index.html new file mode 100644 index 0000000..07436c3 --- /dev/null +++ b/data-library/epa-ecoregions/index.html @@ -0,0 +1,1414 @@ + + + + + + + + + + + + + + + + + + + + + + EPA Ecoregions - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    EPA Ecoregions

    +

    EPA ecoregions are a convenient spatial framework for ecosystem regions used by the United States Environmental Protection Agency. Full details on EPA ecoregions can be found here.

    +

    A Roman numeral classification scheme has been adopted for different hierarchical levels of ecoregions, ranging from general regions to more detailed:

    +
      +
    • Level I - 12 ecoregions in the continental U.S.
    • +
    • Level II - 25 ecoregions in the continental U.S.
    • +
    • Level III -105 ecoregions in the continental U.S.
    • +
    • Level IV - 967 ecoregions in the conterminous U.S.
    • +
    +

    Instructions for accessing spatial EPA ecoregion data can be found in the script code/create-data-library/access_epa_ecoregions.R. The script is also copied below:

    +
    # This brief script demonstrates how to access level 3 and 4 EPA ecoregions for North America.
    +# Directly accessing the files via VSI is recommended, as this uses cloud-hosted data.
    +# A version for downloading the zipped files is also provided in case for some reason you need the actual files.
    +
    +# ESIIL, February 2024
    +# Tyler L. McIntosh
    +
    +####### ACCESS SHAPEFILES DIRECTLY VIA VSI #########
    +
    +require(glue)
    +require(sf)
    +
    +epa_l3 <- glue::glue(
    +  "/vsizip/vsicurl/", #magic remote connection
    +  "https://gaftp.epa.gov/EPADataCommons/ORD/Ecoregions/us/us_eco_l3.zip", #copied link to download location
    +  "/us_eco_l3.shp") |> #path inside zip file
    +  sf::st_read()
    +epa_l4 <- glue::glue(
    +  "/vsizip/vsicurl/", #magic remote connection
    +  "https://gaftp.epa.gov/EPADataCommons/ORD/Ecoregions/us/us_eco_l4.zip", #copied link to download location
    +  "/us_eco_l4_no_st.shp") |> #path inside zip file
    +  sf::st_read()
    +
    +
    +
    +######### DOWNLOAD ZIPPED DATA FILES #########
    +
    +#Set up directory
    +directory <- "~/data/ecoregions"
    +if (!dir.exists(directory)) {
    +  dir.create(directory)
    +}
    +
    +#Avoid download timeout
    +options(timeout = max(1000, getOption("timeout")))
    +
    +#URLs for downloads
    +epaUrls <- c("https://gaftp.epa.gov/EPADataCommons/ORD/Ecoregions/us/us_eco_l3.zip",
    +             "https://gaftp.epa.gov/EPADataCommons/ORD/Ecoregions/us/us_eco_l4.zip")
    +destFiles <- file.path(directory, basename(epaUrls))
    +
    +#Download
    +mapply(FUN = function(url, destfile) {download.file(url = url,
    +                                                    destfile = destfile,
    +                                                    mode = "wb")},
    +       url = epaUrls,
    +       destfile = destFiles)
    +
    +#Unzip downloaded files
    +mapply(FUN = function(destfile, exdir) {unzip(zipfile = destfile,
    +                                              files = NULL,
    +                                              exdir = exdir)},
    +       destfile = destFiles,
    +       exdir = gsub(pattern = ".zip", replacement = "", x = destFiles))
    +
    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/data-library/esiil-data-library/esiil-data-library.md b/data-library/esiil-data-library/esiil-data-library.md new file mode 100644 index 0000000..600ee24 --- /dev/null +++ b/data-library/esiil-data-library/esiil-data-library.md @@ -0,0 +1,78 @@ +# ESIIL Data Libraries + +ESIIL has compiled additional data libraries for use at summits and hackathons. Link to those data libraries are available here, along with a summary of their current contents (February 2024). + +## General ESIIL Data Library + +[Our data library](https://cu-esiil.github.io/data-library/) features a diverse range of datasets, each with its own dedicated web page. To help you get started, we provide easy-to-use R and Python code snippets for downloading and working with each dataset. For more advanced users, we also offer comprehensive tutorials and vignettes tailored to individual datasets. Explore our rich collection and unlock the power of environmental data for your research today! + +### Data Contents +- EDS in Indian Country + - Global native homelands + - USA federal tribal reservations + - All types of tribal land in USA +- Solving water + - NEON Aquatic instrument data + - EPA water quality + - USGS water services +- Data librarianship + - Public libraries survey +- Cutting-edge remote sensing + - NEON hyperspectral data + - Lidar-based canopy height + - Multispectral sentinel-2 on AWS +- Nature-based solutions and human development + - Open street map +- Extreme events and hazards + - EPA air quality data + - Fire Event Delineation (FIRED) + - US National Incident Management System + - Uranium mines +- Spatial scale + - Spatial occurrence as points +- Ecological forecasting + - National Ecological Observation Network (NEON) + - USA phenology network + - Forecasting NEON data + - NEON lidar after fire +- Data harmonization + - Data cataloged with publications + - NEON and LTER + - NEON lidar and organismal data +- Food supply + - UN Food and Agriculture +- Social justice + - Redlining + - Congressional voting +- Data science in decision making and policy + - US Census + - FDIC failed banks list +- AI in environmental data science + - WeatherBench +- Math, modeling, statistics + - NEON tick pathogen data + - Everglades food network + - Mammal primate association network +- EDS education + - Education statistics + - Nonprofit explorer + +## MosAIc Data Library + +The data library from ESIIL's MosAIc Hackathon [is located here](https://cu-esiil.github.io/hackathon2023_datacube/). This data library contains some similar content to the general ESIIL data library, in addition to extra resources on cloud collaboration and AI. + +### Data Contents +- Flood event inventory +- Flood event area (polygons) +- River geography +- River and basin features +- NEON Lakes +- NEON Rivers +- EPA water quality +- USGS Water Services +- Global Species Occurrence +- NEON LIDAR +- NEON biogeochemistry +- Open Street Map +- US Census +- Remote sensing \ No newline at end of file diff --git a/data-library/esiil-data-library/index.html b/data-library/esiil-data-library/index.html new file mode 100644 index 0000000..3bfaff4 --- /dev/null +++ b/data-library/esiil-data-library/index.html @@ -0,0 +1,1424 @@ + + + + + + + + + + + + + + + + + + + + + + ESIIL Data Libraries - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    ESIIL Data Libraries

    +

    ESIIL has compiled additional data libraries for use at summits and hackathons. Link to those data libraries are available here, along with a summary of their current contents (February 2024).

    +

    General ESIIL Data Library

    +

    Our data library features a diverse range of datasets, each with its own dedicated web page. To help you get started, we provide easy-to-use R and Python code snippets for downloading and working with each dataset. For more advanced users, we also offer comprehensive tutorials and vignettes tailored to individual datasets. Explore our rich collection and unlock the power of environmental data for your research today!

    +

    Data Contents

    +
      +
    • EDS in Indian Country
    • +
    • Global native homelands
    • +
    • USA federal tribal reservations
    • +
    • All types of tribal land in USA
    • +
    • Solving water
    • +
    • NEON Aquatic instrument data
    • +
    • EPA water quality
    • +
    • USGS water services
    • +
    • Data librarianship
    • +
    • Public libraries survey
    • +
    • Cutting-edge remote sensing
    • +
    • NEON hyperspectral data
    • +
    • Lidar-based canopy height
    • +
    • Multispectral sentinel-2 on AWS
    • +
    • Nature-based solutions and human development
    • +
    • Open street map
    • +
    • Extreme events and hazards
    • +
    • EPA air quality data
    • +
    • Fire Event Delineation (FIRED)
    • +
    • US National Incident Management System
    • +
    • Uranium mines
    • +
    • Spatial scale
    • +
    • Spatial occurrence as points
    • +
    • Ecological forecasting
    • +
    • National Ecological Observation Network (NEON)
    • +
    • USA phenology network
    • +
    • Forecasting NEON data
    • +
    • NEON lidar after fire
    • +
    • Data harmonization
    • +
    • Data cataloged with publications
    • +
    • NEON and LTER
    • +
    • NEON lidar and organismal data
    • +
    • Food supply
    • +
    • UN Food and Agriculture
    • +
    • Social justice
    • +
    • Redlining
    • +
    • Congressional voting
    • +
    • Data science in decision making and policy
    • +
    • US Census
    • +
    • FDIC failed banks list
    • +
    • AI in environmental data science
    • +
    • WeatherBench
    • +
    • Math, modeling, statistics
    • +
    • NEON tick pathogen data
    • +
    • Everglades food network
    • +
    • Mammal primate association network
    • +
    • EDS education
    • +
    • Education statistics
    • +
    • Nonprofit explorer
    • +
    +

    MosAIc Data Library

    +

    The data library from ESIIL's MosAIc Hackathon is located here. This data library contains some similar content to the general ESIIL data library, in addition to extra resources on cloud collaboration and AI.

    +

    Data Contents

    +
      +
    • Flood event inventory
    • +
    • Flood event area (polygons)
    • +
    • River geography
    • +
    • River and basin features
    • +
    • NEON Lakes
    • +
    • NEON Rivers
    • +
    • EPA water quality
    • +
    • USGS Water Services
    • +
    • Global Species Occurrence
    • +
    • NEON LIDAR
    • +
    • NEON biogeochemistry
    • +
    • Open Street Map
    • +
    • US Census
    • +
    • Remote sensing
    • +
    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/data-library/fia/fia.md b/data-library/fia/fia.md new file mode 100644 index 0000000..58645f9 --- /dev/null +++ b/data-library/fia/fia.md @@ -0,0 +1,240 @@ +# Forest Inventory and Analysis Database (FIA or FIADB) + +## Database description + +The Forest Inventory and Analysis (FIA) program of the USDA Forest Service Research and Development Branch collects, processes, analyzes, and reports on data necessary for assessing the extent and condition of forest resources in the United States. + +This data is collected at the plot level across the US, and includes information such as tree quantity and identifications, downed woody materials, tree regeneration, and more. If you are looking for spatially continuous data, TreeMap is a data product derived from FIA data and uses machine learning algorithms to assign each forested pixel across the US with the id of the FIA plot that best matches it. + +[This is an overview of the FIA program.](https://www.fs.usda.gov/research/programs/fia) + +[This is the most recent user guide for the FIADB.](https://www.fs.usda.gov/research/understory/forest-inventory-and-analysis-database-user-guide-phase-2) + +## Prepared data access functions + +FIA data is available from the [FIA DataMart](https://apps.fs.usda.gov/fia/datamart/datamart.html). + +Two R functions have been prepared for your use in downloading FIA data directly to your cloud instance. Those functions can be found at code/create-data-library/download_fia.R + +The functions are also copied here: + +``` r +# This script contains functions to download both individual +# FIA data csv files as well as bulk download data types. The two key functions +# described are fia_download_individual_data_files and fia_bulk_download_data_files + +# ESIIL, February 2024 +# Tyler L. McIntosh + +options(timeout = 300) + +################################ +# DOWNLOAD INDIVIDUAL FIA DATASETS +# +# This function will download individual FIA datasets requested and return the filenames +# It will create a new subdirectory for the files, "fia_individual_data_files". +# If you want to bulk download data by type, use function fia_bulk_download_data_files +# Note that you may want to change your environment's download timeout option to allow longer downloads +# (e.g. options(timeout = 300)) +# +#### PARAMETERS #### +# state_abbreviations : a vector of state abbreviations as strings (e.g. c("CO", "WY", "NM")) +# file_suffixes : a vector of data file oracle table names (e.g. c("DWM_VISIT", "COUNTY") from https://www.fs.usda.gov/research/understory/forest-inventory-and-analysis-database-user-guide-phase-2 +# directory : the directory in which to store the data (a new subdirectory will be created for the new files) +# +#### Example call to the function and read of the data #### +# downloaded_files <- fia_download_individual_data_files( +# state_abbreviations = c("CO"), +# file_suffixes = c("DWM_VISIT", "COUNTY"), +# directory = "~/data") +# data_list <- downloaded_files |> lapply(readr::read_csv) +# names(data_list) <- basename(downloaded_files) +# +fia_download_individual_data_files <- function(state_abbreviations, file_suffixes, directory) { + + #Ensure directory exists + if (!dir.exists(directory)) { + dir.create(directory) + } + + base_url <- "https://apps.fs.usda.gov/fia/datamart/CSV/" + + # Define the subdirectory path + subdirectory_path <- file.path(directory, "fia_individual_data_files") + + # Create the subdirectory if it does not exist + if (!dir.exists(subdirectory_path)) { + dir.create(subdirectory_path, recursive = TRUE) + } + + downloaded_files <- c() # Initialize an empty vector to store downloaded filenames + + for (state in state_abbreviations) { + for (suffix in file_suffixes) { + # Replace underscores with spaces to match the naming convention in the URL + url_suffix <- gsub("_", " ", suffix) + url_suffix <- gsub(" ", "_", toupper(url_suffix)) # URL seems to be uppercase + + # Construct the URL and filename using the subdirectory path + url <- paste0(base_url, state, "_", url_suffix, ".csv") + filename <- paste0(subdirectory_path, "/", state, "_", suffix, ".csv") + + # Attempt to download the file + tryCatch({ + download.file(url, destfile = filename, mode = "wb") + downloaded_files <- c(downloaded_files, filename) # Add the filename to the vector + message("Downloaded ", filename) + }, error = function(e) { + message("Failed to download ", url, ": ", e$message) + }) + } + } + + return(downloaded_files) # Return the vector of downloaded filenames +} + + +################################ +# BULK DOWNLOAD FIA DATASETS +# +# This function will bulk download FIA datasets requested into associated subdirectories and return the filenames +# as a named list of vectors, where each vector contains the files included in that bulk data set. +# All bulk data subdirectories will be put into a directory called 'fia_bulk_data_files' +# Note that you may want to change your environment's download timeout option to allow longer downloads +# (e.g. options(timeout = 300)) +# +#### PARAMETERS #### +# state_abbreviations : a vector of state abbreviations as strings (e.g. c("CO", "WY", "NM")) +# directory : the directory in which to store the data +# bulk_data_types : a vector of bulk download mappings as strings (e.g. c("location level", "plot")) +# Available data mappings are: + # "location level" + # "tree level" + # "invasives and understory vegetation" + # "down woody material" + # "tree regeneration" + # "ground cover" + # "soils" + # "population" + # "plot" + # "reference" +# Full descriptions of each of these data mappings can be found at the FIA user guide, +# with each mapping associated with a different chapter of tables: +# https://www.fs.usda.gov/research/understory/forest-inventory-and-analysis-database-user-guide-phase-2 +# +#### Example call to the function for multiple bulk data types and read in the data #### +# downloaded_files <- fia_bulk_download_data_files( +# state = c("CO"), +# directory = "~/data", +# bulk_data_types = c("down woody material", "plot") +# ) +# data_list_dwm <- downloaded_files$`down woody material`|> lapply(readr::read_csv) +# names(data_list_dwm) <- basename(downloaded_files$`down woody material`) +# +fia_bulk_download_data_files <- function(state, directory, bulk_data_types) { + + #Ensure directory exists + if (!dir.exists(directory)) { + dir.create(directory) + } + + # Map bulk data types to their corresponding file suffixes + bulk_data_mappings <- list( + "down woody material" = c( + "DWM_VISIT", "DWM_COARSE_WOODY_DEBRIS", "DWM_DUFF_LITTER_FUEL", + "DWM_FINE_WOODY_DEBRIS", "DWM_MICROPLOT_FUEL", "DWM_RESIDUAL_PILE", + "DWM_TRANSECT_SEGMENT", "COND_DWM_CALC" + ), + "location level" = c( + "SURVEY", "PROJECT", "COUNTY", "PLOT", "COND", + "SUBPLOT", "SUBP_COND", + #"BOUNDARY", + "SUBP_COND_CHNG_MTRX" + ), + "tree level" = c( + "TREE", "WOODLAND_STEMS", "GRM_COMPONENT", + "GRM_THRESHOLD", "GRM_MIDPT", "GRM_BEGIN", + "GRM_ESTN", "BEGINEND", "SEEDLING", "SITETREE" + ), + "invasives and understory vegetation" = c( + "INVASIVE_SUBPLOT_SPP", "P2VEG_SUBPLOT_SPP", "P2VEG_SUBP_STRUCTURE" + ), + "tree regeneration" = c( + "PLOT_REGEN", "SUBPLOT_REGEN", "SEEDLING_REGEN" + ), + "ground cover" = c( + "GRND_CVR", "GRND_LYR_FNCTL_GRP", "GRND_LYR_MICROQUAD" + ), + "soils" = c( + "SUBP_SOIL_SAMPLE_LOC", "SUBP_SOIL_SAMPLE_LAYER" + ), + "population" = c( + "POP_ESTN_UNIT", "POP_EVAL", "POP_EVAL_ATTRIBUTE", + "POP_EVAL_GRP", "POP_EVAL_TYP", "POP_PLOT_STRATUM_ASSGN", + "POP_STRATUM" + ), + "plot" = c( + "PLOTGEOM", "PLOTSNAP" + ), + "reference" = c( + "REF_POP_ATTRIBUTE", "REF_POP_EVAL_TYP_DESCR", "REF_FOREST_TYPE", + "REF_FOREST_TYPE_GROUP", "REF_SPECIES", "REF_PLANT_DICTIONARY", + "REF_SPECIES_GROUP", "REF_INVASIVE_SPECIES", "REF_HABTYP_DESCRIPTION", + "REF_HABTYP_PUBLICATION", "REF_CITATION", "REF_FIADB_VERSION", + "REF_STATE_ELEV", "REF_UNIT", "REF_RESEARCH_STATION", + "REF_NVCS_HIERARCHY_STRICT", "REF_NVCS_LEVEL_1_CODES", + "REF_NVCS_LEVEL_2_CODES", "REF_NVCS_LEVEL_3_CODES", + "REF_NVCS_LEVEL_4_CODES", "REF_NVCS_LEVEL_5_CODES", + "REF_NVCS_LEVEL_6_CODES", "REF_NVCS_LEVEL_7_CODES", + "REF_NVCS_LEVEL_8_CODES", "REF_AGENT", "REF_DAMAGE_AGENT", + "REF_DAMAGE_AGENT_GROUP", "REF_FVS_VAR_NAME", "REF_FVS_LOC_NAME", + "REF_OWNGRP_CD", "REF_DIFFERENCE_TEST_PER_ACRE", + "REF_DIFFERENCE_TEST_TOTALS", "REF_EQUATION_TABLE", "REF_SEQN", + "REF_GRM_TYPE", "REF_INTL_TO_DOYLE_FACTOR", "REF_TREE_CARBON_RATIO_DEAD", + "REF_TREE_DECAY_PROP", "REF_TREE_STAND_DEAD_CR_PROP", "REF_GRND_LYR" + ) + ) + # Initialize a named list to store the filenames for each bulk data type + all_downloaded_files <- setNames(vector("list", length(bulk_data_types)), bulk_data_types) + + # Define and create the main bulk data directory + main_bulk_dir <- file.path(directory, "fia_bulk_data_files") + if (!dir.exists(main_bulk_dir)) { + dir.create(main_bulk_dir, recursive = TRUE) + } + + # Loop through each bulk data type + for (bulk_data_type in bulk_data_types) { + # Check if the bulk data type is known + if (!bulk_data_type %in% names(bulk_data_mappings)) { + stop("Unknown bulk data type: ", bulk_data_type) + } + + # Create a subdirectory name by replacing spaces with underscores + subdirectory <- gsub(" ", "_", bulk_data_type) + subdirectory_path <- file.path(main_bulk_dir, subdirectory) + + # Create the subdirectory if it does not exist + if (!dir.exists(subdirectory_path)) { + dir.create(subdirectory_path, recursive = TRUE) + } + + # Retrieve the correct set of file suffixes for the current bulk data type + file_suffixes <- bulk_data_mappings[[bulk_data_type]] + + # Call the download function for each file suffix and save in the new subdirectory + downloaded_files <- download_data_files( + state_abbreviations = state, + file_suffixes = file_suffixes, + location = subdirectory_path + ) + + # Store the downloaded filenames in the named list under the current bulk data type + all_downloaded_files[[bulk_data_type]] <- downloaded_files + } + + # Return the named list of vectors with filenames + return(all_downloaded_files) +} + +``` \ No newline at end of file diff --git a/data-library/fia/index.html b/data-library/fia/index.html new file mode 100644 index 0000000..ce21de8 --- /dev/null +++ b/data-library/fia/index.html @@ -0,0 +1,1577 @@ + + + + + + + + + + + + + + + + + + + + + + Forest Inventory and Analysis Database (FIA or FIADB) - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    Forest Inventory and Analysis Database (FIA or FIADB)

    +

    Database description

    +

    The Forest Inventory and Analysis (FIA) program of the USDA Forest Service Research and Development Branch collects, processes, analyzes, and reports on data necessary for assessing the extent and condition of forest resources in the United States.

    +

    This data is collected at the plot level across the US, and includes information such as tree quantity and identifications, downed woody materials, tree regeneration, and more. If you are looking for spatially continuous data, TreeMap is a data product derived from FIA data and uses machine learning algorithms to assign each forested pixel across the US with the id of the FIA plot that best matches it.

    +

    This is an overview of the FIA program.

    +

    This is the most recent user guide for the FIADB.

    +

    Prepared data access functions

    +

    FIA data is available from the FIA DataMart.

    +

    Two R functions have been prepared for your use in downloading FIA data directly to your cloud instance. Those functions can be found at code/create-data-library/download_fia.R

    +

    The functions are also copied here:

    +
    # This script contains functions to download both individual
    +# FIA data csv files as well as bulk download data types. The two key functions
    +# described are fia_download_individual_data_files and fia_bulk_download_data_files
    +
    +# ESIIL, February 2024
    +# Tyler L. McIntosh
    +
    +options(timeout = 300)
    +
    +################################
    +# DOWNLOAD INDIVIDUAL FIA DATASETS
    +#
    +# This function will download individual FIA datasets requested and return the filenames
    +# It will create a new subdirectory for the files, "fia_individual_data_files".
    +# If you want to bulk download data by type, use function fia_bulk_download_data_files
    +# Note that you may want to change your environment's download timeout option to allow longer downloads
    +# (e.g. options(timeout = 300))
    +#
    +#### PARAMETERS ####
    +# state_abbreviations : a vector of state abbreviations as strings (e.g. c("CO", "WY", "NM"))
    +# file_suffixes : a vector of data file oracle table names (e.g. c("DWM_VISIT", "COUNTY") from https://www.fs.usda.gov/research/understory/forest-inventory-and-analysis-database-user-guide-phase-2
    +# directory : the directory in which to store the data (a new subdirectory will be created for the new files)
    +#
    +#### Example call to the function and read of the data ####
    +# downloaded_files <- fia_download_individual_data_files(
    +#   state_abbreviations = c("CO"),
    +#   file_suffixes = c("DWM_VISIT", "COUNTY"),
    +#   directory = "~/data")
    +# data_list <- downloaded_files |> lapply(readr::read_csv)
    +# names(data_list) <- basename(downloaded_files)
    +#
    +fia_download_individual_data_files <- function(state_abbreviations, file_suffixes, directory) {
    +
    +  #Ensure directory exists
    +  if (!dir.exists(directory)) {
    +    dir.create(directory)
    +  }
    +
    +  base_url <- "https://apps.fs.usda.gov/fia/datamart/CSV/"
    +
    +  # Define the subdirectory path
    +  subdirectory_path <- file.path(directory, "fia_individual_data_files")
    +
    +  # Create the subdirectory if it does not exist
    +  if (!dir.exists(subdirectory_path)) {
    +    dir.create(subdirectory_path, recursive = TRUE)
    +  }
    +
    +  downloaded_files <- c()  # Initialize an empty vector to store downloaded filenames
    +
    +  for (state in state_abbreviations) {
    +    for (suffix in file_suffixes) {
    +      # Replace underscores with spaces to match the naming convention in the URL
    +      url_suffix <- gsub("_", " ", suffix)
    +      url_suffix <- gsub(" ", "_", toupper(url_suffix)) # URL seems to be uppercase
    +
    +      # Construct the URL and filename using the subdirectory path
    +      url <- paste0(base_url, state, "_", url_suffix, ".csv")
    +      filename <- paste0(subdirectory_path, "/", state, "_", suffix, ".csv")
    +
    +      # Attempt to download the file
    +      tryCatch({
    +        download.file(url, destfile = filename, mode = "wb")
    +        downloaded_files <- c(downloaded_files, filename)  # Add the filename to the vector
    +        message("Downloaded ", filename)
    +      }, error = function(e) {
    +        message("Failed to download ", url, ": ", e$message)
    +      })
    +    }
    +  }
    +
    +  return(downloaded_files)  # Return the vector of downloaded filenames
    +}
    +
    +
    +################################
    +# BULK DOWNLOAD FIA DATASETS
    +#
    +# This function will bulk download FIA datasets requested into associated subdirectories and return the filenames
    +# as a named list of vectors, where each vector contains the files included in that bulk data set.
    +# All bulk data subdirectories will be put into a directory called 'fia_bulk_data_files'
    +# Note that you may want to change your environment's download timeout option to allow longer downloads
    +# (e.g. options(timeout = 300))
    +#
    +#### PARAMETERS ####
    +# state_abbreviations : a vector of state abbreviations as strings (e.g. c("CO", "WY", "NM"))
    +# directory : the directory in which to store the data
    +# bulk_data_types : a vector of bulk download mappings as strings (e.g. c("location level", "plot")) 
    +#       Available data mappings are:
    +          # "location level"
    +          # "tree level"
    +          # "invasives and understory vegetation"
    +          # "down woody material"
    +          # "tree regeneration"
    +          # "ground cover"
    +          # "soils"
    +          # "population"
    +          # "plot"
    +          # "reference"
    +#       Full descriptions of each of these data mappings can be found at the FIA user guide,
    +#       with each mapping associated with a different chapter of tables:
    +#          https://www.fs.usda.gov/research/understory/forest-inventory-and-analysis-database-user-guide-phase-2
    +# 
    +#### Example call to the function for multiple bulk data types and read in the data ####
    +# downloaded_files <- fia_bulk_download_data_files(
    +#   state = c("CO"),
    +#   directory = "~/data",
    +#   bulk_data_types = c("down woody material", "plot")
    +# )
    +# data_list_dwm <- downloaded_files$`down woody material`|> lapply(readr::read_csv)
    +# names(data_list_dwm) <- basename(downloaded_files$`down woody material`)
    +#
    +fia_bulk_download_data_files <- function(state, directory, bulk_data_types) {
    +
    +  #Ensure directory exists
    +  if (!dir.exists(directory)) {
    +    dir.create(directory)
    +  }
    +
    +  # Map bulk data types to their corresponding file suffixes
    +  bulk_data_mappings <- list(
    +    "down woody material" = c(
    +      "DWM_VISIT", "DWM_COARSE_WOODY_DEBRIS", "DWM_DUFF_LITTER_FUEL",
    +      "DWM_FINE_WOODY_DEBRIS", "DWM_MICROPLOT_FUEL", "DWM_RESIDUAL_PILE",
    +      "DWM_TRANSECT_SEGMENT", "COND_DWM_CALC"
    +    ),
    +    "location level" = c(
    +      "SURVEY", "PROJECT", "COUNTY", "PLOT", "COND",
    +      "SUBPLOT", "SUBP_COND", 
    +      #"BOUNDARY", 
    +      "SUBP_COND_CHNG_MTRX"
    +    ),
    +    "tree level" = c(
    +      "TREE", "WOODLAND_STEMS", "GRM_COMPONENT",
    +      "GRM_THRESHOLD", "GRM_MIDPT", "GRM_BEGIN",
    +      "GRM_ESTN", "BEGINEND", "SEEDLING", "SITETREE"
    +    ),
    +    "invasives and understory vegetation" = c(
    +      "INVASIVE_SUBPLOT_SPP", "P2VEG_SUBPLOT_SPP", "P2VEG_SUBP_STRUCTURE"
    +    ),
    +    "tree regeneration" = c(
    +      "PLOT_REGEN", "SUBPLOT_REGEN", "SEEDLING_REGEN"
    +    ),
    +    "ground cover" = c(
    +      "GRND_CVR", "GRND_LYR_FNCTL_GRP", "GRND_LYR_MICROQUAD"
    +    ),
    +    "soils" = c(
    +      "SUBP_SOIL_SAMPLE_LOC", "SUBP_SOIL_SAMPLE_LAYER"
    +    ),
    +    "population" = c(
    +      "POP_ESTN_UNIT", "POP_EVAL", "POP_EVAL_ATTRIBUTE",
    +      "POP_EVAL_GRP", "POP_EVAL_TYP", "POP_PLOT_STRATUM_ASSGN",
    +      "POP_STRATUM"
    +    ),
    +    "plot" = c(
    +      "PLOTGEOM", "PLOTSNAP"
    +    ),
    +    "reference" = c(
    +      "REF_POP_ATTRIBUTE", "REF_POP_EVAL_TYP_DESCR", "REF_FOREST_TYPE",
    +      "REF_FOREST_TYPE_GROUP", "REF_SPECIES", "REF_PLANT_DICTIONARY",
    +      "REF_SPECIES_GROUP", "REF_INVASIVE_SPECIES", "REF_HABTYP_DESCRIPTION",
    +      "REF_HABTYP_PUBLICATION", "REF_CITATION", "REF_FIADB_VERSION",
    +      "REF_STATE_ELEV", "REF_UNIT", "REF_RESEARCH_STATION",
    +      "REF_NVCS_HIERARCHY_STRICT", "REF_NVCS_LEVEL_1_CODES",
    +      "REF_NVCS_LEVEL_2_CODES", "REF_NVCS_LEVEL_3_CODES",
    +      "REF_NVCS_LEVEL_4_CODES", "REF_NVCS_LEVEL_5_CODES",
    +      "REF_NVCS_LEVEL_6_CODES", "REF_NVCS_LEVEL_7_CODES",
    +      "REF_NVCS_LEVEL_8_CODES", "REF_AGENT", "REF_DAMAGE_AGENT",
    +      "REF_DAMAGE_AGENT_GROUP", "REF_FVS_VAR_NAME", "REF_FVS_LOC_NAME",
    +      "REF_OWNGRP_CD", "REF_DIFFERENCE_TEST_PER_ACRE",
    +      "REF_DIFFERENCE_TEST_TOTALS", "REF_EQUATION_TABLE", "REF_SEQN",
    +      "REF_GRM_TYPE", "REF_INTL_TO_DOYLE_FACTOR", "REF_TREE_CARBON_RATIO_DEAD",
    +      "REF_TREE_DECAY_PROP", "REF_TREE_STAND_DEAD_CR_PROP", "REF_GRND_LYR"
    +    )
    +  )
    +  # Initialize a named list to store the filenames for each bulk data type
    +  all_downloaded_files <- setNames(vector("list", length(bulk_data_types)), bulk_data_types)
    +
    +  # Define and create the main bulk data directory
    +  main_bulk_dir <- file.path(directory, "fia_bulk_data_files")
    +  if (!dir.exists(main_bulk_dir)) {
    +    dir.create(main_bulk_dir, recursive = TRUE)
    +  }
    +
    +  # Loop through each bulk data type
    +  for (bulk_data_type in bulk_data_types) {
    +    # Check if the bulk data type is known
    +    if (!bulk_data_type %in% names(bulk_data_mappings)) {
    +      stop("Unknown bulk data type: ", bulk_data_type)
    +    }
    +
    +    # Create a subdirectory name by replacing spaces with underscores
    +    subdirectory <- gsub(" ", "_", bulk_data_type)
    +    subdirectory_path <- file.path(main_bulk_dir, subdirectory)
    +
    +    # Create the subdirectory if it does not exist
    +    if (!dir.exists(subdirectory_path)) {
    +      dir.create(subdirectory_path, recursive = TRUE)
    +    }
    +
    +    # Retrieve the correct set of file suffixes for the current bulk data type
    +    file_suffixes <- bulk_data_mappings[[bulk_data_type]]
    +
    +    # Call the download function for each file suffix and save in the new subdirectory
    +    downloaded_files <- download_data_files(
    +      state_abbreviations = state,
    +      file_suffixes = file_suffixes,
    +      location = subdirectory_path
    +    )
    +
    +    # Store the downloaded filenames in the named list under the current bulk data type
    +    all_downloaded_files[[bulk_data_type]] <- downloaded_files
    +  }
    +
    +  # Return the named list of vectors with filenames
    +  return(all_downloaded_files)
    +}
    +
    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/data-library/fire-cbi/fire-cbi.md b/data-library/fire-cbi/fire-cbi.md new file mode 100644 index 0000000..739ed89 --- /dev/null +++ b/data-library/fire-cbi/fire-cbi.md @@ -0,0 +1,13 @@ +# Fire severity: Composite Burn Index (CBI) + +The Composite Burn Index (CBI) is a commonly used and ecologically meaningful measure of fire severity. Unlike some other measures of fire burn severity (e.g. MTBS fire severity), CBI is more readily comparable across large regions. + +To calculate this stack of CBI data the ESIIL team used the method described in [Parks et al. (2019)](https://www.mdpi.com/2072-4292/11/14/1735), which uses random forests regression to calculate CBI based on Relativized Burn Ratio (RBR), latitude, climatic water deficit, and other factors. RBR was calculated using pre- and post-fire image composites of Landsat 4-9 imagery (Collection 2) during the growing season. A correction was applied to the CBI estimates to prevent overprediction at low values (Parks et al., 2019). + +This dataset has a layer for each year of data, with NA values at any location that was unburned during that year. Fire disturbance events documented in the Landfire fire events database will appear in these rasters. + +The data is pre-loaded onto the Cyverse data store and is located in the below file: + +``` +~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/Disturbance/SR_landfire_fire_events_cbi_bc.tif +``` \ No newline at end of file diff --git a/data-library/fire-cbi/index.html b/data-library/fire-cbi/index.html new file mode 100644 index 0000000..07571bc --- /dev/null +++ b/data-library/fire-cbi/index.html @@ -0,0 +1,1356 @@ + + + + + + + + + + + + + + + + + + + + + + Fire severity: Composite Burn Index (CBI) - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    Fire severity: Composite Burn Index (CBI)

    +

    The Composite Burn Index (CBI) is a commonly used and ecologically meaningful measure of fire severity. Unlike some other measures of fire burn severity (e.g. MTBS fire severity), CBI is more readily comparable across large regions.

    +

    To calculate this stack of CBI data the ESIIL team used the method described in Parks et al. (2019), which uses random forests regression to calculate CBI based on Relativized Burn Ratio (RBR), latitude, climatic water deficit, and other factors. RBR was calculated using pre- and post-fire image composites of Landsat 4-9 imagery (Collection 2) during the growing season. A correction was applied to the CBI estimates to prevent overprediction at low values (Parks et al., 2019).

    +

    This dataset has a layer for each year of data, with NA values at any location that was unburned during that year. Fire disturbance events documented in the Landfire fire events database will appear in these rasters.

    +

    The data is pre-loaded onto the Cyverse data store and is located in the below file:

    +
    ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/Disturbance/SR_landfire_fire_events_cbi_bc.tif
    +
    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/data-library/gedi/gedi.md b/data-library/gedi/gedi.md new file mode 100644 index 0000000..2311ee2 --- /dev/null +++ b/data-library/gedi/gedi.md @@ -0,0 +1,123 @@ +# GEDI data overview + +The Global Ecosystem Dynamics Investigation (GEDI) is a joint mission between NASA and the University of Maryland, with the instrument installed aboard the International Space Station. Data acquired using the instrument’s three lasers are used to construct detailed three-dimensional (3D) maps of forest canopy height and the distribution of branches and leaves. By accurately measuring forests in 3D, GEDI data play an important role in understanding the amounts of biomass and carbon forests store and how much they lose when disturbed – vital information for understanding Earth’s carbon cycle and how it is changing. GEDI data also can be used to study plant and animal habitats and biodiversity, and how these change over time. + +[The GEDI homepage is located here](https://gedi.umd.edu/). + +GEDI data is collected in footprints of ~25m along the track of the sensor. Each footprint is separated by 60m. GEDI footprint based aboveground biomass density (Mg/ha) over the Southern Rocky Mountains have been downloaded by Dr. Nayani Ilangakoon and placed on the Cyverse data store at the below path. The data are from 2019-2022, and are in the form of tiled CSV files. + +``` +~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/GEDI +``` + +Brief scripts in both R and Python are available in the GitHub repository demonstrating how to access and manipulate the data. The R script is copied below. + +``` r +### This file reads, filter basedo on qulaity flag and ecoregion, and plots GEDI biomass data in csv format. +# ESIIL, 2024 +# Nayani Ilangakoon + +# Load necessary libraries +library(readr) # For read_csv +library(dplyr) # For data manipulation +library(ggplot2) # For plotting +library(tidyr) # For data tidying +library(forcats) + +############### +# NOTE: This script is reading the data directly from the data store. It is only actually opening and processing a single csv +# If you want to use all of the GEDI data that has been made available for your use, you will want to move it +# to your cyverse instance to improve performance +############### + +# Define the root path to the data drive +ROOT_PATH <- "~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest" +# Create the path to the GEDI data by appending the directory name to the root path +indir <- file.path(ROOT_PATH, "GEDI/GEDI_SR_footprint_data/GEDI_biomass_SR") + +# List the contents of the indir directory +list.files(indir) + +# List all files that end with .csv in indir +polyfiles <- list.files(indir, pattern = "\\.csv$", full.names = TRUE) + +# Print the list of .csv files +polyfiles + +out_csv <- file.path(indir, "recovery_treat_bms_64.csv") + + +# Reading the csv file created in the last step +l4a_df <- read_csv(out_csv) + +# Assign "NA" to the values that needs to be discarded. +l4a_df <- l4a_df %>% + mutate(agbd = if_else(agbd == -9999,NA_real_,agbd)) + +l4a_df <- na.omit(l4a_df) + + + +# MCD12Q1 PFT types +pft_legend <- c('Water Bodies', 'Evergreen Needleleaf Trees', 'Evergreen Broadleaf Trees', + 'Deciduous Needleleaf Trees', 'Deciduous Broadleaf Trees', 'Shrub', 'Grass', + 'Cereal Croplands', 'Broadleaf Croplands', 'Urban and Built-up Lands', + 'Permanent Snow and Ice', 'Barren', 'Unclassified') + +# label PFT classes with numbers +names(pft_legend) <- as.character(0:12) + +# Creating mask with good quality shots and trees/shrubs pft class +mask <- l4a_df$l4_quality_flag == 1 & l4a_df$`land_cover_data/pft_class` <= 5 + +# Filter the dataframe based on the mask +filtered_df <- l4a_df[mask, ] + +# Transforming the PFT class to a factor with labels +filtered_df$`land_cover_data/pft_class` <- factor(filtered_df$`land_cover_data/pft_class`, + levels = names(pft_legend), labels = pft_legend) + +# Plotting the distribution of GEDI L4A AGBD estimates by PFTs +ggplot(filtered_df, aes(x = agbd, fill = `land_cover_data/pft_class`)) + + geom_histogram(bins = 30, alpha = 0.6, position = "identity") + + scale_fill_manual(values = rainbow(length(unique(filtered_df$`land_cover_data/pft_class`)))) + + labs(title = 'Distribution of GEDI L4A AGBD estimates by PFTs (Plant Functional Types) in ACA in 2020', + x = 'agbd (Mg / ha)', y = 'Frequency') + + theme_minimal() + + guides(fill = guide_legend(title = "PFT Class")) + + theme(legend.position = "bottom") + +# Saving the plot +ggsave("test.png", width = 15, height = 5, units = "in") + + + +# Assuming l4a_df and mask have been defined as before + +# Binning the elevation data +l4a_df <- l4a_df %>% + mutate(elev_bin = cut(elev_lowestmode, breaks = seq(0, 5000, by = 500))) + +# Ensure PFT class is a factor with proper labels +l4a_df$`land_cover_data/pft_class` <- factor(l4a_df$`land_cover_data/pft_class`, + levels = names(pft_legend), labels = pft_legend) + +# Filtering the dataframe based on mask and ensure it is applied correctly +filtered_df <- l4a_df %>% + filter(mask) + +# Creating the boxplot +g <- ggplot(filtered_df, aes(x = elev_bin, y = agbd)) + + geom_boxplot() + + facet_wrap(~`land_cover_data/pft_class`, scales = "free", labeller = labeller(`land_cover_data/pft_class` = as_labeller(pft_legend))) + + theme(axis.text.x = element_text(angle = 90, hjust = 1)) + + labs(x = "Elevation (m)", y = "agbd", title = "AGBD by Elevation and PFT Class") + + theme_minimal() + +# Print the plot +print(g) + +# Save the plot +ggsave("agbd_category.png", plot = g, width = 15, height = 10, units = "in") + +``` \ No newline at end of file diff --git a/data-library/gedi/index.html b/data-library/gedi/index.html new file mode 100644 index 0000000..3875e67 --- /dev/null +++ b/data-library/gedi/index.html @@ -0,0 +1,1463 @@ + + + + + + + + + + + + + + + + + + + + + + GEDI data overview - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    GEDI data overview

    +

    The Global Ecosystem Dynamics Investigation (GEDI) is a joint mission between NASA and the University of Maryland, with the instrument installed aboard the International Space Station. Data acquired using the instrument’s three lasers are used to construct detailed three-dimensional (3D) maps of forest canopy height and the distribution of branches and leaves. By accurately measuring forests in 3D, GEDI data play an important role in understanding the amounts of biomass and carbon forests store and how much they lose when disturbed – vital information for understanding Earth’s carbon cycle and how it is changing. GEDI data also can be used to study plant and animal habitats and biodiversity, and how these change over time.

    +

    The GEDI homepage is located here.

    +

    GEDI data is collected in footprints of ~25m along the track of the sensor. Each footprint is separated by 60m. GEDI footprint based aboveground biomass density (Mg/ha) over the Southern Rocky Mountains have been downloaded by Dr. Nayani Ilangakoon and placed on the Cyverse data store at the below path. The data are from 2019-2022, and are in the form of tiled CSV files.

    +
    ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/GEDI
    +
    +

    Brief scripts in both R and Python are available in the GitHub repository demonstrating how to access and manipulate the data. The R script is copied below.

    +
    ### This file reads, filter basedo on qulaity flag and ecoregion, and plots GEDI biomass data in csv format.
    +# ESIIL, 2024
    +# Nayani Ilangakoon
    +
    +# Load necessary libraries
    +library(readr) # For read_csv
    +library(dplyr) # For data manipulation
    +library(ggplot2) # For plotting
    +library(tidyr) # For data tidying
    +library(forcats)
    +
    +###############
    +# NOTE: This script is reading the data directly from the data store. It is only actually opening and processing a single csv
    +# If you want to use all of the GEDI data that has been made available for your use, you will want to move it
    +# to your cyverse instance to improve performance
    +###############
    +
    +# Define the root path to the data drive
    +ROOT_PATH <- "~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest"
    +# Create the path to the GEDI data by appending the directory name to the root path
    +indir <- file.path(ROOT_PATH, "GEDI/GEDI_SR_footprint_data/GEDI_biomass_SR")
    +
    +# List the contents of the indir directory
    +list.files(indir)
    +
    +# List all files that end with .csv in indir
    +polyfiles <- list.files(indir, pattern = "\\.csv$", full.names = TRUE)
    +
    +# Print the list of .csv files
    +polyfiles
    +
    +out_csv <- file.path(indir, "recovery_treat_bms_64.csv")
    +
    +
    +# Reading the csv file created in the last step
    +l4a_df <- read_csv(out_csv)
    +
    +# Assign "NA" to the values that needs to be discarded.
    +l4a_df <- l4a_df %>%
    +  mutate(agbd = if_else(agbd == -9999,NA_real_,agbd))
    +
    +l4a_df <- na.omit(l4a_df)
    +
    +
    +
    +# MCD12Q1 PFT types
    +pft_legend <- c('Water Bodies', 'Evergreen Needleleaf Trees', 'Evergreen Broadleaf Trees', 
    +                'Deciduous Needleleaf Trees', 'Deciduous Broadleaf Trees', 'Shrub', 'Grass',
    +                'Cereal Croplands', 'Broadleaf Croplands', 'Urban and Built-up Lands', 
    +                'Permanent Snow and Ice', 'Barren', 'Unclassified')
    +
    +# label PFT classes with numbers
    +names(pft_legend) <- as.character(0:12)
    +
    +# Creating mask with good quality shots and trees/shrubs pft class
    +mask <- l4a_df$l4_quality_flag == 1 & l4a_df$`land_cover_data/pft_class` <= 5
    +
    +# Filter the dataframe based on the mask
    +filtered_df <- l4a_df[mask, ]
    +
    +# Transforming the PFT class to a factor with labels
    +filtered_df$`land_cover_data/pft_class` <- factor(filtered_df$`land_cover_data/pft_class`, 
    +                                                  levels = names(pft_legend), labels = pft_legend)
    +
    +# Plotting the distribution of GEDI L4A AGBD estimates by PFTs
    +ggplot(filtered_df, aes(x = agbd, fill = `land_cover_data/pft_class`)) +
    +  geom_histogram(bins = 30, alpha = 0.6, position = "identity") +
    +  scale_fill_manual(values = rainbow(length(unique(filtered_df$`land_cover_data/pft_class`)))) +
    +  labs(title = 'Distribution of GEDI L4A AGBD estimates by PFTs (Plant Functional Types) in ACA in 2020',
    +       x = 'agbd (Mg / ha)', y = 'Frequency') +
    +  theme_minimal() +
    +  guides(fill = guide_legend(title = "PFT Class")) +
    +  theme(legend.position = "bottom")
    +
    +# Saving the plot
    +ggsave("test.png", width = 15, height = 5, units = "in")
    +
    +
    +
    +# Assuming l4a_df and mask have been defined as before
    +
    +# Binning the elevation data
    +l4a_df <- l4a_df %>%
    +  mutate(elev_bin = cut(elev_lowestmode, breaks = seq(0, 5000, by = 500)))
    +
    +# Ensure PFT class is a factor with proper labels
    +l4a_df$`land_cover_data/pft_class` <- factor(l4a_df$`land_cover_data/pft_class`, 
    +                                             levels = names(pft_legend), labels = pft_legend)
    +
    +# Filtering the dataframe based on mask and ensure it is applied correctly
    +filtered_df <- l4a_df %>%
    +  filter(mask)
    +
    +# Creating the boxplot
    +g <- ggplot(filtered_df, aes(x = elev_bin, y = agbd)) +
    +  geom_boxplot() +
    +  facet_wrap(~`land_cover_data/pft_class`, scales = "free", labeller = labeller(`land_cover_data/pft_class` = as_labeller(pft_legend))) +
    +  theme(axis.text.x = element_text(angle = 90, hjust = 1)) +
    +  labs(x = "Elevation (m)", y = "agbd", title = "AGBD by Elevation and PFT Class") +
    +  theme_minimal()
    +
    +# Print the plot
    +print(g)
    +
    +# Save the plot
    +ggsave("agbd_category.png", plot = g, width = 15, height = 10, units = "in")
    +
    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/data-library/landfire-events/index.html b/data-library/landfire-events/index.html new file mode 100644 index 0000000..8c1d731 --- /dev/null +++ b/data-library/landfire-events/index.html @@ -0,0 +1,1406 @@ + + + + + + + + + + + + + + + + + + + + + + LANDFIRE Public Events Geodatabase - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    LANDFIRE Public Events Geodatabase

    +

    From 'LANDFIRE Product Descriptions with References'

    +

    The LF National (LF 1.X) Public Events Geodatabase is a collection of recent natural disturbance and +land management activities used to update existing vegetation and fuel layers during LF Program +deliverables. Public Events exclude proprietary and/or sensitive data. +This geodatabase includes three feature classes - Raw Events, Model Ready Events, and Exotics. The +Public Raw and Model Ready Event feature classes include natural disturbance and vegetation/fuel +treatment data. The Public Exotics feature class contains data on the occurrence of exotic or invasive +plant species. There is also a look up table for the source code (lutSource_Code), an attribute found in +all three feature classes. The source code is an LF internal code assigned to each data source. Consult +thetable“lutSource_Code” in thegeodatabases for more information about the data sources included +in, and excluded from, releases. +The data compiled in the three feature classes are collected from disparate sources including federal, +state, local, and private organizations. All data submitted to LF are evaluated for inclusion into the LF +Events geodatabase. Acceptable Event data must have the following minimum requirements to be +included in the Events geodatabase: + 1) be represented by a polygon on the landscape and have a defined spatial coordinate +system + 2) have an acceptable event type (Appendix B) or exotics plant species + 3) be attributed with year of occurrence or observation of the current data call.

    +

    Metadata

    +

    The LANDFIRE public events geodatabase contents description is available here.

    +

    This document provides a description of how polygon data of disturbans and treatments are evaluated and processed into the LANDFIRE Events geodatabase.

    +

    The Raw and Model Ready Events Data Dictionary is available here.

    +

    Note that this is a large geodatabase (> 1 million polygons). Recommend filtering as soon as possible.

    +

    The relevant layers within the .gdb file are:

    +
      +
    • CONUS_230_PublicExotics
    • +
    • CONUS_230_PublicModelReadyEvents
    • +
    • CONUS_230_PublicRawEvents
    • +
    +

    Access

    +

    Storage location: +

    ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/Disturbance/LF_Public_Events_1999_2022
    +

    +

    Example access script: +

    system("cp -r ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/Disturbance/LF_Public_Events_1999_2022 ~/LF_Events") #move the data first!!
    +
    +
    +landfireEvents <- sf::st_read("~/LF_Events/LF_Public_Events_1999_2022.gdb",
    +                              layer = "CONUS_230_PublicModelReadyEvents")
    +
    +unique(landfireEvents$Event_Type)
    +
    +# [1] "Thinning"          "Other Mechanical"  "Prescribed Fire"   "Herbicide"        
    +# [5] "Clearcut"          "Harvest"           "Wildfire"          "Mastication"      
    +# [9] "Wildland Fire"     "Chemical"          "Development"       "Biological"       
    +# [13] "Weather"           "Planting"          "Reforestation"     "Insects"          
    +# [17] "Seeding"           "Disease"           "Wildland Fire Use" "Insects/Disease"  
    +# [21] "Insecticide"  
    +
    +landfireFireEvents <- landfireEvents |> dplyr::filter(Event_Type == "Wildfire" | 
    +                                                        Event_Type == "Wildland Fire Use" |
    +                                                        Event_Type == "Prescribed Fire" |
    +                                                        Event_Type == "Wildland Fire" |
    +                                                        Event_Type == "Fire")
    +

    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/data-library/landfire-events/landfire-events.md b/data-library/landfire-events/landfire-events.md new file mode 100644 index 0000000..37756b0 --- /dev/null +++ b/data-library/landfire-events/landfire-events.md @@ -0,0 +1,71 @@ +# LANDFIRE Public Events Geodatabase + +*From ['LANDFIRE Product Descriptions with References'](https://landfire.gov/documents/LF_Data_Product_Descriptions_w-References2019.pdf)* + +The LF National (LF 1.X) Public Events Geodatabase is a collection of recent natural disturbance and +land management activities used to update existing vegetation and fuel layers during LF Program +deliverables. Public Events exclude proprietary and/or sensitive data. +This geodatabase includes three feature classes - Raw Events, Model Ready Events, and Exotics. The +Public Raw and Model Ready Event feature classes include natural disturbance and vegetation/fuel +treatment data. The Public Exotics feature class contains data on the occurrence of exotic or invasive +plant species. There is also a look up table for the source code (lutSource_Code), an attribute found in +all three feature classes. The source code is an LF internal code assigned to each data source. Consult +thetable“lutSource_Code” in thegeodatabases for more information about the data sources included +in, and excluded from, releases. +The data compiled in the three feature classes are collected from disparate sources including federal, +state, local, and private organizations. All data submitted to LF are evaluated for inclusion into the LF +Events geodatabase. Acceptable Event data must have the following minimum requirements to be +included in the Events geodatabase: + 1) be represented by a polygon on the landscape and have a defined spatial coordinate +system + 2) have an acceptable event type (Appendix B) or exotics plant species + 3) be attributed with year of occurrence or observation of the current data call. + + +## Metadata + +The LANDFIRE public events geodatabase contents description is available [here](https://landfire.gov/documents/LANDFIRE_2022_Public_Events_README.pdf). + +[This document](https://landfire.gov/documents/Disturbance_Data_Processing.pdf) provides a description of how polygon data of disturbans and treatments are evaluated and processed into the LANDFIRE Events geodatabase. + +The Raw and Model Ready Events Data Dictionary is available [here](https://landfire.gov/documents/LANDFIREEventsDataDictionary.pdf). + +Note that this is a large geodatabase (> 1 million polygons). Recommend filtering as soon as possible. + +The relevant layers within the .gdb file are: + + - CONUS_230_PublicExotics + - CONUS_230_PublicModelReadyEvents + - CONUS_230_PublicRawEvents + +## Access + +Storage location: +``` +~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/Disturbance/LF_Public_Events_1999_2022 +``` + +Example access script: +``` + +system("cp -r ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/Disturbance/LF_Public_Events_1999_2022 ~/LF_Events") #move the data first!! + + +landfireEvents <- sf::st_read("~/LF_Events/LF_Public_Events_1999_2022.gdb", + layer = "CONUS_230_PublicModelReadyEvents") + +unique(landfireEvents$Event_Type) + +# [1] "Thinning" "Other Mechanical" "Prescribed Fire" "Herbicide" +# [5] "Clearcut" "Harvest" "Wildfire" "Mastication" +# [9] "Wildland Fire" "Chemical" "Development" "Biological" +# [13] "Weather" "Planting" "Reforestation" "Insects" +# [17] "Seeding" "Disease" "Wildland Fire Use" "Insects/Disease" +# [21] "Insecticide" + +landfireFireEvents <- landfireEvents |> dplyr::filter(Event_Type == "Wildfire" | + Event_Type == "Wildland Fire Use" | + Event_Type == "Prescribed Fire" | + Event_Type == "Wildland Fire" | + Event_Type == "Fire") +``` \ No newline at end of file diff --git a/data-library/lcmap/index.html b/data-library/lcmap/index.html new file mode 100644 index 0000000..f83303b --- /dev/null +++ b/data-library/lcmap/index.html @@ -0,0 +1,1427 @@ + + + + + + + + + + + + + + + + + + + + + + Land Change Monitoring, Assessment, and Projection - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    Land Change Monitoring, Assessment, and Projection

    +

    Land Change Monitoring, Assessment, and Projection (LCMAP) represents a new generation of land cover mapping and change monitoring from the U.S. Geological Survey’s Earth Resources Observation and Science (EROS) Center. LCMAP answers a need for higher quality results at greater frequency with additional land cover and change variables than previous efforts. The USGS website for LCMAP is here.

    +

    Collection 1.3 of the LCMAP product contains 10 different science products (details here).

    +

    To accelerate your access to this dataset, the ESIIL team has made LCMAP 1.3 Primary Land Cover product (LCPRI) data for the Southern Rockies available on the Cyverse data store at the below directory:

    +
    ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/LCMAP_SR_1985_2021
    +
    +

    Additional LCMAP layers and products may be accessed via STAC and VSI (see below for example).

    +

    The script used to download the LCMAP data already available is located in the GitHub repo at /code/create-data-library/LCMAP_Direct_Access-adapted.ipynb. The code is from the LCMAP data access tutorial.

    +
    #Access LCMAP data from STAC
    +#Adapted from 'Download data from a STAC API using R, rstac, and GDAL'
    +#https://stacspec.org/en/tutorials/1-download-data-using-r/
    +
    +
    +require(glue)
    +require(sf)
    +require(terra)
    +require(rstac)
    +
    +
    +#Access ecoregiosn via VSI
    +epa_l3 <- glue::glue(
    +  "/vsizip/vsicurl/", #magic remote connection
    +  "https://gaftp.epa.gov/EPADataCommons/ORD/Ecoregions/us/us_eco_l3.zip", #copied link to download location
    +  "/us_eco_l3.shp") |> #path inside zip file
    +  sf::st_read()
    +
    +#Get just S.Rockies and ensure that it is in EPSG:4326
    +southernRockies <- epa_l3 |>
    +  dplyr::filter(US_L3NAME == "Southern Rockies") |>
    +  dplyr::group_by(US_L3NAME) |>
    +  dplyr::summarize(geometry = sf::st_union(geometry)) |>
    +  sf::st_transform("EPSG:4326")
    +
    +bboxSR4326 <- sf::st_bbox(southernRockies)
    +
    +# Create a stac query for just the 2021 LCMAP data
    +stac_query <- rstac::stac(
    +  "https://planetarycomputer.microsoft.com/api/stac/v1"
    +) |>
    +  rstac::stac_search(
    +    collections = "usgs-lcmap-conus-v13",
    +    bbox = bboxSR4326,
    +    datetime = "2021-01-01/2021-12-31"
    +  ) |>
    +  rstac::get_request()
    +
    +#A function to get a vsicurl url form a base url
    +make_lcmap_vsicurl_url <- function(base_url) {
    +  paste0(
    +    "/vsicurl", 
    +    "?pc_url_signing=yes",
    +    "&pc_collection=usgs-lcmap-conus-v13",
    +    "&url=",
    +    base_url
    +  )
    +}
    +
    +lcpri_url <- make_lcmap_vsicurl_url(rstac::assets_url(stac_query, "lcpri"))
    +
    +#Pull the file
    +out_file <- tempfile(fileext = ".tif")
    +sf::gdal_utils(
    +  "warp",
    +  source = lcpri_url,
    +  destination = out_file,
    +  options = c(
    +    "-t_srs", sf::st_crs(southernRockies)$wkt,
    +    "-te", sf::st_bbox(southernRockies)
    +  )
    +)
    +
    +#Create the raster and plot!
    +terra::rast(out_file) |>
    +  terra::plot()
    +southernRockies |> 
    +  sf::st_geometry() |> 
    +  plot(lwd = 3, add = TRUE)
    +
    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/data-library/lcmap/lcmap.md b/data-library/lcmap/lcmap.md new file mode 100644 index 0000000..0ace7bb --- /dev/null +++ b/data-library/lcmap/lcmap.md @@ -0,0 +1,88 @@ +# Land Change Monitoring, Assessment, and Projection + +Land Change Monitoring, Assessment, and Projection (LCMAP) represents a new generation of land cover mapping and change monitoring from the U.S. Geological Survey’s Earth Resources Observation and Science (EROS) Center. LCMAP answers a need for higher quality results at greater frequency with additional land cover and change variables than previous efforts. [The USGS website for LCMAP is here.](https://www.usgs.gov/special-topics/lcmap) + +Collection 1.3 of the LCMAP product contains 10 different science products ([details here](https://www.usgs.gov/special-topics/lcmap/collection-13-conus-science-products)). + +To accelerate your access to this dataset, the ESIIL team has made LCMAP 1.3 Primary Land Cover product (LCPRI) data for the Southern Rockies available on the Cyverse data store at the below directory: + +``` +~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/LCMAP_SR_1985_2021 +``` + +Additional LCMAP layers and products may be accessed via STAC and VSI (see below for example). + +The script used to download the LCMAP data already available is located in the GitHub repo at /code/create-data-library/LCMAP_Direct_Access-adapted.ipynb. The code is from the LCMAP data access tutorial. + +``` + +#Access LCMAP data from STAC +#Adapted from 'Download data from a STAC API using R, rstac, and GDAL' +#https://stacspec.org/en/tutorials/1-download-data-using-r/ + + +require(glue) +require(sf) +require(terra) +require(rstac) + + +#Access ecoregiosn via VSI +epa_l3 <- glue::glue( + "/vsizip/vsicurl/", #magic remote connection + "https://gaftp.epa.gov/EPADataCommons/ORD/Ecoregions/us/us_eco_l3.zip", #copied link to download location + "/us_eco_l3.shp") |> #path inside zip file + sf::st_read() + +#Get just S.Rockies and ensure that it is in EPSG:4326 +southernRockies <- epa_l3 |> + dplyr::filter(US_L3NAME == "Southern Rockies") |> + dplyr::group_by(US_L3NAME) |> + dplyr::summarize(geometry = sf::st_union(geometry)) |> + sf::st_transform("EPSG:4326") + +bboxSR4326 <- sf::st_bbox(southernRockies) + +# Create a stac query for just the 2021 LCMAP data +stac_query <- rstac::stac( + "https://planetarycomputer.microsoft.com/api/stac/v1" +) |> + rstac::stac_search( + collections = "usgs-lcmap-conus-v13", + bbox = bboxSR4326, + datetime = "2021-01-01/2021-12-31" + ) |> + rstac::get_request() + +#A function to get a vsicurl url form a base url +make_lcmap_vsicurl_url <- function(base_url) { + paste0( + "/vsicurl", + "?pc_url_signing=yes", + "&pc_collection=usgs-lcmap-conus-v13", + "&url=", + base_url + ) +} + +lcpri_url <- make_lcmap_vsicurl_url(rstac::assets_url(stac_query, "lcpri")) + +#Pull the file +out_file <- tempfile(fileext = ".tif") +sf::gdal_utils( + "warp", + source = lcpri_url, + destination = out_file, + options = c( + "-t_srs", sf::st_crs(southernRockies)$wkt, + "-te", sf::st_bbox(southernRockies) + ) +) + +#Create the raster and plot! +terra::rast(out_file) |> + terra::plot() +southernRockies |> + sf::st_geometry() |> + plot(lwd = 3, add = TRUE) +``` \ No newline at end of file diff --git a/data-library/modis-vcf/index.html b/data-library/modis-vcf/index.html new file mode 100644 index 0000000..d5e52ca --- /dev/null +++ b/data-library/modis-vcf/index.html @@ -0,0 +1,1356 @@ + + + + + + + + + + + + + + + + + + + + + + MODIS Vegetation Continuous Fields (VCF) - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    MODIS Vegetation Continuous Fields (VCF)

    +

    The MODIS VCF product is derived from the MODIS satellite. The dataset provides proportional estimates of varying cover types. This data is developed from global training data derived using high-resolution imagery. The training data and phenological metrics are used with a regression tree to derive percent cover globally. The model is then used to estimate areal proportions of life form, leaf type, and leaf longevity.

    +

    MODIS Vegetation Continuous Fields (MOD44B) provides global sub-pixel estimates of three land cover components (percent tree cover; percent non-tree vegetation; and percent non-vegetated) at 250 m spatial resolution. NASA MODIS information here.

    +

    To accelerate your access to this dataset, the ESIIL team has made MODIS VCF data for the Southern Rockies available on the Cyverse data store at the below directory:

    +
    ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/modis-vcf
    +
    +

    Additional MODIS data is best accessed via VSI or STAC.

    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/data-library/modis-vcf/modis-vcf.md b/data-library/modis-vcf/modis-vcf.md new file mode 100644 index 0000000..a59b63a --- /dev/null +++ b/data-library/modis-vcf/modis-vcf.md @@ -0,0 +1,13 @@ +# MODIS Vegetation Continuous Fields (VCF) + +The MODIS VCF product is derived from the MODIS satellite. The dataset provides proportional estimates of varying cover types. This data is developed from global training data derived using high-resolution imagery. The training data and phenological metrics are used with a regression tree to derive percent cover globally. The model is then used to estimate areal proportions of life form, leaf type, and leaf longevity. + +MODIS Vegetation Continuous Fields (MOD44B) provides global sub-pixel estimates of three land cover components (percent tree cover; percent non-tree vegetation; and percent non-vegetated) at 250 m spatial resolution. [NASA MODIS information here](https://modis-land.gsfc.nasa.gov/vcc.html). + +To accelerate your access to this dataset, the ESIIL team has made MODIS VCF data for the Southern Rockies available on the Cyverse data store at the below directory: + +``` +~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/modis-vcf +``` + +Additional MODIS data is best accessed via VSI or STAC. \ No newline at end of file diff --git a/data-library/mounting-via-vsi/index.html b/data-library/mounting-via-vsi/index.html new file mode 100644 index 0000000..018b35d --- /dev/null +++ b/data-library/mounting-via-vsi/index.html @@ -0,0 +1,1399 @@ + + + + + + + + + + + + + + + + + + + + + + Mounting data directly from a URL - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    Mounting data directly from a URL

    +

    ESIIL, 2024 +Tyler McIntosh

    +

    Data can be directly accessed from where it is hosted on the internet, without the need to download the entire file to your local machine.

    +

    For spatial data, special protocols from the GDAL library can be used.

    +

    The first part of enabling remote access is "vsicurl". VSI is GDAL's Virtual File System. This is a virtual file system handler allows access to files hosted on remote servers over protocols like HTTP, HTTPS, and FTP. When you prepend "vsicurl/" to a URL, GDAL reads the file directly from the remote location without downloading it entirely to the local disk. It's particularly useful for large files, as it only fetches the portions of the file needed for the current operation.

    +

    The second part of enabling remote access to a zipped file (most large data files hosted online) is "vsizip". This is another virtual file system handler in GDAL that enables reading files inside zip archives as if they were unzipped, without the need to extract them manually. By using "vsizip/", you can directly access the contents of a zip file.

    +

    When combined, "/vsizip/vsicurl/" allows GDAL (and, subsequently, a package such as 'terra' or 'sf' in R, or similar Python packages) to access files inside of a zip archive on a remote server. The URL following this protocol specifies the remote location of the zip file, and the path after the URL specifies the particular file within the zip archive that you want to access.

    +

    Example

    +

    For example, you may have a url to a spatial dataset that you want to use, "https://gaftp.epa.gov/EPADataCommons/ORD/Ecoregions/us/us_eco_l3.zip". You may have found this link on a website.

    +

    Figure out your archive contents

    +

    In order to open a specific file within the zip archive, you need to know the names of the files within the archive. You can either:

    +
      +
    • Download the archive once, view the data structure, and then access it remotely from then on, or, a better solution is to...
    • +
    • Access the contents of the zip file using GDAL from a command-line environment
    • +
    +

    To access the contents from a command-line environment, you would use a line of code like this: +

    gdalinfo /vsizip/vsicurl/https://example.com/data.zip
    +
    +Or, in our example: +
    gdalinfo /vsizip/vsicurl/https://gaftp.epa.gov/EPADataCommons/ORD/Ecoregions/us/us_eco_l3.zip
    +

    +

    If you would like to do this without leaving your R or Python environment, you can use R or Python to execute command line calls:

    +

    R, using "system": +

    zip_url = "/vsizip//vsicurl/https://gaftp.epa.gov/EPADataCommons/ORD/Ecoregions/us/us_eco_l3.zip"
    +system(paste("gdalinfo", zip_url))
    +

    +

    Python, using "subprocess.run": +

    import subprocess
    +
    +zip_url = "/vsizip//vsicurl/https://gaftp.epa.gov/EPADataCommons/ORD/Ecoregions/us/us_eco_l3.zip"
    +subprocess.run(["gdalinfo", zip_url])
    +

    +

    This will tell you that the archive contains several files, one of which is "us_eco_l3.shp" - our shapefile of interest. (If there were subdirectories within the directory, repeat the process).

    +

    Mounting the data

    +

    We now know the full path to our file of interest: +

    "/vsizip//vsicurl/https://gaftp.epa.gov/EPADataCommons/ORD/Ecoregions/us/us_eco_l3.zip/us_eco_l3.shp"
    +

    +

    To mount the data, we simply feed this string to our spatial data package just as we would any other data location. For example, in R, we could do: +

    require(glue)
    +require(sf)
    +
    +epa_l3 <- glue::glue(
    +  "/vsizip/vsicurl/", #magic remote connection
    +  "https://gaftp.epa.gov/EPADataCommons/ORD/Ecoregions/us/us_eco_l3.zip", #copied link to download location
    +  "/us_eco_l3.shp") |> #path inside zip file
    +  sf::st_read()
    +

    +

    From this point, we now have the data mounted in our epa_l3 variable, and can manipulate it as usual.

    +

    Note that, since vsicurl only fetches the portions of the file needed for an operation, the data mounted very quickly. Only once you attempt an operation with the data that requires the entire dataset will it actually fetch the entire dataset!

    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/data-library/mounting-via-vsi/mounting-via-vsi.md b/data-library/mounting-via-vsi/mounting-via-vsi.md new file mode 100644 index 0000000..3089782 --- /dev/null +++ b/data-library/mounting-via-vsi/mounting-via-vsi.md @@ -0,0 +1,81 @@ +# Mounting data directly from a URL + +ESIIL, 2024 +Tyler McIntosh + +Data can be directly accessed from where it is hosted on the internet, without the need to download the entire file to your local machine. + +For spatial data, special protocols from the GDAL library can be used. + +The first part of enabling remote access is "vsicurl". VSI is GDAL's Virtual File System. This is a virtual file system handler allows access to files hosted on remote servers over protocols like HTTP, HTTPS, and FTP. When you prepend "vsicurl/" to a URL, GDAL reads the file directly from the remote location without downloading it entirely to the local disk. It's particularly useful for large files, as it only fetches the portions of the file needed for the current operation. + +The second part of enabling remote access to a zipped file (most large data files hosted online) is "vsizip". This is another virtual file system handler in GDAL that enables reading files inside zip archives as if they were unzipped, without the need to extract them manually. By using "vsizip/", you can directly access the contents of a zip file. + +When combined, "/vsizip/vsicurl/" allows GDAL (and, subsequently, a package such as 'terra' or 'sf' in R, or similar Python packages) to access files inside of a zip archive on a remote server. The URL following this protocol specifies the remote location of the zip file, and the path after the URL specifies the particular file within the zip archive that you want to access. + +## Example + +For example, you may have a url to a spatial dataset that you want to use, "https://gaftp.epa.gov/EPADataCommons/ORD/Ecoregions/us/us_eco_l3.zip". You may have found this link on a website. + +### Figure out your archive contents + +In order to open a specific file within the zip archive, you need to know the names of the files within the archive. You can either: + + - Download the archive once, view the data structure, and then access it remotely from then on, or, a better solution is to... + - Access the contents of the zip file using GDAL from a command-line environment + +To access the contents from a command-line environment, you would use a line of code like this: +``` +gdalinfo /vsizip/vsicurl/https://example.com/data.zip +``` +Or, in our example: +``` +gdalinfo /vsizip/vsicurl/https://gaftp.epa.gov/EPADataCommons/ORD/Ecoregions/us/us_eco_l3.zip +``` + +If you would like to do this without leaving your R or Python environment, you can use R or Python to execute command line calls: + +*R, using "system":* +``` +zip_url = "/vsizip//vsicurl/https://gaftp.epa.gov/EPADataCommons/ORD/Ecoregions/us/us_eco_l3.zip" +system(paste("gdalinfo", zip_url)) +``` + +*Python, using "subprocess.run":* +``` +import subprocess + +zip_url = "/vsizip//vsicurl/https://gaftp.epa.gov/EPADataCommons/ORD/Ecoregions/us/us_eco_l3.zip" +subprocess.run(["gdalinfo", zip_url]) +``` + +This will tell you that the archive contains several files, one of which is "us_eco_l3.shp" - our shapefile of interest. (If there were subdirectories within the directory, repeat the process). + +### Mounting the data + +We now know the full path to our file of interest: +``` +"/vsizip//vsicurl/https://gaftp.epa.gov/EPADataCommons/ORD/Ecoregions/us/us_eco_l3.zip/us_eco_l3.shp" +``` + +To mount the data, we simply feed this string to our spatial data package just as we would any other data location. For example, in R, we could do: +``` +require(glue) +require(sf) + +epa_l3 <- glue::glue( + "/vsizip/vsicurl/", #magic remote connection + "https://gaftp.epa.gov/EPADataCommons/ORD/Ecoregions/us/us_eco_l3.zip", #copied link to download location + "/us_eco_l3.shp") |> #path inside zip file + sf::st_read() +``` + +From this point, we now have the data mounted in our epa_l3 variable, and can manipulate it as usual. + +Note that, since vsicurl only fetches the portions of the file needed for an operation, the data mounted very quickly. Only once you attempt an operation with the data that requires the entire dataset will it actually fetch the entire dataset! + + + + + + diff --git a/data-library/move-data-to-instance/index.html b/data-library/move-data-to-instance/index.html new file mode 100644 index 0000000..6aac6ca --- /dev/null +++ b/data-library/move-data-to-instance/index.html @@ -0,0 +1,1423 @@ + + + + + + + + + + + + + + + + + + + + + + Move and Save data - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    Moving data to your instance from the data store

    +

    Some data has been pre-downloaded for you and stored on the CyVerse data store in order to help expedite your projects.

    +

    While you CAN access that data directly on the data store, it is HIGHLY recommended that you copy the data over to your instance (see "Cyverse data management" under "Collaborating on the cloud" for more information). This is because your work with the data will be dramatically faster with it located on your instance.

    +

    Take, for instance, the treemap data.

    +

    If we load and plot the data without moving it, it takes just a few seconds (i.e. ~2.973 seconds). Not bad. +

    require(terra)
    +require(tictoc)
    +tictoc::tic()
    +treemap <- terra::rast("~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/TreeMap/treemap2016_southernrockies.tif")
    +terra::plot(treemap)
    +tictoc::toc()
    +

    +

    However, if we load and plot the data after moving it, it takes less than a second (i.e. ~0.302 seconds). Even better! This 10x increase in speed will add up incredibly quickly as soon as you start working more intensively with the data.

    +
    require(terra)
    +require(tictoc)
    +system("cp -r ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/TreeMap ~/TreeMap") #move the data first!!
    +tictoc::tic()
    +treemap <- terra::rast("~/TreeMap/treemap2016_southernrockies.tif")
    +terra::plot(treemap)
    +tictoc::toc()
    +
    +

    Takeaway: seriously, just copy the data over.

    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/data-library/move-data-to-instance/move-data-to-instance.md b/data-library/move-data-to-instance/move-data-to-instance.md new file mode 100644 index 0000000..f7fa80b --- /dev/null +++ b/data-library/move-data-to-instance/move-data-to-instance.md @@ -0,0 +1,31 @@ +# Moving data to your instance from the data store + +Some data has been pre-downloaded for you and stored on the CyVerse data store in order to help expedite your projects. + +While you CAN access that data directly on the data store, it is HIGHLY recommended that you copy the data over to your instance (see "Cyverse data management" under "Collaborating on the cloud" for more information). This is because your work with the data will be dramatically faster with it located on your instance. + +Take, for instance, the treemap data. + +If we load and plot the data without moving it, it takes just a few seconds (i.e. ~2.973 seconds). Not bad. +``` +require(terra) +require(tictoc) +tictoc::tic() +treemap <- terra::rast("~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/TreeMap/treemap2016_southernrockies.tif") +terra::plot(treemap) +tictoc::toc() +``` + +However, if we load and plot the data after moving it, it takes less than a second (i.e. ~0.302 seconds). Even better! This 10x increase in speed will add up incredibly quickly as soon as you start working more intensively with the data. + +``` +require(terra) +require(tictoc) +system("cp -r ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/TreeMap ~/TreeMap") #move the data first!! +tictoc::tic() +treemap <- terra::rast("~/TreeMap/treemap2016_southernrockies.tif") +terra::plot(treemap) +tictoc::toc() +``` + +Takeaway: seriously, just copy the data over. \ No newline at end of file diff --git a/data-library/stac_mount_save/index.html b/data-library/stac_mount_save/index.html new file mode 100644 index 0000000..9badc95 --- /dev/null +++ b/data-library/stac_mount_save/index.html @@ -0,0 +1,2164 @@ + + + + + + + + + + + + + + + + + + + + + + Stream data - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    The art of making a data cube

    +

    Ty Tuff, ESIIL Data Scientist +2023-10-27

    +
    #library(Rcpp)
    +library(sf)
    +library(gdalcubes)
    +library(rstac)
    +library(gdalUtils)
    +library(terra)
    +library(rgdal)
    +library(reshape2)
    +library(osmdata)
    +library(terra)
    +library(dplyr)
    +library(stars)
    +library(ggplot2)
    +library(colorspace)
    +library(geos)
    +library(osmdata)
    +library(ggthemes)
    +library(tidyr)
    +gdalcubes_options(parallel = 8)
    +
    +sf::sf_extSoftVersion()
    +##           GEOS           GDAL         proj.4 GDAL_with_GEOS     USE_PROJ_H 
    +##       "3.11.0"        "3.5.3"        "9.1.0"         "true"         "true" 
    +##           PROJ 
    +##        "9.1.0"
    +gdalcubes_gdal_has_geos()
    +## [1] TRUE
    +
    +library(osmdata)
    +library(dplyr)
    +library(sf)
    +library(terra)
    +library(tidyterra)
    +library(glue)
    +library(ggplot2)
    +library(ggthemes)
    +library(stars)
    +library(magrittr)
    +library(landsat)
    +
    +

    The philosophy of moving data in the cloud

    +

    The philosophy of moving data in the cloud represents a paradigm shift +in how we approach data within our analytical processes. Instead of the +traditional method of transferring entire datasets to our local +environments, the cloud encourages a more efficient model: bring your +analysis to the data. This approach minimizes data movement and +leverages the cloud’s computational power and scalability. By utilizing +cloud-native tools and services, we can run our analyses directly on the +data where it resides, selectively accessing and processing only what is +necessary. This not only streamlines workflows but also significantly +reduces overheads related to data transfer and storage management. In +essence, the focus is on diverting computational resources to the data +rather than the cumbersome and resource-intensive practice of moving +large datasets to and fro.

    +

    ‘To Make’ or ‘To Take’ a photo

    +

    The distinction between making and taking a photograph lies in the +approach and intent behind the camera. Taking a photo is often a +reactive process, where the photographer captures moments as they +naturally unfold, seizing the spontaneity of life without alteration. +It’s a passive form of photography where the emphasis is on the right +timing and the natural interplay of elements within the frame. On the +other hand, making a photo is a proactive and deliberate act. It is akin +to craftsmanship, where a professional photographer starts with a +concept and utilizes a variety of tools and techniques to stage and +construct the desired scene. They actively manipulate lighting, +composition, and subjects to create a photograph that aligns with their +pre-visualized artistic vision. While both methods use a camera to +produce a photograph, making a photo involves a creation process, +whereas taking a photo is about finding the scene.

    +

    David Yarrow is a famous photographer who ‘makes’ his photographs. + +

    +

    What does it mean to ‘make’ a data cube?

    +

    The artistry of Ansel Adams’ photography serves as a compelling analogy +for the meticulous craft of building a data cube from cloud data sources +using tools like STAC and GDAL VSI. Just as Adams would survey the +vastness of a landscape, discerning the interplay of light and shadow +upon the mountains before him, a data architect surveys the expanse of +available data. In this analogy, the raw data are the majestic mountains +and sweeping landscapes waiting to be captured. The STAC collection acts +as the photographer’s deliberate choice of scene, pointing the camera +lens—our data tools—towards the most telling and coherent dataset.

    +

    Just as Adams’ +photographs are more than mere records of a landscape, but rather a +confluence of his vision, technique, and the scene’s natural beauty, so +too is the data cube more than the sum of its parts. It is the artful +synthesis of information, crafted and composed with the skill and intent +of an artist, producing not just a tool for analysis but a harmonized, +data-driven portrait of the world it represents. The builder of the data +cube is, indeed, an artist, and the data cube their masterpiece, +revealing not just data, but a story, a perspective, a landscape sewn +from the raw material of cloud-sourced information.

    +

    As Adams would adjust his viewfinder, setting the boundaries of his +photographic frame, the data builder sets the view window, filtering and +transferring relevant data to their own medium, akin to Adams’ film. +This is where the raw data is transformed, organized into the structured +form of a data frame or data cube, a process not unlike the careful +development of a photograph in a darkroom. Here, the data cube creator, +much like Adams with his careful dodging and burning, harmonizes +disparate elements into a cohesive whole, each decision reflecting an +intention and vision for the final product.

    +

    1) The Rat through the Snake Problem: Scalability with Cloud Computing

    +

    Just like a snake that swallows a rat, traditional computing systems +often struggle to process the large volumes of environmental data — +they’re constrained by their static hardware limitations. Cloud +computing introduces a python-esque capability: massive scalability. By +migrating to the cloud, we essentially make the snake bigger, allowing +it to handle larger “prey.” Scalable computers in the cloud can grow +with the demand, providing the necessary computational power to process +extensive datasets, which is vital in a field where data volumes are +increasing exponentially.

    +

    Raster through a snake

    +

    2) The Antelope through the Python Problem: Streamlining with GDAL VSI

    +

    As we scale up, we encounter a new challenge: trying to pass an antelope +through a python — a metaphor for the next level of complexity in data +processing. The sheer size and complexity of the data can become +overwhelming. This is where GDAL’s Virtual File System (VSI) becomes our +ecological adaptation. VSI allows us to access remote data transparently +and more efficiently. Instead of ingesting the entire “antelope,” VSI +enables the “python” to dynamically access and process only the parts of +the data it needs, when it needs them, much like constriction before +digestion. This selective access minimizes the need for local storage +and expedites the data handling process.

    +

    Antelope through a Python

    +

    3) Drinking from a Fire Hose: Accelerated Inference with AI and ML

    +

    Once we’ve enabled the flow of large amounts of digestible data, we +encounter the metaphorical challenge of drinking from a fire hose. The +data, now flowing and accessible, is immense and rapid — posing a +challenge not just to store and process, but to understand and derive +meaning from in real-time. This is where artificial intelligence (AI) +and machine learning (ML) step in. These technologies act as a +sophisticated filtration system, enabling us to drink safely and +beneficially from the torrent. AI and ML can analyze patterns, make +predictions, and infer insights at a pace that keeps up with the fast +stream of data, turning raw information into actionable knowledge.

    +

    Inference through a firehose

    +

    By addressing these three pivotal challenges with cloud computing, GDAL +VSI, and AI/ML, we not only manage to consume the data effectively but +also transform our capabilities in environmental data science. We can +move from mere data ingestion to meaningful data interpretation, all at +a scale and speed necessary for impactful environmental analysis.

    +

    Mounting data

    +

    A void-filled Digital Elevation Model (DEM) is a comprehensive +topographical representation where any missing data points, known as +voids, have been filled in. These voids can occur due to various +reasons, such as clouds or technical errors during data collection. In a +void-filled DEM, these gaps are interpolated or estimated using the +surrounding data to create a continuous, seamless surface model. This +process enhances the utility and accuracy of the DEM for hydrological +modeling, terrain analysis, and other geographical applications. The +HydroSHEDS website +(https://www.hydrosheds.org/hydrosheds-core-downloads) provides access +to high-quality, void-filled DEM datasets like the +DEM_continuous_CONUS_15s, which users can download and easily integrate +into spatial analysis workflows using tools such as ‘terra’ in R, +allowing for sophisticated environmental and geographical research and +planning.

    +
    # Record start time
    +a <- Sys.time()  
    +
    +# Create a string with the file path using glue, then download and read the DEM file as a raster object
    +
    +DEM_continuous_CONUS_15s <- glue(
    +  "/vsizip/vsicurl/", #magic remote connection 
    +  "https://data.hydrosheds.org/file/hydrosheds-v1-dem/hyd_na_dem_15s.zip", #copied link to download location
    +  "/hyd_na_dem_15s.tif") %>% #path inside zip file
    +  terra::rast()  
    +
    +# The 'glue' function constructs the file path string, which is then passed to 'terra::rast()' to read the DEM file into R as a raster layer. '/vsizip/vsicurl/' is a special GDAL virtual file system syntax that allows reading directly from a zipped file on a remote server.
    +
    +# Record end time and calculate the time difference
    +b <- Sys.time()  
    +difftime(b, a) 
    +## Time difference of 4.603666 secs
    +
    +# The resulting raster object is stored in 'DEM_continuous_CONUS_15s', which now contains the void-filled DEM data ready for use
    +
    +DEM_continuous_CONUS_15s  # Prints out the details of the 'DEM_continuous_CONUS_15s' raster object
    +## class       : SpatRaster 
    +## dimensions  : 13920, 20640, 1  (nrow, ncol, nlyr)
    +## resolution  : 0.004166667, 0.004166667  (x, y)
    +## extent      : -138, -52, 5, 63  (xmin, xmax, ymin, ymax)
    +## coord. ref. : lon/lat WGS 84 (EPSG:4326) 
    +## source      : hyd_na_dem_15s.tif 
    +## name        : Band_1
    +
    +# output is a SpatRaster, which is the object type associated with the 'terra' package. 
    +
    +

    Continuous DEM for North America

    +
    # Record start time
    +a <- Sys.time()
    +
    +ggplot() +
    +  geom_spatraster(data=DEM_continuous_CONUS_15s) +
    +  theme_tufte()
    +
    +

    +
    b <- Sys.time()
    +difftime(b, a)
    +## Time difference of 52.49061 secs
    +
    +

    Calculate Slope from that DEM

    +
    SLOPE_continuous_CONUS_15s <-  terra::terrain(DEM_continuous_CONUS_15s, "slope") 
    +
    +SLOPE_continuous_CONUS_15s
    +## class       : SpatRaster 
    +## dimensions  : 13920, 20640, 1  (nrow, ncol, nlyr)
    +## resolution  : 0.004166667, 0.004166667  (x, y)
    +## extent      : -138, -52, 5, 63  (xmin, xmax, ymin, ymax)
    +## coord. ref. : lon/lat WGS 84 (EPSG:4326) 
    +## source(s)   : memory
    +## name        :    slope 
    +## min value   :  0.00000 
    +## max value   : 56.98691
    +
    +
    # Record start time
    +a <- Sys.time()
    +
    +ggplot() +
    +  geom_spatraster(data=SLOPE_continuous_CONUS_15s) +
    +  theme_tufte()
    +
    +

    +
    b <- Sys.time()
    +difftime(b, a)
    +## Time difference of 3.859545 secs
    +
    +

    Calculate aspect from DEM

    +
    ASPECT_continuous_CONUS_15s <-  terra::terrain(DEM_continuous_CONUS_15s, "aspect") 
    +ASPECT_continuous_CONUS_15s
    +## class       : SpatRaster 
    +## dimensions  : 13920, 20640, 1  (nrow, ncol, nlyr)
    +## resolution  : 0.004166667, 0.004166667  (x, y)
    +## extent      : -138, -52, 5, 63  (xmin, xmax, ymin, ymax)
    +## coord. ref. : lon/lat WGS 84 (EPSG:4326) 
    +## source(s)   : memory
    +## name        : aspect 
    +## min value   :      0 
    +## max value   :    360
    +
    +
    # Record start time
    +a <- Sys.time()
    +
    +ggplot() +
    +  geom_spatraster(data=ASPECT_continuous_CONUS_15s) +
    +  theme_tufte()
    +
    +

    +
    b <- Sys.time()
    +difftime(b, a)
    +## Time difference of 3.650267 secs
    +
    +

    Create a cube from those layers!

    +
    mini_stack <- c(DEM_continuous_CONUS_15s, SLOPE_continuous_CONUS_15s,ASPECT_continuous_CONUS_15s)
    +mini_stack
    +## class       : SpatRaster 
    +## dimensions  : 13920, 20640, 3  (nrow, ncol, nlyr)
    +## resolution  : 0.004166667, 0.004166667  (x, y)
    +## extent      : -138, -52, 5, 63  (xmin, xmax, ymin, ymax)
    +## coord. ref. : lon/lat WGS 84 (EPSG:4326) 
    +## sources     : hyd_na_dem_15s.tif  
    +##               memory  
    +##               memory  
    +## names       : Band_1,    slope, aspect 
    +## min values  :     ? ,  0.00000,      0 
    +## max values  :     ? , 56.98691,    360
    +
    +

    Reproject and return the bounding box coordinates for our Area of +Interest

    +
    # Transform the filtered geometry to EPSG:4326 and store its bounding box
    +# Record start time
    +a <- Sys.time()
    +
    +DEM_continuous_CONUS_15s |>
    +stars::st_as_stars() |> 
    +  st_transform("EPSG:4326") |>
    +  st_bbox() -> bbox_4326
    +
    +
    +DEM_continuous_CONUS_15s |>
    +stars::st_as_stars() |> 
    +  st_transform("EPSG:32618") |>
    +  st_bbox() -> bbox_32618
    +
    +b <- Sys.time()
    +difftime(b, a)
    +## Time difference of 3.7653 mins
    +
    +

    Get a polygon for Boulder County, reproject, and return bounding box. +This is so I can make a smaller search in the stac catalog.

    +
    boulder_county <- getbb("boulder, co", format_out="sf_polygon")
    +
    +boulder_county$multipolygon |> 
    +  st_transform(crs =4326 ) |>
    +  st_bbox() -> bbox_4326_boulder
    +
    +boulder_county$multipolygon |> 
    +  st_transform(crs =32720 ) |>
    +  st_bbox() -> bbox_32720_boulder
    +
    +

    Get a polygon for the United States and crop it to be the same size as +the DEM above.

    +
    aoi <- getbb("United States", format_out="sf_polygon")
    +
    +conus <- aoi$multipolygon |>
    +  st_crop(bbox_4326)
    +
    +
    +ggplot(data=conus) +
    +  geom_sf()
    +
    +

    +

    Search the Stac catalog.

    +

    STAC, or SpatioTemporal Asset Catalog, is an open-source specification +designed to standardize the way geospatial data is indexed and +discovered. Developed by Element 84 among others, it facilitates better +interoperability and sharing of geospatial assets by providing a common +language for describing them. STAC’s flexible design allows for easy +cataloging of data, making it simpler for individuals and systems to +search and retrieve geospatial information. By effectively organizing +data about the Earth’s spatial and temporal characteristics, STAC +enables users to harness the full power of the cloud and modern data +processing technologies, optimizing the way we access and analyze +environmental data on a global scale.

    +
     stac("https://earth-search.aws.element84.com/v1") |>
    +       get_request()
    +## ###STACCatalog
    +## - id: earth-search-aws
    +## - description: A STAC API of public datasets on AWS
    +## - field(s): stac_version, type, id, title, description, links, conformsTo
    +
    +

    Element 84’s Earth Search is a STAC compliant search and discovery API +that offers users access to a vast collection of geospatial open +datasets hosted on AWS. It serves as a centralized search catalog +providing standardized metadata for these open datasets, designed to be +freely used and integrated into various applications. Alongside the API, +Element 84 also provides a web application named Earth Search Console, +which is map-centric and allows users to explore and visualize the data +contained within the Earth Search API’s catalog. This suite of tools is +part of Element 84’s initiative to make geospatial data more accessible +and actionable for a wide range of users and applications.

    +
    collection_formats()
    +##    CHIRPS_v2_0_daily_p05_tif | Image collection format for CHIRPS v 2.0 daily
    +##                              | global precipitation dataset (0.05 degrees
    +##                              | resolution) from GeoTIFFs, expects list of .tif
    +##                              | or .tif.gz files as input. [TAGS: CHIRPS,
    +##                              | precipitation]
    +##  CHIRPS_v2_0_monthly_p05_tif | Image collection format for CHIRPS v 2.0 monthly
    +##                              | global precipitation dataset (0.05 degrees
    +##                              | resolution) from GeoTIFFs, expects list of .tif
    +##                              | or .tif.gz files as input. [TAGS: CHIRPS,
    +##                              | precipitation]
    +##            ESA_CCI_SM_ACTIVE | Collection format for ESA CCI soil moisture
    +##                              | active product (version 4.7) [TAGS: Soil
    +##                              | Moisture, ESA, CCI]
    +##           ESA_CCI_SM_PASSIVE | Collection format for ESA CCI soil moisture
    +##                              | passive product (version 4.7) [TAGS: Soil
    +##                              | Moisture, ESA, CCI]
    +##    GPM_IMERG_3B_DAY_GIS_V06A | Collection format for daily
    +##                              | IMERG_3B_DAY_GIS_V06A data [TAGS: Precipitation,
    +##                              | GPM, IMERG]
    +##                      L8_L1TP | Collection format for Landsat 8 Level 1 TP
    +##                              | product [TAGS: Landsat, USGS, Level 1, NASA]
    +##                        L8_SR | Collection format for Landsat 8 surface
    +##                              | reflectance product [TAGS: Landsat, USGS, Level
    +##                              | 2, NASA, surface reflectance]
    +##                        MAXAR | Preliminary collection format for MAXAR open
    +##                              | data, visual only (under development) [TAGS: ]
    +##                      MxD09GA | Collection format for selected bands from the
    +##                              | MODIS MxD09GA (Aqua and Terra) product [TAGS:
    +##                              | MODIS, surface reflectance]
    +##                      MxD10A2 | Collection format for selected bands from the
    +##                              | MODIS MxD10A2 (Aqua and Terra) v006 Snow Cover
    +##                              | product [TAGS: MODIS, Snow Cover]
    +##                      MxD11A1 | Collection format for selected bands from the
    +##                              | MODIS MxD11A2 (Aqua and Terra) v006 Land Surface
    +##                              | Temperature product [TAGS: MODIS, LST]
    +##                      MxD11A2 | Collection format for selected bands from the
    +##                              | MODIS MxD11A2 (Aqua and Terra) v006 Land Surface
    +##                              | Temperature product [TAGS: MODIS, LST]
    +##                      MxD13A2 | Collection format for selected bands from the
    +##                              | MODIS MxD13A2 (Aqua and Terra) product [TAGS:
    +##                              | MODIS, VI, NDVI, EVI]
    +##                      MxD13A3 | Collection format for selected bands from the
    +##                              | MODIS MxD13A3 (Aqua and Terra) product [TAGS:
    +##                              | MODIS, VI, NDVI, EVI]
    +##                      MxD13Q1 | Collection format for selected bands from the
    +##                              | MODIS MxD13Q1 (Aqua and Terra) product [TAGS:
    +##                              | MODIS, VI, NDVI, EVI]
    +##                      MxD14A2 | Collection format for the MODIS MxD14A2 (Aqua
    +##                              | and Terra) product [TAGS: MODIS, Fire]
    +## PlanetScope_3B_AnalyticMS_SR | Image collection format for PlanetScope 4-band
    +##                              | scenes [TAGS: PlanetScope, BOA, Surface
    +##                              | Reflectance]
    +##                Sentinel2_L1C | Image collection format for Sentinel 2 Level 1C
    +##                              | data as downloaded from the Copernicus Open
    +##                              | Access Hub, expects a list of file paths as
    +##                              | input. The format works on original ZIP
    +##                              | compressed as well as uncompressed imagery.
    +##                              | [TAGS: Sentinel, Copernicus, ESA, TOA]
    +##            Sentinel2_L1C_AWS | Image collection format for Sentinel 2 Level 1C
    +##                              | data in AWS [TAGS: Sentinel, Copernicus, ESA,
    +##                              | TOA]
    +##                Sentinel2_L2A | Image collection format for Sentinel 2 Level 2A
    +##                              | data as downloaded from the Copernicus Open
    +##                              | Access Hub, expects a list of file paths as
    +##                              | input. The format should work on original ZIP
    +##                              | compressed as well as uncompressed imagery.
    +##                              | [TAGS: Sentinel, Copernicus, ESA, BOA, Surface
    +##                              | Reflectance]
    +##          Sentinel2_L2A_THEIA | Image collection format for Sentinel 2 Level 2A
    +##                              | data as downloaded from Theia. [TAGS: Sentinel,
    +##                              | ESA, Flat Reflectance, Theia]
    +
    +

    Building a stac collection by aiming your camera at the landscape

    +

    Creating a STAC collection is akin to a photographer framing a shot; the +landscape is rich with diverse data, mirroring a scene bustling with +potential subjects, colors, and light. Just as a photographer selects a +portion of the vista to capture, focusing on elements that will compose +a compelling image, a data scientist must similarly navigate the vast +data terrain. They must ‘point their camera’ judiciously, ensuring that +the ‘frame’ encapsulates the precise data needed. This careful selection +is crucial, as it determines the relevance and quality of the data +collection, much like the photographer’s choice dictates the story a +photograph will tell.

    +

    +
    # Record start time
    +a <- Sys.time()
    +
    +# Initialize STAC connection
    +s = stac("https://earth-search.aws.element84.com/v0")
    +
    +
    +# Search for Sentinel-2 images within specified bounding box and date range
    +#22 Million items
    +items = s |>
    +    stac_search(collections = "sentinel-s2-l2a-cogs",
    +                bbox = c(bbox_4326_boulder["xmin"], 
    +                         bbox_4326_boulder["ymin"],
    +                         bbox_4326_boulder["xmax"], 
    +                         bbox_4326_boulder["ymax"]), 
    +                datetime = "2021-05-15/2021-05-16") |>
    +    post_request() |>
    +    items_fetch(progress = FALSE)
    +
    +# Print number of found items
    +length(items$features)
    +## [1] 1
    +
    +# Prepare the assets for analysis
    +library(gdalcubes)
    +assets = c("B01", "B02", "B03", "B04", "B05", "B06", 
    +           "B07", 
    +           "B08", "B8A", "B09", "B11", "B12", "SCL")
    +s2_collection = stac_image_collection(items$features, asset_names = assets,
    +property_filter = function(x) {x[["eo:cloud_cover"]] < 20}) #all images with less than 20% clouds
    +
    +b <- Sys.time()
    +difftime(b, a)
    +## Time difference of 0.4706092 secs
    +
    +# Display the image collection
    +s2_collection
    +## Image collection object, referencing 1 images with 13 bands
    +## Images:
    +##                       name      left      top   bottom     right
    +## 1 S2B_13TDE_20210516_0_L2A -106.1832 40.65079 39.65576 -104.8846
    +##              datetime        srs
    +## 1 2021-05-16T18:02:54 EPSG:32613
    +## 
    +## Bands:
    +##    name offset scale unit nodata image_count
    +## 1   B01      0     1                       1
    +## 2   B02      0     1                       1
    +## 3   B03      0     1                       1
    +## 4   B04      0     1                       1
    +## 5   B05      0     1                       1
    +## 6   B06      0     1                       1
    +## 7   B07      0     1                       1
    +## 8   B08      0     1                       1
    +## 9   B09      0     1                       1
    +## 10  B11      0     1                       1
    +## 11  B12      0     1                       1
    +## 12  B8A      0     1                       1
    +## 13  SCL      0     1                       1
    +
    +

    Setting up your camera and film

    +

    The camera through which the data scientist frames the shot is +multifaceted, akin to the tools and processes they employ. The camera’s +film, analogous to the data cube, defines the resolution and dimensions +of the captured data, shaping how the final dataset will be utilized. +The lens and its settings—focus, aperture, and exposure—determine the +clarity, depth, and breadth of the captured information, much like the +algorithms and parameters set by the data scientist dictate the +granularity and scope of the data cube. The flash, like data enhancement +techniques, can illuminate hidden details, ensuring that the data cube, +the final product, is as informative and accurate as the landscape it +represents.

    +

    +
    # Record start time
    +a <- Sys.time()
    +
    +# Define a specific view on the satellite image collection
    +v = cube_view(
    +    srs = "EPSG:32720", #this is harder than expected. 
    +    dx = 100, 
    +    dy = 100, 
    +    dt = "P1M", 
    +    aggregation = "median", 
    +    resampling = "near",
    +    extent = list(
    +        t0 = "2021-05-15", 
    +        t1 = "2021-05-16",
    +        left = bbox_32720_boulder[1], 
    +        right = bbox_32720_boulder[2],
    +        top = bbox_32720_boulder[4], 
    +        bottom = bbox_32720_boulder[3]
    +    )
    +)
    +
    +b <- Sys.time()
    +difftime(b, a)
    +## Time difference of 0.002738953 secs
    +
    +# Display the defined view
    +v
    +## A data cube view object
    +## 
    +## Dimensions:
    +##                 low             high  count pixel_size
    +## t        2021-05-01       2021-05-31      1        P1M
    +## y -3103099.52398788 15434400.4760121 185375        100
    +## x -3178878.98542359 15369521.0145764 185484        100
    +## 
    +## SRS: "EPSG:32720"
    +## Temporal aggregation method: "median"
    +## Spatial resampling method: "near"
    +
    +

    Take a picture!

    +

    Raster style

    +
    # Record start time
    +a <- Sys.time()
    +
    +s2_collection |>
    +    raster_cube(v) |>
    +    select_bands(c( "B04", "B05"))  |>
    +  apply_pixel(c("(B05-B04)/(B05+B04)"), names="NDVI") |>
    +  write_tif() |>
    +  raster::stack() -> x
    +x
    +## class      : RasterStack 
    +## dimensions : 185375, 185484, 34384096500, 1  (nrow, ncol, ncell, nlayers)
    +## resolution : 100, 100  (x, y)
    +## extent     : -3178879, 15369521, -3103100, 15434400  (xmin, xmax, ymin, ymax)
    +## crs        : +proj=utm +zone=20 +south +datum=WGS84 +units=m +no_defs 
    +## names      : NDVI
    +
    +b <- Sys.time()
    +difftime(b, a)
    +## Time difference of 4.132932 mins
    +
    +

    STARS style

    +
    # Record start time
    +a <- Sys.time()
    +
    +s2_collection |>
    +    raster_cube(v) |>
    +    select_bands(c("B04","B05"))  |>
    +  apply_pixel(c("(B05-B04)/(B05+B04)"), names="NDVI") |>
    +  stars::st_as_stars() -> y
    +
    +b <- Sys.time()
    +difftime(b, a)
    +## Time difference of 1.459866 mins
    +
    +y
    +## stars_proxy object with 1 attribute in 1 file(s):
    +## $NDVI
    +## [1] "[...]/filec5982c38536c.nc:NDVI"
    +## 
    +## dimension(s):
    +##      from     to   offset delta                refsys point
    +## x       1 185484 -3178879   100 WGS 84 / UTM zone 20S    NA
    +## y       1 185375 15434400  -100 WGS 84 / UTM zone 20S    NA
    +## time    1      1       NA    NA               POSIXct FALSE
    +##                       values x/y
    +## x                       NULL [x]
    +## y                       NULL [y]
    +## time [2021-05-01,2021-06-01)
    +
    +

    Extract data

    +
    # Record start time
    +a <- Sys.time()
    +
    +
    +x <- s2_collection |>
    +    raster_cube(v) |>
    +    select_bands(c("B01", "B02", "B03", "B04", 
    +                   "B05", "B06", "B07", "B08", 
    +                   "B8A", "B09", "B11", "B12")) |>
    +    extract_geom(boulder_county$multipolygon) |>
    +    rename(
    +        "time" = "time",
    +        "443" = "B01",
    +        "490" = "B02",
    +        "560" = "B03",
    +        "665" = "B04",
    +        "705" = "B05",
    +        "740" = "B06",
    +        "783" = "B07",
    +        "842" = "B08",
    +        "865" = "B8A",
    +        "940" = "B09",
    +        "1610" = "B11",
    +        "2190" = "B12"
    +    )
    +
    +b <- Sys.time()
    +difftime(b, a)
    +## Time difference of 1.699016 mins
    +
    +head(x)
    +##   FID       time   443   490   560   665   705   740   783   842  865  940 1610
    +## 1   1 2021-05-01 11096 10929 10224  9893  9956  9706  9715  9641 9511 8459 5682
    +## 2   1 2021-05-01 11631 11282 10550 10234 10288 10031 10032  9988 9828 9153 5802
    +## 3   1 2021-05-01 11900 11393 10666 10337 10398 10142 10138 10093 9927 9461 5754
    +## 4   1 2021-05-01 11406 10597  9928  9626  9694  9481  9516  9338 9336 8959 5726
    +## 5   1 2021-05-01 11399 10939 10237  9905  9978  9738  9746  9633 9555 8925 5831
    +## 6   1 2021-05-01 11600 11174 10462 10147 10209  9952  9960  9890 9760 9153 5773
    +##   2190
    +## 1 3917
    +## 2 3981
    +## 3 3937
    +## 4 4054
    +## 5 4097
    +## 6 3990
    +
    +

    Make a timeseries

    +
    # Record start time
    +a <- Sys.time()
    +
    +items <- s |>
    +    stac_search(collections = "sentinel-s2-l2a-cogs",
    +                bbox = c(-105.694362,   39.912886,  -105.052774,    40.262785),
    +                datetime = "2020-01-01/2022-12-31",
    +                limit = 500) %>% 
    +    post_request() 
    +
    +S2.mask = image_mask("SCL", values=c(3,8,9))
    +
    +col = stac_image_collection(items$features, asset_names = assets, 
    +                            property_filter = function(x) {x[["eo:cloud_cover"]] < 30})
    +
    +v = cube_view(srs = "EPSG:4326",  extent = list(t0 = "2020-01-01", t1 = "2022-12-31",
    +              left = -105.694362, right = -105.052774,  top = 40.262785, bottom = 39.912886),
    +              dx = 0.001, dy = 0.001, dt = "P1M", aggregation = "median", resampling = "bilinear")
    +
    +library(colorspace)
    +ndvi.col = function(n) {
    +  rev(sequential_hcl(n, "Green-Yellow"))
    +}
    +library(gdalcubes)
    +raster_cube(col, v, mask = S2.mask) |>
    +    select_bands(c("B04", "B08")) |>
    +    apply_pixel("(B08-B04)/(B08+B04)", "NDVI") |>
    +    gdalcubes::animate(col = ndvi.col, zlim=c(-0.2,1), key.pos = 1, save_as = "anim.gif", fps = 4)
    +## [1] "/Users/ty/Documents/Github/hackathon2023_datacube/docs/code_for_building_cube/anim.gif"
    +
    +b <- Sys.time()
    +difftime(b, a)
    +## Time difference of 4.716672 mins
    +
    +y
    +## stars_proxy object with 1 attribute in 1 file(s):
    +## $NDVI
    +## [1] "[...]/filec5982c38536c.nc:NDVI"
    +## 
    +## dimension(s):
    +##      from     to   offset delta                refsys point
    +## x       1 185484 -3178879   100 WGS 84 / UTM zone 20S    NA
    +## y       1 185375 15434400  -100 WGS 84 / UTM zone 20S    NA
    +## time    1      1       NA    NA               POSIXct FALSE
    +##                       values x/y
    +## x                       NULL [x]
    +## y                       NULL [y]
    +## time [2021-05-01,2021-06-01)
    +
    +

    +

    Saving Data Cubes to Local Storage There are occasions when we need to +manipulate data cubes using other software. For such purposes, we can +save data cubes to our local disk as individual netCDF files or as a +series of GeoTIFF files. In the case of the latter, each temporal +segment of the cube is saved as a separate (multiband) GeoTIFF file.

    +

    Both netCDF and GeoTIFF formats allow for file size reduction through +compression and data packing. This process involves transforming double +precision numbers into smaller integer values using a scale and offset, +which can be particularly useful for managing disk space (for more +details, refer to the ?write_ncdf and ?write_tif documentation).

    +
    gdalcubes_options(ncdf_compression_level = 1)
    +write_ncdf(cube, file.path("~/Desktop", basename(tempfile(fileext = ".nc"))))
    +gdalcubes_options(ncdf_compression_level = 0)
    +
    +

    write_tif() and write_ncdf() both return the path(s) to created file(s) +as a character vector.

    +
    items_2020 <- s |>
    +    stac_search(collections = "sentinel-s2-l2a-cogs",
    +                bbox = c(-105.694362,   39.912886,  -105.052774,    40.262785),
    +                datetime = "2020-05-01/2020-06-30") |> 
    +    post_request() 
    +
    +items_2021 <- s |>
    +    stac_search(collections = "sentinel-s2-l2a-cogs",
    +                bbox = c(-105.694362,   39.912886,  -105.052774,    40.262785),
    +                datetime = "2021-05-01/2021-06-30") |> 
    +    post_request() 
    +
    +
    +col_2020 = stac_image_collection(items_2020$features, asset_names = assets)
    +col_2021 = stac_image_collection(items_2021$features, asset_names = assets)
    +
    +v_2020 = cube_view(srs = "EPSG:32720",  extent = list(t0 = "2020-05-01", t1 = "2020-06-30",
    +              left = bbox_32720_boulder["xmin"], right = bbox_32720_boulder["xmax"],  top = bbox_32720_boulder["ymax"], bottom = bbox_32720_boulder["ymin"]),
    +              dx = 100, dy = 100, dt = "P1D", aggregation = "median", resampling = "bilinear")
    +
    +v_2021 = cube_view(v_2020, extent = list(t0 = "2021-05-01", t1 = "2021-06-30"))
    +
    +
    +max_ndvi_mosaic <- function(col, v) {
    +    raster_cube(col, v) |>
    +    select_bands(c("B04", "B08")) |>
    +    apply_pixel(c("(B08-B04)/(B08+B04)"), names="NDVI") |>
    +    reduce_time("max(NDVI)")
    +}
    +
    +suppressPackageStartupMessages(library(stars))
    +max_ndvi_mosaic(col_2020, v_2020) -> maxndvi_2020
    +
    +max_ndvi_mosaic(col_2021, v_2021)  -> maxndvi_2021
    +
    +maxndvi_2021
    +maxndvi_2020
    +
    +difference = maxndvi_2021 - maxndvi_2020
    +difference[difference > -0.15] = NA
    +names(difference) <- "Difference of max NDVI (2020 - 2019)"
    +
    +
    flood_polygon_data3 <- glue("/vsizip/vsicurl/https://data.hydrosheds.org/file/hydrosheds-associated/gloric/GloRiC_v10_shapefile.zip/GloRiC_v10_shapefile/GloRiC_v10.shp") |>
    +  st_read() |>
    +  st_as_sf(coords = c("lon","lat"))
    +
    +flood_polygon_data3
    +
    +
    #st_read("/Users/ty/Downloads/GloRiC_v10_geodatabase/GloRiC_v10.gdb")
    +
    +flood_polygon_data3 <- glue("/vsizip/vsicurl/https://data.hydrosheds.org/file/hydrosheds-associated/gloric/GloRiC_v10_geodatabase.zip/GloRiC_v10_geodatabase/GloRiC_v10.gdb") |>
    +  st_read() |>
    +  st_as_sf(coords = c("lon","lat"))
    +
    +flood_polygon_data3
    +
    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/data-library/stac_mount_save/stac_mount_save.md b/data-library/stac_mount_save/stac_mount_save.md new file mode 100644 index 0000000..fa03684 --- /dev/null +++ b/data-library/stac_mount_save/stac_mount_save.md @@ -0,0 +1,871 @@ +The art of making a data cube +================ +Ty Tuff, ESIIL Data Scientist +2023-10-27 + +``` r + +#library(Rcpp) +library(sf) +library(gdalcubes) +library(rstac) +library(gdalUtils) +library(terra) +library(rgdal) +library(reshape2) +library(osmdata) +library(terra) +library(dplyr) +library(stars) +library(ggplot2) +library(colorspace) +library(geos) +library(osmdata) +library(ggthemes) +library(tidyr) +gdalcubes_options(parallel = 8) + +sf::sf_extSoftVersion() +## GEOS GDAL proj.4 GDAL_with_GEOS USE_PROJ_H +## "3.11.0" "3.5.3" "9.1.0" "true" "true" +## PROJ +## "9.1.0" +gdalcubes_gdal_has_geos() +## [1] TRUE + +library(osmdata) +library(dplyr) +library(sf) +library(terra) +library(tidyterra) +library(glue) +library(ggplot2) +library(ggthemes) +library(stars) +library(magrittr) +library(landsat) +``` + +# The philosophy of moving data in the cloud + +The philosophy of moving data in the cloud represents a paradigm shift +in how we approach data within our analytical processes. Instead of the +traditional method of transferring entire datasets to our local +environments, the cloud encourages a more efficient model: bring your +analysis to the data. This approach minimizes data movement and +leverages the cloud’s computational power and scalability. By utilizing +cloud-native tools and services, we can run our analyses directly on the +data where it resides, selectively accessing and processing only what is +necessary. This not only streamlines workflows but also significantly +reduces overheads related to data transfer and storage management. In +essence, the focus is on diverting computational resources to the data +rather than the cumbersome and resource-intensive practice of moving +large datasets to and fro. + +## ‘To Make’ or ‘To Take’ a photo + +The distinction between making and taking a photograph lies in the +approach and intent behind the camera. Taking a photo is often a +reactive process, where the photographer captures moments as they +naturally unfold, seizing the spontaneity of life without alteration. +It’s a passive form of photography where the emphasis is on the right +timing and the natural interplay of elements within the frame. On the +other hand, making a photo is a proactive and deliberate act. It is akin +to craftsmanship, where a professional photographer starts with a +concept and utilizes a variety of tools and techniques to stage and +construct the desired scene. They actively manipulate lighting, +composition, and subjects to create a photograph that aligns with their +pre-visualized artistic vision. While both methods use a camera to +produce a photograph, making a photo involves a creation process, +whereas taking a photo is about finding the scene. + +David Yarrow is a famous photographer who ‘makes’ his photographs. +![](../assets/esiil_content/stac_mount_save/David-Yarrow-sorrel-sky-gallery-Photographic-Print-Cindys-Shotgun-Wedding.png) +![](../assets/esiil_content/stac_mount_save/bison.png) + +## What does it mean to ‘make’ a data cube? + +The artistry of Ansel Adams’ photography serves as a compelling analogy +for the meticulous craft of building a data cube from cloud data sources +using tools like STAC and GDAL VSI. Just as Adams would survey the +vastness of a landscape, discerning the interplay of light and shadow +upon the mountains before him, a data architect surveys the expanse of +available data. In this analogy, the raw data are the majestic mountains +and sweeping landscapes waiting to be captured. The STAC collection acts +as the photographer’s deliberate choice of scene, pointing the camera +lens—our data tools—towards the most telling and coherent dataset. + +![](../assets/stac_mount_save/Ansel_Adams_datacube.png) Just as Adams’ +photographs are more than mere records of a landscape, but rather a +confluence of his vision, technique, and the scene’s natural beauty, so +too is the data cube more than the sum of its parts. It is the artful +synthesis of information, crafted and composed with the skill and intent +of an artist, producing not just a tool for analysis but a harmonized, +data-driven portrait of the world it represents. The builder of the data +cube is, indeed, an artist, and the data cube their masterpiece, +revealing not just data, but a story, a perspective, a landscape sewn +from the raw material of cloud-sourced information. + +As Adams would adjust his viewfinder, setting the boundaries of his +photographic frame, the data builder sets the view window, filtering and +transferring relevant data to their own medium, akin to Adams’ film. +This is where the raw data is transformed, organized into the structured +form of a data frame or data cube, a process not unlike the careful +development of a photograph in a darkroom. Here, the data cube creator, +much like Adams with his careful dodging and burning, harmonizes +disparate elements into a cohesive whole, each decision reflecting an +intention and vision for the final product. + +## 1) The Rat through the Snake Problem: Scalability with Cloud Computing + +Just like a snake that swallows a rat, traditional computing systems +often struggle to process the large volumes of environmental data — +they’re constrained by their static hardware limitations. Cloud +computing introduces a python-esque capability: massive scalability. By +migrating to the cloud, we essentially make the snake bigger, allowing +it to handle larger “prey.” Scalable computers in the cloud can grow +with the demand, providing the necessary computational power to process +extensive datasets, which is vital in a field where data volumes are +increasing exponentially. + +![Raster through a snake](../assets/esiil_content/stac_mount_save/mouseinsnake.png) + +## 2) The Antelope through the Python Problem: Streamlining with GDAL VSI + +As we scale up, we encounter a new challenge: trying to pass an antelope +through a python — a metaphor for the next level of complexity in data +processing. The sheer size and complexity of the data can become +overwhelming. This is where GDAL’s Virtual File System (VSI) becomes our +ecological adaptation. VSI allows us to access remote data transparently +and more efficiently. Instead of ingesting the entire “antelope,” VSI +enables the “python” to dynamically access and process only the parts of +the data it needs, when it needs them, much like constriction before +digestion. This selective access minimizes the need for local storage +and expedites the data handling process. + +![Antelope through a Python](../assets/esiil_content/stac_mount_save/antelopeinpython.png) + +## 3) Drinking from a Fire Hose: Accelerated Inference with AI and ML + +Once we’ve enabled the flow of large amounts of digestible data, we +encounter the metaphorical challenge of drinking from a fire hose. The +data, now flowing and accessible, is immense and rapid — posing a +challenge not just to store and process, but to understand and derive +meaning from in real-time. This is where artificial intelligence (AI) +and machine learning (ML) step in. These technologies act as a +sophisticated filtration system, enabling us to drink safely and +beneficially from the torrent. AI and ML can analyze patterns, make +predictions, and infer insights at a pace that keeps up with the fast +stream of data, turning raw information into actionable knowledge. + +![Inference through a firehose](../assets/esiil_content/stac_mount_save/drink-firehose.png) + +By addressing these three pivotal challenges with cloud computing, GDAL +VSI, and AI/ML, we not only manage to consume the data effectively but +also transform our capabilities in environmental data science. We can +move from mere data ingestion to meaningful data interpretation, all at +a scale and speed necessary for impactful environmental analysis. + +### Mounting data + +A void-filled Digital Elevation Model (DEM) is a comprehensive +topographical representation where any missing data points, known as +voids, have been filled in. These voids can occur due to various +reasons, such as clouds or technical errors during data collection. In a +void-filled DEM, these gaps are interpolated or estimated using the +surrounding data to create a continuous, seamless surface model. This +process enhances the utility and accuracy of the DEM for hydrological +modeling, terrain analysis, and other geographical applications. The +HydroSHEDS website +(https://www.hydrosheds.org/hydrosheds-core-downloads) provides access +to high-quality, void-filled DEM datasets like the +DEM_continuous_CONUS_15s, which users can download and easily integrate +into spatial analysis workflows using tools such as ‘terra’ in R, +allowing for sophisticated environmental and geographical research and +planning. + +``` r +# Record start time +a <- Sys.time() + +# Create a string with the file path using glue, then download and read the DEM file as a raster object + +DEM_continuous_CONUS_15s <- glue( + "/vsizip/vsicurl/", #magic remote connection + "https://data.hydrosheds.org/file/hydrosheds-v1-dem/hyd_na_dem_15s.zip", #copied link to download location + "/hyd_na_dem_15s.tif") %>% #path inside zip file + terra::rast() + +# The 'glue' function constructs the file path string, which is then passed to 'terra::rast()' to read the DEM file into R as a raster layer. '/vsizip/vsicurl/' is a special GDAL virtual file system syntax that allows reading directly from a zipped file on a remote server. + +# Record end time and calculate the time difference +b <- Sys.time() +difftime(b, a) +## Time difference of 4.603666 secs + +# The resulting raster object is stored in 'DEM_continuous_CONUS_15s', which now contains the void-filled DEM data ready for use + +DEM_continuous_CONUS_15s # Prints out the details of the 'DEM_continuous_CONUS_15s' raster object +## class : SpatRaster +## dimensions : 13920, 20640, 1 (nrow, ncol, nlyr) +## resolution : 0.004166667, 0.004166667 (x, y) +## extent : -138, -52, 5, 63 (xmin, xmax, ymin, ymax) +## coord. ref. : lon/lat WGS 84 (EPSG:4326) +## source : hyd_na_dem_15s.tif +## name : Band_1 + +# output is a SpatRaster, which is the object type associated with the 'terra' package. +``` + +Continuous DEM for North America + +``` r +# Record start time +a <- Sys.time() + +ggplot() + + geom_spatraster(data=DEM_continuous_CONUS_15s) + + theme_tufte() +``` + +![](stac_mount_save_files/figure-gfm/DEM_plot-1.png) + +``` r + +b <- Sys.time() +difftime(b, a) +## Time difference of 52.49061 secs +``` + +Calculate Slope from that DEM + +``` r +SLOPE_continuous_CONUS_15s <- terra::terrain(DEM_continuous_CONUS_15s, "slope") + +SLOPE_continuous_CONUS_15s +## class : SpatRaster +## dimensions : 13920, 20640, 1 (nrow, ncol, nlyr) +## resolution : 0.004166667, 0.004166667 (x, y) +## extent : -138, -52, 5, 63 (xmin, xmax, ymin, ymax) +## coord. ref. : lon/lat WGS 84 (EPSG:4326) +## source(s) : memory +## name : slope +## min value : 0.00000 +## max value : 56.98691 +``` + +``` r +# Record start time +a <- Sys.time() + +ggplot() + + geom_spatraster(data=SLOPE_continuous_CONUS_15s) + + theme_tufte() +``` + +![](stac_mount_save_files/figure-gfm/SLOPE_plot-1.png) + +``` r + +b <- Sys.time() +difftime(b, a) +## Time difference of 3.859545 secs +``` + +Calculate aspect from DEM + +``` r +ASPECT_continuous_CONUS_15s <- terra::terrain(DEM_continuous_CONUS_15s, "aspect") +ASPECT_continuous_CONUS_15s +## class : SpatRaster +## dimensions : 13920, 20640, 1 (nrow, ncol, nlyr) +## resolution : 0.004166667, 0.004166667 (x, y) +## extent : -138, -52, 5, 63 (xmin, xmax, ymin, ymax) +## coord. ref. : lon/lat WGS 84 (EPSG:4326) +## source(s) : memory +## name : aspect +## min value : 0 +## max value : 360 +``` + +``` r +# Record start time +a <- Sys.time() + +ggplot() + + geom_spatraster(data=ASPECT_continuous_CONUS_15s) + + theme_tufte() +``` + +![](stac_mount_save_files/figure-gfm/ASPECT_plot-1.png) + +``` r + +b <- Sys.time() +difftime(b, a) +## Time difference of 3.650267 secs +``` + +Create a cube from those layers! + +``` r +mini_stack <- c(DEM_continuous_CONUS_15s, SLOPE_continuous_CONUS_15s,ASPECT_continuous_CONUS_15s) +mini_stack +## class : SpatRaster +## dimensions : 13920, 20640, 3 (nrow, ncol, nlyr) +## resolution : 0.004166667, 0.004166667 (x, y) +## extent : -138, -52, 5, 63 (xmin, xmax, ymin, ymax) +## coord. ref. : lon/lat WGS 84 (EPSG:4326) +## sources : hyd_na_dem_15s.tif +## memory +## memory +## names : Band_1, slope, aspect +## min values : ? , 0.00000, 0 +## max values : ? , 56.98691, 360 +``` + +Reproject and return the bounding box coordinates for our Area of +Interest + +``` r +# Transform the filtered geometry to EPSG:4326 and store its bounding box +# Record start time +a <- Sys.time() + +DEM_continuous_CONUS_15s |> +stars::st_as_stars() |> + st_transform("EPSG:4326") |> + st_bbox() -> bbox_4326 + + +DEM_continuous_CONUS_15s |> +stars::st_as_stars() |> + st_transform("EPSG:32618") |> + st_bbox() -> bbox_32618 + +b <- Sys.time() +difftime(b, a) +## Time difference of 3.7653 mins +``` + +Get a polygon for Boulder County, reproject, and return bounding box. +This is so I can make a smaller search in the stac catalog. + +``` r +boulder_county <- getbb("boulder, co", format_out="sf_polygon") + +boulder_county$multipolygon |> + st_transform(crs =4326 ) |> + st_bbox() -> bbox_4326_boulder + +boulder_county$multipolygon |> + st_transform(crs =32720 ) |> + st_bbox() -> bbox_32720_boulder +``` + +Get a polygon for the United States and crop it to be the same size as +the DEM above. + +``` r +aoi <- getbb("United States", format_out="sf_polygon") + +conus <- aoi$multipolygon |> + st_crop(bbox_4326) + + +ggplot(data=conus) + + geom_sf() +``` + +![](stac_mount_save_files/figure-gfm/conus_bounding_box-1.png) + +Search the Stac catalog. + +STAC, or SpatioTemporal Asset Catalog, is an open-source specification +designed to standardize the way geospatial data is indexed and +discovered. Developed by Element 84 among others, it facilitates better +interoperability and sharing of geospatial assets by providing a common +language for describing them. STAC’s flexible design allows for easy +cataloging of data, making it simpler for individuals and systems to +search and retrieve geospatial information. By effectively organizing +data about the Earth’s spatial and temporal characteristics, STAC +enables users to harness the full power of the cloud and modern data +processing technologies, optimizing the way we access and analyze +environmental data on a global scale. + +``` r + stac("https://earth-search.aws.element84.com/v1") |> + get_request() +## ###STACCatalog +## - id: earth-search-aws +## - description: A STAC API of public datasets on AWS +## - field(s): stac_version, type, id, title, description, links, conformsTo +``` + +Element 84’s Earth Search is a STAC compliant search and discovery API +that offers users access to a vast collection of geospatial open +datasets hosted on AWS. It serves as a centralized search catalog +providing standardized metadata for these open datasets, designed to be +freely used and integrated into various applications. Alongside the API, +Element 84 also provides a web application named Earth Search Console, +which is map-centric and allows users to explore and visualize the data +contained within the Earth Search API’s catalog. This suite of tools is +part of Element 84’s initiative to make geospatial data more accessible +and actionable for a wide range of users and applications. + +``` r +collection_formats() +## CHIRPS_v2_0_daily_p05_tif | Image collection format for CHIRPS v 2.0 daily +## | global precipitation dataset (0.05 degrees +## | resolution) from GeoTIFFs, expects list of .tif +## | or .tif.gz files as input. [TAGS: CHIRPS, +## | precipitation] +## CHIRPS_v2_0_monthly_p05_tif | Image collection format for CHIRPS v 2.0 monthly +## | global precipitation dataset (0.05 degrees +## | resolution) from GeoTIFFs, expects list of .tif +## | or .tif.gz files as input. [TAGS: CHIRPS, +## | precipitation] +## ESA_CCI_SM_ACTIVE | Collection format for ESA CCI soil moisture +## | active product (version 4.7) [TAGS: Soil +## | Moisture, ESA, CCI] +## ESA_CCI_SM_PASSIVE | Collection format for ESA CCI soil moisture +## | passive product (version 4.7) [TAGS: Soil +## | Moisture, ESA, CCI] +## GPM_IMERG_3B_DAY_GIS_V06A | Collection format for daily +## | IMERG_3B_DAY_GIS_V06A data [TAGS: Precipitation, +## | GPM, IMERG] +## L8_L1TP | Collection format for Landsat 8 Level 1 TP +## | product [TAGS: Landsat, USGS, Level 1, NASA] +## L8_SR | Collection format for Landsat 8 surface +## | reflectance product [TAGS: Landsat, USGS, Level +## | 2, NASA, surface reflectance] +## MAXAR | Preliminary collection format for MAXAR open +## | data, visual only (under development) [TAGS: ] +## MxD09GA | Collection format for selected bands from the +## | MODIS MxD09GA (Aqua and Terra) product [TAGS: +## | MODIS, surface reflectance] +## MxD10A2 | Collection format for selected bands from the +## | MODIS MxD10A2 (Aqua and Terra) v006 Snow Cover +## | product [TAGS: MODIS, Snow Cover] +## MxD11A1 | Collection format for selected bands from the +## | MODIS MxD11A2 (Aqua and Terra) v006 Land Surface +## | Temperature product [TAGS: MODIS, LST] +## MxD11A2 | Collection format for selected bands from the +## | MODIS MxD11A2 (Aqua and Terra) v006 Land Surface +## | Temperature product [TAGS: MODIS, LST] +## MxD13A2 | Collection format for selected bands from the +## | MODIS MxD13A2 (Aqua and Terra) product [TAGS: +## | MODIS, VI, NDVI, EVI] +## MxD13A3 | Collection format for selected bands from the +## | MODIS MxD13A3 (Aqua and Terra) product [TAGS: +## | MODIS, VI, NDVI, EVI] +## MxD13Q1 | Collection format for selected bands from the +## | MODIS MxD13Q1 (Aqua and Terra) product [TAGS: +## | MODIS, VI, NDVI, EVI] +## MxD14A2 | Collection format for the MODIS MxD14A2 (Aqua +## | and Terra) product [TAGS: MODIS, Fire] +## PlanetScope_3B_AnalyticMS_SR | Image collection format for PlanetScope 4-band +## | scenes [TAGS: PlanetScope, BOA, Surface +## | Reflectance] +## Sentinel2_L1C | Image collection format for Sentinel 2 Level 1C +## | data as downloaded from the Copernicus Open +## | Access Hub, expects a list of file paths as +## | input. The format works on original ZIP +## | compressed as well as uncompressed imagery. +## | [TAGS: Sentinel, Copernicus, ESA, TOA] +## Sentinel2_L1C_AWS | Image collection format for Sentinel 2 Level 1C +## | data in AWS [TAGS: Sentinel, Copernicus, ESA, +## | TOA] +## Sentinel2_L2A | Image collection format for Sentinel 2 Level 2A +## | data as downloaded from the Copernicus Open +## | Access Hub, expects a list of file paths as +## | input. The format should work on original ZIP +## | compressed as well as uncompressed imagery. +## | [TAGS: Sentinel, Copernicus, ESA, BOA, Surface +## | Reflectance] +## Sentinel2_L2A_THEIA | Image collection format for Sentinel 2 Level 2A +## | data as downloaded from Theia. [TAGS: Sentinel, +## | ESA, Flat Reflectance, Theia] +``` + +Building a stac collection by aiming your camera at the landscape + +Creating a STAC collection is akin to a photographer framing a shot; the +landscape is rich with diverse data, mirroring a scene bustling with +potential subjects, colors, and light. Just as a photographer selects a +portion of the vista to capture, focusing on elements that will compose +a compelling image, a data scientist must similarly navigate the vast +data terrain. They must ‘point their camera’ judiciously, ensuring that +the ‘frame’ encapsulates the precise data needed. This careful selection +is crucial, as it determines the relevance and quality of the data +collection, much like the photographer’s choice dictates the story a +photograph will tell. + +![](../assets/esiil_content/stac_mount_save/Ansel_adams_Jackson_hole.png) + +``` r +# Record start time +a <- Sys.time() + +# Initialize STAC connection +s = stac("https://earth-search.aws.element84.com/v0") + + +# Search for Sentinel-2 images within specified bounding box and date range +#22 Million items +items = s |> + stac_search(collections = "sentinel-s2-l2a-cogs", + bbox = c(bbox_4326_boulder["xmin"], + bbox_4326_boulder["ymin"], + bbox_4326_boulder["xmax"], + bbox_4326_boulder["ymax"]), + datetime = "2021-05-15/2021-05-16") |> + post_request() |> + items_fetch(progress = FALSE) + +# Print number of found items +length(items$features) +## [1] 1 + +# Prepare the assets for analysis +library(gdalcubes) +assets = c("B01", "B02", "B03", "B04", "B05", "B06", + "B07", + "B08", "B8A", "B09", "B11", "B12", "SCL") +s2_collection = stac_image_collection(items$features, asset_names = assets, +property_filter = function(x) {x[["eo:cloud_cover"]] < 20}) #all images with less than 20% clouds + +b <- Sys.time() +difftime(b, a) +## Time difference of 0.4706092 secs + +# Display the image collection +s2_collection +## Image collection object, referencing 1 images with 13 bands +## Images: +## name left top bottom right +## 1 S2B_13TDE_20210516_0_L2A -106.1832 40.65079 39.65576 -104.8846 +## datetime srs +## 1 2021-05-16T18:02:54 EPSG:32613 +## +## Bands: +## name offset scale unit nodata image_count +## 1 B01 0 1 1 +## 2 B02 0 1 1 +## 3 B03 0 1 1 +## 4 B04 0 1 1 +## 5 B05 0 1 1 +## 6 B06 0 1 1 +## 7 B07 0 1 1 +## 8 B08 0 1 1 +## 9 B09 0 1 1 +## 10 B11 0 1 1 +## 11 B12 0 1 1 +## 12 B8A 0 1 1 +## 13 SCL 0 1 1 +``` + +Setting up your camera and film + +The camera through which the data scientist frames the shot is +multifaceted, akin to the tools and processes they employ. The camera’s +film, analogous to the data cube, defines the resolution and dimensions +of the captured data, shaping how the final dataset will be utilized. +The lens and its settings—focus, aperture, and exposure—determine the +clarity, depth, and breadth of the captured information, much like the +algorithms and parameters set by the data scientist dictate the +granularity and scope of the data cube. The flash, like data enhancement +techniques, can illuminate hidden details, ensuring that the data cube, +the final product, is as informative and accurate as the landscape it +represents. + +![](../assets/esiil_content/stac_mount_save/View_Ansel-Adams_Camera.png) + +``` r +# Record start time +a <- Sys.time() + +# Define a specific view on the satellite image collection +v = cube_view( + srs = "EPSG:32720", #this is harder than expected. + dx = 100, + dy = 100, + dt = "P1M", + aggregation = "median", + resampling = "near", + extent = list( + t0 = "2021-05-15", + t1 = "2021-05-16", + left = bbox_32720_boulder[1], + right = bbox_32720_boulder[2], + top = bbox_32720_boulder[4], + bottom = bbox_32720_boulder[3] + ) +) + +b <- Sys.time() +difftime(b, a) +## Time difference of 0.002738953 secs + +# Display the defined view +v +## A data cube view object +## +## Dimensions: +## low high count pixel_size +## t 2021-05-01 2021-05-31 1 P1M +## y -3103099.52398788 15434400.4760121 185375 100 +## x -3178878.98542359 15369521.0145764 185484 100 +## +## SRS: "EPSG:32720" +## Temporal aggregation method: "median" +## Spatial resampling method: "near" +``` + +Take a picture! + +Raster style + +``` r +# Record start time +a <- Sys.time() + +s2_collection |> + raster_cube(v) |> + select_bands(c( "B04", "B05")) |> + apply_pixel(c("(B05-B04)/(B05+B04)"), names="NDVI") |> + write_tif() |> + raster::stack() -> x +x +## class : RasterStack +## dimensions : 185375, 185484, 34384096500, 1 (nrow, ncol, ncell, nlayers) +## resolution : 100, 100 (x, y) +## extent : -3178879, 15369521, -3103100, 15434400 (xmin, xmax, ymin, ymax) +## crs : +proj=utm +zone=20 +south +datum=WGS84 +units=m +no_defs +## names : NDVI + +b <- Sys.time() +difftime(b, a) +## Time difference of 4.132932 mins +``` + +STARS style + +``` r +# Record start time +a <- Sys.time() + +s2_collection |> + raster_cube(v) |> + select_bands(c("B04","B05")) |> + apply_pixel(c("(B05-B04)/(B05+B04)"), names="NDVI") |> + stars::st_as_stars() -> y + +b <- Sys.time() +difftime(b, a) +## Time difference of 1.459866 mins + +y +## stars_proxy object with 1 attribute in 1 file(s): +## $NDVI +## [1] "[...]/filec5982c38536c.nc:NDVI" +## +## dimension(s): +## from to offset delta refsys point +## x 1 185484 -3178879 100 WGS 84 / UTM zone 20S NA +## y 1 185375 15434400 -100 WGS 84 / UTM zone 20S NA +## time 1 1 NA NA POSIXct FALSE +## values x/y +## x NULL [x] +## y NULL [y] +## time [2021-05-01,2021-06-01) +``` + +Extract data + +``` r +# Record start time +a <- Sys.time() + + +x <- s2_collection |> + raster_cube(v) |> + select_bands(c("B01", "B02", "B03", "B04", + "B05", "B06", "B07", "B08", + "B8A", "B09", "B11", "B12")) |> + extract_geom(boulder_county$multipolygon) |> + rename( + "time" = "time", + "443" = "B01", + "490" = "B02", + "560" = "B03", + "665" = "B04", + "705" = "B05", + "740" = "B06", + "783" = "B07", + "842" = "B08", + "865" = "B8A", + "940" = "B09", + "1610" = "B11", + "2190" = "B12" + ) + +b <- Sys.time() +difftime(b, a) +## Time difference of 1.699016 mins + +head(x) +## FID time 443 490 560 665 705 740 783 842 865 940 1610 +## 1 1 2021-05-01 11096 10929 10224 9893 9956 9706 9715 9641 9511 8459 5682 +## 2 1 2021-05-01 11631 11282 10550 10234 10288 10031 10032 9988 9828 9153 5802 +## 3 1 2021-05-01 11900 11393 10666 10337 10398 10142 10138 10093 9927 9461 5754 +## 4 1 2021-05-01 11406 10597 9928 9626 9694 9481 9516 9338 9336 8959 5726 +## 5 1 2021-05-01 11399 10939 10237 9905 9978 9738 9746 9633 9555 8925 5831 +## 6 1 2021-05-01 11600 11174 10462 10147 10209 9952 9960 9890 9760 9153 5773 +## 2190 +## 1 3917 +## 2 3981 +## 3 3937 +## 4 4054 +## 5 4097 +## 6 3990 +``` + +Make a timeseries + +``` r + +# Record start time +a <- Sys.time() + +items <- s |> + stac_search(collections = "sentinel-s2-l2a-cogs", + bbox = c(-105.694362, 39.912886, -105.052774, 40.262785), + datetime = "2020-01-01/2022-12-31", + limit = 500) %>% + post_request() + +S2.mask = image_mask("SCL", values=c(3,8,9)) + +col = stac_image_collection(items$features, asset_names = assets, + property_filter = function(x) {x[["eo:cloud_cover"]] < 30}) + +v = cube_view(srs = "EPSG:4326", extent = list(t0 = "2020-01-01", t1 = "2022-12-31", + left = -105.694362, right = -105.052774, top = 40.262785, bottom = 39.912886), + dx = 0.001, dy = 0.001, dt = "P1M", aggregation = "median", resampling = "bilinear") + +library(colorspace) +ndvi.col = function(n) { + rev(sequential_hcl(n, "Green-Yellow")) +} +library(gdalcubes) +raster_cube(col, v, mask = S2.mask) |> + select_bands(c("B04", "B08")) |> + apply_pixel("(B08-B04)/(B08+B04)", "NDVI") |> + gdalcubes::animate(col = ndvi.col, zlim=c(-0.2,1), key.pos = 1, save_as = "anim.gif", fps = 4) +## [1] "/Users/ty/Documents/Github/hackathon2023_datacube/docs/code_for_building_cube/anim.gif" + +b <- Sys.time() +difftime(b, a) +## Time difference of 4.716672 mins + +y +## stars_proxy object with 1 attribute in 1 file(s): +## $NDVI +## [1] "[...]/filec5982c38536c.nc:NDVI" +## +## dimension(s): +## from to offset delta refsys point +## x 1 185484 -3178879 100 WGS 84 / UTM zone 20S NA +## y 1 185375 15434400 -100 WGS 84 / UTM zone 20S NA +## time 1 1 NA NA POSIXct FALSE +## values x/y +## x NULL [x] +## y NULL [y] +## time [2021-05-01,2021-06-01) +``` + +![](../assets/esiil_content/stac_mount_save/anim.gif) + +Saving Data Cubes to Local Storage There are occasions when we need to +manipulate data cubes using other software. For such purposes, we can +save data cubes to our local disk as individual netCDF files or as a +series of GeoTIFF files. In the case of the latter, each temporal +segment of the cube is saved as a separate (multiband) GeoTIFF file. + +Both netCDF and GeoTIFF formats allow for file size reduction through +compression and data packing. This process involves transforming double +precision numbers into smaller integer values using a scale and offset, +which can be particularly useful for managing disk space (for more +details, refer to the ?write_ncdf and ?write_tif documentation). + +``` r +gdalcubes_options(ncdf_compression_level = 1) +write_ncdf(cube, file.path("~/Desktop", basename(tempfile(fileext = ".nc")))) +gdalcubes_options(ncdf_compression_level = 0) +``` + +write_tif() and write_ncdf() both return the path(s) to created file(s) +as a character vector. + +``` r +items_2020 <- s |> + stac_search(collections = "sentinel-s2-l2a-cogs", + bbox = c(-105.694362, 39.912886, -105.052774, 40.262785), + datetime = "2020-05-01/2020-06-30") |> + post_request() + +items_2021 <- s |> + stac_search(collections = "sentinel-s2-l2a-cogs", + bbox = c(-105.694362, 39.912886, -105.052774, 40.262785), + datetime = "2021-05-01/2021-06-30") |> + post_request() + + +col_2020 = stac_image_collection(items_2020$features, asset_names = assets) +col_2021 = stac_image_collection(items_2021$features, asset_names = assets) + +v_2020 = cube_view(srs = "EPSG:32720", extent = list(t0 = "2020-05-01", t1 = "2020-06-30", + left = bbox_32720_boulder["xmin"], right = bbox_32720_boulder["xmax"], top = bbox_32720_boulder["ymax"], bottom = bbox_32720_boulder["ymin"]), + dx = 100, dy = 100, dt = "P1D", aggregation = "median", resampling = "bilinear") + +v_2021 = cube_view(v_2020, extent = list(t0 = "2021-05-01", t1 = "2021-06-30")) + + +max_ndvi_mosaic <- function(col, v) { + raster_cube(col, v) |> + select_bands(c("B04", "B08")) |> + apply_pixel(c("(B08-B04)/(B08+B04)"), names="NDVI") |> + reduce_time("max(NDVI)") +} + +suppressPackageStartupMessages(library(stars)) +max_ndvi_mosaic(col_2020, v_2020) -> maxndvi_2020 + +max_ndvi_mosaic(col_2021, v_2021) -> maxndvi_2021 + +maxndvi_2021 +maxndvi_2020 + +difference = maxndvi_2021 - maxndvi_2020 +difference[difference > -0.15] = NA +names(difference) <- "Difference of max NDVI (2020 - 2019)" +``` + +``` r +flood_polygon_data3 <- glue("/vsizip/vsicurl/https://data.hydrosheds.org/file/hydrosheds-associated/gloric/GloRiC_v10_shapefile.zip/GloRiC_v10_shapefile/GloRiC_v10.shp") |> + st_read() |> + st_as_sf(coords = c("lon","lat")) + +flood_polygon_data3 +``` + +``` r +#st_read("/Users/ty/Downloads/GloRiC_v10_geodatabase/GloRiC_v10.gdb") + +flood_polygon_data3 <- glue("/vsizip/vsicurl/https://data.hydrosheds.org/file/hydrosheds-associated/gloric/GloRiC_v10_geodatabase.zip/GloRiC_v10_geodatabase/GloRiC_v10.gdb") |> + st_read() |> + st_as_sf(coords = c("lon","lat")) + +flood_polygon_data3 +``` diff --git a/data-library/stac_simple/index.html b/data-library/stac_simple/index.html new file mode 100644 index 0000000..71f0608 --- /dev/null +++ b/data-library/stac_simple/index.html @@ -0,0 +1,1576 @@ + + + + + + + + + + + + + + + + + + + + + + Stream data (light) - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    Accessing data via STAC

    +

    ESIIL, 2024 +Ty Tuff & Tyler McIntosh

    +

    SpatioTemporal Asset Catalog, is an open-source specification designed to standardize the way geospatial data is indexed and discovered. Developed by Element 84 among others, it facilitates better interoperability and sharing of geospatial assets by providing a common language for describing them. STAC’s flexible design allows for easy cataloging of data, making it simpler for individuals and systems to search and retrieve geospatial information. By effectively organizing data about the Earth’s spatial and temporal characteristics, STAC enables users to harness the full power of the cloud and modern data processing technologies, optimizing the way we access and analyze environmental data on a global scale.

    +

    Element 84’s Earth Search is a STAC compliant search and discovery API that offers users access to a vast collection of geospatial open datasets hosted on AWS. It serves as a centralized search catalog providing standardized metadata for these open datasets, designed to be freely used and integrated into various applications. Alongside the API, Element 84 also provides a web application named Earth Search Console, which is map-centric and allows users to explore and visualize the data contained within the Earth Search API’s catalog. This suite of tools is part of Element 84’s initiative to make geospatial data more accessible and actionable for a wide range of users and applications.

    +

    First, we need an area of interest

    +
    require(glue)
    +require(sf)
    +require(gdalcubes)
    +require(rstac)
    +
    +
    +#Access ecoregiosn via VSI
    +epa_l3 <- glue::glue(
    +  "/vsizip/vsicurl/", #magic remote connection
    +  "https://gaftp.epa.gov/EPADataCommons/ORD/Ecoregions/us/us_eco_l3.zip", #copied link to download location
    +  "/us_eco_l3.shp") |> #path inside zip file
    +  sf::st_read()
    +
    +#Get just S.Rockies and ensure that it is in EPSG:4326
    +southernRockies <- epa_l3 |>
    +  dplyr::filter(US_L3NAME == "Southern Rockies") |>
    +  dplyr::group_by(US_L3NAME) |>
    +  dplyr::summarize(geometry = sf::st_union(geometry)) |>
    +  sf::st_transform("EPSG:4326")
    +
    +bboxSR4326 <- sf::st_bbox(southernRockies)
    +
    +

    To access data from STAC correctly, we need to request the data in a projected CRS. +

    southernRockies <- southernRockies |> sf::st_transform("EPSG:32613")
    +
    +bboxSRproj <- sf::st_bbox(southernRockies)
    +

    +

    Search the STAC catalog

    +

    To get information about a STAC archive, you can use rstac::get_request(). You can also use gdalcubes::collection_formats() to see various collection formats that you may encounter.

    +

    To search a STAC catalog online, stacindex.org is a useful tool. For example, here is the page for the Earth Search catalog by Element84 that we will use.

    +
    stac("https://earth-search.aws.element84.com/v1") |>
    +  get_request()
    +## ###STACCatalog
    +## - id: earth-search-aws
    +## - description: A STAC API of public datasets on AWS
    +## - field(s): stac_version, type, id, title, description, links, conformsTo
    +
    +collection_formats()
    +
    +

    Initialize a STAC connection (rstac::stac()) and search for data that you are interested in (rstac::stac_search()). Note that you will request a spatial area of interest as well as a temporal window of interest. To get more information on the data and how it is structured, you can examine the 'items' object we create.

    +
    # Record start time
    +a <- Sys.time()
    +
    +# Initialize STAC connection
    +s = rstac::stac("https://earth-search.aws.element84.com/v0")
    +
    +
    +# Search for Sentinel-2 images within specified bounding box and date range
    +#22 Million items
    +items = s |>
    +  rstac::stac_search(collections = "sentinel-s2-l2a-cogs",
    +              bbox = c(bboxSR4326["xmin"], 
    +                       bboxSR4326["ymin"],
    +                       bboxSR4326["xmax"], 
    +                       bboxSR4326["ymax"]), 
    +              datetime = "2021-05-15/2021-05-16") |>
    +  post_request() |>
    +  items_fetch(progress = FALSE)
    +
    +# Print number of found items
    +length(items$features)
    +
    +items
    +
    +

    There is data we want! Now, we need to prepare the assets for us to access. We will list the assets we want, and set any property filters that we would like to apply.

    +
    # Prepare the assets for analysis
    +library(gdalcubes)
    +assets = c("B01", "B02", "B03", "B04", "B05", "B06", 
    +           "B07", 
    +           "B08", "B8A", "B09", "B11", "B12", "SCL")
    +s2_collection = gdalcubes::stac_image_collection(items$features, asset_names = assets,
    +                                      property_filter = function(x) {x[["eo:cloud_cover"]] < 20}) #all images with less than 20% clouds
    +
    +b <- Sys.time()
    +difftime(b, a)
    +
    +# Display the image collection
    +s2_collection
    +
    +

    Access the data

    +

    First, we need to set up our view on the collection. We will set our spatial and temporal resolution, as well as how we want the data temporally aggregated and spatially resampled. We then also set our spatial and temporal window. Note that the spatial extent here should be in a projected CRS!

    +
    # Record start time
    +a <- Sys.time()
    +
    +# Define a specific view on the satellite image collection
    +v = gdalcubes::cube_view(
    +  srs = "EPSG:32613",
    +  dx = 100, 
    +  dy = 100, 
    +  dt = "P1M", 
    +  aggregation = "median", 
    +  resampling = "near",
    +  extent = list(
    +    t0 = "2021-05-15", 
    +    t1 = "2021-05-16",
    +    left = bboxSRproj[1], 
    +    right = bboxSRproj[2],
    +    top = bboxSRproj[4], 
    +    bottom = bboxSRproj[3]
    +  )
    +)
    +
    +b <- Sys.time()
    +difftime(b, a)
    +
    +# Display the defined view
    +v
    +
    +

    Finally, let's take our snapshot of the data! Let's also calculate NDVI and then view the data.

    +

    ```

    +

    Record start time

    +

    a <- Sys.time()

    +

    s2_collection |> + raster_cube(v) |> + select_bands(c( "B04", "B05")) |> + apply_pixel(c("(B05-B04)/(B05+B04)"), names="NDVI") |> + write_tif() |> + raster::stack() -> x

    +

    View the product

    +

    x

    +

    b <- Sys.time() +difftime(b, a)

    +

    Let's view the dat

    +

    mapview::mapview(x, layer.name = "NDVI") + mapview::mapview(southernRockies)

    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/data-library/stac_simple/stac_simple.md b/data-library/stac_simple/stac_simple.md new file mode 100644 index 0000000..59a11f8 --- /dev/null +++ b/data-library/stac_simple/stac_simple.md @@ -0,0 +1,159 @@ +# Accessing data via STAC + +ESIIL, 2024 +Ty Tuff & Tyler McIntosh + +SpatioTemporal Asset Catalog, is an open-source specification designed to standardize the way geospatial data is indexed and discovered. Developed by Element 84 among others, it facilitates better interoperability and sharing of geospatial assets by providing a common language for describing them. STAC’s flexible design allows for easy cataloging of data, making it simpler for individuals and systems to search and retrieve geospatial information. By effectively organizing data about the Earth’s spatial and temporal characteristics, STAC enables users to harness the full power of the cloud and modern data processing technologies, optimizing the way we access and analyze environmental data on a global scale. + +Element 84’s Earth Search is a STAC compliant search and discovery API that offers users access to a vast collection of geospatial open datasets hosted on AWS. It serves as a centralized search catalog providing standardized metadata for these open datasets, designed to be freely used and integrated into various applications. Alongside the API, Element 84 also provides a web application named Earth Search Console, which is map-centric and allows users to explore and visualize the data contained within the Earth Search API’s catalog. This suite of tools is part of Element 84’s initiative to make geospatial data more accessible and actionable for a wide range of users and applications. + +## First, we need an area of interest + +``` +require(glue) +require(sf) +require(gdalcubes) +require(rstac) + + +#Access ecoregiosn via VSI +epa_l3 <- glue::glue( + "/vsizip/vsicurl/", #magic remote connection + "https://gaftp.epa.gov/EPADataCommons/ORD/Ecoregions/us/us_eco_l3.zip", #copied link to download location + "/us_eco_l3.shp") |> #path inside zip file + sf::st_read() + +#Get just S.Rockies and ensure that it is in EPSG:4326 +southernRockies <- epa_l3 |> + dplyr::filter(US_L3NAME == "Southern Rockies") |> + dplyr::group_by(US_L3NAME) |> + dplyr::summarize(geometry = sf::st_union(geometry)) |> + sf::st_transform("EPSG:4326") + +bboxSR4326 <- sf::st_bbox(southernRockies) +``` + +To access data from STAC correctly, we need to request the data in a projected CRS. +``` +southernRockies <- southernRockies |> sf::st_transform("EPSG:32613") + +bboxSRproj <- sf::st_bbox(southernRockies) +``` + +## Search the STAC catalog + +To get information about a STAC archive, you can use rstac::get_request(). You can also use gdalcubes::collection_formats() to see various collection formats that you may encounter. + +To search a STAC catalog online, [stacindex.org](stacindex.org) is a useful tool. For example, [here is the page](https://stacindex.org/catalogs/earth-search#/) for the Earth Search catalog by Element84 that we will use. + +``` +stac("https://earth-search.aws.element84.com/v1") |> + get_request() +## ###STACCatalog +## - id: earth-search-aws +## - description: A STAC API of public datasets on AWS +## - field(s): stac_version, type, id, title, description, links, conformsTo + +collection_formats() +``` + +Initialize a STAC connection (rstac::stac()) and search for data that you are interested in (rstac::stac_search()). Note that you will request a spatial area of interest as well as a temporal window of interest. To get more information on the data and how it is structured, you can examine the 'items' object we create. + +``` +# Record start time +a <- Sys.time() + +# Initialize STAC connection +s = rstac::stac("https://earth-search.aws.element84.com/v0") + + +# Search for Sentinel-2 images within specified bounding box and date range +#22 Million items +items = s |> + rstac::stac_search(collections = "sentinel-s2-l2a-cogs", + bbox = c(bboxSR4326["xmin"], + bboxSR4326["ymin"], + bboxSR4326["xmax"], + bboxSR4326["ymax"]), + datetime = "2021-05-15/2021-05-16") |> + post_request() |> + items_fetch(progress = FALSE) + +# Print number of found items +length(items$features) + +items +``` + +There is data we want! Now, we need to prepare the assets for us to access. We will list the assets we want, and set any property filters that we would like to apply. + +``` +# Prepare the assets for analysis +library(gdalcubes) +assets = c("B01", "B02", "B03", "B04", "B05", "B06", + "B07", + "B08", "B8A", "B09", "B11", "B12", "SCL") +s2_collection = gdalcubes::stac_image_collection(items$features, asset_names = assets, + property_filter = function(x) {x[["eo:cloud_cover"]] < 20}) #all images with less than 20% clouds + +b <- Sys.time() +difftime(b, a) + +# Display the image collection +s2_collection + +``` +## Access the data + +First, we need to set up our view on the collection. We will set our spatial and temporal resolution, as well as how we want the data temporally aggregated and spatially resampled. We then also set our spatial and temporal window. Note that the spatial extent here should be in a projected CRS! + +``` +# Record start time +a <- Sys.time() + +# Define a specific view on the satellite image collection +v = gdalcubes::cube_view( + srs = "EPSG:32613", + dx = 100, + dy = 100, + dt = "P1M", + aggregation = "median", + resampling = "near", + extent = list( + t0 = "2021-05-15", + t1 = "2021-05-16", + left = bboxSRproj[1], + right = bboxSRproj[2], + top = bboxSRproj[4], + bottom = bboxSRproj[3] + ) +) + +b <- Sys.time() +difftime(b, a) + +# Display the defined view +v +``` + +Finally, let's take our snapshot of the data! Let's also calculate NDVI and then view the data. + +``` +# Record start time +a <- Sys.time() + +s2_collection |> + raster_cube(v) |> + select_bands(c( "B04", "B05")) |> + apply_pixel(c("(B05-B04)/(B05+B04)"), names="NDVI") |> + write_tif() |> + raster::stack() -> x + +#View the product +x + +b <- Sys.time() +difftime(b, a) + +#Let's view the dat +mapview::mapview(x, layer.name = "NDVI") + mapview::mapview(southernRockies) diff --git a/data-library/treemap/index.html b/data-library/treemap/index.html new file mode 100644 index 0000000..b3d0133 --- /dev/null +++ b/data-library/treemap/index.html @@ -0,0 +1,1430 @@ + + + + + + + + + + + + + + + + + + + + + + TreeMap - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    TreeMap

    +

    TreeMap 2016 is a USFS tree-level model of the forests of the conterminous United States created by using machine learning algorithms to match forest plot data from Forest Inventory and Analysis (FIA) to a 30x30 meter (m) grid.

    +

    The main output of this project is a raster map of imputed plot identifiers at 30×30 m spatial resolution for the conterminous U.S. for landscape conditions circa 2016. The plot identifiers can be associated with data from FIA plots held in the associated csv and SQL files.

    +

    An overview of the data product can be found here.

    +

    The TreeMap data dictionary PDF can be found here.

    +

    A portion of the TreeMap dataset covering the Southern Rockies has been prepared and placed in the CyVerse data store at the below directroy. The associated CSV and SQL DB files are in the same location. A script showing how to access it, as well as how the raster was accessed, is available in the code repository, as well as copied below.

    +
    ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/TreeMap
    +
    +
    # This script demonstrates how to open and access pre-downloaded TreeMap data from the data store
    +# It also, at the bottom, shows how the data was accessed via VSI.
    +# A similar approach could be used to access the SnagHazard data in the zip file via VSI if desired. (Path inside zip: Data/SnagHazard2016.tif) 
    +
    +# ESIIL, 2024
    +# Tyler L. McIntosh
    +
    +
    +require(terra)
    +
    +#Move data from data store to instance
    +system("cp -r ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/TreeMap ~/TreeMap
    +")
    +
    +#Open the raster
    +treemap <- terra::rast("~/TreeMap/treemap2016_southernrockies.tif")
    +terra::plot(treemap)
    +
    +#Open the csv
    +treemapCsv <- readr::read_csv("~/TreeMap/TreeMap2016_tree_table.csv")
    +
    +head(treemapCsv)
    +
    +
    +
    +
    +
    +#######################################################
    +# DATA ACCESS SCRIPT
    +#######################################################
    +
    +# Access treemap data, crop to southern rockies, and save to data store
    +
    +require(glue)
    +require(terra)
    +require(sf)
    +
    +#Access EPA L3 data for cropping
    +epa_l3 <- glue::glue(
    +  "/vsizip/vsicurl/", #magic remote connection
    +  "https://gaftp.epa.gov/EPADataCommons/ORD/Ecoregions/us/us_eco_l3.zip", #copied link to download location
    +  "/us_eco_l3.shp") |> #path inside zip file
    +  sf::st_read()
    +
    +#get just S.Rockies
    +southernRockies <- epa_l3 |>
    +  dplyr::filter(US_L3NAME == "Southern Rockies") |>
    +  dplyr::group_by(US_L3NAME) |>
    +  dplyr::summarize(geometry = sf::st_union(geometry))
    +
    +#Access treemap data
    +treemap <- glue::glue(
    +  "/vsizip/vsicurl/", #magic remote connection 
    +  "https://s3-us-west-2.amazonaws.com/fs.usda.rds/RDS-2021-0074/RDS-2021-0074_Data.zip", #copied link to download location
    +  "/Data/TreeMap2016.tif") |> #path inside zip file
    +  terra::rast() 
    +
    +#Crop to s.rockies
    +treemapSR <- treemap |> terra::crop(southernRockies, mask = FALSE)
    +
    +#check data
    +terra::plot(treemapSR)
    +
    +#Write to instance
    +terra::writeRaster(treemapSR,
    +                   filename = '~/treemap2016_southernrockies.tif',
    +                   overwrite = TRUE,
    +                   gdal=c("COMPRESS=DEFLATE"))
    +
    +#Move data to data store
    +system("cp ~/treemap2016_southernrockies.tif ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/TreeMap/treemap2016_southernrockies_again.tif
    +")
    +
    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/data-library/treemap/treemap.md b/data-library/treemap/treemap.md new file mode 100644 index 0000000..441b026 --- /dev/null +++ b/data-library/treemap/treemap.md @@ -0,0 +1,91 @@ +# TreeMap + +TreeMap 2016 is a USFS tree-level model of the forests of the conterminous United States created by using machine learning algorithms to match forest plot data from Forest Inventory and Analysis (FIA) to a 30x30 meter (m) grid. + +The main output of this project is a raster map of imputed plot identifiers at 30×30 m spatial resolution for the conterminous U.S. for landscape conditions circa 2016. The plot identifiers can be associated with data from FIA plots held in the associated csv and SQL files. + +[An overview of the data product can be found here.](https://www.fs.usda.gov/rds/archive/Catalog/RDS-2021-0074) + +[The TreeMap data dictionary PDF can be found here.](https://github.com/CU-ESIIL/forest-carbon-codefest/blob/main/docs/assets/TreeMap2016_Data_Dictionary.pdf) + +A portion of the TreeMap dataset covering the Southern Rockies has been prepared and placed in the CyVerse data store at the below directroy. The associated CSV and SQL DB files are in the same location. A script showing how to access it, as well as how the raster was accessed, is available in the code repository, as well as copied below. + +``` +~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/TreeMap +``` + +``` r +# This script demonstrates how to open and access pre-downloaded TreeMap data from the data store +# It also, at the bottom, shows how the data was accessed via VSI. +# A similar approach could be used to access the SnagHazard data in the zip file via VSI if desired. (Path inside zip: Data/SnagHazard2016.tif) + +# ESIIL, 2024 +# Tyler L. McIntosh + + +require(terra) + +#Move data from data store to instance +system("cp -r ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/TreeMap ~/TreeMap +") + +#Open the raster +treemap <- terra::rast("~/TreeMap/treemap2016_southernrockies.tif") +terra::plot(treemap) + +#Open the csv +treemapCsv <- readr::read_csv("~/TreeMap/TreeMap2016_tree_table.csv") + +head(treemapCsv) + + + + + +####################################################### +# DATA ACCESS SCRIPT +####################################################### + +# Access treemap data, crop to southern rockies, and save to data store + +require(glue) +require(terra) +require(sf) + +#Access EPA L3 data for cropping +epa_l3 <- glue::glue( + "/vsizip/vsicurl/", #magic remote connection + "https://gaftp.epa.gov/EPADataCommons/ORD/Ecoregions/us/us_eco_l3.zip", #copied link to download location + "/us_eco_l3.shp") |> #path inside zip file + sf::st_read() + +#get just S.Rockies +southernRockies <- epa_l3 |> + dplyr::filter(US_L3NAME == "Southern Rockies") |> + dplyr::group_by(US_L3NAME) |> + dplyr::summarize(geometry = sf::st_union(geometry)) + +#Access treemap data +treemap <- glue::glue( + "/vsizip/vsicurl/", #magic remote connection + "https://s3-us-west-2.amazonaws.com/fs.usda.rds/RDS-2021-0074/RDS-2021-0074_Data.zip", #copied link to download location + "/Data/TreeMap2016.tif") |> #path inside zip file + terra::rast() + +#Crop to s.rockies +treemapSR <- treemap |> terra::crop(southernRockies, mask = FALSE) + +#check data +terra::plot(treemapSR) + +#Write to instance +terra::writeRaster(treemapSR, + filename = '~/treemap2016_southernrockies.tif', + overwrite = TRUE, + gdal=c("COMPRESS=DEFLATE")) + +#Move data to data store +system("cp ~/treemap2016_southernrockies.tif ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/TreeMap/treemap2016_southernrockies_again.tif +") + +``` \ No newline at end of file diff --git a/docs.Rproj b/docs.Rproj new file mode 100644 index 0000000..8e3c2eb --- /dev/null +++ b/docs.Rproj @@ -0,0 +1,13 @@ +Version: 1.0 + +RestoreWorkspace: Default +SaveWorkspace: Default +AlwaysSaveHistory: Default + +EnableCodeIndexing: Yes +UseSpacesForTab: Yes +NumSpacesForTab: 2 +Encoding: UTF-8 + +RnwWeave: Sweave +LaTeX: pdfLaTeX diff --git a/gantt_chart.png b/gantt_chart.png new file mode 100644 index 0000000..dbaa1dc Binary files /dev/null and b/gantt_chart.png differ diff --git a/index.html b/index.html new file mode 100644 index 0000000..7859b99 --- /dev/null +++ b/index.html @@ -0,0 +1,1543 @@ + + + + + + + + + + + + + + + + + + + + + + ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    DOI

    +

    Pollinating Resilient Interactions (PRI)

    +

    Resilience of pollinator interactions in the face of climate change

    +

    Welcome to the Pollinating Resilient Interactions repository, an integral part of ESIIL and Earth Lab's Forest Carbon Codefest. This repository is the central hub for our team, encompassing our project overview, team member information, codebase, and more...

    +

    Google Drive Folder: https://drive.google.com/drive/folders/1LH9YAs8eZfam9B4oba-MAwjgD-MI3mQO

    +

    CyVerse share drive location: +/iplant/home/shared/iplant_esiil_summit/2024/group_04b

    +

    Our Project

    +

    Species interactions under climate change

    +

    How will shifting climate conditions alter species interactions, and what impications will these changes have for ecosystem stability?

    +

    Documentation

    + +

    Group Members

    +
      +
    • Member 1: Brooke Lamonte Long-Fox
    • +
    • Member 2: Colleen Miller
    • +
    • Member 3: Elsa Culler
    • +
    • Member 4: Pablo Moreno
    • +
    • Member 5: Miguel C Leon
    • +
    • Member 6 Yahn-Jauh Su
    • +
    • Member 7: Yu Liu
    • +
    • [Link to more detailed bios or profiles if available and desired.]
    • +
    +

    Code Repository Structure

    +
      +
    • Data Processing: Scripts for cleaning, merging, and managing datasets.
    • +
    • Analysis Code: Scripts for data analysis, statistical modeling, etc.
    • +
    • Visualization: Code for creating figures, charts, and interactive visualizations.
    • +
    +

    Meeting Notes and Agendas

    +
      +
    • Regular updates to keep all group members informed and engaged with the project's progress and direction.
    • +
    +

    Contributing to This Repository

    +
      +
    • Contributions from all group members are welcome.
    • +
    • Please adhere to these guidelines:
    • +
    • Ensure commits have clear and concise messages.
    • +
    • Document major changes in the meeting notes.
    • +
    • Review and merge changes through pull requests for oversight.
    • +
    +

    Getting Help

    +
      +
    • If you encounter any issues or have questions, please refer to the ESIIL Support Page or contact the repository maintainers directly.
    • +
    +

    Customize Your Repository

    +
      +
    • Edit This Readme: Update with information specific to your project.
    • +
    • Update Group Member Bios: Add detailed information about each group member's expertise and role.
    • +
    • Organize Your Code: Use logical structure and clear naming conventions.
    • +
    • Document Your Data: Include a data directory with README files for datasets.
    • +
    • Outline Your Methods: Create a METHODS.md file for methodologies and tools.
    • +
    • Set Up Project Management: Use 'Issues' and 'Projects' for task tracking.
    • +
    • Add a License: Include an appropriate open-source license.
    • +
    • Create Contribution Guidelines: Establish a CONTRIBUTING.md file.
    • +
    • Review and Merge Workflow: Document your process for reviewing and merging changes.
    • +
    • Establish Communication Channels: Set up channels like Slack or Discord for discussions.
    • +
    +

    Remember, the goal is to make your repository clear, accessible, and useful for all current and future researchers. Happy researching!

    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/index.md b/index.md new file mode 100644 index 0000000..34b4061 --- /dev/null +++ b/index.md @@ -0,0 +1,62 @@ +[![DOI](https://zenodo.org/badge/771059390.svg)](https://zenodo.org/doi/10.5281/zenodo.11166866) +# Pollinating Resilient Interactions (PRI) + +## Resilience of pollinator interactions in the face of climate change + + +Welcome to the **Pollinating Resilient Interactions** repository, an integral part of ESIIL and Earth Lab's Forest Carbon Codefest. This repository is the central hub for our team, encompassing our project overview, team member information, codebase, and more... + +Google Drive Folder: https://drive.google.com/drive/folders/1LH9YAs8eZfam9B4oba-MAwjgD-MI3mQO + +CyVerse share drive location: +/iplant/home/shared/iplant_esiil_summit/2024/group_04b + +## Our Project +### Species interactions under climate change +How will shifting climate conditions alter species interactions, and what impications will these changes have for ecosystem stability? + +## Documentation +- Access detailed documentation on our [GitHub Pages site](https://your-gh-pages-url/). + + +## Group Members +- Member 1: Brooke Lamonte Long-Fox +- Member 2: Colleen Miller +- Member 3: Elsa Culler +- Member 4: Pablo Moreno +- Member 5: Miguel C Leon +- Member 6 Yahn-Jauh Su +- Member 7: Yu Liu +- [Link to more detailed bios or profiles if available and desired.] + +## Code Repository Structure +- **Data Processing**: Scripts for cleaning, merging, and managing datasets. +- **Analysis Code**: Scripts for data analysis, statistical modeling, etc. +- **Visualization**: Code for creating figures, charts, and interactive visualizations. + +## Meeting Notes and Agendas +- Regular updates to keep all group members informed and engaged with the project's progress and direction. + +## Contributing to This Repository +- Contributions from all group members are welcome. +- Please adhere to these guidelines: + - Ensure commits have clear and concise messages. + - Document major changes in the meeting notes. + - Review and merge changes through pull requests for oversight. + +## Getting Help +- If you encounter any issues or have questions, please refer to the [ESIIL Support Page](https://esiil-support-page-url/) or contact the repository maintainers directly. + +## Customize Your Repository +- **Edit This Readme**: Update with information specific to your project. +- **Update Group Member Bios**: Add detailed information about each group member's expertise and role. +- **Organize Your Code**: Use logical structure and clear naming conventions. +- **Document Your Data**: Include a data directory with README files for datasets. +- **Outline Your Methods**: Create a METHODS.md file for methodologies and tools. +- **Set Up Project Management**: Use 'Issues' and 'Projects' for task tracking. +- **Add a License**: Include an appropriate open-source license. +- **Create Contribution Guidelines**: Establish a CONTRIBUTING.md file. +- **Review and Merge Workflow**: Document your process for reviewing and merging changes. +- **Establish Communication Channels**: Set up channels like Slack or Discord for discussions. + +Remember, the goal is to make your repository clear, accessible, and useful for all current and future researchers. Happy researching! diff --git a/project-documentation/methods/index.html b/project-documentation/methods/index.html new file mode 100644 index 0000000..1f7d962 --- /dev/null +++ b/project-documentation/methods/index.html @@ -0,0 +1,1362 @@ + + + + + + + + + + + + + + + + + + + + + + Project methods overview - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    Project methods overview

    +

    Data Sources

    +

    List and describe data sources used, including links to cloud-optimized sources. Highlight permissions and compliance with data ownership guidelines.

    +

    Data Processing Steps

    +

    Describe data processing steps taken, the order of scripts, etc.

    +

    Data Analysis

    +

    Describe steps taken to analyze data and resulting files in team data store file structure.

    +

    Visualizations

    +

    Describe visualizations created and any specialized techniques or libraries that users should be aware of.

    +

    Conclusions

    +

    Summary of the full workflow and its outcomes. Reflect on the methods used.

    +

    References

    +

    Citations of tools, data sources, and other references used.

    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/project-documentation/methods/methods.md b/project-documentation/methods/methods.md new file mode 100644 index 0000000..f52dc79 --- /dev/null +++ b/project-documentation/methods/methods.md @@ -0,0 +1,19 @@ +# Project methods overview + +## Data Sources +List and describe data sources used, including links to cloud-optimized sources. Highlight permissions and compliance with data ownership guidelines. + +## Data Processing Steps +Describe data processing steps taken, the order of scripts, etc. + +## Data Analysis +Describe steps taken to analyze data and resulting files in team data store file structure. + +## Visualizations +Describe visualizations created and any specialized techniques or libraries that users should be aware of. + +## Conclusions +Summary of the full workflow and its outcomes. Reflect on the methods used. + +## References +Citations of tools, data sources, and other references used. \ No newline at end of file diff --git a/project-documentation/project-notes/index.html b/project-documentation/project-notes/index.html new file mode 100644 index 0000000..f267b11 --- /dev/null +++ b/project-documentation/project-notes/index.html @@ -0,0 +1,1356 @@ + + + + + + + + + + + + + + + + + + + + + + Project discussion notes - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    Project discussion notes

    +

    Virtual meeting #3

    +

    Team theme, tentative area of interest, or question:

    +

    Day 1: March 12, 2024 - CU Boulder

    +

    Selected scientific question:

    +

    Day 2: March 13, 2024 - CU Boulder

    +

    Day 3: March 14, 2024 - CU Boulder

    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/project-documentation/project-notes/project-notes.md b/project-documentation/project-notes/project-notes.md new file mode 100644 index 0000000..45dbb6c --- /dev/null +++ b/project-documentation/project-notes/project-notes.md @@ -0,0 +1,13 @@ +# Project discussion notes + +## Virtual meeting #3 + +### Team theme, tentative area of interest, or question: + +## Day 1: March 12, 2024 - CU Boulder + +### Selected scientific question: + +## Day 2: March 13, 2024 - CU Boulder + +## Day 3: March 14, 2024 - CU Boulder diff --git a/project-documentation/project-presentation/index.html b/project-documentation/project-presentation/index.html new file mode 100644 index 0000000..29b6fda --- /dev/null +++ b/project-documentation/project-presentation/index.html @@ -0,0 +1,1353 @@ + + + + + + + + + + + + + + + + + + + + + + Project presentation overview - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    Project presentation overview

    +

    All project presentation materials should be made available on this page.

    +

    Your team may present directly from this page if you would like to; alternatively, if you would prefer to use slides to present, please make sure to export your team's slides as a PDF, add them to your GitHub, and add the link to that PDF here below.

    +

    Presentation

    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/project-documentation/project-presentation/project-presentation.md b/project-documentation/project-presentation/project-presentation.md new file mode 100644 index 0000000..bf792c0 --- /dev/null +++ b/project-documentation/project-presentation/project-presentation.md @@ -0,0 +1,8 @@ +# Project presentation overview + +All project presentation materials should be made available on this page. + +Your team may present directly from this page if you would like to; alternatively, if you would prefer to use slides to present, please make sure to export your team's slides as a PDF, add them to your GitHub, and add the link to that PDF here below. + +# Presentation + diff --git a/resources/art gallery.qmd b/resources/art gallery.qmd new file mode 100644 index 0000000..f7b60ca --- /dev/null +++ b/resources/art gallery.qmd @@ -0,0 +1,71 @@ +--- +title: "science art" +date: "2024-01-25" +format: gfm +editor_options: + markdown: + wrap: 72 +--- + +# Ty's art opinion + +In the context of the ongoing discussions for the redesign of our ESIIL office space, I would like to offer my personal perspective on the art and aesthetic that might enrich our environment: + +**Urban Realism with a Personal Touch**: I have a strong appreciation for artworks that reflect a realistic depiction of nature and urban life but with an imaginative twist. Art that integrates with and elevates our daily surroundings could offer a fresh perspective on the mundane. + +**Nature in the Workplace**: On a personal note, I find that art which brings elements of the outdoors inside can create a serene and motivating atmosphere, conducive to the values of sustainability that ESIIL embodies. + +**Interactive Art**: I believe that art installations which invite interaction or present a playful exaggeration of reality can energize our space. They have the potential to foster a creative dialogue among the team and with visitors. + +**Dimensionality and Engagement**: From my viewpoint, art that breaks out of the traditional two-dimensional space and engages with the viewer in three dimensions can transform the feel of an office. Such dynamic pieces could encourage innovative thinking and collaboration. + +**Art with a Message**: It's my opinion that the art we choose should subtly reflect our collective social and environmental commitments. Pieces that prompt introspection about our role in larger societal issues could resonate well with our team's ethos. + +**Community Connection**: Lastly, I feel that our office should not just be a place for work but also a space that invites community interaction. Art can be a bridge between ESIIL and the public, making our office a hub for inspiration and engagement. + +![](../assets/esiil_art/antenna_girl.png) + +![](../assets/esiil_art/child_plant_interaction.png) + + + +![](../assets/esiil_art/hen_harrier.png) +![](../assets/esiil_art/tree_hair.png) +![](../assets/esiil_art/sculpture_toothpaste.png) +![](../assets/esiil_art/tree hands.png) +![](../assets/esiil_art/paperclip.png) +![](../assets/esiil_art/fancy_dandilion.png) +![](../assets/esiil_art/tree_palm.png) +![](../assets/esiil_art/swans.png) +![](../assets/esiil_art/diver.png) +![](../assets/esiil_art/looker.png) +![](../assets/esiil_art/veg_as_hair.png) +![](../assets/esiil_art/turtle.png) +![](../assets/esiil_art/peel_back.png) +![](../assets/esiil_art/tree_eyes.png) +![](../assets/esiil_art/gull_trash.png) +![](../assets/esiil_art/monolith-alpha.png) +![](../assets/esiil_art/veg_from_box.png) + + + + + + + + + + + + + + + + + + + + + + + diff --git a/resources/art gallery/art gallery.md b/resources/art gallery/art gallery.md new file mode 100644 index 0000000..3085227 --- /dev/null +++ b/resources/art gallery/art gallery.md @@ -0,0 +1,60 @@ +science art +================ +2024-01-25 + +# Ty’s art opinion + +In the context of the ongoing discussions for the redesign of our ESIIL +office space, I would like to offer my personal perspective on the art +and aesthetic that might enrich our environment: + +**Urban Realism with a Personal Touch**: I have a strong appreciation +for artworks that reflect a realistic depiction of nature and urban life +but with an imaginative twist. Art that integrates with and elevates our +daily surroundings could offer a fresh perspective on the mundane. + +**Nature in the Workplace**: On a personal note, I find that art which +brings elements of the outdoors inside can create a serene and +motivating atmosphere, conducive to the values of sustainability that +ESIIL embodies. + +**Interactive Art**: I believe that art installations which invite +interaction or present a playful exaggeration of reality can energize +our space. They have the potential to foster a creative dialogue among +the team and with visitors. + +**Dimensionality and Engagement**: From my viewpoint, art that breaks +out of the traditional two-dimensional space and engages with the viewer +in three dimensions can transform the feel of an office. Such dynamic +pieces could encourage innovative thinking and collaboration. + +**Art with a Message**: It’s my opinion that the art we choose should +subtly reflect our collective social and environmental commitments. +Pieces that prompt introspection about our role in larger societal +issues could resonate well with our team’s ethos. + +**Community Connection**: Lastly, I feel that our office should not just +be a place for work but also a space that invites community interaction. +Art can be a bridge between ESIIL and the public, making our office a +hub for inspiration and engagement. + +![](../assets/esiil_art/antenna_girl.png) + +![](../assets/esiil_art/child_plant_interaction.png) + +![](../assets/esiil_art/hen_harrier.png) +![](../assets/esiil_art/tree_hair.png) +![](../assets/esiil_art/sculpture_toothpaste.png) +![](../assets/esiil_art/tree%20hands.png) +![](../assets/esiil_art/paperclip.png) +![](../assets/esiil_art/fancy_dandilion.png) +![](../assets/esiil_art/tree_palm.png) +![](../assets/esiil_art/swans.png) ![](../assets/esiil_art/diver.png) +![](../assets/esiil_art/looker.png) +![](../assets/esiil_art/veg_as_hair.png) +![](../assets/esiil_art/turtle.png) +![](../assets/esiil_art/peel_back.png) +![](../assets/esiil_art/tree_eyes.png) +![](../assets/esiil_art/gull_trash.png) +![](../assets/esiil_art/monolith-alpha.png) +![](../assets/esiil_art/veg_from_box.png) diff --git a/resources/art gallery/index.html b/resources/art gallery/index.html new file mode 100644 index 0000000..2b8f7ed --- /dev/null +++ b/resources/art gallery/index.html @@ -0,0 +1,1397 @@ + + + + + + + + + + + + + + + + + + + + + + science art - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    science art

    +

    2024-01-25

    +

    Ty’s art opinion

    +

    In the context of the ongoing discussions for the redesign of our ESIIL +office space, I would like to offer my personal perspective on the art +and aesthetic that might enrich our environment:

    +

    Urban Realism with a Personal Touch: I have a strong appreciation +for artworks that reflect a realistic depiction of nature and urban life +but with an imaginative twist. Art that integrates with and elevates our +daily surroundings could offer a fresh perspective on the mundane.

    +

    Nature in the Workplace: On a personal note, I find that art which +brings elements of the outdoors inside can create a serene and +motivating atmosphere, conducive to the values of sustainability that +ESIIL embodies.

    +

    Interactive Art: I believe that art installations which invite +interaction or present a playful exaggeration of reality can energize +our space. They have the potential to foster a creative dialogue among +the team and with visitors.

    +

    Dimensionality and Engagement: From my viewpoint, art that breaks +out of the traditional two-dimensional space and engages with the viewer +in three dimensions can transform the feel of an office. Such dynamic +pieces could encourage innovative thinking and collaboration.

    +

    Art with a Message: It’s my opinion that the art we choose should +subtly reflect our collective social and environmental commitments. +Pieces that prompt introspection about our role in larger societal +issues could resonate well with our team’s ethos.

    +

    Community Connection: Lastly, I feel that our office should not just +be a place for work but also a space that invites community interaction. +Art can be a bridge between ESIIL and the public, making our office a +hub for inspiration and engagement.

    +

    +

    +

    + + + + + + + + + + + + + + +

    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/resources/citations/citations.md b/resources/citations/citations.md new file mode 100644 index 0000000..df0c869 --- /dev/null +++ b/resources/citations/citations.md @@ -0,0 +1,64 @@ +# Citation Management and Notes Collection in Markdown + +## Introduction + +This document serves as a guide for managing citations and collecting research notes for our project. We'll use a combination of a `.bib` file for bibliographic references and Markdown for note-taking. + +## Part 1: Setting Up Your .bib File for Citations + +### Creating a .bib File + +1. **Create a new file** with a `.bib` extension, for example, `project_references.bib`. +2. **Add bibliographic entries** to this file. Each entry should follow the BibTeX format. + +### Example of a .bib Entry + +```bibtex +@article{Doe2021, + author = {Jane Doe and John Smith}, + title = {Insights into Environmental Data Science}, + journal = {Journal of Data Science}, + year = {2021}, + volume = {15}, + number = {4}, + pages = {123-145}, + doi = {10.1000/jds.2021.15.4} +} + + + +## Part 2: Using Citations in Markdown + +### Citing in Your Markdown Document + +- Refer to works in your `.bib` file using citation keys, like `[@Doe2021]`. + +### Converting Markdown to PDF with Citations + +- Use Pandoc: `pandoc yourdoc.md --bibliography=project_references.bib --citeproc -o output.pdf` + +## Part 3: Collecting Citations and Research Notes + +### Structuring Your Notes + +#### Notes on Doe 2021 `[@Doe2021]` + +- **Key Points:** + - Summary of the article's main arguments. + - Notable methodologies. + +- **Relevance to Our Project:** + - How this research informs our project. + - Applicable methodologies or theories. + +#### Notes on Another Article `[@Another2021]` + +- **Key Points:** + - ... + +- **Relevance to Our Project:** + - ... + +## Conclusion + +This document facilitates efficient management of references and collaborative knowledge building for our project. diff --git a/resources/citations/index.html b/resources/citations/index.html new file mode 100644 index 0000000..8fe162c --- /dev/null +++ b/resources/citations/index.html @@ -0,0 +1,1407 @@ + + + + + + + + + + + + + + + + + + + + + + Citation Management and Notes Collection in Markdown - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    Citation Management and Notes Collection in Markdown

    +

    Introduction

    +

    This document serves as a guide for managing citations and collecting research notes for our project. We'll use a combination of a .bib file for bibliographic references and Markdown for note-taking.

    +

    Part 1: Setting Up Your .bib File for Citations

    +

    Creating a .bib File

    +
      +
    1. Create a new file with a .bib extension, for example, project_references.bib.
    2. +
    3. Add bibliographic entries to this file. Each entry should follow the BibTeX format.
    4. +
    +

    Example of a .bib Entry

    +

    ```bibtex +@article{Doe2021, + author = {Jane Doe and John Smith}, + title = {Insights into Environmental Data Science}, + journal = {Journal of Data Science}, + year = {2021}, + volume = {15}, + number = {4}, + pages = {123-145}, + doi = {10.1000/jds.2021.15.4} +}

    +

    Part 2: Using Citations in Markdown

    +

    Citing in Your Markdown Document

    +
      +
    • Refer to works in your .bib file using citation keys, like [@Doe2021].
    • +
    +

    Converting Markdown to PDF with Citations

    +
      +
    • Use Pandoc: pandoc yourdoc.md --bibliography=project_references.bib --citeproc -o output.pdf
    • +
    +

    Part 3: Collecting Citations and Research Notes

    +

    Structuring Your Notes

    +

    Notes on Doe 2021 [@Doe2021]

    +
      +
    • Key Points:
    • +
    • Summary of the article's main arguments.
    • +
    • +

      Notable methodologies.

      +
    • +
    • +

      Relevance to Our Project:

      +
    • +
    • How this research informs our project.
    • +
    • Applicable methodologies or theories.
    • +
    +

    Notes on Another Article [@Another2021]

    +
      +
    • Key Points:
    • +
    • +

      ...

      +
    • +
    • +

      Relevance to Our Project:

      +
    • +
    • ...
    • +
    +

    Conclusion

    +

    This document facilitates efficient management of references and collaborative knowledge building for our project.

    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/resources/cyverse_basics/cyverse_basics.md b/resources/cyverse_basics/cyverse_basics.md new file mode 100644 index 0000000..6b5d7bd --- /dev/null +++ b/resources/cyverse_basics/cyverse_basics.md @@ -0,0 +1,91 @@ +# Connecting Cyverse to GitHub + +## Log in to Cyverse + +1. Go to the Cyverse user account website [https://user.cyverse.org/](https://user.cyverse.org/) + +image + +2. Click `Sign up` (if you do not already have an account) + + image + +3. Head over to the Cyverse Discovery Environment [https://de.cyverse.org](https://de.cyverse.org), and log in with your new account. + + image + + You should now see the Discovery Environment: + + image + +4. We will give you permissions to access the Hackathon app. If you haven't already, let us know that you need access + +## Open up an analysis with the hackathon environment (Jupyter Lab) + +1. From the Cyverse Discovery Environment, click on `Apps` in the left menu + ![apps](../assets/cyverse_basics/apps.png) + +2. Select `JupyterLab ESIIL` + ![use_this_app](../assets/cyverse_basics/use_this_app.png) + +3. Configure and launch your analysis - when choosing the disk size, make sure to choose 64GB or greater. The rest of the settings you can change to suit your computing needs: + ![app_launch](../assets/cyverse_basics/app_launch.png) + + ![app_settings](../assets/cyverse_basics/app_settings.png) + + ![launch](../assets/cyverse_basics/launch.png) + +4. Click `Go to analysis`: + ![go_to_analysis](../assets/cyverse_basics/go_to_analysis.png) + +5. Now you should see Jupyter Lab! + ![jupyterlab](../assets/cyverse_basics/jupyterlab.png) + +## Set up your GitHub credentials + +### If you would prefer to follow a video instead of a written outline, we have prepared a video here: + + + + +1. From Jupyter Lab, click on the Git Extension icon on the left menu: + ![jupyterlab](../assets/cyverse_basics/jupyterlab.png) + +2. Click `Clone a Repository` and Paste the link to the cyverse-utils [https://github.com/CU-ESIIL/cyverse-utils.git](https://github.com/CU-ESIIL/cyverse-utils.git) and click `Clone`: + ![clone](../assets/cyverse_basics/clone.png) + +3. You should now see the `cyverse-utils` folder in your directory tree (provided you haven't changed directories from the default `/home/jovyan/data-store` + ![cyverse-utils](../assets/cyverse_basics/cyverse-utils.png) + +4. Go into the `cyverse-utils` folder: + ![click_cyverse_utils](../assets/cyverse_basics/click_cyverse_utils.png) + +5. open up the `create_github_keypair.ipynb` notebook if you prefer Python or the 'create_github_keypair.R' script if you prefer R by double-clicking and then select the default 'macrosystems' kernel: +![open_cyverse_utils](../assets/cyverse_basics/open_cyverse_utils.png) + +6. Now you should see the notebook open. Click the `play` button at the top. You will be prompted to enter your GitHub username and email: + ![script_1](../assets/cyverse_basics/script_1.png) + + ![username](../assets/cyverse_basics/username.png) + + ![email](../assets/cyverse_basics/email.png) + +7. You should now see your Public Key. Copy the WHOLE LINE including `ssh-ed25519` at the beginning and the `jovyan@...` at the end +![key](../assets/cyverse_basics/key.png) + +8. Go to your GitHub settings page (you may need to log in to GitHub first): + ![settings](../assets/cyverse_basics/settings.png) + +9. Select `SSH and GPG keys` + ![ssh](../assets/cyverse_basics/ssh.png) + +10. Select `New SSH key` + ![new_key](../assets/cyverse_basics/new_key.png) + +11. Give your key a descriptive name, paste your ENTIRE public key in the `Key` input box, and click `Add SSH Key`. You may need to re-authenticate with your password or two-factor authentication.: + ![paste_key](../assets/cyverse_basics/paste_key.png) + +12. You should now see your new SSH key in your `Authentication Keys` list! Now you will be able to clone private repositories and push changes to GitHub from your Cyverse analysis! + ![final](../assets/cyverse_basics/final.png) + +> NOTE! Your GitHub authentication is ONLY for the analysis you're working with right now. You will be able to use it as long as you want there, but once you start a new analysis you will need to go through this process again. Feel free to delete keys from old analyses that have been shut down. diff --git a/resources/cyverse_basics/index.html b/resources/cyverse_basics/index.html new file mode 100644 index 0000000..1f69007 --- /dev/null +++ b/resources/cyverse_basics/index.html @@ -0,0 +1,1567 @@ + + + + + + + + + + + + + + + + + + + + + + Cyverse - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    Connecting Cyverse to GitHub

    +

    Log in to Cyverse

    +
      +
    1. Go to the Cyverse user account website https://user.cyverse.org/
    2. +
    +

    image

    +
      +
    1. Click Sign up (if you do not already have an account)
    2. +
    +

    image

    +
      +
    1. Head over to the Cyverse Discovery Environment https://de.cyverse.org, and log in with your new account.
    2. +
    +

    image

    +

    You should now see the Discovery Environment:

    +

    image

    +
      +
    1. We will give you permissions to access the Hackathon app. If you haven't already, let us know that you need access
    2. +
    +

    Open up an analysis with the hackathon environment (Jupyter Lab)

    +
      +
    1. +

      From the Cyverse Discovery Environment, click on Apps in the left menu + apps

      +
    2. +
    3. +

      Select JupyterLab ESIIL + use_this_app

      +
    4. +
    5. +

      Configure and launch your analysis - when choosing the disk size, make sure to choose 64GB or greater. The rest of the settings you can change to suit your computing needs: + app_launch

      +
    6. +
    +

    app_settings

    +

    launch

    +
      +
    1. +

      Click Go to analysis: + go_to_analysis

      +
    2. +
    3. +

      Now you should see Jupyter Lab! + jupyterlab

      +
    4. +
    +

    Set up your GitHub credentials

    +

    If you would prefer to follow a video instead of a written outline, we have prepared a video here:

    +

    + +

    +
      +
    1. +

      From Jupyter Lab, click on the Git Extension icon on the left menu: + jupyterlab

      +
    2. +
    3. +

      Click Clone a Repository and Paste the link to the cyverse-utils https://github.com/CU-ESIIL/cyverse-utils.git and click Clone: + clone

      +
    4. +
    5. +

      You should now see the cyverse-utils folder in your directory tree (provided you haven't changed directories from the default /home/jovyan/data-store + cyverse-utils

      +
    6. +
    7. +

      Go into the cyverse-utils folder: + click_cyverse_utils

      +
    8. +
    9. +

      open up the create_github_keypair.ipynb notebook if you prefer Python or the 'create_github_keypair.R' script if you prefer R by double-clicking and then select the default 'macrosystems' kernel: +open_cyverse_utils

      +
    10. +
    11. +

      Now you should see the notebook open. Click the play button at the top. You will be prompted to enter your GitHub username and email: + script_1

      +
    12. +
    +

    username

    +

    email

    +
      +
    1. +

      You should now see your Public Key. Copy the WHOLE LINE including ssh-ed25519 at the beginning and the jovyan@... at the end +key

      +
    2. +
    3. +

      Go to your GitHub settings page (you may need to log in to GitHub first): + settings

      +
    4. +
    5. +

      Select SSH and GPG keys + ssh

      +
    6. +
    7. +

      Select New SSH key + new_key

      +
    8. +
    9. +

      Give your key a descriptive name, paste your ENTIRE public key in the Key input box, and click Add SSH Key. You may need to re-authenticate with your password or two-factor authentication.: + paste_key

      +
    10. +
    11. +

      You should now see your new SSH key in your Authentication Keys list! Now you will be able to clone private repositories and push changes to GitHub from your Cyverse analysis! + final

      +
    12. +
    +
    +

    NOTE! Your GitHub authentication is ONLY for the analysis you're working with right now. You will be able to use it as long as you want there, but once you start a new analysis you will need to go through this process again. Feel free to delete keys from old analyses that have been shut down.

    +
    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/resources/cyverse_hacks/cyverse_hacks.md b/resources/cyverse_hacks/cyverse_hacks.md new file mode 100644 index 0000000..6166afc --- /dev/null +++ b/resources/cyverse_hacks/cyverse_hacks.md @@ -0,0 +1,36 @@ +# Cyverse fixes + +## Earth Lab Data Storage +- **Path:** `/home/jovyan/data-store/iplant/home/shared/earthlab/` +- Ensure your project has a directory within the Earth Lab data storage. + +## Setup +1. **CyVerse Account:** + - Create an account if not already owned. + - Contact Tyson for account upgrades after maximizing current limits. + +## GitHub Connection +- Follow Elsa Culler's guide for connecting GitHub to CyVerse. +- Select “JupyterLab ESIIL” and choose “macrosystems” in the version dropdown. +- Clone into `/home/jovyan/data-store`. +- Clone `innovation-summit-utils` for SSH connection to GitHub. +- Run `conda install -c conda-forge openssh` in the terminal if encountering errors. +- GitHub authentication is session-specific. + +## RStudio in DE +1. Copy your instance ID. It can be found in your analyis URL in form https://.cyverse.run/lab. +2. Use your ID in these links: + - `https://.cyverse.run/rstudio/auth-sign-in` + - `https://.cyverse.run/rstudio/` + +## Package Requests +- List desired packages here for future container updates. + +## Data Transfer to CyVerse +- Use GoCommands for HPC/CyVerse transfers. +- **Installation:** + - **Linux:** (Command) + - **Windows Powershell:** (Command) +- **Usage:** + - Use `put` for upload and `get` for download. + - Ensure correct CyVerse directory path. diff --git a/resources/cyverse_hacks/index.html b/resources/cyverse_hacks/index.html new file mode 100644 index 0000000..e5cf21e --- /dev/null +++ b/resources/cyverse_hacks/index.html @@ -0,0 +1,1509 @@ + + + + + + + + + + + + + + + + + + + + + + Cyverse hacks - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    Cyverse fixes

    +

    Earth Lab Data Storage

    +
      +
    • Path: /home/jovyan/data-store/iplant/home/shared/earthlab/
    • +
    • Ensure your project has a directory within the Earth Lab data storage.
    • +
    +

    Setup

    +
      +
    1. CyVerse Account:
    2. +
    3. Create an account if not already owned.
    4. +
    5. Contact Tyson for account upgrades after maximizing current limits.
    6. +
    +

    GitHub Connection

    +
      +
    • Follow Elsa Culler's guide for connecting GitHub to CyVerse.
    • +
    • Select “JupyterLab ESIIL” and choose “macrosystems” in the version dropdown.
    • +
    • Clone into /home/jovyan/data-store.
    • +
    • Clone innovation-summit-utils for SSH connection to GitHub.
    • +
    • Run conda install -c conda-forge openssh in the terminal if encountering errors.
    • +
    • GitHub authentication is session-specific.
    • +
    +

    RStudio in DE

    +
      +
    1. Copy your instance ID. It can be found in your analyis URL in form https://.cyverse.run/lab.
    2. +
    3. Use your ID in these links:
    4. +
    5. https://<id>.cyverse.run/rstudio/auth-sign-in
    6. +
    7. https://<id>.cyverse.run/rstudio/
    8. +
    +

    Package Requests

    +
      +
    • List desired packages here for future container updates.
    • +
    +

    Data Transfer to CyVerse

    +
      +
    • Use GoCommands for HPC/CyVerse transfers.
    • +
    • Installation:
    • +
    • Linux: (Command)
    • +
    • Windows Powershell: (Command)
    • +
    • Usage:
    • +
    • Use put for upload and get for download.
    • +
    • Ensure correct CyVerse directory path.
    • +
    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/resources/cyverse_move_and_save_data/cyverse_move_and_save_data.md b/resources/cyverse_move_and_save_data/cyverse_move_and_save_data.md new file mode 100644 index 0000000..6b5d7bd --- /dev/null +++ b/resources/cyverse_move_and_save_data/cyverse_move_and_save_data.md @@ -0,0 +1,91 @@ +# Connecting Cyverse to GitHub + +## Log in to Cyverse + +1. Go to the Cyverse user account website [https://user.cyverse.org/](https://user.cyverse.org/) + +image + +2. Click `Sign up` (if you do not already have an account) + + image + +3. Head over to the Cyverse Discovery Environment [https://de.cyverse.org](https://de.cyverse.org), and log in with your new account. + + image + + You should now see the Discovery Environment: + + image + +4. We will give you permissions to access the Hackathon app. If you haven't already, let us know that you need access + +## Open up an analysis with the hackathon environment (Jupyter Lab) + +1. From the Cyverse Discovery Environment, click on `Apps` in the left menu + ![apps](../assets/cyverse_basics/apps.png) + +2. Select `JupyterLab ESIIL` + ![use_this_app](../assets/cyverse_basics/use_this_app.png) + +3. Configure and launch your analysis - when choosing the disk size, make sure to choose 64GB or greater. The rest of the settings you can change to suit your computing needs: + ![app_launch](../assets/cyverse_basics/app_launch.png) + + ![app_settings](../assets/cyverse_basics/app_settings.png) + + ![launch](../assets/cyverse_basics/launch.png) + +4. Click `Go to analysis`: + ![go_to_analysis](../assets/cyverse_basics/go_to_analysis.png) + +5. Now you should see Jupyter Lab! + ![jupyterlab](../assets/cyverse_basics/jupyterlab.png) + +## Set up your GitHub credentials + +### If you would prefer to follow a video instead of a written outline, we have prepared a video here: + + + + +1. From Jupyter Lab, click on the Git Extension icon on the left menu: + ![jupyterlab](../assets/cyverse_basics/jupyterlab.png) + +2. Click `Clone a Repository` and Paste the link to the cyverse-utils [https://github.com/CU-ESIIL/cyverse-utils.git](https://github.com/CU-ESIIL/cyverse-utils.git) and click `Clone`: + ![clone](../assets/cyverse_basics/clone.png) + +3. You should now see the `cyverse-utils` folder in your directory tree (provided you haven't changed directories from the default `/home/jovyan/data-store` + ![cyverse-utils](../assets/cyverse_basics/cyverse-utils.png) + +4. Go into the `cyverse-utils` folder: + ![click_cyverse_utils](../assets/cyverse_basics/click_cyverse_utils.png) + +5. open up the `create_github_keypair.ipynb` notebook if you prefer Python or the 'create_github_keypair.R' script if you prefer R by double-clicking and then select the default 'macrosystems' kernel: +![open_cyverse_utils](../assets/cyverse_basics/open_cyverse_utils.png) + +6. Now you should see the notebook open. Click the `play` button at the top. You will be prompted to enter your GitHub username and email: + ![script_1](../assets/cyverse_basics/script_1.png) + + ![username](../assets/cyverse_basics/username.png) + + ![email](../assets/cyverse_basics/email.png) + +7. You should now see your Public Key. Copy the WHOLE LINE including `ssh-ed25519` at the beginning and the `jovyan@...` at the end +![key](../assets/cyverse_basics/key.png) + +8. Go to your GitHub settings page (you may need to log in to GitHub first): + ![settings](../assets/cyverse_basics/settings.png) + +9. Select `SSH and GPG keys` + ![ssh](../assets/cyverse_basics/ssh.png) + +10. Select `New SSH key` + ![new_key](../assets/cyverse_basics/new_key.png) + +11. Give your key a descriptive name, paste your ENTIRE public key in the `Key` input box, and click `Add SSH Key`. You may need to re-authenticate with your password or two-factor authentication.: + ![paste_key](../assets/cyverse_basics/paste_key.png) + +12. You should now see your new SSH key in your `Authentication Keys` list! Now you will be able to clone private repositories and push changes to GitHub from your Cyverse analysis! + ![final](../assets/cyverse_basics/final.png) + +> NOTE! Your GitHub authentication is ONLY for the analysis you're working with right now. You will be able to use it as long as you want there, but once you start a new analysis you will need to go through this process again. Feel free to delete keys from old analyses that have been shut down. diff --git a/resources/cyverse_move_and_save_data/index.html b/resources/cyverse_move_and_save_data/index.html new file mode 100644 index 0000000..89622e9 --- /dev/null +++ b/resources/cyverse_move_and_save_data/index.html @@ -0,0 +1,1457 @@ + + + + + + + + + + + + + + + + + + + + + + Connecting Cyverse to GitHub - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    Connecting Cyverse to GitHub

    +

    Log in to Cyverse

    +
      +
    1. Go to the Cyverse user account website https://user.cyverse.org/
    2. +
    +

    image

    +
      +
    1. Click Sign up (if you do not already have an account)
    2. +
    +

    image

    +
      +
    1. Head over to the Cyverse Discovery Environment https://de.cyverse.org, and log in with your new account.
    2. +
    +

    image

    +

    You should now see the Discovery Environment:

    +

    image

    +
      +
    1. We will give you permissions to access the Hackathon app. If you haven't already, let us know that you need access
    2. +
    +

    Open up an analysis with the hackathon environment (Jupyter Lab)

    +
      +
    1. +

      From the Cyverse Discovery Environment, click on Apps in the left menu + apps

      +
    2. +
    3. +

      Select JupyterLab ESIIL + use_this_app

      +
    4. +
    5. +

      Configure and launch your analysis - when choosing the disk size, make sure to choose 64GB or greater. The rest of the settings you can change to suit your computing needs: + app_launch

      +
    6. +
    +

    app_settings

    +

    launch

    +
      +
    1. +

      Click Go to analysis: + go_to_analysis

      +
    2. +
    3. +

      Now you should see Jupyter Lab! + jupyterlab

      +
    4. +
    +

    Set up your GitHub credentials

    +

    If you would prefer to follow a video instead of a written outline, we have prepared a video here:

    +

    + +

    +
      +
    1. +

      From Jupyter Lab, click on the Git Extension icon on the left menu: + jupyterlab

      +
    2. +
    3. +

      Click Clone a Repository and Paste the link to the cyverse-utils https://github.com/CU-ESIIL/cyverse-utils.git and click Clone: + clone

      +
    4. +
    5. +

      You should now see the cyverse-utils folder in your directory tree (provided you haven't changed directories from the default /home/jovyan/data-store + cyverse-utils

      +
    6. +
    7. +

      Go into the cyverse-utils folder: + click_cyverse_utils

      +
    8. +
    9. +

      open up the create_github_keypair.ipynb notebook if you prefer Python or the 'create_github_keypair.R' script if you prefer R by double-clicking and then select the default 'macrosystems' kernel: +open_cyverse_utils

      +
    10. +
    11. +

      Now you should see the notebook open. Click the play button at the top. You will be prompted to enter your GitHub username and email: + script_1

      +
    12. +
    +

    username

    +

    email

    +
      +
    1. +

      You should now see your Public Key. Copy the WHOLE LINE including ssh-ed25519 at the beginning and the jovyan@... at the end +key

      +
    2. +
    3. +

      Go to your GitHub settings page (you may need to log in to GitHub first): + settings

      +
    4. +
    5. +

      Select SSH and GPG keys + ssh

      +
    6. +
    7. +

      Select New SSH key + new_key

      +
    8. +
    9. +

      Give your key a descriptive name, paste your ENTIRE public key in the Key input box, and click Add SSH Key. You may need to re-authenticate with your password or two-factor authentication.: + paste_key

      +
    10. +
    11. +

      You should now see your new SSH key in your Authentication Keys list! Now you will be able to clone private repositories and push changes to GitHub from your Cyverse analysis! + final

      +
    12. +
    +
    +

    NOTE! Your GitHub authentication is ONLY for the analysis you're working with right now. You will be able to use it as long as you want there, but once you start a new analysis you will need to go through this process again. Feel free to delete keys from old analyses that have been shut down.

    +
    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/resources/cyverse_shutdown/cyverse_shutdown.md b/resources/cyverse_shutdown/cyverse_shutdown.md new file mode 100644 index 0000000..6b5d7bd --- /dev/null +++ b/resources/cyverse_shutdown/cyverse_shutdown.md @@ -0,0 +1,91 @@ +# Connecting Cyverse to GitHub + +## Log in to Cyverse + +1. Go to the Cyverse user account website [https://user.cyverse.org/](https://user.cyverse.org/) + +image + +2. Click `Sign up` (if you do not already have an account) + + image + +3. Head over to the Cyverse Discovery Environment [https://de.cyverse.org](https://de.cyverse.org), and log in with your new account. + + image + + You should now see the Discovery Environment: + + image + +4. We will give you permissions to access the Hackathon app. If you haven't already, let us know that you need access + +## Open up an analysis with the hackathon environment (Jupyter Lab) + +1. From the Cyverse Discovery Environment, click on `Apps` in the left menu + ![apps](../assets/cyverse_basics/apps.png) + +2. Select `JupyterLab ESIIL` + ![use_this_app](../assets/cyverse_basics/use_this_app.png) + +3. Configure and launch your analysis - when choosing the disk size, make sure to choose 64GB or greater. The rest of the settings you can change to suit your computing needs: + ![app_launch](../assets/cyverse_basics/app_launch.png) + + ![app_settings](../assets/cyverse_basics/app_settings.png) + + ![launch](../assets/cyverse_basics/launch.png) + +4. Click `Go to analysis`: + ![go_to_analysis](../assets/cyverse_basics/go_to_analysis.png) + +5. Now you should see Jupyter Lab! + ![jupyterlab](../assets/cyverse_basics/jupyterlab.png) + +## Set up your GitHub credentials + +### If you would prefer to follow a video instead of a written outline, we have prepared a video here: + + + + +1. From Jupyter Lab, click on the Git Extension icon on the left menu: + ![jupyterlab](../assets/cyverse_basics/jupyterlab.png) + +2. Click `Clone a Repository` and Paste the link to the cyverse-utils [https://github.com/CU-ESIIL/cyverse-utils.git](https://github.com/CU-ESIIL/cyverse-utils.git) and click `Clone`: + ![clone](../assets/cyverse_basics/clone.png) + +3. You should now see the `cyverse-utils` folder in your directory tree (provided you haven't changed directories from the default `/home/jovyan/data-store` + ![cyverse-utils](../assets/cyverse_basics/cyverse-utils.png) + +4. Go into the `cyverse-utils` folder: + ![click_cyverse_utils](../assets/cyverse_basics/click_cyverse_utils.png) + +5. open up the `create_github_keypair.ipynb` notebook if you prefer Python or the 'create_github_keypair.R' script if you prefer R by double-clicking and then select the default 'macrosystems' kernel: +![open_cyverse_utils](../assets/cyverse_basics/open_cyverse_utils.png) + +6. Now you should see the notebook open. Click the `play` button at the top. You will be prompted to enter your GitHub username and email: + ![script_1](../assets/cyverse_basics/script_1.png) + + ![username](../assets/cyverse_basics/username.png) + + ![email](../assets/cyverse_basics/email.png) + +7. You should now see your Public Key. Copy the WHOLE LINE including `ssh-ed25519` at the beginning and the `jovyan@...` at the end +![key](../assets/cyverse_basics/key.png) + +8. Go to your GitHub settings page (you may need to log in to GitHub first): + ![settings](../assets/cyverse_basics/settings.png) + +9. Select `SSH and GPG keys` + ![ssh](../assets/cyverse_basics/ssh.png) + +10. Select `New SSH key` + ![new_key](../assets/cyverse_basics/new_key.png) + +11. Give your key a descriptive name, paste your ENTIRE public key in the `Key` input box, and click `Add SSH Key`. You may need to re-authenticate with your password or two-factor authentication.: + ![paste_key](../assets/cyverse_basics/paste_key.png) + +12. You should now see your new SSH key in your `Authentication Keys` list! Now you will be able to clone private repositories and push changes to GitHub from your Cyverse analysis! + ![final](../assets/cyverse_basics/final.png) + +> NOTE! Your GitHub authentication is ONLY for the analysis you're working with right now. You will be able to use it as long as you want there, but once you start a new analysis you will need to go through this process again. Feel free to delete keys from old analyses that have been shut down. diff --git a/resources/cyverse_shutdown/index.html b/resources/cyverse_shutdown/index.html new file mode 100644 index 0000000..eb52a1f --- /dev/null +++ b/resources/cyverse_shutdown/index.html @@ -0,0 +1,1567 @@ + + + + + + + + + + + + + + + + + + + + + + Shutdown procedure - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    Connecting Cyverse to GitHub

    +

    Log in to Cyverse

    +
      +
    1. Go to the Cyverse user account website https://user.cyverse.org/
    2. +
    +

    image

    +
      +
    1. Click Sign up (if you do not already have an account)
    2. +
    +

    image

    +
      +
    1. Head over to the Cyverse Discovery Environment https://de.cyverse.org, and log in with your new account.
    2. +
    +

    image

    +

    You should now see the Discovery Environment:

    +

    image

    +
      +
    1. We will give you permissions to access the Hackathon app. If you haven't already, let us know that you need access
    2. +
    +

    Open up an analysis with the hackathon environment (Jupyter Lab)

    +
      +
    1. +

      From the Cyverse Discovery Environment, click on Apps in the left menu + apps

      +
    2. +
    3. +

      Select JupyterLab ESIIL + use_this_app

      +
    4. +
    5. +

      Configure and launch your analysis - when choosing the disk size, make sure to choose 64GB or greater. The rest of the settings you can change to suit your computing needs: + app_launch

      +
    6. +
    +

    app_settings

    +

    launch

    +
      +
    1. +

      Click Go to analysis: + go_to_analysis

      +
    2. +
    3. +

      Now you should see Jupyter Lab! + jupyterlab

      +
    4. +
    +

    Set up your GitHub credentials

    +

    If you would prefer to follow a video instead of a written outline, we have prepared a video here:

    +

    + +

    +
      +
    1. +

      From Jupyter Lab, click on the Git Extension icon on the left menu: + jupyterlab

      +
    2. +
    3. +

      Click Clone a Repository and Paste the link to the cyverse-utils https://github.com/CU-ESIIL/cyverse-utils.git and click Clone: + clone

      +
    4. +
    5. +

      You should now see the cyverse-utils folder in your directory tree (provided you haven't changed directories from the default /home/jovyan/data-store + cyverse-utils

      +
    6. +
    7. +

      Go into the cyverse-utils folder: + click_cyverse_utils

      +
    8. +
    9. +

      open up the create_github_keypair.ipynb notebook if you prefer Python or the 'create_github_keypair.R' script if you prefer R by double-clicking and then select the default 'macrosystems' kernel: +open_cyverse_utils

      +
    10. +
    11. +

      Now you should see the notebook open. Click the play button at the top. You will be prompted to enter your GitHub username and email: + script_1

      +
    12. +
    +

    username

    +

    email

    +
      +
    1. +

      You should now see your Public Key. Copy the WHOLE LINE including ssh-ed25519 at the beginning and the jovyan@... at the end +key

      +
    2. +
    3. +

      Go to your GitHub settings page (you may need to log in to GitHub first): + settings

      +
    4. +
    5. +

      Select SSH and GPG keys + ssh

      +
    6. +
    7. +

      Select New SSH key + new_key

      +
    8. +
    9. +

      Give your key a descriptive name, paste your ENTIRE public key in the Key input box, and click Add SSH Key. You may need to re-authenticate with your password or two-factor authentication.: + paste_key

      +
    10. +
    11. +

      You should now see your new SSH key in your Authentication Keys list! Now you will be able to clone private repositories and push changes to GitHub from your Cyverse analysis! + final

      +
    12. +
    +
    +

    NOTE! Your GitHub authentication is ONLY for the analysis you're working with right now. You will be able to use it as long as you want there, but once you start a new analysis you will need to go through this process again. Feel free to delete keys from old analyses that have been shut down.

    +
    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/resources/cyverse_startup/cyverse_startup.md b/resources/cyverse_startup/cyverse_startup.md new file mode 100644 index 0000000..6b5d7bd --- /dev/null +++ b/resources/cyverse_startup/cyverse_startup.md @@ -0,0 +1,91 @@ +# Connecting Cyverse to GitHub + +## Log in to Cyverse + +1. Go to the Cyverse user account website [https://user.cyverse.org/](https://user.cyverse.org/) + +image + +2. Click `Sign up` (if you do not already have an account) + + image + +3. Head over to the Cyverse Discovery Environment [https://de.cyverse.org](https://de.cyverse.org), and log in with your new account. + + image + + You should now see the Discovery Environment: + + image + +4. We will give you permissions to access the Hackathon app. If you haven't already, let us know that you need access + +## Open up an analysis with the hackathon environment (Jupyter Lab) + +1. From the Cyverse Discovery Environment, click on `Apps` in the left menu + ![apps](../assets/cyverse_basics/apps.png) + +2. Select `JupyterLab ESIIL` + ![use_this_app](../assets/cyverse_basics/use_this_app.png) + +3. Configure and launch your analysis - when choosing the disk size, make sure to choose 64GB or greater. The rest of the settings you can change to suit your computing needs: + ![app_launch](../assets/cyverse_basics/app_launch.png) + + ![app_settings](../assets/cyverse_basics/app_settings.png) + + ![launch](../assets/cyverse_basics/launch.png) + +4. Click `Go to analysis`: + ![go_to_analysis](../assets/cyverse_basics/go_to_analysis.png) + +5. Now you should see Jupyter Lab! + ![jupyterlab](../assets/cyverse_basics/jupyterlab.png) + +## Set up your GitHub credentials + +### If you would prefer to follow a video instead of a written outline, we have prepared a video here: + + + + +1. From Jupyter Lab, click on the Git Extension icon on the left menu: + ![jupyterlab](../assets/cyverse_basics/jupyterlab.png) + +2. Click `Clone a Repository` and Paste the link to the cyverse-utils [https://github.com/CU-ESIIL/cyverse-utils.git](https://github.com/CU-ESIIL/cyverse-utils.git) and click `Clone`: + ![clone](../assets/cyverse_basics/clone.png) + +3. You should now see the `cyverse-utils` folder in your directory tree (provided you haven't changed directories from the default `/home/jovyan/data-store` + ![cyverse-utils](../assets/cyverse_basics/cyverse-utils.png) + +4. Go into the `cyverse-utils` folder: + ![click_cyverse_utils](../assets/cyverse_basics/click_cyverse_utils.png) + +5. open up the `create_github_keypair.ipynb` notebook if you prefer Python or the 'create_github_keypair.R' script if you prefer R by double-clicking and then select the default 'macrosystems' kernel: +![open_cyverse_utils](../assets/cyverse_basics/open_cyverse_utils.png) + +6. Now you should see the notebook open. Click the `play` button at the top. You will be prompted to enter your GitHub username and email: + ![script_1](../assets/cyverse_basics/script_1.png) + + ![username](../assets/cyverse_basics/username.png) + + ![email](../assets/cyverse_basics/email.png) + +7. You should now see your Public Key. Copy the WHOLE LINE including `ssh-ed25519` at the beginning and the `jovyan@...` at the end +![key](../assets/cyverse_basics/key.png) + +8. Go to your GitHub settings page (you may need to log in to GitHub first): + ![settings](../assets/cyverse_basics/settings.png) + +9. Select `SSH and GPG keys` + ![ssh](../assets/cyverse_basics/ssh.png) + +10. Select `New SSH key` + ![new_key](../assets/cyverse_basics/new_key.png) + +11. Give your key a descriptive name, paste your ENTIRE public key in the `Key` input box, and click `Add SSH Key`. You may need to re-authenticate with your password or two-factor authentication.: + ![paste_key](../assets/cyverse_basics/paste_key.png) + +12. You should now see your new SSH key in your `Authentication Keys` list! Now you will be able to clone private repositories and push changes to GitHub from your Cyverse analysis! + ![final](../assets/cyverse_basics/final.png) + +> NOTE! Your GitHub authentication is ONLY for the analysis you're working with right now. You will be able to use it as long as you want there, but once you start a new analysis you will need to go through this process again. Feel free to delete keys from old analyses that have been shut down. diff --git a/resources/cyverse_startup/index.html b/resources/cyverse_startup/index.html new file mode 100644 index 0000000..e44e24f --- /dev/null +++ b/resources/cyverse_startup/index.html @@ -0,0 +1,1567 @@ + + + + + + + + + + + + + + + + + + + + + + Startup procedure - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    Connecting Cyverse to GitHub

    +

    Log in to Cyverse

    +
      +
    1. Go to the Cyverse user account website https://user.cyverse.org/
    2. +
    +

    image

    +
      +
    1. Click Sign up (if you do not already have an account)
    2. +
    +

    image

    +
      +
    1. Head over to the Cyverse Discovery Environment https://de.cyverse.org, and log in with your new account.
    2. +
    +

    image

    +

    You should now see the Discovery Environment:

    +

    image

    +
      +
    1. We will give you permissions to access the Hackathon app. If you haven't already, let us know that you need access
    2. +
    +

    Open up an analysis with the hackathon environment (Jupyter Lab)

    +
      +
    1. +

      From the Cyverse Discovery Environment, click on Apps in the left menu + apps

      +
    2. +
    3. +

      Select JupyterLab ESIIL + use_this_app

      +
    4. +
    5. +

      Configure and launch your analysis - when choosing the disk size, make sure to choose 64GB or greater. The rest of the settings you can change to suit your computing needs: + app_launch

      +
    6. +
    +

    app_settings

    +

    launch

    +
      +
    1. +

      Click Go to analysis: + go_to_analysis

      +
    2. +
    3. +

      Now you should see Jupyter Lab! + jupyterlab

      +
    4. +
    +

    Set up your GitHub credentials

    +

    If you would prefer to follow a video instead of a written outline, we have prepared a video here:

    +

    + +

    +
      +
    1. +

      From Jupyter Lab, click on the Git Extension icon on the left menu: + jupyterlab

      +
    2. +
    3. +

      Click Clone a Repository and Paste the link to the cyverse-utils https://github.com/CU-ESIIL/cyverse-utils.git and click Clone: + clone

      +
    4. +
    5. +

      You should now see the cyverse-utils folder in your directory tree (provided you haven't changed directories from the default /home/jovyan/data-store + cyverse-utils

      +
    6. +
    7. +

      Go into the cyverse-utils folder: + click_cyverse_utils

      +
    8. +
    9. +

      open up the create_github_keypair.ipynb notebook if you prefer Python or the 'create_github_keypair.R' script if you prefer R by double-clicking and then select the default 'macrosystems' kernel: +open_cyverse_utils

      +
    10. +
    11. +

      Now you should see the notebook open. Click the play button at the top. You will be prompted to enter your GitHub username and email: + script_1

      +
    12. +
    +

    username

    +

    email

    +
      +
    1. +

      You should now see your Public Key. Copy the WHOLE LINE including ssh-ed25519 at the beginning and the jovyan@... at the end +key

      +
    2. +
    3. +

      Go to your GitHub settings page (you may need to log in to GitHub first): + settings

      +
    4. +
    5. +

      Select SSH and GPG keys + ssh

      +
    6. +
    7. +

      Select New SSH key + new_key

      +
    8. +
    9. +

      Give your key a descriptive name, paste your ENTIRE public key in the Key input box, and click Add SSH Key. You may need to re-authenticate with your password or two-factor authentication.: + paste_key

      +
    10. +
    11. +

      You should now see your new SSH key in your Authentication Keys list! Now you will be able to clone private repositories and push changes to GitHub from your Cyverse analysis! + final

      +
    12. +
    +
    +

    NOTE! Your GitHub authentication is ONLY for the analysis you're working with right now. You will be able to use it as long as you want there, but once you start a new analysis you will need to go through this process again. Feel free to delete keys from old analyses that have been shut down.

    +
    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/resources/cyverse_stream_data/cyverse_stream_data.md b/resources/cyverse_stream_data/cyverse_stream_data.md new file mode 100644 index 0000000..6b5d7bd --- /dev/null +++ b/resources/cyverse_stream_data/cyverse_stream_data.md @@ -0,0 +1,91 @@ +# Connecting Cyverse to GitHub + +## Log in to Cyverse + +1. Go to the Cyverse user account website [https://user.cyverse.org/](https://user.cyverse.org/) + +image + +2. Click `Sign up` (if you do not already have an account) + + image + +3. Head over to the Cyverse Discovery Environment [https://de.cyverse.org](https://de.cyverse.org), and log in with your new account. + + image + + You should now see the Discovery Environment: + + image + +4. We will give you permissions to access the Hackathon app. If you haven't already, let us know that you need access + +## Open up an analysis with the hackathon environment (Jupyter Lab) + +1. From the Cyverse Discovery Environment, click on `Apps` in the left menu + ![apps](../assets/cyverse_basics/apps.png) + +2. Select `JupyterLab ESIIL` + ![use_this_app](../assets/cyverse_basics/use_this_app.png) + +3. Configure and launch your analysis - when choosing the disk size, make sure to choose 64GB or greater. The rest of the settings you can change to suit your computing needs: + ![app_launch](../assets/cyverse_basics/app_launch.png) + + ![app_settings](../assets/cyverse_basics/app_settings.png) + + ![launch](../assets/cyverse_basics/launch.png) + +4. Click `Go to analysis`: + ![go_to_analysis](../assets/cyverse_basics/go_to_analysis.png) + +5. Now you should see Jupyter Lab! + ![jupyterlab](../assets/cyverse_basics/jupyterlab.png) + +## Set up your GitHub credentials + +### If you would prefer to follow a video instead of a written outline, we have prepared a video here: + + + + +1. From Jupyter Lab, click on the Git Extension icon on the left menu: + ![jupyterlab](../assets/cyverse_basics/jupyterlab.png) + +2. Click `Clone a Repository` and Paste the link to the cyverse-utils [https://github.com/CU-ESIIL/cyverse-utils.git](https://github.com/CU-ESIIL/cyverse-utils.git) and click `Clone`: + ![clone](../assets/cyverse_basics/clone.png) + +3. You should now see the `cyverse-utils` folder in your directory tree (provided you haven't changed directories from the default `/home/jovyan/data-store` + ![cyverse-utils](../assets/cyverse_basics/cyverse-utils.png) + +4. Go into the `cyverse-utils` folder: + ![click_cyverse_utils](../assets/cyverse_basics/click_cyverse_utils.png) + +5. open up the `create_github_keypair.ipynb` notebook if you prefer Python or the 'create_github_keypair.R' script if you prefer R by double-clicking and then select the default 'macrosystems' kernel: +![open_cyverse_utils](../assets/cyverse_basics/open_cyverse_utils.png) + +6. Now you should see the notebook open. Click the `play` button at the top. You will be prompted to enter your GitHub username and email: + ![script_1](../assets/cyverse_basics/script_1.png) + + ![username](../assets/cyverse_basics/username.png) + + ![email](../assets/cyverse_basics/email.png) + +7. You should now see your Public Key. Copy the WHOLE LINE including `ssh-ed25519` at the beginning and the `jovyan@...` at the end +![key](../assets/cyverse_basics/key.png) + +8. Go to your GitHub settings page (you may need to log in to GitHub first): + ![settings](../assets/cyverse_basics/settings.png) + +9. Select `SSH and GPG keys` + ![ssh](../assets/cyverse_basics/ssh.png) + +10. Select `New SSH key` + ![new_key](../assets/cyverse_basics/new_key.png) + +11. Give your key a descriptive name, paste your ENTIRE public key in the `Key` input box, and click `Add SSH Key`. You may need to re-authenticate with your password or two-factor authentication.: + ![paste_key](../assets/cyverse_basics/paste_key.png) + +12. You should now see your new SSH key in your `Authentication Keys` list! Now you will be able to clone private repositories and push changes to GitHub from your Cyverse analysis! + ![final](../assets/cyverse_basics/final.png) + +> NOTE! Your GitHub authentication is ONLY for the analysis you're working with right now. You will be able to use it as long as you want there, but once you start a new analysis you will need to go through this process again. Feel free to delete keys from old analyses that have been shut down. diff --git a/resources/cyverse_stream_data/index.html b/resources/cyverse_stream_data/index.html new file mode 100644 index 0000000..79e80ab --- /dev/null +++ b/resources/cyverse_stream_data/index.html @@ -0,0 +1,1457 @@ + + + + + + + + + + + + + + + + + + + + + + Connecting Cyverse to GitHub - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    Connecting Cyverse to GitHub

    +

    Log in to Cyverse

    +
      +
    1. Go to the Cyverse user account website https://user.cyverse.org/
    2. +
    +

    image

    +
      +
    1. Click Sign up (if you do not already have an account)
    2. +
    +

    image

    +
      +
    1. Head over to the Cyverse Discovery Environment https://de.cyverse.org, and log in with your new account.
    2. +
    +

    image

    +

    You should now see the Discovery Environment:

    +

    image

    +
      +
    1. We will give you permissions to access the Hackathon app. If you haven't already, let us know that you need access
    2. +
    +

    Open up an analysis with the hackathon environment (Jupyter Lab)

    +
      +
    1. +

      From the Cyverse Discovery Environment, click on Apps in the left menu + apps

      +
    2. +
    3. +

      Select JupyterLab ESIIL + use_this_app

      +
    4. +
    5. +

      Configure and launch your analysis - when choosing the disk size, make sure to choose 64GB or greater. The rest of the settings you can change to suit your computing needs: + app_launch

      +
    6. +
    +

    app_settings

    +

    launch

    +
      +
    1. +

      Click Go to analysis: + go_to_analysis

      +
    2. +
    3. +

      Now you should see Jupyter Lab! + jupyterlab

      +
    4. +
    +

    Set up your GitHub credentials

    +

    If you would prefer to follow a video instead of a written outline, we have prepared a video here:

    +

    + +

    +
      +
    1. +

      From Jupyter Lab, click on the Git Extension icon on the left menu: + jupyterlab

      +
    2. +
    3. +

      Click Clone a Repository and Paste the link to the cyverse-utils https://github.com/CU-ESIIL/cyverse-utils.git and click Clone: + clone

      +
    4. +
    5. +

      You should now see the cyverse-utils folder in your directory tree (provided you haven't changed directories from the default /home/jovyan/data-store + cyverse-utils

      +
    6. +
    7. +

      Go into the cyverse-utils folder: + click_cyverse_utils

      +
    8. +
    9. +

      open up the create_github_keypair.ipynb notebook if you prefer Python or the 'create_github_keypair.R' script if you prefer R by double-clicking and then select the default 'macrosystems' kernel: +open_cyverse_utils

      +
    10. +
    11. +

      Now you should see the notebook open. Click the play button at the top. You will be prompted to enter your GitHub username and email: + script_1

      +
    12. +
    +

    username

    +

    email

    +
      +
    1. +

      You should now see your Public Key. Copy the WHOLE LINE including ssh-ed25519 at the beginning and the jovyan@... at the end +key

      +
    2. +
    3. +

      Go to your GitHub settings page (you may need to log in to GitHub first): + settings

      +
    4. +
    5. +

      Select SSH and GPG keys + ssh

      +
    6. +
    7. +

      Select New SSH key + new_key

      +
    8. +
    9. +

      Give your key a descriptive name, paste your ENTIRE public key in the Key input box, and click Add SSH Key. You may need to re-authenticate with your password or two-factor authentication.: + paste_key

      +
    10. +
    11. +

      You should now see your new SSH key in your Authentication Keys list! Now you will be able to clone private repositories and push changes to GitHub from your Cyverse analysis! + final

      +
    12. +
    +
    +

    NOTE! Your GitHub authentication is ONLY for the analysis you're working with right now. You will be able to use it as long as you want there, but once you start a new analysis you will need to go through this process again. Feel free to delete keys from old analyses that have been shut down.

    +
    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/resources/data_analysis/data_analysis.md b/resources/data_analysis/data_analysis.md new file mode 100644 index 0000000..effdfe3 --- /dev/null +++ b/resources/data_analysis/data_analysis.md @@ -0,0 +1,36 @@ +# Data Analysis Documentation + +## Overview +Brief overview of the data analysis goals and the analytical questions being addressed. + +## Analysis Methodology +Description of the analytical approach, methods used, and justification for the chosen techniques. + +## Code Overview +Explanation of the structure of the analysis code, including key functions and their roles. + +## Running the Analysis +Instructions and example commands for executing the analysis scripts. +```bash +python analysis_script.py +``` + +## Analysis Results + +Summary of key findings from the analysis, including interpretation and relevance. + +## Challenges and Solutions + +Discussion of challenges faced during the analysis and solutions or workarounds implemented. + +## Conclusions + +Concluding remarks on the analysis, insights gained, and their potential impact. + +## Future Work + +Suggestions for extending or refining the analysis and potential areas for further research. + +## References + +Citations or references to external sources or literature used. \ No newline at end of file diff --git a/resources/data_analysis/index.html b/resources/data_analysis/index.html new file mode 100644 index 0000000..bd7798a --- /dev/null +++ b/resources/data_analysis/index.html @@ -0,0 +1,1370 @@ + + + + + + + + + + + + + + + + + + + + + + Data Analysis Documentation - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    Data Analysis Documentation

    +

    Overview

    +

    Brief overview of the data analysis goals and the analytical questions being addressed.

    +

    Analysis Methodology

    +

    Description of the analytical approach, methods used, and justification for the chosen techniques.

    +

    Code Overview

    +

    Explanation of the structure of the analysis code, including key functions and their roles.

    +

    Running the Analysis

    +

    Instructions and example commands for executing the analysis scripts. +

    python analysis_script.py
    +

    +

    Analysis Results

    +

    Summary of key findings from the analysis, including interpretation and relevance.

    +

    Challenges and Solutions

    +

    Discussion of challenges faced during the analysis and solutions or workarounds implemented.

    +

    Conclusions

    +

    Concluding remarks on the analysis, insights gained, and their potential impact.

    +

    Future Work

    +

    Suggestions for extending or refining the analysis and potential areas for further research.

    +

    References

    +

    Citations or references to external sources or literature used.

    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/resources/data_processing/data_processing.md b/resources/data_processing/data_processing.md new file mode 100644 index 0000000..ebc29dc --- /dev/null +++ b/resources/data_processing/data_processing.md @@ -0,0 +1,41 @@ +# Data Processing Documentation + +## Overview +Brief description of the data processing objectives and scope. Reminder to adhere to data ownership and usage guidelines. + +## Data Sources +List and describe data sources used, including links to cloud-optimized sources. Highlight permissions and compliance with data ownership guidelines. + +## CyVerse Discovery Environment +Instructions for setting up and using the CyVerse Discovery Environment for data processing. Tips for cloud-based data access and processing. + +## Data Processing Steps + +### Using GDAL VSI +Guidance on using GDAL VSI (Virtual System Interface) for data access and processing. Example commands or scripts: +```bash +gdal_translate /vsicurl/http://example.com/data.tif output.tif +``` + +## Cloud-Optimized Data +Advantages of using cloud-optimized data formats and processing data without downloading. Instructions for such processes. + +## Data Storage + +Information on storing processed data, with guidelines for choosing between the repository and CyVerse Data Store. + +## Best Practices + +Recommendations for efficient and responsible data processing in the cloud. Tips to ensure data integrity and reproducibility. + +## Challenges and Troubleshooting + +Common challenges in data processing and potential solutions. Resources for troubleshooting in the CyVerse Discovery Environment. + +## Conclusions + +Summary of the data processing phase and its outcomes. Reflect on the methods used. + +## References + +Citations of tools, data sources, and other references used in the data processing phase. \ No newline at end of file diff --git a/resources/data_processing/index.html b/resources/data_processing/index.html new file mode 100644 index 0000000..8f4c9ef --- /dev/null +++ b/resources/data_processing/index.html @@ -0,0 +1,1373 @@ + + + + + + + + + + + + + + + + + + + + + + Data Processing Documentation - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    Data Processing Documentation

    +

    Overview

    +

    Brief description of the data processing objectives and scope. Reminder to adhere to data ownership and usage guidelines.

    +

    Data Sources

    +

    List and describe data sources used, including links to cloud-optimized sources. Highlight permissions and compliance with data ownership guidelines.

    +

    CyVerse Discovery Environment

    +

    Instructions for setting up and using the CyVerse Discovery Environment for data processing. Tips for cloud-based data access and processing.

    +

    Data Processing Steps

    +

    Using GDAL VSI

    +

    Guidance on using GDAL VSI (Virtual System Interface) for data access and processing. Example commands or scripts: +

    gdal_translate /vsicurl/http://example.com/data.tif output.tif
    +

    +

    Cloud-Optimized Data

    +

    Advantages of using cloud-optimized data formats and processing data without downloading. Instructions for such processes.

    +

    Data Storage

    +

    Information on storing processed data, with guidelines for choosing between the repository and CyVerse Data Store.

    +

    Best Practices

    +

    Recommendations for efficient and responsible data processing in the cloud. Tips to ensure data integrity and reproducibility.

    +

    Challenges and Troubleshooting

    +

    Common challenges in data processing and potential solutions. Resources for troubleshooting in the CyVerse Discovery Environment.

    +

    Conclusions

    +

    Summary of the data processing phase and its outcomes. Reflect on the methods used.

    +

    References

    +

    Citations of tools, data sources, and other references used in the data processing phase.

    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/resources/esiil_training/esiil_training.md b/resources/esiil_training/esiil_training.md new file mode 100644 index 0000000..a9f0897 --- /dev/null +++ b/resources/esiil_training/esiil_training.md @@ -0,0 +1,66 @@ +# ESIIL Working Groups training sessions + +## Introduction to ESIIL Training +- Brief overview of the training program. +- Objectives and expected outcomes for the working groups. + +## Session 1: The Science of Team Science (2 Hours) +### Part 1: Creating Ethical and Innovative Work Spaces +- Strategies for fostering ethical and inclusive environments. +- Techniques for encouraging innovation and creativity in team settings. + +### Part 2: Effective Communication and Collaboration +- Best practices for ensuring every team member's voice is heard. +- Approaches for maintaining productivity and positive team dynamics. +- Overview of the code of conduct and participant agreement. + +## Session 2: Foundations of Environmental Data Science (2 Hours) +### Part 1: Data Management, Ethics, and GitHub Usage +- Principles of data management in environmental science. +- Understanding data ethics and ownership guidelines. +- Tour of GitHub repositories and setup instructions for effective collaboration. + +### Part 2: Essential Tools and Technologies +- Introduction to key tools and technologies used in ESIIL. +- Basic training on software and platforms essential for data analysis. + +## Session 3: Practical Application and Project Execution (2 Hours) + +### Part 1: Travel Planning and Reimbursement +- Learn how to manage finances and submit paperwork to the University. + +### Part 2: Hands-on Data Analysis Workflow +- Interactive session on constructing a data analysis pipeline using ESIIL/CyVerse tools. +- Practical exercises on data processing, analysis, and visualization techniques. +- Troubleshooting common issues and optimizing workflow efficiency. + +### Part 3: Wrap-up and Project Planning +- Strategies for sustaining project momentum and managing long-term research goals. +- Planning for publication, data sharing, and broader impact. +- Final Q&A session to address any outstanding questions or concerns. + +### Conclusion and Feedback +- Summary of key learnings from all sessions. +- Encouragement for participants to apply these skills in their respective projects. +- Collection of feedback for future training improvements. + +## Additional Resources +- List of resources for further learning and exploration. +- Links to community forums or groups for ongoing support and collaboration. + +## Roundtable Event 1: PI/Team Leads Discussion (2 Hours) +- A roundtable discussion for Principal Investigators and team leads. +- Sharing experiences, challenges, and strategies among group leaders. +- Fostering a collaborative network and problem-solving atmosphere. + +## Roundtable Event 2: Technical Leads Office Hours (2 Hours) +- A roundtable and office hours session for technical leads. +- Ensuring a thorough understanding of the ESIIL/CyVerse cyberinfrastructure. +- Providing technical support and knowledge exchange. + +## Conclusion and Feedback +- Recap of key takeaways from the training sessions and roundtables. +- Collection of feedback for continuous improvement of the training program. + +## Additional Resources +- Supplementary materials, reading lists, and links to online tutorials and documentation. diff --git a/resources/esiil_training/index.html b/resources/esiil_training/index.html new file mode 100644 index 0000000..8878010 --- /dev/null +++ b/resources/esiil_training/index.html @@ -0,0 +1,1428 @@ + + + + + + + + + + + + + + + + + + + + + + ESIIL Working Groups training sessions - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    ESIIL Working Groups training sessions

    +

    Introduction to ESIIL Training

    +
      +
    • Brief overview of the training program.
    • +
    • Objectives and expected outcomes for the working groups.
    • +
    +

    Session 1: The Science of Team Science (2 Hours)

    +

    Part 1: Creating Ethical and Innovative Work Spaces

    +
      +
    • Strategies for fostering ethical and inclusive environments.
    • +
    • Techniques for encouraging innovation and creativity in team settings.
    • +
    +

    Part 2: Effective Communication and Collaboration

    +
      +
    • Best practices for ensuring every team member's voice is heard.
    • +
    • Approaches for maintaining productivity and positive team dynamics.
    • +
    • Overview of the code of conduct and participant agreement.
    • +
    +

    Session 2: Foundations of Environmental Data Science (2 Hours)

    +

    Part 1: Data Management, Ethics, and GitHub Usage

    +
      +
    • Principles of data management in environmental science.
    • +
    • Understanding data ethics and ownership guidelines.
    • +
    • Tour of GitHub repositories and setup instructions for effective collaboration.
    • +
    +

    Part 2: Essential Tools and Technologies

    +
      +
    • Introduction to key tools and technologies used in ESIIL.
    • +
    • Basic training on software and platforms essential for data analysis.
    • +
    +

    Session 3: Practical Application and Project Execution (2 Hours)

    +

    Part 1: Travel Planning and Reimbursement

    +
      +
    • Learn how to manage finances and submit paperwork to the University.
    • +
    +

    Part 2: Hands-on Data Analysis Workflow

    +
      +
    • Interactive session on constructing a data analysis pipeline using ESIIL/CyVerse tools.
    • +
    • Practical exercises on data processing, analysis, and visualization techniques.
    • +
    • Troubleshooting common issues and optimizing workflow efficiency.
    • +
    +

    Part 3: Wrap-up and Project Planning

    +
      +
    • Strategies for sustaining project momentum and managing long-term research goals.
    • +
    • Planning for publication, data sharing, and broader impact.
    • +
    • Final Q&A session to address any outstanding questions or concerns.
    • +
    +

    Conclusion and Feedback

    +
      +
    • Summary of key learnings from all sessions.
    • +
    • Encouragement for participants to apply these skills in their respective projects.
    • +
    • Collection of feedback for future training improvements.
    • +
    +

    Additional Resources

    +
      +
    • List of resources for further learning and exploration.
    • +
    • Links to community forums or groups for ongoing support and collaboration.
    • +
    +

    Roundtable Event 1: PI/Team Leads Discussion (2 Hours)

    +
      +
    • A roundtable discussion for Principal Investigators and team leads.
    • +
    • Sharing experiences, challenges, and strategies among group leaders.
    • +
    • Fostering a collaborative network and problem-solving atmosphere.
    • +
    +

    Roundtable Event 2: Technical Leads Office Hours (2 Hours)

    +
      +
    • A roundtable and office hours session for technical leads.
    • +
    • Ensuring a thorough understanding of the ESIIL/CyVerse cyberinfrastructure.
    • +
    • Providing technical support and knowledge exchange.
    • +
    +

    Conclusion and Feedback

    +
      +
    • Recap of key takeaways from the training sessions and roundtables.
    • +
    • Collection of feedback for continuous improvement of the training program.
    • +
    +

    Additional Resources

    +
      +
    • Supplementary materials, reading lists, and links to online tutorials and documentation.
    • +
    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/resources/first_meeting_notes/first_meeting_notes.md b/resources/first_meeting_notes/first_meeting_notes.md new file mode 100644 index 0000000..b4e16b0 --- /dev/null +++ b/resources/first_meeting_notes/first_meeting_notes.md @@ -0,0 +1,89 @@ +# Primary Meeting 1 + +# Day 1-5: Project Kickoff and Strategy + +## Meeting Details +- **Dates:** +- **Times:** +- **Location:** +- **Facilitator:** + +## Attendees +- List of attendees + +## Daily Agenda + +### Day 1: Setting the Stage + +#### Opening Remarks +- Welcoming speech and outline of the week's objectives. + +#### Project Overview +- Presentation of the project goals and significance. + +#### Theoretical Framework +- Discussion on the theoretical underpinnings of the project. + +#### Data Overview +- Review available data and any gaps that need addressing. + +### Day 2-4: Deep Dives + +#### Daily Goals +- Outline specific goals for each day. + +#### Task Assignments +- Assign tasks and areas of responsibility to team members. + +#### Theory and Data Synthesis +- Host focused discussions on how theory will inform data analysis. +- Explore different methodological approaches and data integration strategies. + +#### Evening Social and Soft Work Sessions +- Casual gatherings to further discuss ideas and foster team bonding. + +### Day 5: Roadmap and Closure + +#### Project Roadmap +- Draft a detailed plan of action for the project going forward. + +#### Responsibilities +- Confirm individual responsibilities and deadlines. + +#### Review and Feedback +- Reflect on the week's discussions and adjust the project plan as needed. + +#### Closing Remarks +- Summarize achievements and express appreciation for the team's efforts. + +## Detailed Notes + +### Day 1 Notes +- Summary of discussions, decisions, and key points. + +### Day 2 Notes +- ... + +### Day 3 Notes +- ... + +### Day 4 Notes +- ... + +### Day 5 Notes +- ... + +## Action Items +- [ ] Specific task: Assigned to - Deadline +- [ ] Specific task: Assigned to - Deadline +- ... + +## Reflections and Comments +- (Space for any additional thoughts, insights, or personal reflections on the meeting.) + +## Next Steps +- Schedule for follow-up meetings or checkpoints. +- Outline of expected progress before the next primary meeting. + +## Additional Documentation +- (Include or link to any additional documents, charts, or resources that were created or referenced during the meeting.) diff --git a/resources/first_meeting_notes/index.html b/resources/first_meeting_notes/index.html new file mode 100644 index 0000000..da0cf98 --- /dev/null +++ b/resources/first_meeting_notes/index.html @@ -0,0 +1,1455 @@ + + + + + + + + + + + + + + + + + + + + + + Primary Meeting 1 - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    Primary Meeting 1

    +

    Day 1-5: Project Kickoff and Strategy

    +

    Meeting Details

    +
      +
    • Dates:
    • +
    • Times:
    • +
    • Location:
    • +
    • Facilitator:
    • +
    +

    Attendees

    +
      +
    • List of attendees
    • +
    +

    Daily Agenda

    +

    Day 1: Setting the Stage

    +

    Opening Remarks

    +
      +
    • Welcoming speech and outline of the week's objectives.
    • +
    +

    Project Overview

    +
      +
    • Presentation of the project goals and significance.
    • +
    +

    Theoretical Framework

    +
      +
    • Discussion on the theoretical underpinnings of the project.
    • +
    +

    Data Overview

    +
      +
    • Review available data and any gaps that need addressing.
    • +
    +

    Day 2-4: Deep Dives

    +

    Daily Goals

    +
      +
    • Outline specific goals for each day.
    • +
    +

    Task Assignments

    +
      +
    • Assign tasks and areas of responsibility to team members.
    • +
    +

    Theory and Data Synthesis

    +
      +
    • Host focused discussions on how theory will inform data analysis.
    • +
    • Explore different methodological approaches and data integration strategies.
    • +
    +

    Evening Social and Soft Work Sessions

    +
      +
    • Casual gatherings to further discuss ideas and foster team bonding.
    • +
    +

    Day 5: Roadmap and Closure

    +

    Project Roadmap

    +
      +
    • Draft a detailed plan of action for the project going forward.
    • +
    +

    Responsibilities

    +
      +
    • Confirm individual responsibilities and deadlines.
    • +
    +

    Review and Feedback

    +
      +
    • Reflect on the week's discussions and adjust the project plan as needed.
    • +
    +

    Closing Remarks

    +
      +
    • Summarize achievements and express appreciation for the team's efforts.
    • +
    +

    Detailed Notes

    +

    Day 1 Notes

    +
      +
    • Summary of discussions, decisions, and key points.
    • +
    +

    Day 2 Notes

    +
      +
    • ...
    • +
    +

    Day 3 Notes

    +
      +
    • ...
    • +
    +

    Day 4 Notes

    +
      +
    • ...
    • +
    +

    Day 5 Notes

    +
      +
    • ...
    • +
    +

    Action Items

    +
      +
    • Specific task: Assigned to - Deadline
    • +
    • Specific task: Assigned to - Deadline
    • +
    • ...
    • +
    +

    Reflections and Comments

    +
      +
    • (Space for any additional thoughts, insights, or personal reflections on the meeting.)
    • +
    +

    Next Steps

    +
      +
    • Schedule for follow-up meetings or checkpoints.
    • +
    • Outline of expected progress before the next primary meeting.
    • +
    +

    Additional Documentation

    +
      +
    • (Include or link to any additional documents, charts, or resources that were created or referenced during the meeting.)
    • +
    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/resources/github_basics/github_basics.md b/resources/github_basics/github_basics.md new file mode 100644 index 0000000..30e9869 --- /dev/null +++ b/resources/github_basics/github_basics.md @@ -0,0 +1,424 @@ +Github essentials +================ + +## I. Introduction (2 minutes) + +### A. Brief overview of GitHub: + +GitHub is a web-based platform that provides version control and +collaboration features using Git, a distributed version control system. +It enables developers to work together on projects, track changes to +code, and efficiently manage different versions of the project. GitHub +is widely used in the software development industry and is an essential +tool for collaborative projects and maintaining code quality. + +![A basic git workflow represented as two islands, one with "local repo" and "working directory", and another with "remote repo." Bunnies move file boxes from the working directory to the staging area, then with Commit move them to the local repo. Bunnies in rowboats move changes from the local repo to the remote repo (labeled "PUSH") and from the remote repo to the working directory (labeled "PULL"). ](https://cdn.myportfolio.com/45214904-6a61-4e23-98d6-b140f8654a40/68739659-fb6f-41e8-9813-32e1de3d82c0_rw_3840.png?h=5c36d3c50c350a440567a1f8f72ac028) + +> Image source: [Artwork by @allison_horst](https://twitter.com/allison_horst) + + + +### B. Introduce GitHub Desktop and JupyterHub GitHub widget: + +GitHub Desktop is a graphical user interface (GUI) application that +simplifies working with Git and GitHub by providing a more visual and +intuitive way to manage repositories, branches, commits, and other Git +features. JupyterHub GitHub widget, on the other hand, is a built-in +widget that integrates Git and GitHub functionality directly into +Jupyter notebooks, allowing users to perform version control and +collaboration tasks within the Jupyter environment. Both tools help +streamline the process of working with GitHub and make it more +accessible to users with varying levels of experience with Git and +version control. + +#### 1. Download GitHub Desktop + +##### Step 1: Download GitHub Desktop + +Go to the GitHub Desktop download page: https://desktop.github.com/ + +Click on the “Download for Windows” or “Download for macOS” button, +depending on your operating system. The download should start +automatically. + +##### Step 2: Install GitHub Desktop + +For Windows: + +Locate the downloaded installer file (usually in the Downloads folder) +and double-click on it to run the installer. + +Follow the installation instructions that appear on the screen, +accepting the default settings or customizing them as desired. + +Once the installation is complete, GitHub Desktop will launch +automatically. For macOS: + +Locate the downloaded .zip file (usually in the Downloads folder) and +double-click on it to extract the GitHub Desktop application. + +Drag the extracted “GitHub Desktop” application into the “Applications” +folder. + +Open the “Applications” folder and double-click on “GitHub Desktop” to +launch the application. + +##### Step 3: Set up GitHub Desktop + +When GitHub Desktop launches for the first time, you will be prompted to +sign in with your GitHub account. If you don’t have one, you can create +one at https://github.com/join. + +Enter your GitHub username (or email) and password, and click on “Sign +in.” + +You will then be prompted to configure Git. Enter your name and email +address, which will be used for your commit messages. Click “Continue” +when you’re done. Choose whether you want to submit usage data to help +improve GitHub Desktop. Click “Finish” to complete the setup. + +Now, you have successfully installed and set up GitHub Desktop. You can +start using it to clone repositories, make changes, commit, and sync +with the remote repositories on GitHub. + +#### 1. Download GitHub for JupyterHub cloud service + +### Step 1: Accessing JupyterHub on the cloud + +Visit the JupyterHub cloud service you want to use (e.g., Binder, Google +Colab, or a custom JupyterHub deployment provided by your organization). + +Sign in with your credentials or authenticate using a third-party +service if required. + +### Step 2: Launch a new Jupyter Notebook or open an existing one + +Click on the “New” button (usually located in the top right corner) and +select “Python” to create a new Jupyter Notebook or open an existing one +from the file browser. + +Once the notebook is open, you will see the Jupyter Notebook interface +with the familiar cells for writing and executing code. + +### Step 3: Install and enable the JupyterLab Git extension + +In your Jupyter Notebook, create a new code cell and run the following +command to install the JupyterLab Git extension: + +!pip install jupyterlab-git + +Restart the Jupyter Notebook server for the changes to take effect. + +### Step 4: Using the JupyterHub GitHub widget + +In the Jupyter Notebook interface, you should now see a Git icon on the +left sidebar. Click on it to open the GitHub widget. + +To clone a repository, click on the “+” icon in the GitHub widget and +enter the repository URL. This will clone the repository into your +JupyterHub workspace. You can now navigate through the cloned +repository, make changes, and use the GitHub widget to stage, commit, +and push your changes back to the remote repository. + +To create and manage branches, use the branch icon in the GitHub widget. +You can create new branches, switch between branches, and merge branches +using this interface. + +To sync your local repository with the remote repository, use the “Pull” +and “Push” buttons in the GitHub widget. + +Now, you know how to access and use the JupyterHub GitHub widget running +on the cloud. This allows you to work with Git and GitHub directly from +your Jupyter Notebook interface, streamlining your workflow and making +collaboration easier. + +### C. GitHub in Rstudio: + +Integrating GitHub with RStudio allows users to manage their Git +repositories and collaborate on projects directly within the RStudio +environment. It offers similar functionality to GitHub Desktop but +caters specifically to R users working within RStudio. By configuring +RStudio to work with Git, creating or opening RStudio projects, and +linking projects to GitHub repositories, users can enjoy a seamless +workflow for version control and collaboration. RStudio’s Git pane +enables users to stage, commit, and push changes to remote repositories, +as well as manage branches and sync local repositories with remote ones, +providing a comprehensive solution for R developers working with GitHub. + +#### Step 1: Install Git + +Before integrating GitHub with RStudio, you need to have Git installed +on your computer. Visit the official Git website (https://git-scm.com/) +to download and install the latest version of Git for your operating +system. + +#### Step 2: Configure RStudio to work with Git + +Open RStudio. + +Go to “Tools” \> “Global Options” in the top menu. In the “Global +Options” window, click on the “Git/SVN” tab. + +Check that the “Git executable” field is pointing to the correct +location of the installed Git. If not, click “Browse” and navigate to +the location of the Git executable file (usually found in the “bin” +folder of the Git installation directory). + +Click “OK” to save the changes. + +#### Step 3: Create or open an RStudio project + +To create a new RStudio project, go to “File” \> “New Project” in the +top menu. You can either create a new directory or choose an existing +one for your project. + +To open an existing RStudio project, go to “File” \> “Open Project” and +navigate to the project’s “.Rproj” file. + +#### Step 4: Link your RStudio project to a GitHub repository + +In the RStudio project, go to the “Tools” menu and select “Version +Control” \> “Project Setup.” + +In the “Project Setup” window, select “Git” as the version control +system and click “OK.” + +A new “.git” folder will be created in your project directory, +initializing it as a Git repository. Commit any changes you have made so +far by clicking on the “Commit” button in the “Git” pane in RStudio. + +To link your local repository to a remote GitHub repository, go to your +GitHub account and create a new repository. + +Copy the remote repository’s URL (e.g., +“https://github.com/username/repository.git”). + +In RStudio, open the “Shell” by going to “Tools” \> “Shell.” + +In the shell, run the following command to add the remote repository: + +git remote add origin https://github.com/username/repository.git + +Replace the URL with the one you copied from your GitHub repository. + +Push your changes to the remote repository by running the following +command in the shell: + +git push -u origin master + +Now, your RStudio project is linked to a GitHub repository. You can use +the “Git” pane in RStudio to stage, commit, and push changes to the +remote repository, as well as manage branches and sync your local +repository with the remote one. + +By integrating GitHub with RStudio, you can streamline your workflow, +collaborate more effectively with your team, and manage your Git +repositories directly from the RStudio interface. + +## II. GitHub Basics (4 minutes) + +### A. Repository: + +A repository, often abbreviated as “repo,” is the fundamental building +block of GitHub. It is a storage space for your project files, including +the code, documentation, and other related resources. Each repository +also contains the complete history of all changes made to the project +files, which is crucial for effective version control. Repositories can +be public, allowing anyone to access and contribute, or private, +restricting access to specific collaborators. + +### B. Fork and Clone: + +Forking and cloning are two essential operations for working with +repositories on GitHub. Forking creates a personal copy of someone +else’s repository under your GitHub account, enabling you to make +changes to the project without affecting the original repo. Cloning, on +the other hand, is the process of downloading a remote repository to +your local machine for offline development. In GitHub Desktop, you can +clone a repository by selecting “Clone a repository from the Internet” +and entering the repository URL. In JupyterHub GitHub widget, you can +clone a repository by entering the repo URL in the “Clone Repository” +section of the widget. + +### C. Branches: + +Branches are a critical aspect of Git version control, as they allow you +to create multiple parallel versions of your project within a single +repository. This is particularly useful when working on new features or +bug fixes, as it prevents changes from interfering with the main (or +“master”) branch until they are ready to be merged. Creating a new +branch in GitHub Desktop can be done by clicking the “Current Branch” +dropdown and selecting “New Branch.” In JupyterHub GitHub widget, you +can create a new branch by clicking the “New Branch” button in the +“Branches” section of the widget. + +### D. Replace ‘master’ with ‘main’: + +In recent years, there has been a growing awareness of the importance of +inclusive language in technology. One such example is the use of the +term “master” in the context of the default branch in a GitHub +repository. The term “master” has historical connections to the +“master/slave” file structure, which evokes an unsavory colonial past +associated with slavery. In light of this, many developers and +organizations have begun to replace the term “master” with more neutral +terms, such as “main.” We encourage you to follow this practice and +change the default branch name in your repositories from “master” to +“main” or another suitable alternative. This small change can help +promote a more inclusive and welcoming environment within the technology +community. + +## III. Collaboration and Version Control (5 minutes) + +### A. Commits: + +Commits are snapshots of your project’s changes at a specific point in +time, serving as the fundamental building blocks of Git’s version +control system. Commits make it possible to track changes, revert to +previous versions, and collaborate with others. In GitHub Desktop, you +can make a commit by staging the changes you want to include, adding a +descriptive commit message, and clicking “Commit to \[branch_name\].” In +JupyterHub GitHub widget, you can create a commit by selecting the files +with changes, entering a commit message, and clicking the “Commit” +button. + +### B. Push: + +In GitHub, “push” is a fundamental operation in the version control +process that transfers commits from your local repository to a remote +repository, such as the one hosted on GitHub. When you push changes, you +synchronize the remote repository with the latest updates made to your +local repository, making those changes accessible to other collaborators +working on the same project. This operation ensures that the remote +repository reflects the most recent state of your work and allows your +team members to stay up to date with your changes. Pushing is an +essential step in distributed version control systems like Git, as it +promotes efficient collaboration among multiple contributors and +provides a centralized location for tracking the project’s history and +progress. + +In GitHub, the concepts of “commit” and “push” represent two distinct +steps in the version control process. A “commit” is the action of saving +changes to your local repository. When you commit changes, you create a +snapshot of your work, accompanied by a unique identifier and an +optional descriptive message. Commits allow you to track the progress of +your work over time and make it easy to revert to a previous state if +necessary. On the other hand, “push” is the action of transferring your +local commits to a remote repository, such as the one hosted on GitHub. +Pushing makes your changes accessible to others collaborating on the +same project and ensures that the remote repository stays up to date +with your local repository. In summary, committing saves changes +locally, while pushing synchronizes those changes with a remote +repository, allowing for seamless collaboration among multiple +contributors. + +### C. Pull Requests: + +Pull requests are a collaboration feature on GitHub that enables +developers to propose changes to a repository, discuss those changes, +and ultimately merge them into the main branch. To create a pull +request, you must first push your changes to a branch on your fork of +the repository. Then, using either GitHub Desktop or JupyterHub GitHub +widget, you can navigate to the original repository, click the “Pull +Request” tab, and create a new pull request. After the pull request is +reviewed and approved, it can be merged into the main branch. + +### D. Merging and Resolving Conflicts: + +Merging is the process of combining changes from one branch into +another. This is typically done when a feature or bugfix has been +completed and is ready to be integrated into the main branch. Conflicts +can arise during the merging process if the same lines of code have been +modified in both branches. To resolve conflicts, you must manually +review the changes and decide which version to keep. In GitHub Desktop, +you can merge branches by selecting the target branch and choosing +“Merge into Current Branch.” Conflicts will be highlighted, and you can +edit the files to resolve them before committing the changes. In +JupyterHub GitHub widget, you can merge branches by selecting the target +branch in the “Branches” section and clicking the “Merge” button. If +conflicts occur, the widget will prompt you to resolve them before +completing the merge. + +## IV. Additional Features (2 minutes) + +### A. Issues and Project Management: + +Issues are a powerful feature in GitHub that allows developers to track +and manage bugs, enhancements, and other tasks within a project. Issues +can be assigned to collaborators, labeled for easy organization, and +linked to specific commits or pull requests. They provide a centralized +location for discussing and addressing project-related concerns, +fostering collaboration and transparent communication among team +members. Using issues effectively can significantly improve the overall +management and organization of your projects. + +### B. GitHub Pages: + +GitHub Pages is a service offered by GitHub that allows you to host +static websites directly from a repository. By creating a new branch +named “gh-pages” in your repository and adding the necessary files +(HTML, CSS, JavaScript, etc.), GitHub will automatically build and +deploy your website to a publicly accessible URL. This is particularly +useful for showcasing project documentation, creating personal +portfolios, or hosting project demos. With GitHub Pages, you can take +advantage of the version control and collaboration features of GitHub +while easily sharing your work with others. + +## V. Conclusion (2 minutes) + +### A. Recap of the essentials of GitHub: + +In this brief introduction, we have covered the essentials of GitHub, +including the basics of repositories, forking, cloning, branching, +commits, pull requests, merging, and resolving conflicts. We have also +discussed additional features like issues for project management and +GitHub Pages for hosting websites directly from a repository. + +### B. Encourage further exploration and learning: + +While this introduction provides a solid foundation for understanding +and using GitHub, there is still much more to learn and explore. As you +continue to use GitHub in your projects, you will discover new features +and workflows that can enhance your productivity and collaboration. We +encourage you to dive deeper into the platform and experiment with +different tools and techniques. + +### C. Share resources for learning more about GitHub: + +There are many resources available for learning more about GitHub and +expanding your skills. Some popular resources include GitHub Guides +(https://guides.github.com/), which offers a collection of tutorials and +best practices, the official GitHub documentation +(https://docs.github.com/), and various online tutorials and courses. By +engaging with these resources and participating in the GitHub community, +you can further develop your understanding of the platform and become a +more proficient user. + +## V. Conclusion (2 minutes) + +### A. Recap of the essentials of GitHub: + +In this brief introduction, we have covered the essentials of GitHub, +including the basics of repositories, forking, cloning, branching, +commits, pull requests, merging, and resolving conflicts. We have also +discussed additional features like issues for project management and +GitHub Pages for hosting websites directly from a repository. + +### B. Encourage further exploration and learning: + +While this introduction provides a solid foundation for understanding +and using GitHub, there is still much more to learn and explore. As you +continue to use GitHub in your projects, you will discover new features +and workflows that can enhance your productivity and collaboration. We +encourage you to dive deeper into the platform and experiment with +different tools and techniques. + +### C. Share resources for learning more about GitHub: + +There are many resources available for learning more about GitHub and +expanding your skills. Some popular resources include GitHub Guides +(https://guides.github.com/), which offers a collection of tutorials and +best practices, the official GitHub documentation +(https://docs.github.com/), and various online tutorials and courses. By +engaging with these resources and participating in the GitHub community, +you can further develop your understanding of the platform and become a +more proficient user. diff --git a/resources/github_basics/index.html b/resources/github_basics/index.html new file mode 100644 index 0000000..d6f87a8 --- /dev/null +++ b/resources/github_basics/index.html @@ -0,0 +1,2063 @@ + + + + + + + + + + + + + + + + + + + + + + Github - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    Github essentials

    +

    I. Introduction (2 minutes)

    +

    A. Brief overview of GitHub:

    +

    GitHub is a web-based platform that provides version control and +collaboration features using Git, a distributed version control system. +It enables developers to work together on projects, track changes to +code, and efficiently manage different versions of the project. GitHub +is widely used in the software development industry and is an essential +tool for collaborative projects and maintaining code quality.

    +

    A basic git workflow represented as two islands, one with "local repo" and "working directory", and another with "remote repo." Bunnies move file boxes from the working directory to the staging area, then with Commit move them to the local repo. Bunnies in rowboats move changes from the local repo to the remote repo (labeled "PUSH") and from the remote repo to the working directory (labeled "PULL").

    +
    +

    Image source: Artwork by @allison_horst

    +
    +

    B. Introduce GitHub Desktop and JupyterHub GitHub widget:

    +

    GitHub Desktop is a graphical user interface (GUI) application that +simplifies working with Git and GitHub by providing a more visual and +intuitive way to manage repositories, branches, commits, and other Git +features. JupyterHub GitHub widget, on the other hand, is a built-in +widget that integrates Git and GitHub functionality directly into +Jupyter notebooks, allowing users to perform version control and +collaboration tasks within the Jupyter environment. Both tools help +streamline the process of working with GitHub and make it more +accessible to users with varying levels of experience with Git and +version control.

    +

    1. Download GitHub Desktop

    +
    Step 1: Download GitHub Desktop
    +

    Go to the GitHub Desktop download page: https://desktop.github.com/

    +

    Click on the “Download for Windows” or “Download for macOS” button, +depending on your operating system. The download should start +automatically.

    +
    Step 2: Install GitHub Desktop
    +

    For Windows:

    +

    Locate the downloaded installer file (usually in the Downloads folder) +and double-click on it to run the installer.

    +

    Follow the installation instructions that appear on the screen, +accepting the default settings or customizing them as desired.

    +

    Once the installation is complete, GitHub Desktop will launch +automatically. For macOS:

    +

    Locate the downloaded .zip file (usually in the Downloads folder) and +double-click on it to extract the GitHub Desktop application.

    +

    Drag the extracted “GitHub Desktop” application into the “Applications” +folder.

    +

    Open the “Applications” folder and double-click on “GitHub Desktop” to +launch the application.

    +
    Step 3: Set up GitHub Desktop
    +

    When GitHub Desktop launches for the first time, you will be prompted to +sign in with your GitHub account. If you don’t have one, you can create +one at https://github.com/join.

    +

    Enter your GitHub username (or email) and password, and click on “Sign +in.”

    +

    You will then be prompted to configure Git. Enter your name and email +address, which will be used for your commit messages. Click “Continue” +when you’re done. Choose whether you want to submit usage data to help +improve GitHub Desktop. Click “Finish” to complete the setup.

    +

    Now, you have successfully installed and set up GitHub Desktop. You can +start using it to clone repositories, make changes, commit, and sync +with the remote repositories on GitHub.

    +

    1. Download GitHub for JupyterHub cloud service

    +

    Step 1: Accessing JupyterHub on the cloud

    +

    Visit the JupyterHub cloud service you want to use (e.g., Binder, Google +Colab, or a custom JupyterHub deployment provided by your organization).

    +

    Sign in with your credentials or authenticate using a third-party +service if required.

    +

    Step 2: Launch a new Jupyter Notebook or open an existing one

    +

    Click on the “New” button (usually located in the top right corner) and +select “Python” to create a new Jupyter Notebook or open an existing one +from the file browser.

    +

    Once the notebook is open, you will see the Jupyter Notebook interface +with the familiar cells for writing and executing code.

    +

    Step 3: Install and enable the JupyterLab Git extension

    +

    In your Jupyter Notebook, create a new code cell and run the following +command to install the JupyterLab Git extension:

    +

    !pip install jupyterlab-git

    +

    Restart the Jupyter Notebook server for the changes to take effect.

    +

    Step 4: Using the JupyterHub GitHub widget

    +

    In the Jupyter Notebook interface, you should now see a Git icon on the +left sidebar. Click on it to open the GitHub widget.

    +

    To clone a repository, click on the “+” icon in the GitHub widget and +enter the repository URL. This will clone the repository into your +JupyterHub workspace. You can now navigate through the cloned +repository, make changes, and use the GitHub widget to stage, commit, +and push your changes back to the remote repository.

    +

    To create and manage branches, use the branch icon in the GitHub widget. +You can create new branches, switch between branches, and merge branches +using this interface.

    +

    To sync your local repository with the remote repository, use the “Pull” +and “Push” buttons in the GitHub widget.

    +

    Now, you know how to access and use the JupyterHub GitHub widget running +on the cloud. This allows you to work with Git and GitHub directly from +your Jupyter Notebook interface, streamlining your workflow and making +collaboration easier.

    +

    C. GitHub in Rstudio:

    +

    Integrating GitHub with RStudio allows users to manage their Git +repositories and collaborate on projects directly within the RStudio +environment. It offers similar functionality to GitHub Desktop but +caters specifically to R users working within RStudio. By configuring +RStudio to work with Git, creating or opening RStudio projects, and +linking projects to GitHub repositories, users can enjoy a seamless +workflow for version control and collaboration. RStudio’s Git pane +enables users to stage, commit, and push changes to remote repositories, +as well as manage branches and sync local repositories with remote ones, +providing a comprehensive solution for R developers working with GitHub.

    +

    Step 1: Install Git

    +

    Before integrating GitHub with RStudio, you need to have Git installed +on your computer. Visit the official Git website (https://git-scm.com/) +to download and install the latest version of Git for your operating +system.

    +

    Step 2: Configure RStudio to work with Git

    +

    Open RStudio.

    +

    Go to “Tools” > “Global Options” in the top menu. In the “Global +Options” window, click on the “Git/SVN” tab.

    +

    Check that the “Git executable” field is pointing to the correct +location of the installed Git. If not, click “Browse” and navigate to +the location of the Git executable file (usually found in the “bin” +folder of the Git installation directory).

    +

    Click “OK” to save the changes.

    +

    Step 3: Create or open an RStudio project

    +

    To create a new RStudio project, go to “File” > “New Project” in the +top menu. You can either create a new directory or choose an existing +one for your project.

    +

    To open an existing RStudio project, go to “File” > “Open Project” and +navigate to the project’s “.Rproj” file.

    + +

    In the RStudio project, go to the “Tools” menu and select “Version +Control” > “Project Setup.”

    +

    In the “Project Setup” window, select “Git” as the version control +system and click “OK.”

    +

    A new “.git” folder will be created in your project directory, +initializing it as a Git repository. Commit any changes you have made so +far by clicking on the “Commit” button in the “Git” pane in RStudio.

    +

    To link your local repository to a remote GitHub repository, go to your +GitHub account and create a new repository.

    +

    Copy the remote repository’s URL (e.g., +“https://github.com/username/repository.git”).

    +

    In RStudio, open the “Shell” by going to “Tools” > “Shell.”

    +

    In the shell, run the following command to add the remote repository:

    +

    git remote add origin https://github.com/username/repository.git

    +

    Replace the URL with the one you copied from your GitHub repository.

    +

    Push your changes to the remote repository by running the following +command in the shell:

    +

    git push -u origin master

    +

    Now, your RStudio project is linked to a GitHub repository. You can use +the “Git” pane in RStudio to stage, commit, and push changes to the +remote repository, as well as manage branches and sync your local +repository with the remote one.

    +

    By integrating GitHub with RStudio, you can streamline your workflow, +collaborate more effectively with your team, and manage your Git +repositories directly from the RStudio interface.

    +

    II. GitHub Basics (4 minutes)

    +

    A. Repository:

    +

    A repository, often abbreviated as “repo,” is the fundamental building +block of GitHub. It is a storage space for your project files, including +the code, documentation, and other related resources. Each repository +also contains the complete history of all changes made to the project +files, which is crucial for effective version control. Repositories can +be public, allowing anyone to access and contribute, or private, +restricting access to specific collaborators.

    +

    B. Fork and Clone:

    +

    Forking and cloning are two essential operations for working with +repositories on GitHub. Forking creates a personal copy of someone +else’s repository under your GitHub account, enabling you to make +changes to the project without affecting the original repo. Cloning, on +the other hand, is the process of downloading a remote repository to +your local machine for offline development. In GitHub Desktop, you can +clone a repository by selecting “Clone a repository from the Internet” +and entering the repository URL. In JupyterHub GitHub widget, you can +clone a repository by entering the repo URL in the “Clone Repository” +section of the widget.

    +

    C. Branches:

    +

    Branches are a critical aspect of Git version control, as they allow you +to create multiple parallel versions of your project within a single +repository. This is particularly useful when working on new features or +bug fixes, as it prevents changes from interfering with the main (or +“master”) branch until they are ready to be merged. Creating a new +branch in GitHub Desktop can be done by clicking the “Current Branch” +dropdown and selecting “New Branch.” In JupyterHub GitHub widget, you +can create a new branch by clicking the “New Branch” button in the +“Branches” section of the widget.

    +

    D. Replace ‘master’ with ‘main’:

    +

    In recent years, there has been a growing awareness of the importance of +inclusive language in technology. One such example is the use of the +term “master” in the context of the default branch in a GitHub +repository. The term “master” has historical connections to the +“master/slave” file structure, which evokes an unsavory colonial past +associated with slavery. In light of this, many developers and +organizations have begun to replace the term “master” with more neutral +terms, such as “main.” We encourage you to follow this practice and +change the default branch name in your repositories from “master” to +“main” or another suitable alternative. This small change can help +promote a more inclusive and welcoming environment within the technology +community.

    +

    III. Collaboration and Version Control (5 minutes)

    +

    A. Commits:

    +

    Commits are snapshots of your project’s changes at a specific point in +time, serving as the fundamental building blocks of Git’s version +control system. Commits make it possible to track changes, revert to +previous versions, and collaborate with others. In GitHub Desktop, you +can make a commit by staging the changes you want to include, adding a +descriptive commit message, and clicking “Commit to [branch_name].” In +JupyterHub GitHub widget, you can create a commit by selecting the files +with changes, entering a commit message, and clicking the “Commit” +button.

    +

    B. Push:

    +

    In GitHub, “push” is a fundamental operation in the version control +process that transfers commits from your local repository to a remote +repository, such as the one hosted on GitHub. When you push changes, you +synchronize the remote repository with the latest updates made to your +local repository, making those changes accessible to other collaborators +working on the same project. This operation ensures that the remote +repository reflects the most recent state of your work and allows your +team members to stay up to date with your changes. Pushing is an +essential step in distributed version control systems like Git, as it +promotes efficient collaboration among multiple contributors and +provides a centralized location for tracking the project’s history and +progress.

    +

    In GitHub, the concepts of “commit” and “push” represent two distinct +steps in the version control process. A “commit” is the action of saving +changes to your local repository. When you commit changes, you create a +snapshot of your work, accompanied by a unique identifier and an +optional descriptive message. Commits allow you to track the progress of +your work over time and make it easy to revert to a previous state if +necessary. On the other hand, “push” is the action of transferring your +local commits to a remote repository, such as the one hosted on GitHub. +Pushing makes your changes accessible to others collaborating on the +same project and ensures that the remote repository stays up to date +with your local repository. In summary, committing saves changes +locally, while pushing synchronizes those changes with a remote +repository, allowing for seamless collaboration among multiple +contributors.

    +

    C. Pull Requests:

    +

    Pull requests are a collaboration feature on GitHub that enables +developers to propose changes to a repository, discuss those changes, +and ultimately merge them into the main branch. To create a pull +request, you must first push your changes to a branch on your fork of +the repository. Then, using either GitHub Desktop or JupyterHub GitHub +widget, you can navigate to the original repository, click the “Pull +Request” tab, and create a new pull request. After the pull request is +reviewed and approved, it can be merged into the main branch.

    +

    D. Merging and Resolving Conflicts:

    +

    Merging is the process of combining changes from one branch into +another. This is typically done when a feature or bugfix has been +completed and is ready to be integrated into the main branch. Conflicts +can arise during the merging process if the same lines of code have been +modified in both branches. To resolve conflicts, you must manually +review the changes and decide which version to keep. In GitHub Desktop, +you can merge branches by selecting the target branch and choosing +“Merge into Current Branch.” Conflicts will be highlighted, and you can +edit the files to resolve them before committing the changes. In +JupyterHub GitHub widget, you can merge branches by selecting the target +branch in the “Branches” section and clicking the “Merge” button. If +conflicts occur, the widget will prompt you to resolve them before +completing the merge.

    +

    IV. Additional Features (2 minutes)

    +

    A. Issues and Project Management:

    +

    Issues are a powerful feature in GitHub that allows developers to track +and manage bugs, enhancements, and other tasks within a project. Issues +can be assigned to collaborators, labeled for easy organization, and +linked to specific commits or pull requests. They provide a centralized +location for discussing and addressing project-related concerns, +fostering collaboration and transparent communication among team +members. Using issues effectively can significantly improve the overall +management and organization of your projects.

    +

    B. GitHub Pages:

    +

    GitHub Pages is a service offered by GitHub that allows you to host +static websites directly from a repository. By creating a new branch +named “gh-pages” in your repository and adding the necessary files +(HTML, CSS, JavaScript, etc.), GitHub will automatically build and +deploy your website to a publicly accessible URL. This is particularly +useful for showcasing project documentation, creating personal +portfolios, or hosting project demos. With GitHub Pages, you can take +advantage of the version control and collaboration features of GitHub +while easily sharing your work with others.

    +

    V. Conclusion (2 minutes)

    +

    A. Recap of the essentials of GitHub:

    +

    In this brief introduction, we have covered the essentials of GitHub, +including the basics of repositories, forking, cloning, branching, +commits, pull requests, merging, and resolving conflicts. We have also +discussed additional features like issues for project management and +GitHub Pages for hosting websites directly from a repository.

    +

    B. Encourage further exploration and learning:

    +

    While this introduction provides a solid foundation for understanding +and using GitHub, there is still much more to learn and explore. As you +continue to use GitHub in your projects, you will discover new features +and workflows that can enhance your productivity and collaboration. We +encourage you to dive deeper into the platform and experiment with +different tools and techniques.

    +

    C. Share resources for learning more about GitHub:

    +

    There are many resources available for learning more about GitHub and +expanding your skills. Some popular resources include GitHub Guides +(https://guides.github.com/), which offers a collection of tutorials and +best practices, the official GitHub documentation +(https://docs.github.com/), and various online tutorials and courses. By +engaging with these resources and participating in the GitHub community, +you can further develop your understanding of the platform and become a +more proficient user.

    +

    V. Conclusion (2 minutes)

    +

    A. Recap of the essentials of GitHub:

    +

    In this brief introduction, we have covered the essentials of GitHub, +including the basics of repositories, forking, cloning, branching, +commits, pull requests, merging, and resolving conflicts. We have also +discussed additional features like issues for project management and +GitHub Pages for hosting websites directly from a repository.

    +

    B. Encourage further exploration and learning:

    +

    While this introduction provides a solid foundation for understanding +and using GitHub, there is still much more to learn and explore. As you +continue to use GitHub in your projects, you will discover new features +and workflows that can enhance your productivity and collaboration. We +encourage you to dive deeper into the platform and experiment with +different tools and techniques.

    +

    C. Share resources for learning more about GitHub:

    +

    There are many resources available for learning more about GitHub and +expanding your skills. Some popular resources include GitHub Guides +(https://guides.github.com/), which offers a collection of tutorials and +best practices, the official GitHub documentation +(https://docs.github.com/), and various online tutorials and courses. By +engaging with these resources and participating in the GitHub community, +you can further develop your understanding of the platform and become a +more proficient user.

    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/resources/manuscript/index.html b/resources/manuscript/index.html new file mode 100644 index 0000000..0750826 --- /dev/null +++ b/resources/manuscript/index.html @@ -0,0 +1,1407 @@ + + + + + + + + + + + + + + + + + + + + + + Manuscript Title - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    Manuscript Title

    +

    Authors

    +
      +
    • Author 1, Affiliation
    • +
    • Author 2, Affiliation
    • +
    • ...
    • +
    +

    Abstract

    +
      +
    • A brief summary of the research, its objectives, main findings, and conclusions.
    • +
    +

    Introduction

    +
      +
    • Background information and context setting for the research.
    • +
    • Statement of the problem and research objectives.
    • +
    • Overview of the methodology and approach.
    • +
    +

    Literature Review

    +
      +
    • Discussion of relevant previous work and how this research contributes to the field.
    • +
    +

    Methodology

    +
      +
    • Detailed description of the research methodology.
    • +
    • Explanation of data collection and analysis techniques.
    • +
    • Justification for methodological choices.
    • +
    +

    Results

    +
      +
    • Presentation of the research findings.
    • +
    • Use of tables, graphs, and figures to illustrate key points.
    • +
    • Analysis and interpretation of the results.
    • +
    +

    Discussion

    +
      +
    • Discussion of the implications of the findings.
    • +
    • Comparison with previous research in the field.
    • +
    • Consideration of the limitations of the study.
    • +
    +

    Conclusion

    +
      +
    • Summary of the main findings.
    • +
    • Reflection on the research's significance and potential impact.
    • +
    • Suggestions for future research directions.
    • +
    +

    Acknowledgements

    +
      +
    • Acknowledgement of any assistance, funding, or contributions from others.
    • +
    +

    References

    +
      +
    • Bibliographic details of the cited works.
    • +
    • Use a consistent citation style throughout.
    • +
    +

    Appendices

    +
      +
    • Additional material that supports the manuscript but is too detailed for the main sections.
    • +
    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/resources/manuscript/manuscript.md b/resources/manuscript/manuscript.md new file mode 100644 index 0000000..400212e --- /dev/null +++ b/resources/manuscript/manuscript.md @@ -0,0 +1,58 @@ +# Manuscript Title + +## Authors + +- Author 1, Affiliation +- Author 2, Affiliation +- ... + +## Abstract + +- A brief summary of the research, its objectives, main findings, and conclusions. + +## Introduction + +- Background information and context setting for the research. +- Statement of the problem and research objectives. +- Overview of the methodology and approach. + +## Literature Review + +- Discussion of relevant previous work and how this research contributes to the field. + +## Methodology + +- Detailed description of the research methodology. +- Explanation of data collection and analysis techniques. +- Justification for methodological choices. + +## Results + +- Presentation of the research findings. +- Use of tables, graphs, and figures to illustrate key points. +- Analysis and interpretation of the results. + +## Discussion + +- Discussion of the implications of the findings. +- Comparison with previous research in the field. +- Consideration of the limitations of the study. + +## Conclusion + +- Summary of the main findings. +- Reflection on the research's significance and potential impact. +- Suggestions for future research directions. + +## Acknowledgements + +- Acknowledgement of any assistance, funding, or contributions from others. + +## References + +- Bibliographic details of the cited works. +- Use a consistent citation style throughout. + +## Appendices + +- Additional material that supports the manuscript but is too detailed for the main sections. diff --git a/resources/markdown_basics/index.html b/resources/markdown_basics/index.html new file mode 100644 index 0000000..22a232c --- /dev/null +++ b/resources/markdown_basics/index.html @@ -0,0 +1,2143 @@ + + + + + + + + + + + + + + + + + + + + + + Markdown - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    Markdown for the Modern Researcher at ESIIL

    +

    Join us on a HackMD page to practice Markdown

    +

    Section 1: Mastering Markdown Syntax

    +

    1. Fundamentals of Text Formatting

    +
      +
    • Headings: Use # for different levels of headings.
    • +
    • +

      Heading Level 1

      +
    • +
    • +

      Heading Level 2

      +
    • +
    • +

      Heading Level 3

      +
    • +
    • +

      Lists: Bulleted lists use asterisks, numbers for ordered lists.

      +
    • +
    • Item 1
    • +
    • Item 2
        +
      • Subitem 2.1
      • +
      • Subitem 2.2
      • +
      +
    • +
    • +
        +
      1. First item
      2. +
      +
    • +
    • +
        +
      1. Second item
      2. +
      +
    • +
    • +

      Bold and Italics: Use asterisks or underscores.

      +
    • +
    • Bold Text
    • +
    • Italic Text
    • +
    +

    2. Advanced Structures

    +
      +
    • Tables: Create tables using dashes and pipes.
    • +
    • + + + + + + + + + + + + + + + + + + + + +
      Header 1Header 2Header 3
      Row 1DataData
      Row 2DataData
      +
    • +
    • +

      Add a ":"" to change text justification. Here the : is added on the left for left justification. + | Header 1 | Header 2 | Header 3 | + |---------:|--------- |----------| + | Row 1 | Data | Data | + | Row 2 | Data | Data |

      +
    • +
    • + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
      ANALYTICSENR
      EINVIRONMENT
      VELOPMOCOMUN
      EGAGELLAHCNE
      RATADEVELOPW
      EITSITNEICSR
      SOIGOLOIBHTL
      AHTLAEWEGNEL
      TITSITNEICSN
      IEESREHTOENI
      CSLLAHCEGLAN
      EGALLEHCNEIC
      +
    • +
    • +

      If you hit the boundaries of Markdown's capabilities, you can start to add html directly. Remember, this entire exercisse is to translate to html.

      +
    • +
    +

    Sudoku Puzzle +Fill in the blank cells with numbers from 1 to 9, such that each row, column, and 3x3 subgrid contains all the numbers from 1 to 9 without repetition.

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    537
    6195
    986
    863
    4831
    726
    628
    4195
    879
    + + + + + + + + + + +
    534678912
    672195348
    198342567
    859761423
    426853791
    713924856
    961537284
    287419635
    345286179
    + +
      +
    • Blockquotes: Use > for blockquotes.
    • +
    • +
      +

      This is a blockquote.

      +
      +
    • +
    • +
      +

      It can span multiple lines.

      +
      +
    • +
    +

    3. Integrating Multimedia

    +
      +
    • Images: Add images using the format ![alt text](image_url).
    • +
    • +

      Markdown Logo

      +
    • +
    • +

      Videos: Embed videos using HTML in Markdown.

      +
    • +
    • <iframe width="560" height="315" src="https://www.youtube.com/embed/dQw4w9WgXcQ" frameborder="0" allowfullscreen></iframe>
    • +
    +

    4. Diagrams with Mermaid

    +
      +
    • Flowcharts:
    • +
    +
        graph TD
    +    A[Start] --> B[Analyze Data]
    +    B --> C{Is Data Large?}
    +    C -->|Yes| D[Apply Big Data Solutions]
    +    C -->|No| E[Use Traditional Methods]
    +    D --> F[Machine Learning]
    +    E --> G[Statistical Analysis]
    +    F --> H{Model Accurate?}
    +    G --> I[Report Results]
    +    H -->|Yes| J[Deploy Model]
    +    H -->|No| K[Refine Model]
    +    J --> L[Monitor Performance]
    +    K --> F
    +    L --> M[End: Success]
    +    I --> N[End: Report Generated]
    +    style A fill:#f9f,stroke:#333,stroke-width:2px
    +    style M fill:#9f9,stroke:#333,stroke-width:2px
    +    style N fill:#9f9,stroke:#333,stroke-width:2px
    +
      +
    • +

      Mind Maps: +

          mindmap
      +  root((ESIIL))
      +    section Data Sources
      +      Satellite Imagery
      +        ::icon(fa fa-satellite)
      +      Remote Sensing Data
      +        Drones
      +        Aircraft
      +      On-ground Sensors
      +        Weather Stations
      +        IoT Devices
      +      Open Environmental Data
      +        Public Datasets
      +        ::icon(fa fa-database)
      +    section Research Focus
      +      Climate Change Analysis
      +        Ice Melt Patterns
      +        Sea Level Rise
      +      Biodiversity Monitoring
      +        Species Distribution
      +        Habitat Fragmentation
      +      Geospatial Analysis Techniques
      +        Machine Learning Models
      +        Predictive Analytics
      +    section Applications
      +      Conservation Strategies
      +        ::icon(fa fa-leaf)
      +      Urban Planning
      +        Green Spaces
      +      Disaster Response
      +        Flood Mapping
      +        Wildfire Tracking
      +    section Tools and Technologies
      +      GIS Software
      +        QGIS
      +        ArcGIS
      +      Programming Languages
      +        Python
      +        R
      +      Cloud Computing Platforms
      +        AWS
      +        Google Earth Engine
      +      Data Visualization
      +        D3.js
      +        Tableau

      +
    • +
    • +

      Timelines:

      +
    • +
    +
    gantt
    +    title ESIIL Year 2 Project Schedule
    +    dateFormat  YYYY-MM-DD
    +    section CI
    +    Sovereign OASIS via private jupiterhubs :2024-08-01, 2024-10-30
    +    OASIS documentation                    :2024-09-15, 70d
    +    Data cube OASIS via cyverse account    :2024-09-15, 100d
    +    Integrate with ESIIL User Management system :2024-08-01, 2024-11-30
    +    Build badges to deploy DE from mkdoc   :2024-09-01, 2024-12-15
    +    Streamline Github ssh key management   :2024-10-01, 2024-12-31
    +    Cyverse support (R proxy link)         :2024-11-01, 2024-12-31
    +    Cyverse use summary and statistics     :2024-08-01, 2024-12-15
    +
    +    section CI Consultation and Education
    +    Conferences/Invited talks              :2024-08-01, 2024-12-31
    +    Office hours                           :2024-08-15, 2024-12-15
    +    Proposals                              :2024-09-01, 2024-11-15
    +    Private lessons                        :2024-09-15, 2024-11-30
    +    Pre-event trainings                    :2024-10-01, 2024-12-15
    +    Textbook development w/ education team :2024-08-01, 2024-12-15
    +    Train the trainers / group lessons     :2024-08-15, 2024-11-30
    +    Tribal engagement                      :2024-09-01, 2024-12-15
    +    Ethical Space training                 :2024-09-15, 2024-12-31
    +
    +    section CI Design and Build
    +    Data library (repository)              :2024-08-01, 2024-10-30
    +    Analytics library (repository)         :2024-08-15, 2024-11-15
    +    Containers (repository)                :2024-09-01, 2024-11-30
    +    Cloud infrastructure templates (repository) :2024-09-15, 2024-12-15
    +    Tribal resilience Data Cube            :2024-10-01, 2024-12-31
    +
    
    +%%{init: { 'logLevel': 'debug', 'theme': 'base', 'gitGraph': {'rotateCommitLabel': true}} }%%
    +gitGraph
    +  commit id: "Start from template"
    +  branch c1
    +  commit id: "Set up SSH key pair"
    +  commit id: "Modify _config.yml for GitHub Pages"
    +  commit id: "Initial website structure"
    +  commit id: "Add new markdown pages"
    +  commit id: "Update navigation tree"
    +  commit id: "Edit existing pages"
    +  commit id: "Delete old markdown pages"
    +  commit id: "Finalize website updates"
    +  commit id: "Add new markdown pages"
    +  commit id: "Update navigation tree"
    +checkout c1
    +
    +  branch b1
    +
    +  commit
    +  commit
    +  checkout c1
    +  merge b1
    +
    %%{init: {"quadrantChart": {"chartWidth": 400, "chartHeight": 400}, "themeVariables": {"quadrant1TextFill": "#ff0000"} }}%%
    +quadrantChart
    +  x-axis Urgent --> Not Urgent
    +  y-axis Not Important --> "Important ❤"
    +  quadrant-1 Plan
    +  quadrant-2 Do
    +  quadrant-3 Delegate
    +  quadrant-4 Delete
    +
    timeline
    +    title Major Events in Environmental Science and Data Science
    +    section Environmental Science
    +        19th century : Foundations in Ecology and Conservation
    +        1962 : Publication of 'Silent Spring' by Rachel Carson
    +        1970 : First Earth Day
    +        1987 : Brundtland Report introduces Sustainable Development
    +        1992 : Rio Earth Summit
    +        2015 : Paris Agreement on Climate Change
    +    section Data Science
    +        1960s-1970s : Development of Database Management Systems
    +        1980s : Emergence of Data Warehousing
    +        1990s : Growth of the World Wide Web and Data Mining
    +        2000s : Big Data and Predictive Analytics
    +        2010s : AI and Machine Learning Revolution
    +        2020s : Integration of AI in Environmental Research
    +
    erDiagram
    +    CAR ||--o{ NAMED-DRIVER : allows
    +    CAR {
    +        string registrationNumber
    +        string make
    +        string model
    +    }
    +    PERSON ||--o{ NAMED-DRIVER : is
    +    PERSON {
    +        string firstName
    +        string lastName
    +        int age
    +    }
    +
    ---
    +config:
    +  sankey:
    +    showValues: false
    +---
    +sankey-beta
    +
    +NASA Data,Big Data Harmonization,100
    +    Satellite Imagery,Big Data Harmonization,80
    +    Open Environmental Data,Big Data Harmonization,70
    +    Remote Sensing Data,Big Data Harmonization,90
    +    Big Data Harmonization, Data Analysis and Integration,340
    +    Data Analysis and Integration,Climate Change Research,100
    +    Data Analysis and Integration,Biodiversity Monitoring,80
    +    Data Analysis and Integration,Geospatial Mapping,60
    +    Data Analysis and Integration,Urban Planning,50
    +    Data Analysis and Integration,Disaster Response,50
    +

    5. Interactive Elements

    +
      +
    • Hyperlinks: Use the format [link text](URL).
    • +
    • Google
    • +
    • +

      Play Tetris

      +
    • +
    • +

      Embedding Interactive Content: Use HTML tags or specific platform embed codes.

      +
    • +
    • <iframe src="https://example.com/interactive-content" width="600" height="400"></iframe>
    • +
    +

    6. Math Notation

    +

    Markdown can be combined with LaTeX for mathematical notation, useful in environmental data science for expressing statistical distributions, coordinate systems, and more. This requires a Markdown renderer with LaTeX support (like MathJax or KaTeX).

    +
      +
    • Inline Math: Use single dollar signs for inline math expressions. Representing the normal distribution.
    • +
    +

    Example: The probability density function of the normal distribution is given by \(f(x|\mu,\sigma) = \frac{1}{\sigma\sqrt{2\pi}}e^{-\frac{1}{2}\left(\frac{x-\mu}{\sigma}\right)^2}\).`

    +
      +
    • Display Math: Use double dollar signs for standalone equations.
    • +
    +

    Example: + $$ + f(x|\mu,\sigma) = \frac{1}{\sigma\sqrt{2\pi}}e{-\frac{1}{2}\left(\frac{x-\mu}{\sigma}\right)2} + $$

    +
      +
    • Common LaTeX Elements for Environmental Data Science:
    • +
    • Statistical Distributions:
        +
      • Normal Distribution: \frac{1}{\sigma\sqrt{2\pi}}e^{-\frac{1}{2}\left(\frac{x-\mu}{\sigma}\right)^2} for \(\frac{1}{\sigma\sqrt{2\pi}}e^{-\frac{1}{2}\left(\frac{x-\mu}{\sigma}\right)^2}\)
      • +
      • Poisson Distribution: P(k; \lambda) = \frac{\lambda^k e^{-\lambda}}{k!} for \(P(k; \lambda) = \frac{\lambda^k e^{-\lambda}}{k!}\)
      • +
      +
    • +
    • Coordinate Systems:
        +
      • Spherical Coordinates: (r, \theta, \phi) for \((r, \theta, \phi)\)
      • +
      • Cartesian Coordinates: (x, y, z) for \((x, y, z)\)
      • +
      +
    • +
    • Geospatial Equations:
        +
      • Haversine Formula for Distance: a = \sin^2\left(\frac{\Delta\phi}{2}\right) + \cos(\phi_1)\cos(\phi_2)\sin^2\left(\frac{\Delta\lambda}{2}\right) for \(a = \sin^2\left(\frac{\Delta\phi}{2}\right) + \cos(\phi_1)\cos(\phi_2)\sin^2\left(\frac{\Delta\lambda}{2}\right)\)
      • +
      +
    • +
    +

    Note: The rendering of these equations as formatted math will depend on your Markdown viewer's LaTeX capabilities.

    +

    7. Effective Citations in Markdown

    +

    Inline Citations

    +
      +
    • Objective: Learn how to use inline citations in Markdown.
    • +
    • Example Usage:
    • +
    • Inline citation of a single work:
        +
      • Some text with an inline citation. [@jones:envstudy:2020]
      • +
      +
    • +
    • Inline citation with specific page or section:
        +
      • More text with a specific section cited. [See @jones:envstudy:2020, §4.2]
      • +
      +
    • +
    • Contrasting views:
        +
      • Discussion of a topic with a contrasting view. [Contra @smith:climatechange:2019, p. 78]
      • +
      +
    • +
    +

    Footnote Citations

    +
      +
    • Objective: Understand how to use footnote citations in Markdown.
    • +
    • Example Usage:
    • +
    • Citing with a footnote:
        +
      • Some statement in the text.1
      • +
      +
    • +
    • Multiple references to the same footnote:
        +
      • Another statement referring to the same source.1
      • +
      +
    • +
    • A different citation:
        +
      • Additional comment with a new citation.2
      • +
      +
    • +
    +

    Creating Footnotes

    +
      +
    • Example Syntax:
    • +
    • +
    • +
    +
    +
    +
      +
    1. +

      First reference details. Example: Emma Jones, "Environmental Study," Nature Journal, May 2020, https://nature-journal.com/envstudy2020

      +
    2. +
    3. +

      Second reference details. Example: David Smith, "Climate Change Controversies," Science Daily, August 2019, https://sciencedaily.com/climatechange2019

      +
    4. +
    +
    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/resources/markdown_basics/markdown_basics.md b/resources/markdown_basics/markdown_basics.md new file mode 100644 index 0000000..49a0355 --- /dev/null +++ b/resources/markdown_basics/markdown_basics.md @@ -0,0 +1,381 @@ +# Markdown for the Modern Researcher at ESIIL + +[Join us on a HackMD page to practice Markdown](https://hackmd.io/Y6t8bRbdT2GUbFGxwyVz9Q?both) + + +## Section 1: Mastering Markdown Syntax + +### 1. Fundamentals of Text Formatting + +- **Headings**: Use `#` for different levels of headings. + - # Heading Level 1 + - ## Heading Level 2 + - ### Heading Level 3 + +- **Lists**: Bulleted lists use asterisks, numbers for ordered lists. + - Item 1 + - Item 2 + - Subitem 2.1 + - Subitem 2.2 + - 1. First item + - 2. Second item + +- **Bold and Italics**: Use asterisks or underscores. + - **Bold Text** + - *Italic Text* + +### 2. Advanced Structures + +- **Tables**: Create tables using dashes and pipes. + - | Header 1 | Header 2 | Header 3 | + |----------|----------|----------| + | Row 1 | Data | Data | + | Row 2 | Data | Data | + - Add a ":"" to change text justification. Here the : is added on the left for left justification. + | Header 1 | Header 2 | Header 3 | + |---------:|--------- |----------| + | Row 1 | Data | Data | + | Row 2 | Data | Data | + + - | | | | | | | | | | | | | + |---|---|---|---|---|---|---|---|---|---|---|---| + | A | N | A | L | Y | T | I | C | S | E | N | R | + | E | I | N | V | I | R | O | N | M | E | N | T | + | V | E | L | O | P | M | O | C | O | M | U | N | + | E | G | A | G | E | L | L | A | H | C | N | E | + | R | A | T | A | D | E | V | E | L | O | P | W | + | E | I | T | S | I | T | N | E | I | C | S | R | + | S | O | I | G | O | L | O | I | B | H | T | L | + | A | H | T | L | A | E | W | E | G | N | E | L | + | T | I | T | S | I | T | N | E | I | C | S | N | + | I | E | E | S | R | E | H | T | O | E | N | I | + | C | S | L | L | A | H | C | E | G | L | A | N | + | E | G | A | L | L | E | H | C | N | E | I | C | + + - If you hit the boundaries of Markdown's capabilities, you can start to add html directly. Remember, this entire exercisse is to translate to html. + +**Sudoku Puzzle** +Fill in the blank cells with numbers from 1 to 9, such that each row, column, and 3x3 subgrid contains all the numbers from 1 to 9 without repetition. + +| | | | | | | | | | +|---|---|---|---|---|---|---|---|---| +| 5 | 3 | | | 7 | | | | | +| 6 | | | 1 | 9 | 5 | | | | +| | 9 | 8 | | | | | 6 | | +| 8 | | | | 6 | | | | 3 | +| 4 | | | 8 | | 3 | | | 1 | +| 7 | | | | 2 | | | | 6 | +| | 6 | | | | | 2 | 8 | | +| | | | 4 | 1 | 9 | | | 5 | +| | | | | 8 | | | 7 | 9 | + + + + + + + + + + + + + + +
    534678912
    672195348
    198342567
    859761423
    426853791
    713924856
    961537284
    287419635
    345286179
    + + + + + +- **Blockquotes**: Use `>` for blockquotes. + - > This is a blockquote. + - > It can span multiple lines. + +### 3. Integrating Multimedia + +- **Images**: Add images using the format `![alt text](image_url)`. + - ![Markdown Logo](https://example.com/markdown-logo.png) + +- **Videos**: Embed videos using HTML in Markdown. + - `` + +### 4. Diagrams with Mermaid + +- **Flowcharts**: + +```mermaid + graph TD + A[Start] --> B[Analyze Data] + B --> C{Is Data Large?} + C -->|Yes| D[Apply Big Data Solutions] + C -->|No| E[Use Traditional Methods] + D --> F[Machine Learning] + E --> G[Statistical Analysis] + F --> H{Model Accurate?} + G --> I[Report Results] + H -->|Yes| J[Deploy Model] + H -->|No| K[Refine Model] + J --> L[Monitor Performance] + K --> F + L --> M[End: Success] + I --> N[End: Report Generated] + style A fill:#f9f,stroke:#333,stroke-width:2px + style M fill:#9f9,stroke:#333,stroke-width:2px + style N fill:#9f9,stroke:#333,stroke-width:2px +``` + +- **Mind Maps**: +```mermaid + mindmap + root((ESIIL)) + section Data Sources + Satellite Imagery + ::icon(fa fa-satellite) + Remote Sensing Data + Drones + Aircraft + On-ground Sensors + Weather Stations + IoT Devices + Open Environmental Data + Public Datasets + ::icon(fa fa-database) + section Research Focus + Climate Change Analysis + Ice Melt Patterns + Sea Level Rise + Biodiversity Monitoring + Species Distribution + Habitat Fragmentation + Geospatial Analysis Techniques + Machine Learning Models + Predictive Analytics + section Applications + Conservation Strategies + ::icon(fa fa-leaf) + Urban Planning + Green Spaces + Disaster Response + Flood Mapping + Wildfire Tracking + section Tools and Technologies + GIS Software + QGIS + ArcGIS + Programming Languages + Python + R + Cloud Computing Platforms + AWS + Google Earth Engine + Data Visualization + D3.js + Tableau +``` + +- **Timelines**: + +```mermaid +gantt + title ESIIL Year 2 Project Schedule + dateFormat YYYY-MM-DD + section CI + Sovereign OASIS via private jupiterhubs :2024-08-01, 2024-10-30 + OASIS documentation :2024-09-15, 70d + Data cube OASIS via cyverse account :2024-09-15, 100d + Integrate with ESIIL User Management system :2024-08-01, 2024-11-30 + Build badges to deploy DE from mkdoc :2024-09-01, 2024-12-15 + Streamline Github ssh key management :2024-10-01, 2024-12-31 + Cyverse support (R proxy link) :2024-11-01, 2024-12-31 + Cyverse use summary and statistics :2024-08-01, 2024-12-15 + + section CI Consultation and Education + Conferences/Invited talks :2024-08-01, 2024-12-31 + Office hours :2024-08-15, 2024-12-15 + Proposals :2024-09-01, 2024-11-15 + Private lessons :2024-09-15, 2024-11-30 + Pre-event trainings :2024-10-01, 2024-12-15 + Textbook development w/ education team :2024-08-01, 2024-12-15 + Train the trainers / group lessons :2024-08-15, 2024-11-30 + Tribal engagement :2024-09-01, 2024-12-15 + Ethical Space training :2024-09-15, 2024-12-31 + + section CI Design and Build + Data library (repository) :2024-08-01, 2024-10-30 + Analytics library (repository) :2024-08-15, 2024-11-15 + Containers (repository) :2024-09-01, 2024-11-30 + Cloud infrastructure templates (repository) :2024-09-15, 2024-12-15 + Tribal resilience Data Cube :2024-10-01, 2024-12-31 +``` + +```mermaid + +%%{init: { 'logLevel': 'debug', 'theme': 'base', 'gitGraph': {'rotateCommitLabel': true}} }%% +gitGraph + commit id: "Start from template" + branch c1 + commit id: "Set up SSH key pair" + commit id: "Modify _config.yml for GitHub Pages" + commit id: "Initial website structure" + commit id: "Add new markdown pages" + commit id: "Update navigation tree" + commit id: "Edit existing pages" + commit id: "Delete old markdown pages" + commit id: "Finalize website updates" + commit id: "Add new markdown pages" + commit id: "Update navigation tree" +checkout c1 + + branch b1 + + commit + commit + checkout c1 + merge b1 +``` + +```mermaid +%%{init: {"quadrantChart": {"chartWidth": 400, "chartHeight": 400}, "themeVariables": {"quadrant1TextFill": "#ff0000"} }}%% +quadrantChart + x-axis Urgent --> Not Urgent + y-axis Not Important --> "Important ❤" + quadrant-1 Plan + quadrant-2 Do + quadrant-3 Delegate + quadrant-4 Delete +``` + + +```mermaid +timeline + title Major Events in Environmental Science and Data Science + section Environmental Science + 19th century : Foundations in Ecology and Conservation + 1962 : Publication of 'Silent Spring' by Rachel Carson + 1970 : First Earth Day + 1987 : Brundtland Report introduces Sustainable Development + 1992 : Rio Earth Summit + 2015 : Paris Agreement on Climate Change + section Data Science + 1960s-1970s : Development of Database Management Systems + 1980s : Emergence of Data Warehousing + 1990s : Growth of the World Wide Web and Data Mining + 2000s : Big Data and Predictive Analytics + 2010s : AI and Machine Learning Revolution + 2020s : Integration of AI in Environmental Research +``` + + + + +```mermaid +erDiagram + CAR ||--o{ NAMED-DRIVER : allows + CAR { + string registrationNumber + string make + string model + } + PERSON ||--o{ NAMED-DRIVER : is + PERSON { + string firstName + string lastName + int age + } +``` + +```mermaid +--- +config: + sankey: + showValues: false +--- +sankey-beta + +NASA Data,Big Data Harmonization,100 + Satellite Imagery,Big Data Harmonization,80 + Open Environmental Data,Big Data Harmonization,70 + Remote Sensing Data,Big Data Harmonization,90 + Big Data Harmonization, Data Analysis and Integration,340 + Data Analysis and Integration,Climate Change Research,100 + Data Analysis and Integration,Biodiversity Monitoring,80 + Data Analysis and Integration,Geospatial Mapping,60 + Data Analysis and Integration,Urban Planning,50 + Data Analysis and Integration,Disaster Response,50 +``` + + +### 5. Interactive Elements + +- **Hyperlinks**: Use the format `[link text](URL)`. + - [Google](https://www.google.com) + - [Play Tetris](https://tetris.com/play-tetris) + +- **Embedding Interactive Content**: Use HTML tags or specific platform embed codes. + - `` + + + + +### 6. Math Notation + +Markdown can be combined with LaTeX for mathematical notation, useful in environmental data science for expressing statistical distributions, coordinate systems, and more. This requires a Markdown renderer with LaTeX support (like MathJax or KaTeX). + +- **Inline Math**: Use single dollar signs for inline math expressions. Representing the normal distribution. + + Example: The probability density function of the normal distribution is given by $f(x|\mu,\sigma) = \frac{1}{\sigma\sqrt{2\pi}}e^{-\frac{1}{2}\left(\frac{x-\mu}{\sigma}\right)^2}$.` + +- **Display Math**: Use double dollar signs for standalone equations. + + Example: + $$ + f(x|\mu,\sigma) = \frac{1}{\sigma\sqrt{2\pi}}e^{-\frac{1}{2}\left(\frac{x-\mu}{\sigma}\right)^2} + $$ + + +- **Common LaTeX Elements for Environmental Data Science**: + - **Statistical Distributions**: + - Normal Distribution: `\frac{1}{\sigma\sqrt{2\pi}}e^{-\frac{1}{2}\left(\frac{x-\mu}{\sigma}\right)^2}` for $\frac{1}{\sigma\sqrt{2\pi}}e^{-\frac{1}{2}\left(\frac{x-\mu}{\sigma}\right)^2}$ + - Poisson Distribution: `P(k; \lambda) = \frac{\lambda^k e^{-\lambda}}{k!}` for $P(k; \lambda) = \frac{\lambda^k e^{-\lambda}}{k!}$ + - **Coordinate Systems**: + - Spherical Coordinates: `(r, \theta, \phi)` for $(r, \theta, \phi)$ + - Cartesian Coordinates: `(x, y, z)` for $(x, y, z)$ + - **Geospatial Equations**: + - Haversine Formula for Distance: `a = \sin^2\left(\frac{\Delta\phi}{2}\right) + \cos(\phi_1)\cos(\phi_2)\sin^2\left(\frac{\Delta\lambda}{2}\right)` for $a = \sin^2\left(\frac{\Delta\phi}{2}\right) + \cos(\phi_1)\cos(\phi_2)\sin^2\left(\frac{\Delta\lambda}{2}\right)$ + +Note: The rendering of these equations as formatted math will depend on your Markdown viewer's LaTeX capabilities. + + + +### 7. Effective Citations in Markdown + +## Inline Citations + +- **Objective:** Learn how to use inline citations in Markdown. +- **Example Usage:** + - Inline citation of a single work: + - Some text with an inline citation. [@jones:envstudy:2020] + - Inline citation with specific page or section: + - More text with a specific section cited. [See @jones:envstudy:2020, §4.2] + - Contrasting views: + - Discussion of a topic with a contrasting view. [Contra @smith:climatechange:2019, p. 78] + +## Footnote Citations + +- **Objective:** Understand how to use footnote citations in Markdown. +- **Example Usage:** + - Citing with a footnote: + - Some statement in the text.[^1] + - Multiple references to the same footnote: + - Another statement referring to the same source.[^1] + - A different citation: + - Additional comment with a new citation.[^2] + +## Creating Footnotes + +- **Example Syntax:** + - [^1]: First reference details. Example: Emma Jones, "Environmental Study," Nature Journal, May 2020, https://nature-journal.com/envstudy2020. + - [^2]: Second reference details. Example: David Smith, "Climate Change Controversies," Science Daily, August 2019, https://sciencedaily.com/climatechange2019. + + diff --git a/resources/notes_from_readings/index.html b/resources/notes_from_readings/index.html new file mode 100644 index 0000000..441fb9e --- /dev/null +++ b/resources/notes_from_readings/index.html @@ -0,0 +1,1408 @@ + + + + + + + + + + + + + + + + + + + + + + Literature Reading Notes - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    Literature Reading Notes

    +

    Reference Information

    +
      +
    • Title:
    • +
    • Authors:
    • +
    • Publication Year:
    • +
    • Journal/Source:
    • +
    • DOI/URL:
    • +
    +

    Summary

    +
      +
    • Brief summary of the main objective, research question, or thesis of the literature.
    • +
    +

    Key Findings

    +
      +
    • Major findings or conclusions:
    • +
    • Finding 1
    • +
    • Finding 2
    • +
    • ...
    • +
    +

    Methodology

    +
      +
    • Description of research methodology, techniques, or approaches.
    • +
    • Notable tools, datasets, or analytical methods used.
    • +
    +

    Theoretical Framework

    +
      +
    • Theoretical models or frameworks underpinning the research.
    • +
    • Positioning within the broader field.
    • +
    +

    Critical Analysis

    +
      +
    • Strengths: Well-executed aspects or convincing arguments.
    • +
    • Limitations: Weaknesses, gaps, or biases.
    • +
    • Insights: New understandings or perspectives gained.
    • +
    +

    Connections to Other Work

    +
      +
    • Similarities or differences with other readings.
    • +
    • Complementarity to other studies.
    • +
    +

    Quotations and Notes

    +
      +
    • Significant quotes:
    • +
    • "Quote here." - Author Name, page number
    • +
    • Additional notes or comments.
    • +
    +

    Personal Reflections

    +
      +
    • Influence on understanding or perspective.
    • +
    • Potential impact on future research or studies.
    • +
    +

    Action Items

    +
      +
    • Follow-up actions such as readings, discussions, or research activities:
    • +
    • Action item 1
    • +
    • Action item 2
    • +
    • ...
    • +
    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/resources/notes_from_readings/notes_from_readings.md b/resources/notes_from_readings/notes_from_readings.md new file mode 100644 index 0000000..f0dcbb8 --- /dev/null +++ b/resources/notes_from_readings/notes_from_readings.md @@ -0,0 +1,59 @@ +# Literature Reading Notes + +## Reference Information + +- **Title:** +- **Authors:** +- **Publication Year:** +- **Journal/Source:** +- **DOI/URL:** + +## Summary + +- Brief summary of the main objective, research question, or thesis of the literature. + +## Key Findings + +- Major findings or conclusions: + - Finding 1 + - Finding 2 + - ... + +## Methodology + +- Description of research methodology, techniques, or approaches. +- Notable tools, datasets, or analytical methods used. + +## Theoretical Framework + +- Theoretical models or frameworks underpinning the research. +- Positioning within the broader field. + +## Critical Analysis + +- **Strengths:** Well-executed aspects or convincing arguments. +- **Limitations:** Weaknesses, gaps, or biases. +- **Insights:** New understandings or perspectives gained. + +## Connections to Other Work + +- Similarities or differences with other readings. +- Complementarity to other studies. + +## Quotations and Notes + +- Significant quotes: + - "Quote here." - Author Name, page number +- Additional notes or comments. + +## Personal Reflections + +- Influence on understanding or perspective. +- Potential impact on future research or studies. + +## Action Items + +- Follow-up actions such as readings, discussions, or research activities: + - [ ] Action item 1 + - [ ] Action item 2 + - ... diff --git a/resources/post_meeting_notes/index.html b/resources/post_meeting_notes/index.html new file mode 100644 index 0000000..98c77a8 --- /dev/null +++ b/resources/post_meeting_notes/index.html @@ -0,0 +1,1416 @@ + + + + + + + + + + + + + + + + + + + + + + Post-Meeting Notes Template - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    Post-Meeting Notes Template

    +

    Meeting Details

    +
      +
    • Date:
    • +
    • Time:
    • +
    • Location:
    • +
    • Facilitator:
    • +
    +

    Attendees

    +
      +
    • List of attendees
    • +
    +

    Agenda

    +

    1. Review of Meeting Goals

    +
      +
    • Recap the primary objectives and if they were met.
    • +
    +

    2. Manuscript Development

    +
      +
    • Discuss the status of current manuscript drafts.
    • +
    • Assign writing and editing tasks for different sections of the manuscript.
    • +
    • Set deadlines for draft completion and review.
    • +
    +

    3. Research Highlights

    +
      +
    • Identify key findings and outcomes that should be emphasized in the publications.
    • +
    • Discuss any new research insights that emerged from the meeting.
    • +
    +

    4. Publication Strategy

    +
      +
    • Decide on target journals or conferences for publication submission.
    • +
    • Discuss authorship order and contributions.
    • +
    • Plan for any additional data or research needed to strengthen the manuscript.
    • +
    +

    5. Editing and Review Process

    +
      +
    • Establish a peer-review process within the group for initial feedback.
    • +
    • Assign members to focus on specific aspects of editing, such as clarity, grammar, and technical accuracy.
    • +
    • Agree on a schedule for review rounds to ensure timely submission.
    • +
    +

    6. Responsibilities and Expectations

    +
      +
    • Clearly define what is expected from each member before the next meeting.
    • +
    • Discuss communication methods for progress updates and questions.
    • +
    +

    7. Closing Remarks

    +
      +
    • Summarize the discussion and confirm the action plan.
    • +
    • Reiterate the importance of meeting the set deadlines and maintaining communication.
    • +
    +

    Action Items

    +
      +
    • Draft introduction section: Responsible person(s) - Deadline
    • +
    • Compile and analyze additional data: Responsible person(s) - Deadline
    • +
    • Draft methodology section: Responsible person(s) - Deadline
    • +
    • ...
    • +
    • Coordinate manuscript peer review: Responsible person(s) - Deadline
    • +
    +

    Next Steps

    +
      +
    • Define the timeline for the submission process.
    • +
    • Schedule follow-up meetings or check-ins to monitor progress.
    • +
    +

    Notes

    +
      +
    • (Additional notes, comments, or observations made during the meeting.)
    • +
    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/resources/post_meeting_notes/post_meeting_notes.md b/resources/post_meeting_notes/post_meeting_notes.md new file mode 100644 index 0000000..41615dc --- /dev/null +++ b/resources/post_meeting_notes/post_meeting_notes.md @@ -0,0 +1,57 @@ +# Post-Meeting Notes Template + +## Meeting Details +- **Date:** +- **Time:** +- **Location:** +- **Facilitator:** + +## Attendees +- List of attendees + +## Agenda + +### 1. Review of Meeting Goals +- Recap the primary objectives and if they were met. + +### 2. Manuscript Development +- Discuss the status of current manuscript drafts. +- Assign writing and editing tasks for different sections of the manuscript. +- Set deadlines for draft completion and review. + +### 3. Research Highlights +- Identify key findings and outcomes that should be emphasized in the publications. +- Discuss any new research insights that emerged from the meeting. + +### 4. Publication Strategy +- Decide on target journals or conferences for publication submission. +- Discuss authorship order and contributions. +- Plan for any additional data or research needed to strengthen the manuscript. + +### 5. Editing and Review Process +- Establish a peer-review process within the group for initial feedback. +- Assign members to focus on specific aspects of editing, such as clarity, grammar, and technical accuracy. +- Agree on a schedule for review rounds to ensure timely submission. + +### 6. Responsibilities and Expectations +- Clearly define what is expected from each member before the next meeting. +- Discuss communication methods for progress updates and questions. + +### 7. Closing Remarks +- Summarize the discussion and confirm the action plan. +- Reiterate the importance of meeting the set deadlines and maintaining communication. + +## Action Items +- [ ] Draft introduction section: Responsible person(s) - Deadline +- [ ] Compile and analyze additional data: Responsible person(s) - Deadline +- [ ] Draft methodology section: Responsible person(s) - Deadline +- ... +- [ ] Coordinate manuscript peer review: Responsible person(s) - Deadline + +## Next Steps +- Define the timeline for the submission process. +- Schedule follow-up meetings or check-ins to monitor progress. + +## Notes +- (Additional notes, comments, or observations made during the meeting.) + diff --git a/resources/pre_meeting_notes/index.html b/resources/pre_meeting_notes/index.html new file mode 100644 index 0000000..ad4d931 --- /dev/null +++ b/resources/pre_meeting_notes/index.html @@ -0,0 +1,1409 @@ + + + + + + + + + + + + + + + + + + + + + + Pre-Meeting Notes - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    Pre-Meeting Notes

    +

    Meeting Details

    +
      +
    • Date:
    • +
    • Time:
    • +
    • Location:
    • +
    • Facilitator:
    • +
    +

    Attendees

    +
      +
    • List of attendees
    • +
    +

    Agenda

    +

    1. Opening Remarks

    +
      +
    • Brief welcome and overview of the meeting's objectives.
    • +
    +

    2. Introductions

    +
      +
    • Roundtable introductions for all attendees.
    • +
    • Share a personal note or interesting fact to foster camaraderie.
    • +
    +

    3. Planning

    +
      +
    • Discuss the agenda for the primary meetings.
    • +
    • Outline the key topics and issues to address.
    • +
    • Assign roles for note-taking, timekeeping, and facilitation in primary meetings.
    • +
    +

    4. Goal Setting

    +
      +
    • Establish clear, actionable goals for the upcoming period.
    • +
    • Identify specific outcomes desired from the primary meetings.
    • +
    • Agree on metrics or indicators of success for these goals.
    • +
    +

    5. Camaraderie Building

    +
      +
    • Icebreaker activity or team-building exercise.
    • +
    • Share expectations and aspirations for the group's progress.
    • +
    • Highlight the importance of collaboration and mutual support.
    • +
    +

    6. Open Discussion

    +
      +
    • Allow for any additional topics, concerns, or ideas to be brought forward.
    • +
    +

    7. Closing Remarks

    +
      +
    • Summarize the discussions and confirm the next steps.
    • +
    • Confirm dates and times for primary meetings.
    • +
    • Express appreciation for participation.
    • +
    +

    Action Items

    +
      +
    • Action item 1: Responsible person(s) - Deadline
    • +
    • Action item 2: Responsible person(s) - Deadline
    • +
    • ...
    • +
    +

    Notes

    +
      +
    • (Any additional notes or comments about the meeting.)
    • +
    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/resources/pre_meeting_notes/pre_meeting_notes.md b/resources/pre_meeting_notes/pre_meeting_notes.md new file mode 100644 index 0000000..4c2bad6 --- /dev/null +++ b/resources/pre_meeting_notes/pre_meeting_notes.md @@ -0,0 +1,51 @@ +# Pre-Meeting Notes + +## Meeting Details +- **Date:** +- **Time:** +- **Location:** +- **Facilitator:** + +## Attendees +- List of attendees + +## Agenda + +### 1. Opening Remarks +- Brief welcome and overview of the meeting's objectives. + +### 2. Introductions +- Roundtable introductions for all attendees. +- Share a personal note or interesting fact to foster camaraderie. + +### 3. Planning +- Discuss the agenda for the primary meetings. +- Outline the key topics and issues to address. +- Assign roles for note-taking, timekeeping, and facilitation in primary meetings. + +### 4. Goal Setting +- Establish clear, actionable goals for the upcoming period. +- Identify specific outcomes desired from the primary meetings. +- Agree on metrics or indicators of success for these goals. + +### 5. Camaraderie Building +- Icebreaker activity or team-building exercise. +- Share expectations and aspirations for the group's progress. +- Highlight the importance of collaboration and mutual support. + +### 6. Open Discussion +- Allow for any additional topics, concerns, or ideas to be brought forward. + +### 7. Closing Remarks +- Summarize the discussions and confirm the next steps. +- Confirm dates and times for primary meetings. +- Express appreciation for participation. + +## Action Items +- [ ] Action item 1: Responsible person(s) - Deadline +- [ ] Action item 2: Responsible person(s) - Deadline +- ... + +## Notes +- (Any additional notes or comments about the meeting.) + diff --git a/resources/second_meeting_notes/index.html b/resources/second_meeting_notes/index.html new file mode 100644 index 0000000..a7defdc --- /dev/null +++ b/resources/second_meeting_notes/index.html @@ -0,0 +1,1439 @@ + + + + + + + + + + + + + + + + + + + + + + Primary Meeting Day 6-10: Progress and Development - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    Primary Meeting Day 6-10: Progress and Development

    +

    Meeting Details

    +
      +
    • Dates:
    • +
    • Times:
    • +
    • Location:
    • +
    • Facilitator:
    • +
    +

    Attendees

    +
      +
    • List of attendees
    • +
    +

    Daily Agenda

    +

    Day 6: Review and Refine

    +

    Recap of Previous Sessions

    +
      +
    • Summary of progress made since the last meeting.
    • +
    • Review of action items and milestones achieved.
    • +
    +

    Refinement of Goals and Tasks

    +
      +
    • Reassessment and adjustment of goals based on current progress.
    • +
    • Identification of any new challenges or opportunities.
    • +
    +

    Day 7-9: In-Depth Work Sessions

    +

    Daily Goals

    +
      +
    • Clear objectives for each day’s work sessions.
    • +
    +

    Task Progress Updates

    +
      +
    • Brief reports from team members on their assigned tasks.
    • +
    • Collaborative problem-solving for any issues encountered.
    • +
    +

    Theory and Data Integration

    +
      +
    • Continued discussions on aligning theoretical frameworks with data analysis.
    • +
    • Workshops or breakout sessions for detailed aspects of the project.
    • +
    +

    Evening Collaborative Activities

    +
      +
    • Informal sessions to encourage ongoing dialogue and collaboration.
    • +
    +

    Day 10: Mid-Point Review

    +

    Progress Evaluation

    +
      +
    • Assessment of the work done during the week.
    • +
    • Feedback sessions to ensure quality and consistency in outputs.
    • +
    +

    Documentation and Record-Keeping

    +
      +
    • Ensure thorough documentation of methods, results, and decisions.
    • +
    • Establish a system for organizing and sharing this documentation.
    • +
    +

    Planning Forward

    +
      +
    • Setting objectives for the next phase of the project.
    • +
    • Adjusting the roadmap as necessary based on insights from the week’s work.
    • +
    +

    Detailed Notes

    +

    Day 6 Notes

    +
      +
    • ...
    • +
    +

    Day 7 Notes

    +
      +
    • ...
    • +
    +

    Day 8 Notes

    +
      +
    • ...
    • +
    +

    Day 9 Notes

    +
      +
    • ...
    • +
    +

    Day 10 Notes

    +
      +
    • ...
    • +
    +

    Action Items

    +
      +
    • Specific task: Assigned to - Deadline
    • +
    • Specific task: Assigned to - Deadline
    • +
    • ...
    • +
    +

    Reflections and Comments

    +
      +
    • (Space for any additional thoughts, insights, or personal reflections on the meeting.)
    • +
    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/resources/second_meeting_notes/second_meeting_notes.md b/resources/second_meeting_notes/second_meeting_notes.md new file mode 100644 index 0000000..1c9037e --- /dev/null +++ b/resources/second_meeting_notes/second_meeting_notes.md @@ -0,0 +1,78 @@ +# Primary Meeting Day 6-10: Progress and Development + +## Meeting Details +- **Dates:** +- **Times:** +- **Location:** +- **Facilitator:** + +## Attendees +- List of attendees + +## Daily Agenda + +### Day 6: Review and Refine + +#### Recap of Previous Sessions +- Summary of progress made since the last meeting. +- Review of action items and milestones achieved. + +#### Refinement of Goals and Tasks +- Reassessment and adjustment of goals based on current progress. +- Identification of any new challenges or opportunities. + +### Day 7-9: In-Depth Work Sessions + +#### Daily Goals +- Clear objectives for each day’s work sessions. + +#### Task Progress Updates +- Brief reports from team members on their assigned tasks. +- Collaborative problem-solving for any issues encountered. + +#### Theory and Data Integration +- Continued discussions on aligning theoretical frameworks with data analysis. +- Workshops or breakout sessions for detailed aspects of the project. + +#### Evening Collaborative Activities +- Informal sessions to encourage ongoing dialogue and collaboration. + +### Day 10: Mid-Point Review + +#### Progress Evaluation +- Assessment of the work done during the week. +- Feedback sessions to ensure quality and consistency in outputs. + +#### Documentation and Record-Keeping +- Ensure thorough documentation of methods, results, and decisions. +- Establish a system for organizing and sharing this documentation. + +#### Planning Forward +- Setting objectives for the next phase of the project. +- Adjusting the roadmap as necessary based on insights from the week’s work. + +## Detailed Notes + +### Day 6 Notes +- ... + +### Day 7 Notes +- ... + +### Day 8 Notes +- ... + +### Day 9 Notes +- ... + +### Day 10 Notes +- ... + +## Action Items +- [ ] Specific task: Assigned to - Deadline +- [ ] Specific task: Assigned to - Deadline +- ... + +## Reflections and Comments +- (Space for any additional thoughts, insights, or personal reflections on the meeting.) + diff --git a/resources/stac_mount_save_files/figure-gfm/ASPECT_plot-1.png b/resources/stac_mount_save_files/figure-gfm/ASPECT_plot-1.png new file mode 100644 index 0000000..fe7c1c2 Binary files /dev/null and b/resources/stac_mount_save_files/figure-gfm/ASPECT_plot-1.png differ diff --git a/resources/stac_mount_save_files/figure-gfm/DEM_plot-1.png b/resources/stac_mount_save_files/figure-gfm/DEM_plot-1.png new file mode 100644 index 0000000..aabddb7 Binary files /dev/null and b/resources/stac_mount_save_files/figure-gfm/DEM_plot-1.png differ diff --git a/resources/stac_mount_save_files/figure-gfm/SLOPE_plot-1.png b/resources/stac_mount_save_files/figure-gfm/SLOPE_plot-1.png new file mode 100644 index 0000000..9e36aa8 Binary files /dev/null and b/resources/stac_mount_save_files/figure-gfm/SLOPE_plot-1.png differ diff --git a/resources/stac_mount_save_files/figure-gfm/conus_bounding_box-1.png b/resources/stac_mount_save_files/figure-gfm/conus_bounding_box-1.png new file mode 100644 index 0000000..fbdadb8 Binary files /dev/null and b/resources/stac_mount_save_files/figure-gfm/conus_bounding_box-1.png differ diff --git a/resources/stac_mount_save_files/figure-html/DEM_plot-1.png b/resources/stac_mount_save_files/figure-html/DEM_plot-1.png new file mode 100644 index 0000000..c3fdb28 Binary files /dev/null and b/resources/stac_mount_save_files/figure-html/DEM_plot-1.png differ diff --git a/resources/stac_mount_save_files/figure-html/unnamed-chunk-3-1.png b/resources/stac_mount_save_files/figure-html/unnamed-chunk-3-1.png new file mode 100644 index 0000000..68de4b4 Binary files /dev/null and b/resources/stac_mount_save_files/figure-html/unnamed-chunk-3-1.png differ diff --git a/resources/stac_mount_save_files/libs/bootstrap/bootstrap-icons.css b/resources/stac_mount_save_files/libs/bootstrap/bootstrap-icons.css new file mode 100644 index 0000000..f51d04b --- /dev/null +++ b/resources/stac_mount_save_files/libs/bootstrap/bootstrap-icons.css @@ -0,0 +1,1704 @@ +@font-face { + font-family: "bootstrap-icons"; + src: +url("./bootstrap-icons.woff?524846017b983fc8ded9325d94ed40f3") format("woff"); +} + +.bi::before, +[class^="bi-"]::before, +[class*=" bi-"]::before { + display: inline-block; + font-family: bootstrap-icons !important; + font-style: normal; + font-weight: normal !important; + font-variant: normal; + text-transform: none; + line-height: 1; + vertical-align: -.125em; + -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; +} + +.bi-123::before { content: "\f67f"; } +.bi-alarm-fill::before { content: "\f101"; } +.bi-alarm::before { content: "\f102"; } +.bi-align-bottom::before { content: "\f103"; } +.bi-align-center::before { content: "\f104"; } +.bi-align-end::before { content: "\f105"; } +.bi-align-middle::before { content: "\f106"; } +.bi-align-start::before { content: "\f107"; } +.bi-align-top::before { content: "\f108"; } +.bi-alt::before { content: "\f109"; } +.bi-app-indicator::before { content: "\f10a"; } +.bi-app::before { content: "\f10b"; } +.bi-archive-fill::before { content: "\f10c"; } +.bi-archive::before { content: "\f10d"; } +.bi-arrow-90deg-down::before { content: "\f10e"; } +.bi-arrow-90deg-left::before { content: "\f10f"; } +.bi-arrow-90deg-right::before { content: "\f110"; } +.bi-arrow-90deg-up::before { content: "\f111"; } +.bi-arrow-bar-down::before { content: "\f112"; } +.bi-arrow-bar-left::before { content: "\f113"; } +.bi-arrow-bar-right::before { content: "\f114"; } +.bi-arrow-bar-up::before { content: "\f115"; } +.bi-arrow-clockwise::before { content: "\f116"; } +.bi-arrow-counterclockwise::before { content: "\f117"; } +.bi-arrow-down-circle-fill::before { content: "\f118"; } +.bi-arrow-down-circle::before { content: "\f119"; } +.bi-arrow-down-left-circle-fill::before { content: "\f11a"; } +.bi-arrow-down-left-circle::before { content: "\f11b"; } +.bi-arrow-down-left-square-fill::before { content: "\f11c"; } +.bi-arrow-down-left-square::before { content: "\f11d"; } +.bi-arrow-down-left::before { content: "\f11e"; } +.bi-arrow-down-right-circle-fill::before { content: "\f11f"; } +.bi-arrow-down-right-circle::before { content: "\f120"; } +.bi-arrow-down-right-square-fill::before { content: "\f121"; } +.bi-arrow-down-right-square::before { content: "\f122"; } +.bi-arrow-down-right::before { content: "\f123"; } +.bi-arrow-down-short::before { content: "\f124"; } +.bi-arrow-down-square-fill::before { content: "\f125"; } +.bi-arrow-down-square::before { content: "\f126"; } +.bi-arrow-down-up::before { content: "\f127"; } +.bi-arrow-down::before { content: "\f128"; } +.bi-arrow-left-circle-fill::before { content: "\f129"; } +.bi-arrow-left-circle::before { content: "\f12a"; } +.bi-arrow-left-right::before { content: "\f12b"; } +.bi-arrow-left-short::before { content: "\f12c"; } +.bi-arrow-left-square-fill::before { content: "\f12d"; } +.bi-arrow-left-square::before { content: "\f12e"; } +.bi-arrow-left::before { content: "\f12f"; } +.bi-arrow-repeat::before { content: "\f130"; } +.bi-arrow-return-left::before { content: "\f131"; } +.bi-arrow-return-right::before { content: "\f132"; } +.bi-arrow-right-circle-fill::before { content: "\f133"; } +.bi-arrow-right-circle::before { content: "\f134"; } +.bi-arrow-right-short::before { content: "\f135"; } +.bi-arrow-right-square-fill::before { content: "\f136"; } +.bi-arrow-right-square::before { content: "\f137"; } +.bi-arrow-right::before { content: "\f138"; } +.bi-arrow-up-circle-fill::before { content: "\f139"; } +.bi-arrow-up-circle::before { content: "\f13a"; } +.bi-arrow-up-left-circle-fill::before { content: "\f13b"; } +.bi-arrow-up-left-circle::before { content: "\f13c"; } +.bi-arrow-up-left-square-fill::before { content: "\f13d"; } +.bi-arrow-up-left-square::before { content: "\f13e"; } +.bi-arrow-up-left::before { content: "\f13f"; } +.bi-arrow-up-right-circle-fill::before { content: "\f140"; } +.bi-arrow-up-right-circle::before { content: "\f141"; } +.bi-arrow-up-right-square-fill::before { content: "\f142"; } +.bi-arrow-up-right-square::before { content: "\f143"; } +.bi-arrow-up-right::before { content: "\f144"; } +.bi-arrow-up-short::before { content: "\f145"; } +.bi-arrow-up-square-fill::before { content: "\f146"; } +.bi-arrow-up-square::before { content: "\f147"; } +.bi-arrow-up::before { content: "\f148"; } +.bi-arrows-angle-contract::before { content: "\f149"; } +.bi-arrows-angle-expand::before { content: "\f14a"; } +.bi-arrows-collapse::before { content: "\f14b"; } +.bi-arrows-expand::before { content: "\f14c"; } +.bi-arrows-fullscreen::before { content: "\f14d"; } +.bi-arrows-move::before { content: "\f14e"; } +.bi-aspect-ratio-fill::before { content: "\f14f"; } +.bi-aspect-ratio::before { content: "\f150"; } +.bi-asterisk::before { content: "\f151"; } +.bi-at::before { content: "\f152"; } +.bi-award-fill::before { content: "\f153"; } +.bi-award::before { content: "\f154"; } +.bi-back::before { content: "\f155"; } +.bi-backspace-fill::before { content: "\f156"; } +.bi-backspace-reverse-fill::before { content: "\f157"; } +.bi-backspace-reverse::before { content: "\f158"; } +.bi-backspace::before { content: "\f159"; } +.bi-badge-3d-fill::before { content: "\f15a"; } +.bi-badge-3d::before { content: "\f15b"; } +.bi-badge-4k-fill::before { content: "\f15c"; } +.bi-badge-4k::before { content: "\f15d"; } +.bi-badge-8k-fill::before { content: "\f15e"; } +.bi-badge-8k::before { content: "\f15f"; } +.bi-badge-ad-fill::before { content: "\f160"; } +.bi-badge-ad::before { content: "\f161"; } +.bi-badge-ar-fill::before { content: "\f162"; } +.bi-badge-ar::before { content: "\f163"; } +.bi-badge-cc-fill::before { content: "\f164"; } +.bi-badge-cc::before { content: "\f165"; } +.bi-badge-hd-fill::before { content: "\f166"; } +.bi-badge-hd::before { content: "\f167"; } +.bi-badge-tm-fill::before { content: "\f168"; } +.bi-badge-tm::before { content: "\f169"; } +.bi-badge-vo-fill::before { content: "\f16a"; } +.bi-badge-vo::before { content: "\f16b"; } +.bi-badge-vr-fill::before { content: "\f16c"; } +.bi-badge-vr::before { content: "\f16d"; } +.bi-badge-wc-fill::before { content: "\f16e"; } +.bi-badge-wc::before { content: "\f16f"; } +.bi-bag-check-fill::before { content: "\f170"; } +.bi-bag-check::before { content: "\f171"; } +.bi-bag-dash-fill::before { content: "\f172"; } +.bi-bag-dash::before { content: "\f173"; } +.bi-bag-fill::before { content: "\f174"; } +.bi-bag-plus-fill::before { content: "\f175"; } +.bi-bag-plus::before { content: "\f176"; } +.bi-bag-x-fill::before { content: "\f177"; } +.bi-bag-x::before { content: "\f178"; } +.bi-bag::before { content: "\f179"; } +.bi-bar-chart-fill::before { content: "\f17a"; } +.bi-bar-chart-line-fill::before { content: "\f17b"; } +.bi-bar-chart-line::before { content: "\f17c"; } +.bi-bar-chart-steps::before { content: "\f17d"; } +.bi-bar-chart::before { content: "\f17e"; } +.bi-basket-fill::before { content: "\f17f"; } +.bi-basket::before { content: "\f180"; } +.bi-basket2-fill::before { content: "\f181"; } +.bi-basket2::before { content: "\f182"; } +.bi-basket3-fill::before { content: "\f183"; } +.bi-basket3::before { content: "\f184"; } +.bi-battery-charging::before { content: "\f185"; } +.bi-battery-full::before { content: "\f186"; } +.bi-battery-half::before { content: "\f187"; } +.bi-battery::before { content: "\f188"; } +.bi-bell-fill::before { content: "\f189"; } +.bi-bell::before { content: "\f18a"; } +.bi-bezier::before { content: "\f18b"; } +.bi-bezier2::before { content: "\f18c"; } +.bi-bicycle::before { content: "\f18d"; } +.bi-binoculars-fill::before { content: "\f18e"; } +.bi-binoculars::before { content: "\f18f"; } +.bi-blockquote-left::before { content: "\f190"; } +.bi-blockquote-right::before { content: "\f191"; } +.bi-book-fill::before { content: "\f192"; } +.bi-book-half::before { content: "\f193"; } +.bi-book::before { content: "\f194"; } +.bi-bookmark-check-fill::before { content: "\f195"; } +.bi-bookmark-check::before { content: "\f196"; } +.bi-bookmark-dash-fill::before { content: "\f197"; } +.bi-bookmark-dash::before { content: "\f198"; } +.bi-bookmark-fill::before { content: "\f199"; } +.bi-bookmark-heart-fill::before { content: "\f19a"; } +.bi-bookmark-heart::before { content: "\f19b"; } +.bi-bookmark-plus-fill::before { content: "\f19c"; } +.bi-bookmark-plus::before { content: "\f19d"; } +.bi-bookmark-star-fill::before { content: "\f19e"; } +.bi-bookmark-star::before { content: "\f19f"; } +.bi-bookmark-x-fill::before { content: "\f1a0"; } +.bi-bookmark-x::before { content: "\f1a1"; } +.bi-bookmark::before { content: "\f1a2"; } +.bi-bookmarks-fill::before { content: "\f1a3"; } +.bi-bookmarks::before { content: "\f1a4"; } +.bi-bookshelf::before { content: "\f1a5"; } +.bi-bootstrap-fill::before { content: "\f1a6"; } +.bi-bootstrap-reboot::before { content: "\f1a7"; } +.bi-bootstrap::before { content: "\f1a8"; } +.bi-border-all::before { content: "\f1a9"; } +.bi-border-bottom::before { content: "\f1aa"; } +.bi-border-center::before { content: "\f1ab"; } +.bi-border-inner::before { content: "\f1ac"; } +.bi-border-left::before { content: "\f1ad"; } +.bi-border-middle::before { content: "\f1ae"; } +.bi-border-outer::before { content: "\f1af"; } +.bi-border-right::before { content: "\f1b0"; } +.bi-border-style::before { content: "\f1b1"; } +.bi-border-top::before { content: "\f1b2"; } +.bi-border-width::before { content: "\f1b3"; } +.bi-border::before { content: "\f1b4"; } +.bi-bounding-box-circles::before { content: "\f1b5"; } +.bi-bounding-box::before { content: "\f1b6"; } +.bi-box-arrow-down-left::before { content: "\f1b7"; } +.bi-box-arrow-down-right::before { content: "\f1b8"; } +.bi-box-arrow-down::before { content: "\f1b9"; } +.bi-box-arrow-in-down-left::before { content: "\f1ba"; } +.bi-box-arrow-in-down-right::before { content: "\f1bb"; } +.bi-box-arrow-in-down::before { content: "\f1bc"; } +.bi-box-arrow-in-left::before { content: "\f1bd"; } +.bi-box-arrow-in-right::before { content: "\f1be"; } +.bi-box-arrow-in-up-left::before { content: "\f1bf"; } +.bi-box-arrow-in-up-right::before { content: "\f1c0"; } +.bi-box-arrow-in-up::before { content: "\f1c1"; } +.bi-box-arrow-left::before { content: "\f1c2"; } +.bi-box-arrow-right::before { content: "\f1c3"; } +.bi-box-arrow-up-left::before { content: "\f1c4"; } +.bi-box-arrow-up-right::before { content: "\f1c5"; } +.bi-box-arrow-up::before { content: "\f1c6"; } +.bi-box-seam::before { content: "\f1c7"; } +.bi-box::before { content: "\f1c8"; } +.bi-braces::before { content: "\f1c9"; } +.bi-bricks::before { content: "\f1ca"; } +.bi-briefcase-fill::before { content: "\f1cb"; } +.bi-briefcase::before { content: "\f1cc"; } +.bi-brightness-alt-high-fill::before { content: "\f1cd"; } +.bi-brightness-alt-high::before { content: "\f1ce"; } +.bi-brightness-alt-low-fill::before { content: "\f1cf"; } +.bi-brightness-alt-low::before { content: "\f1d0"; } +.bi-brightness-high-fill::before { content: "\f1d1"; } +.bi-brightness-high::before { content: "\f1d2"; } +.bi-brightness-low-fill::before { content: "\f1d3"; } +.bi-brightness-low::before { content: "\f1d4"; } +.bi-broadcast-pin::before { content: "\f1d5"; } +.bi-broadcast::before { content: "\f1d6"; } +.bi-brush-fill::before { content: "\f1d7"; } +.bi-brush::before { content: "\f1d8"; } +.bi-bucket-fill::before { content: "\f1d9"; } +.bi-bucket::before { content: "\f1da"; } +.bi-bug-fill::before { content: "\f1db"; } +.bi-bug::before { content: "\f1dc"; } +.bi-building::before { content: "\f1dd"; } +.bi-bullseye::before { content: "\f1de"; } +.bi-calculator-fill::before { content: "\f1df"; } +.bi-calculator::before { content: "\f1e0"; } +.bi-calendar-check-fill::before { content: "\f1e1"; } +.bi-calendar-check::before { content: "\f1e2"; } +.bi-calendar-date-fill::before { content: "\f1e3"; } +.bi-calendar-date::before { content: "\f1e4"; } +.bi-calendar-day-fill::before { content: "\f1e5"; } +.bi-calendar-day::before { content: "\f1e6"; } +.bi-calendar-event-fill::before { content: "\f1e7"; } +.bi-calendar-event::before { content: "\f1e8"; } +.bi-calendar-fill::before { content: "\f1e9"; } +.bi-calendar-minus-fill::before { content: "\f1ea"; } +.bi-calendar-minus::before { content: "\f1eb"; } +.bi-calendar-month-fill::before { content: "\f1ec"; } +.bi-calendar-month::before { content: "\f1ed"; } +.bi-calendar-plus-fill::before { content: "\f1ee"; } +.bi-calendar-plus::before { content: "\f1ef"; } +.bi-calendar-range-fill::before { content: "\f1f0"; } +.bi-calendar-range::before { content: "\f1f1"; } +.bi-calendar-week-fill::before { content: "\f1f2"; } +.bi-calendar-week::before { content: "\f1f3"; } +.bi-calendar-x-fill::before { content: "\f1f4"; } +.bi-calendar-x::before { content: "\f1f5"; } +.bi-calendar::before { content: "\f1f6"; } +.bi-calendar2-check-fill::before { content: "\f1f7"; } +.bi-calendar2-check::before { content: "\f1f8"; } +.bi-calendar2-date-fill::before { content: "\f1f9"; } +.bi-calendar2-date::before { content: "\f1fa"; } +.bi-calendar2-day-fill::before { content: "\f1fb"; } +.bi-calendar2-day::before { content: "\f1fc"; } +.bi-calendar2-event-fill::before { content: "\f1fd"; } +.bi-calendar2-event::before { content: "\f1fe"; } +.bi-calendar2-fill::before { content: "\f1ff"; } +.bi-calendar2-minus-fill::before { content: "\f200"; } +.bi-calendar2-minus::before { content: "\f201"; } +.bi-calendar2-month-fill::before { content: "\f202"; } +.bi-calendar2-month::before { content: "\f203"; } +.bi-calendar2-plus-fill::before { content: "\f204"; } +.bi-calendar2-plus::before { content: "\f205"; } +.bi-calendar2-range-fill::before { content: "\f206"; } +.bi-calendar2-range::before { content: "\f207"; } +.bi-calendar2-week-fill::before { content: "\f208"; } +.bi-calendar2-week::before { content: "\f209"; } +.bi-calendar2-x-fill::before { content: "\f20a"; } +.bi-calendar2-x::before { content: "\f20b"; } +.bi-calendar2::before { content: "\f20c"; } +.bi-calendar3-event-fill::before { content: "\f20d"; } +.bi-calendar3-event::before { content: "\f20e"; } +.bi-calendar3-fill::before { content: "\f20f"; } +.bi-calendar3-range-fill::before { content: "\f210"; } +.bi-calendar3-range::before { content: "\f211"; } +.bi-calendar3-week-fill::before { content: "\f212"; } +.bi-calendar3-week::before { content: "\f213"; } +.bi-calendar3::before { content: "\f214"; } +.bi-calendar4-event::before { content: "\f215"; } +.bi-calendar4-range::before { content: "\f216"; } +.bi-calendar4-week::before { content: "\f217"; } +.bi-calendar4::before { content: "\f218"; } +.bi-camera-fill::before { content: "\f219"; } +.bi-camera-reels-fill::before { content: "\f21a"; } +.bi-camera-reels::before { content: "\f21b"; } +.bi-camera-video-fill::before { content: "\f21c"; } +.bi-camera-video-off-fill::before { content: "\f21d"; } +.bi-camera-video-off::before { content: "\f21e"; } +.bi-camera-video::before { content: "\f21f"; } +.bi-camera::before { content: "\f220"; } +.bi-camera2::before { content: "\f221"; } +.bi-capslock-fill::before { content: "\f222"; } +.bi-capslock::before { content: "\f223"; } +.bi-card-checklist::before { content: "\f224"; } +.bi-card-heading::before { content: "\f225"; } +.bi-card-image::before { content: "\f226"; } +.bi-card-list::before { content: "\f227"; } +.bi-card-text::before { content: "\f228"; } +.bi-caret-down-fill::before { content: "\f229"; } +.bi-caret-down-square-fill::before { content: "\f22a"; } +.bi-caret-down-square::before { content: "\f22b"; } +.bi-caret-down::before { content: "\f22c"; } +.bi-caret-left-fill::before { content: "\f22d"; } +.bi-caret-left-square-fill::before { content: "\f22e"; } +.bi-caret-left-square::before { content: "\f22f"; } +.bi-caret-left::before { content: "\f230"; } +.bi-caret-right-fill::before { content: "\f231"; } +.bi-caret-right-square-fill::before { content: "\f232"; } +.bi-caret-right-square::before { content: "\f233"; } +.bi-caret-right::before { content: "\f234"; } +.bi-caret-up-fill::before { content: "\f235"; } +.bi-caret-up-square-fill::before { content: "\f236"; } +.bi-caret-up-square::before { content: "\f237"; } +.bi-caret-up::before { content: "\f238"; } +.bi-cart-check-fill::before { content: "\f239"; } +.bi-cart-check::before { content: "\f23a"; } +.bi-cart-dash-fill::before { content: "\f23b"; } +.bi-cart-dash::before { content: "\f23c"; } +.bi-cart-fill::before { content: "\f23d"; } +.bi-cart-plus-fill::before { content: "\f23e"; } +.bi-cart-plus::before { content: "\f23f"; } +.bi-cart-x-fill::before { content: "\f240"; } +.bi-cart-x::before { content: "\f241"; } +.bi-cart::before { content: "\f242"; } +.bi-cart2::before { content: "\f243"; } +.bi-cart3::before { content: "\f244"; } +.bi-cart4::before { content: "\f245"; } +.bi-cash-stack::before { content: "\f246"; } +.bi-cash::before { content: "\f247"; } +.bi-cast::before { content: "\f248"; } +.bi-chat-dots-fill::before { content: "\f249"; } +.bi-chat-dots::before { content: "\f24a"; } +.bi-chat-fill::before { content: "\f24b"; } +.bi-chat-left-dots-fill::before { content: "\f24c"; } +.bi-chat-left-dots::before { content: "\f24d"; } +.bi-chat-left-fill::before { content: "\f24e"; } +.bi-chat-left-quote-fill::before { content: "\f24f"; } +.bi-chat-left-quote::before { content: "\f250"; } +.bi-chat-left-text-fill::before { content: "\f251"; } +.bi-chat-left-text::before { content: "\f252"; } +.bi-chat-left::before { content: "\f253"; } +.bi-chat-quote-fill::before { content: "\f254"; } +.bi-chat-quote::before { content: "\f255"; } +.bi-chat-right-dots-fill::before { content: "\f256"; } +.bi-chat-right-dots::before { content: "\f257"; } +.bi-chat-right-fill::before { content: "\f258"; } +.bi-chat-right-quote-fill::before { content: "\f259"; } +.bi-chat-right-quote::before { content: "\f25a"; } +.bi-chat-right-text-fill::before { content: "\f25b"; } +.bi-chat-right-text::before { content: "\f25c"; } +.bi-chat-right::before { content: "\f25d"; } +.bi-chat-square-dots-fill::before { content: "\f25e"; } +.bi-chat-square-dots::before { content: "\f25f"; } +.bi-chat-square-fill::before { content: "\f260"; } +.bi-chat-square-quote-fill::before { content: "\f261"; } +.bi-chat-square-quote::before { content: "\f262"; } +.bi-chat-square-text-fill::before { content: "\f263"; } +.bi-chat-square-text::before { content: "\f264"; } +.bi-chat-square::before { content: "\f265"; } +.bi-chat-text-fill::before { content: "\f266"; } +.bi-chat-text::before { content: "\f267"; } +.bi-chat::before { content: "\f268"; } +.bi-check-all::before { content: "\f269"; } +.bi-check-circle-fill::before { content: "\f26a"; } +.bi-check-circle::before { content: "\f26b"; } +.bi-check-square-fill::before { content: "\f26c"; } +.bi-check-square::before { content: "\f26d"; } +.bi-check::before { content: "\f26e"; } +.bi-check2-all::before { content: "\f26f"; } +.bi-check2-circle::before { content: "\f270"; } +.bi-check2-square::before { content: "\f271"; } +.bi-check2::before { content: "\f272"; } +.bi-chevron-bar-contract::before { content: "\f273"; } +.bi-chevron-bar-down::before { content: "\f274"; } +.bi-chevron-bar-expand::before { content: "\f275"; } +.bi-chevron-bar-left::before { content: "\f276"; } +.bi-chevron-bar-right::before { content: "\f277"; } +.bi-chevron-bar-up::before { content: "\f278"; } +.bi-chevron-compact-down::before { content: "\f279"; } +.bi-chevron-compact-left::before { content: "\f27a"; } +.bi-chevron-compact-right::before { content: "\f27b"; } +.bi-chevron-compact-up::before { content: "\f27c"; } +.bi-chevron-contract::before { content: "\f27d"; } +.bi-chevron-double-down::before { content: "\f27e"; } +.bi-chevron-double-left::before { content: "\f27f"; } +.bi-chevron-double-right::before { content: "\f280"; } +.bi-chevron-double-up::before { content: "\f281"; } +.bi-chevron-down::before { content: "\f282"; } +.bi-chevron-expand::before { content: "\f283"; } +.bi-chevron-left::before { content: "\f284"; } +.bi-chevron-right::before { content: "\f285"; } +.bi-chevron-up::before { content: "\f286"; } +.bi-circle-fill::before { content: "\f287"; } +.bi-circle-half::before { content: "\f288"; } +.bi-circle-square::before { content: "\f289"; } +.bi-circle::before { content: "\f28a"; } +.bi-clipboard-check::before { content: "\f28b"; } +.bi-clipboard-data::before { content: "\f28c"; } +.bi-clipboard-minus::before { content: "\f28d"; } +.bi-clipboard-plus::before { content: "\f28e"; } +.bi-clipboard-x::before { content: "\f28f"; } +.bi-clipboard::before { content: "\f290"; } +.bi-clock-fill::before { content: "\f291"; } +.bi-clock-history::before { content: "\f292"; } +.bi-clock::before { content: "\f293"; } +.bi-cloud-arrow-down-fill::before { content: "\f294"; } +.bi-cloud-arrow-down::before { content: "\f295"; } +.bi-cloud-arrow-up-fill::before { content: "\f296"; } +.bi-cloud-arrow-up::before { content: "\f297"; } +.bi-cloud-check-fill::before { content: "\f298"; } +.bi-cloud-check::before { content: "\f299"; } +.bi-cloud-download-fill::before { content: "\f29a"; } +.bi-cloud-download::before { content: "\f29b"; } +.bi-cloud-drizzle-fill::before { content: "\f29c"; } +.bi-cloud-drizzle::before { content: "\f29d"; } +.bi-cloud-fill::before { content: "\f29e"; } +.bi-cloud-fog-fill::before { content: "\f29f"; } +.bi-cloud-fog::before { content: "\f2a0"; } +.bi-cloud-fog2-fill::before { content: "\f2a1"; } +.bi-cloud-fog2::before { content: "\f2a2"; } +.bi-cloud-hail-fill::before { content: "\f2a3"; } +.bi-cloud-hail::before { content: "\f2a4"; } +.bi-cloud-haze-1::before { content: "\f2a5"; } +.bi-cloud-haze-fill::before { content: "\f2a6"; } +.bi-cloud-haze::before { content: "\f2a7"; } +.bi-cloud-haze2-fill::before { content: "\f2a8"; } +.bi-cloud-lightning-fill::before { content: "\f2a9"; } +.bi-cloud-lightning-rain-fill::before { content: "\f2aa"; } +.bi-cloud-lightning-rain::before { content: "\f2ab"; } +.bi-cloud-lightning::before { content: "\f2ac"; } +.bi-cloud-minus-fill::before { content: "\f2ad"; } +.bi-cloud-minus::before { content: "\f2ae"; } +.bi-cloud-moon-fill::before { content: "\f2af"; } +.bi-cloud-moon::before { content: "\f2b0"; } +.bi-cloud-plus-fill::before { content: "\f2b1"; } +.bi-cloud-plus::before { content: "\f2b2"; } +.bi-cloud-rain-fill::before { content: "\f2b3"; } +.bi-cloud-rain-heavy-fill::before { content: "\f2b4"; } +.bi-cloud-rain-heavy::before { content: "\f2b5"; } +.bi-cloud-rain::before { content: "\f2b6"; } +.bi-cloud-slash-fill::before { content: "\f2b7"; } +.bi-cloud-slash::before { content: "\f2b8"; } +.bi-cloud-sleet-fill::before { content: "\f2b9"; } +.bi-cloud-sleet::before { content: "\f2ba"; } +.bi-cloud-snow-fill::before { content: "\f2bb"; } +.bi-cloud-snow::before { content: "\f2bc"; } +.bi-cloud-sun-fill::before { content: "\f2bd"; } +.bi-cloud-sun::before { content: "\f2be"; } +.bi-cloud-upload-fill::before { content: "\f2bf"; } +.bi-cloud-upload::before { content: "\f2c0"; } +.bi-cloud::before { content: "\f2c1"; } +.bi-clouds-fill::before { content: "\f2c2"; } +.bi-clouds::before { content: "\f2c3"; } +.bi-cloudy-fill::before { content: "\f2c4"; } +.bi-cloudy::before { content: "\f2c5"; } +.bi-code-slash::before { content: "\f2c6"; } +.bi-code-square::before { content: "\f2c7"; } +.bi-code::before { content: "\f2c8"; } +.bi-collection-fill::before { content: "\f2c9"; } +.bi-collection-play-fill::before { content: "\f2ca"; } +.bi-collection-play::before { content: "\f2cb"; } +.bi-collection::before { content: "\f2cc"; } +.bi-columns-gap::before { content: "\f2cd"; } +.bi-columns::before { content: "\f2ce"; } +.bi-command::before { content: "\f2cf"; } +.bi-compass-fill::before { content: "\f2d0"; } +.bi-compass::before { content: "\f2d1"; } +.bi-cone-striped::before { content: "\f2d2"; } +.bi-cone::before { content: "\f2d3"; } +.bi-controller::before { content: "\f2d4"; } +.bi-cpu-fill::before { content: "\f2d5"; } +.bi-cpu::before { content: "\f2d6"; } +.bi-credit-card-2-back-fill::before { content: "\f2d7"; } +.bi-credit-card-2-back::before { content: "\f2d8"; } +.bi-credit-card-2-front-fill::before { content: "\f2d9"; } +.bi-credit-card-2-front::before { content: "\f2da"; } +.bi-credit-card-fill::before { content: "\f2db"; } +.bi-credit-card::before { content: "\f2dc"; } +.bi-crop::before { content: "\f2dd"; } +.bi-cup-fill::before { content: "\f2de"; } +.bi-cup-straw::before { content: "\f2df"; } +.bi-cup::before { content: "\f2e0"; } +.bi-cursor-fill::before { content: "\f2e1"; } +.bi-cursor-text::before { content: "\f2e2"; } +.bi-cursor::before { content: "\f2e3"; } +.bi-dash-circle-dotted::before { content: "\f2e4"; } +.bi-dash-circle-fill::before { content: "\f2e5"; } +.bi-dash-circle::before { content: "\f2e6"; } +.bi-dash-square-dotted::before { content: "\f2e7"; } +.bi-dash-square-fill::before { content: "\f2e8"; } +.bi-dash-square::before { content: "\f2e9"; } +.bi-dash::before { content: "\f2ea"; } +.bi-diagram-2-fill::before { content: "\f2eb"; } +.bi-diagram-2::before { content: "\f2ec"; } +.bi-diagram-3-fill::before { content: "\f2ed"; } +.bi-diagram-3::before { content: "\f2ee"; } +.bi-diamond-fill::before { content: "\f2ef"; } +.bi-diamond-half::before { content: "\f2f0"; } +.bi-diamond::before { content: "\f2f1"; } +.bi-dice-1-fill::before { content: "\f2f2"; } +.bi-dice-1::before { content: "\f2f3"; } +.bi-dice-2-fill::before { content: "\f2f4"; } +.bi-dice-2::before { content: "\f2f5"; } +.bi-dice-3-fill::before { content: "\f2f6"; } +.bi-dice-3::before { content: "\f2f7"; } +.bi-dice-4-fill::before { content: "\f2f8"; } +.bi-dice-4::before { content: "\f2f9"; } +.bi-dice-5-fill::before { content: "\f2fa"; } +.bi-dice-5::before { content: "\f2fb"; } +.bi-dice-6-fill::before { content: "\f2fc"; } +.bi-dice-6::before { content: "\f2fd"; } +.bi-disc-fill::before { content: "\f2fe"; } +.bi-disc::before { content: "\f2ff"; } +.bi-discord::before { content: "\f300"; } +.bi-display-fill::before { content: "\f301"; } +.bi-display::before { content: "\f302"; } +.bi-distribute-horizontal::before { content: "\f303"; } +.bi-distribute-vertical::before { content: "\f304"; } +.bi-door-closed-fill::before { content: "\f305"; } +.bi-door-closed::before { content: "\f306"; } +.bi-door-open-fill::before { content: "\f307"; } +.bi-door-open::before { content: "\f308"; } +.bi-dot::before { content: "\f309"; } +.bi-download::before { content: "\f30a"; } +.bi-droplet-fill::before { content: "\f30b"; } +.bi-droplet-half::before { content: "\f30c"; } +.bi-droplet::before { content: "\f30d"; } +.bi-earbuds::before { content: "\f30e"; } +.bi-easel-fill::before { content: "\f30f"; } +.bi-easel::before { content: "\f310"; } +.bi-egg-fill::before { content: "\f311"; } +.bi-egg-fried::before { content: "\f312"; } +.bi-egg::before { content: "\f313"; } +.bi-eject-fill::before { content: "\f314"; } +.bi-eject::before { content: "\f315"; } +.bi-emoji-angry-fill::before { content: "\f316"; } +.bi-emoji-angry::before { content: "\f317"; } +.bi-emoji-dizzy-fill::before { content: "\f318"; } +.bi-emoji-dizzy::before { content: "\f319"; } +.bi-emoji-expressionless-fill::before { content: "\f31a"; } +.bi-emoji-expressionless::before { content: "\f31b"; } +.bi-emoji-frown-fill::before { content: "\f31c"; } +.bi-emoji-frown::before { content: "\f31d"; } +.bi-emoji-heart-eyes-fill::before { content: "\f31e"; } +.bi-emoji-heart-eyes::before { content: "\f31f"; } +.bi-emoji-laughing-fill::before { content: "\f320"; } +.bi-emoji-laughing::before { content: "\f321"; } +.bi-emoji-neutral-fill::before { content: "\f322"; } +.bi-emoji-neutral::before { content: "\f323"; } +.bi-emoji-smile-fill::before { content: "\f324"; } +.bi-emoji-smile-upside-down-fill::before { content: "\f325"; } +.bi-emoji-smile-upside-down::before { content: "\f326"; } +.bi-emoji-smile::before { content: "\f327"; } +.bi-emoji-sunglasses-fill::before { content: "\f328"; } +.bi-emoji-sunglasses::before { content: "\f329"; } +.bi-emoji-wink-fill::before { content: "\f32a"; } +.bi-emoji-wink::before { content: "\f32b"; } +.bi-envelope-fill::before { content: "\f32c"; } +.bi-envelope-open-fill::before { content: "\f32d"; } +.bi-envelope-open::before { content: "\f32e"; } +.bi-envelope::before { content: "\f32f"; } +.bi-eraser-fill::before { content: "\f330"; } +.bi-eraser::before { content: "\f331"; } +.bi-exclamation-circle-fill::before { content: "\f332"; } +.bi-exclamation-circle::before { content: "\f333"; } +.bi-exclamation-diamond-fill::before { content: "\f334"; } +.bi-exclamation-diamond::before { content: "\f335"; } +.bi-exclamation-octagon-fill::before { content: "\f336"; } +.bi-exclamation-octagon::before { content: "\f337"; } +.bi-exclamation-square-fill::before { content: "\f338"; } +.bi-exclamation-square::before { content: "\f339"; } +.bi-exclamation-triangle-fill::before { content: "\f33a"; } +.bi-exclamation-triangle::before { content: "\f33b"; } +.bi-exclamation::before { content: "\f33c"; } +.bi-exclude::before { content: "\f33d"; } +.bi-eye-fill::before { content: "\f33e"; } +.bi-eye-slash-fill::before { content: "\f33f"; } +.bi-eye-slash::before { content: "\f340"; } +.bi-eye::before { content: "\f341"; } +.bi-eyedropper::before { content: "\f342"; } +.bi-eyeglasses::before { content: "\f343"; } +.bi-facebook::before { content: "\f344"; } +.bi-file-arrow-down-fill::before { content: "\f345"; } +.bi-file-arrow-down::before { content: "\f346"; } +.bi-file-arrow-up-fill::before { content: "\f347"; } +.bi-file-arrow-up::before { content: "\f348"; } +.bi-file-bar-graph-fill::before { content: "\f349"; } +.bi-file-bar-graph::before { content: "\f34a"; } +.bi-file-binary-fill::before { content: "\f34b"; } +.bi-file-binary::before { content: "\f34c"; } +.bi-file-break-fill::before { content: "\f34d"; } +.bi-file-break::before { content: "\f34e"; } +.bi-file-check-fill::before { content: "\f34f"; } +.bi-file-check::before { content: "\f350"; } +.bi-file-code-fill::before { content: "\f351"; } +.bi-file-code::before { content: "\f352"; } +.bi-file-diff-fill::before { content: "\f353"; } +.bi-file-diff::before { content: "\f354"; } +.bi-file-earmark-arrow-down-fill::before { content: "\f355"; } +.bi-file-earmark-arrow-down::before { content: "\f356"; } +.bi-file-earmark-arrow-up-fill::before { content: "\f357"; } +.bi-file-earmark-arrow-up::before { content: "\f358"; } +.bi-file-earmark-bar-graph-fill::before { content: "\f359"; } +.bi-file-earmark-bar-graph::before { content: "\f35a"; } +.bi-file-earmark-binary-fill::before { content: "\f35b"; } +.bi-file-earmark-binary::before { content: "\f35c"; } +.bi-file-earmark-break-fill::before { content: "\f35d"; } +.bi-file-earmark-break::before { content: "\f35e"; } +.bi-file-earmark-check-fill::before { content: "\f35f"; } +.bi-file-earmark-check::before { content: "\f360"; } +.bi-file-earmark-code-fill::before { content: "\f361"; } +.bi-file-earmark-code::before { content: "\f362"; } +.bi-file-earmark-diff-fill::before { content: "\f363"; } +.bi-file-earmark-diff::before { content: "\f364"; } +.bi-file-earmark-easel-fill::before { content: "\f365"; } +.bi-file-earmark-easel::before { content: "\f366"; } +.bi-file-earmark-excel-fill::before { content: "\f367"; } +.bi-file-earmark-excel::before { content: "\f368"; } +.bi-file-earmark-fill::before { content: "\f369"; } +.bi-file-earmark-font-fill::before { content: "\f36a"; } +.bi-file-earmark-font::before { content: "\f36b"; } +.bi-file-earmark-image-fill::before { content: "\f36c"; } +.bi-file-earmark-image::before { content: "\f36d"; } +.bi-file-earmark-lock-fill::before { content: "\f36e"; } +.bi-file-earmark-lock::before { content: "\f36f"; } +.bi-file-earmark-lock2-fill::before { content: "\f370"; } +.bi-file-earmark-lock2::before { content: "\f371"; } +.bi-file-earmark-medical-fill::before { content: "\f372"; } +.bi-file-earmark-medical::before { content: "\f373"; } +.bi-file-earmark-minus-fill::before { content: "\f374"; } +.bi-file-earmark-minus::before { content: "\f375"; } +.bi-file-earmark-music-fill::before { content: "\f376"; } +.bi-file-earmark-music::before { content: "\f377"; } +.bi-file-earmark-person-fill::before { content: "\f378"; } +.bi-file-earmark-person::before { content: "\f379"; } +.bi-file-earmark-play-fill::before { content: "\f37a"; } +.bi-file-earmark-play::before { content: "\f37b"; } +.bi-file-earmark-plus-fill::before { content: "\f37c"; } +.bi-file-earmark-plus::before { content: "\f37d"; } +.bi-file-earmark-post-fill::before { content: "\f37e"; } +.bi-file-earmark-post::before { content: "\f37f"; } +.bi-file-earmark-ppt-fill::before { content: "\f380"; } +.bi-file-earmark-ppt::before { content: "\f381"; } +.bi-file-earmark-richtext-fill::before { content: "\f382"; } +.bi-file-earmark-richtext::before { content: "\f383"; } +.bi-file-earmark-ruled-fill::before { content: "\f384"; } +.bi-file-earmark-ruled::before { content: "\f385"; } +.bi-file-earmark-slides-fill::before { content: "\f386"; } +.bi-file-earmark-slides::before { content: "\f387"; } +.bi-file-earmark-spreadsheet-fill::before { content: "\f388"; } +.bi-file-earmark-spreadsheet::before { content: "\f389"; } +.bi-file-earmark-text-fill::before { content: "\f38a"; } +.bi-file-earmark-text::before { content: "\f38b"; } +.bi-file-earmark-word-fill::before { content: "\f38c"; } +.bi-file-earmark-word::before { content: "\f38d"; } +.bi-file-earmark-x-fill::before { content: "\f38e"; } +.bi-file-earmark-x::before { content: "\f38f"; } +.bi-file-earmark-zip-fill::before { content: "\f390"; } +.bi-file-earmark-zip::before { content: "\f391"; } +.bi-file-earmark::before { content: "\f392"; } +.bi-file-easel-fill::before { content: "\f393"; } +.bi-file-easel::before { content: "\f394"; } +.bi-file-excel-fill::before { content: "\f395"; } +.bi-file-excel::before { content: "\f396"; } +.bi-file-fill::before { content: "\f397"; } +.bi-file-font-fill::before { content: "\f398"; } +.bi-file-font::before { content: "\f399"; } +.bi-file-image-fill::before { content: "\f39a"; } +.bi-file-image::before { content: "\f39b"; } +.bi-file-lock-fill::before { content: "\f39c"; } +.bi-file-lock::before { content: "\f39d"; } +.bi-file-lock2-fill::before { content: "\f39e"; } +.bi-file-lock2::before { content: "\f39f"; } +.bi-file-medical-fill::before { content: "\f3a0"; } +.bi-file-medical::before { content: "\f3a1"; } +.bi-file-minus-fill::before { content: "\f3a2"; } +.bi-file-minus::before { content: "\f3a3"; } +.bi-file-music-fill::before { content: "\f3a4"; } +.bi-file-music::before { content: "\f3a5"; } +.bi-file-person-fill::before { content: "\f3a6"; } +.bi-file-person::before { content: "\f3a7"; } +.bi-file-play-fill::before { content: "\f3a8"; } +.bi-file-play::before { content: "\f3a9"; } +.bi-file-plus-fill::before { content: "\f3aa"; } +.bi-file-plus::before { content: "\f3ab"; } +.bi-file-post-fill::before { content: "\f3ac"; } +.bi-file-post::before { content: "\f3ad"; } +.bi-file-ppt-fill::before { content: "\f3ae"; } +.bi-file-ppt::before { content: "\f3af"; } +.bi-file-richtext-fill::before { content: "\f3b0"; } +.bi-file-richtext::before { content: "\f3b1"; } +.bi-file-ruled-fill::before { content: "\f3b2"; } +.bi-file-ruled::before { content: "\f3b3"; } +.bi-file-slides-fill::before { content: "\f3b4"; } +.bi-file-slides::before { content: "\f3b5"; } +.bi-file-spreadsheet-fill::before { content: "\f3b6"; } +.bi-file-spreadsheet::before { content: "\f3b7"; } +.bi-file-text-fill::before { content: "\f3b8"; } +.bi-file-text::before { content: "\f3b9"; } +.bi-file-word-fill::before { content: "\f3ba"; } +.bi-file-word::before { content: "\f3bb"; } +.bi-file-x-fill::before { content: "\f3bc"; } +.bi-file-x::before { content: "\f3bd"; } +.bi-file-zip-fill::before { content: "\f3be"; } +.bi-file-zip::before { content: "\f3bf"; } +.bi-file::before { content: "\f3c0"; } +.bi-files-alt::before { content: "\f3c1"; } +.bi-files::before { content: "\f3c2"; } +.bi-film::before { content: "\f3c3"; } +.bi-filter-circle-fill::before { content: "\f3c4"; } +.bi-filter-circle::before { content: "\f3c5"; } +.bi-filter-left::before { content: "\f3c6"; } +.bi-filter-right::before { content: "\f3c7"; } +.bi-filter-square-fill::before { content: "\f3c8"; } +.bi-filter-square::before { content: "\f3c9"; } +.bi-filter::before { content: "\f3ca"; } +.bi-flag-fill::before { content: "\f3cb"; } +.bi-flag::before { content: "\f3cc"; } +.bi-flower1::before { content: "\f3cd"; } +.bi-flower2::before { content: "\f3ce"; } +.bi-flower3::before { content: "\f3cf"; } +.bi-folder-check::before { content: "\f3d0"; } +.bi-folder-fill::before { content: "\f3d1"; } +.bi-folder-minus::before { content: "\f3d2"; } +.bi-folder-plus::before { content: "\f3d3"; } +.bi-folder-symlink-fill::before { content: "\f3d4"; } +.bi-folder-symlink::before { content: "\f3d5"; } +.bi-folder-x::before { content: "\f3d6"; } +.bi-folder::before { content: "\f3d7"; } +.bi-folder2-open::before { content: "\f3d8"; } +.bi-folder2::before { content: "\f3d9"; } +.bi-fonts::before { content: "\f3da"; } +.bi-forward-fill::before { content: "\f3db"; } +.bi-forward::before { content: "\f3dc"; } +.bi-front::before { content: "\f3dd"; } +.bi-fullscreen-exit::before { content: "\f3de"; } +.bi-fullscreen::before { content: "\f3df"; } +.bi-funnel-fill::before { content: "\f3e0"; } +.bi-funnel::before { content: "\f3e1"; } +.bi-gear-fill::before { content: "\f3e2"; } +.bi-gear-wide-connected::before { content: "\f3e3"; } +.bi-gear-wide::before { content: "\f3e4"; } +.bi-gear::before { content: "\f3e5"; } +.bi-gem::before { content: "\f3e6"; } +.bi-geo-alt-fill::before { content: "\f3e7"; } +.bi-geo-alt::before { content: "\f3e8"; } +.bi-geo-fill::before { content: "\f3e9"; } +.bi-geo::before { content: "\f3ea"; } +.bi-gift-fill::before { content: "\f3eb"; } +.bi-gift::before { content: "\f3ec"; } +.bi-github::before { content: "\f3ed"; } +.bi-globe::before { content: "\f3ee"; } +.bi-globe2::before { content: "\f3ef"; } +.bi-google::before { content: "\f3f0"; } +.bi-graph-down::before { content: "\f3f1"; } +.bi-graph-up::before { content: "\f3f2"; } +.bi-grid-1x2-fill::before { content: "\f3f3"; } +.bi-grid-1x2::before { content: "\f3f4"; } +.bi-grid-3x2-gap-fill::before { content: "\f3f5"; } +.bi-grid-3x2-gap::before { content: "\f3f6"; } +.bi-grid-3x2::before { content: "\f3f7"; } +.bi-grid-3x3-gap-fill::before { content: "\f3f8"; } +.bi-grid-3x3-gap::before { content: "\f3f9"; } +.bi-grid-3x3::before { content: "\f3fa"; } +.bi-grid-fill::before { content: "\f3fb"; } +.bi-grid::before { content: "\f3fc"; } +.bi-grip-horizontal::before { content: "\f3fd"; } +.bi-grip-vertical::before { content: "\f3fe"; } +.bi-hammer::before { content: "\f3ff"; } +.bi-hand-index-fill::before { content: "\f400"; } +.bi-hand-index-thumb-fill::before { content: "\f401"; } +.bi-hand-index-thumb::before { content: "\f402"; } +.bi-hand-index::before { content: "\f403"; } +.bi-hand-thumbs-down-fill::before { content: "\f404"; } +.bi-hand-thumbs-down::before { content: "\f405"; } +.bi-hand-thumbs-up-fill::before { content: "\f406"; } +.bi-hand-thumbs-up::before { content: "\f407"; } +.bi-handbag-fill::before { content: "\f408"; } +.bi-handbag::before { content: "\f409"; } +.bi-hash::before { content: "\f40a"; } +.bi-hdd-fill::before { content: "\f40b"; } +.bi-hdd-network-fill::before { content: "\f40c"; } +.bi-hdd-network::before { content: "\f40d"; } +.bi-hdd-rack-fill::before { content: "\f40e"; } +.bi-hdd-rack::before { content: "\f40f"; } +.bi-hdd-stack-fill::before { content: "\f410"; } +.bi-hdd-stack::before { content: "\f411"; } +.bi-hdd::before { content: "\f412"; } +.bi-headphones::before { content: "\f413"; } +.bi-headset::before { content: "\f414"; } +.bi-heart-fill::before { content: "\f415"; } +.bi-heart-half::before { content: "\f416"; } +.bi-heart::before { content: "\f417"; } +.bi-heptagon-fill::before { content: "\f418"; } +.bi-heptagon-half::before { content: "\f419"; } +.bi-heptagon::before { content: "\f41a"; } +.bi-hexagon-fill::before { content: "\f41b"; } +.bi-hexagon-half::before { content: "\f41c"; } +.bi-hexagon::before { content: "\f41d"; } +.bi-hourglass-bottom::before { content: "\f41e"; } +.bi-hourglass-split::before { content: "\f41f"; } +.bi-hourglass-top::before { content: "\f420"; } +.bi-hourglass::before { content: "\f421"; } +.bi-house-door-fill::before { content: "\f422"; } +.bi-house-door::before { content: "\f423"; } +.bi-house-fill::before { content: "\f424"; } +.bi-house::before { content: "\f425"; } +.bi-hr::before { content: "\f426"; } +.bi-hurricane::before { content: "\f427"; } +.bi-image-alt::before { content: "\f428"; } +.bi-image-fill::before { content: "\f429"; } +.bi-image::before { content: "\f42a"; } +.bi-images::before { content: "\f42b"; } +.bi-inbox-fill::before { content: "\f42c"; } +.bi-inbox::before { content: "\f42d"; } +.bi-inboxes-fill::before { content: "\f42e"; } +.bi-inboxes::before { content: "\f42f"; } +.bi-info-circle-fill::before { content: "\f430"; } +.bi-info-circle::before { content: "\f431"; } +.bi-info-square-fill::before { content: "\f432"; } +.bi-info-square::before { content: "\f433"; } +.bi-info::before { content: "\f434"; } +.bi-input-cursor-text::before { content: "\f435"; } +.bi-input-cursor::before { content: "\f436"; } +.bi-instagram::before { content: "\f437"; } +.bi-intersect::before { content: "\f438"; } +.bi-journal-album::before { content: "\f439"; } +.bi-journal-arrow-down::before { content: "\f43a"; } +.bi-journal-arrow-up::before { content: "\f43b"; } +.bi-journal-bookmark-fill::before { content: "\f43c"; } +.bi-journal-bookmark::before { content: "\f43d"; } +.bi-journal-check::before { content: "\f43e"; } +.bi-journal-code::before { content: "\f43f"; } +.bi-journal-medical::before { content: "\f440"; } +.bi-journal-minus::before { content: "\f441"; } +.bi-journal-plus::before { content: "\f442"; } +.bi-journal-richtext::before { content: "\f443"; } +.bi-journal-text::before { content: "\f444"; } +.bi-journal-x::before { content: "\f445"; } +.bi-journal::before { content: "\f446"; } +.bi-journals::before { content: "\f447"; } +.bi-joystick::before { content: "\f448"; } +.bi-justify-left::before { content: "\f449"; } +.bi-justify-right::before { content: "\f44a"; } +.bi-justify::before { content: "\f44b"; } +.bi-kanban-fill::before { content: "\f44c"; } +.bi-kanban::before { content: "\f44d"; } +.bi-key-fill::before { content: "\f44e"; } +.bi-key::before { content: "\f44f"; } +.bi-keyboard-fill::before { content: "\f450"; } +.bi-keyboard::before { content: "\f451"; } +.bi-ladder::before { content: "\f452"; } +.bi-lamp-fill::before { content: "\f453"; } +.bi-lamp::before { content: "\f454"; } +.bi-laptop-fill::before { content: "\f455"; } +.bi-laptop::before { content: "\f456"; } +.bi-layer-backward::before { content: "\f457"; } +.bi-layer-forward::before { content: "\f458"; } +.bi-layers-fill::before { content: "\f459"; } +.bi-layers-half::before { content: "\f45a"; } +.bi-layers::before { content: "\f45b"; } +.bi-layout-sidebar-inset-reverse::before { content: "\f45c"; } +.bi-layout-sidebar-inset::before { content: "\f45d"; } +.bi-layout-sidebar-reverse::before { content: "\f45e"; } +.bi-layout-sidebar::before { content: "\f45f"; } +.bi-layout-split::before { content: "\f460"; } +.bi-layout-text-sidebar-reverse::before { content: "\f461"; } +.bi-layout-text-sidebar::before { content: "\f462"; } +.bi-layout-text-window-reverse::before { content: "\f463"; } +.bi-layout-text-window::before { content: "\f464"; } +.bi-layout-three-columns::before { content: "\f465"; } +.bi-layout-wtf::before { content: "\f466"; } +.bi-life-preserver::before { content: "\f467"; } +.bi-lightbulb-fill::before { content: "\f468"; } +.bi-lightbulb-off-fill::before { content: "\f469"; } +.bi-lightbulb-off::before { content: "\f46a"; } +.bi-lightbulb::before { content: "\f46b"; } +.bi-lightning-charge-fill::before { content: "\f46c"; } +.bi-lightning-charge::before { content: "\f46d"; } +.bi-lightning-fill::before { content: "\f46e"; } +.bi-lightning::before { content: "\f46f"; } +.bi-link-45deg::before { content: "\f470"; } +.bi-link::before { content: "\f471"; } +.bi-linkedin::before { content: "\f472"; } +.bi-list-check::before { content: "\f473"; } +.bi-list-nested::before { content: "\f474"; } +.bi-list-ol::before { content: "\f475"; } +.bi-list-stars::before { content: "\f476"; } +.bi-list-task::before { content: "\f477"; } +.bi-list-ul::before { content: "\f478"; } +.bi-list::before { content: "\f479"; } +.bi-lock-fill::before { content: "\f47a"; } +.bi-lock::before { content: "\f47b"; } +.bi-mailbox::before { content: "\f47c"; } +.bi-mailbox2::before { content: "\f47d"; } +.bi-map-fill::before { content: "\f47e"; } +.bi-map::before { content: "\f47f"; } +.bi-markdown-fill::before { content: "\f480"; } +.bi-markdown::before { content: "\f481"; } +.bi-mask::before { content: "\f482"; } +.bi-megaphone-fill::before { content: "\f483"; } +.bi-megaphone::before { content: "\f484"; } +.bi-menu-app-fill::before { content: "\f485"; } +.bi-menu-app::before { content: "\f486"; } +.bi-menu-button-fill::before { content: "\f487"; } +.bi-menu-button-wide-fill::before { content: "\f488"; } +.bi-menu-button-wide::before { content: "\f489"; } +.bi-menu-button::before { content: "\f48a"; } +.bi-menu-down::before { content: "\f48b"; } +.bi-menu-up::before { content: "\f48c"; } +.bi-mic-fill::before { content: "\f48d"; } +.bi-mic-mute-fill::before { content: "\f48e"; } +.bi-mic-mute::before { content: "\f48f"; } +.bi-mic::before { content: "\f490"; } +.bi-minecart-loaded::before { content: "\f491"; } +.bi-minecart::before { content: "\f492"; } +.bi-moisture::before { content: "\f493"; } +.bi-moon-fill::before { content: "\f494"; } +.bi-moon-stars-fill::before { content: "\f495"; } +.bi-moon-stars::before { content: "\f496"; } +.bi-moon::before { content: "\f497"; } +.bi-mouse-fill::before { content: "\f498"; } +.bi-mouse::before { content: "\f499"; } +.bi-mouse2-fill::before { content: "\f49a"; } +.bi-mouse2::before { content: "\f49b"; } +.bi-mouse3-fill::before { content: "\f49c"; } +.bi-mouse3::before { content: "\f49d"; } +.bi-music-note-beamed::before { content: "\f49e"; } +.bi-music-note-list::before { content: "\f49f"; } +.bi-music-note::before { content: "\f4a0"; } +.bi-music-player-fill::before { content: "\f4a1"; } +.bi-music-player::before { content: "\f4a2"; } +.bi-newspaper::before { content: "\f4a3"; } +.bi-node-minus-fill::before { content: "\f4a4"; } +.bi-node-minus::before { content: "\f4a5"; } +.bi-node-plus-fill::before { content: "\f4a6"; } +.bi-node-plus::before { content: "\f4a7"; } +.bi-nut-fill::before { content: "\f4a8"; } +.bi-nut::before { content: "\f4a9"; } +.bi-octagon-fill::before { content: "\f4aa"; } +.bi-octagon-half::before { content: "\f4ab"; } +.bi-octagon::before { content: "\f4ac"; } +.bi-option::before { content: "\f4ad"; } +.bi-outlet::before { content: "\f4ae"; } +.bi-paint-bucket::before { content: "\f4af"; } +.bi-palette-fill::before { content: "\f4b0"; } +.bi-palette::before { content: "\f4b1"; } +.bi-palette2::before { content: "\f4b2"; } +.bi-paperclip::before { content: "\f4b3"; } +.bi-paragraph::before { content: "\f4b4"; } +.bi-patch-check-fill::before { content: "\f4b5"; } +.bi-patch-check::before { content: "\f4b6"; } +.bi-patch-exclamation-fill::before { content: "\f4b7"; } +.bi-patch-exclamation::before { content: "\f4b8"; } +.bi-patch-minus-fill::before { content: "\f4b9"; } +.bi-patch-minus::before { content: "\f4ba"; } +.bi-patch-plus-fill::before { content: "\f4bb"; } +.bi-patch-plus::before { content: "\f4bc"; } +.bi-patch-question-fill::before { content: "\f4bd"; } +.bi-patch-question::before { content: "\f4be"; } +.bi-pause-btn-fill::before { content: "\f4bf"; } +.bi-pause-btn::before { content: "\f4c0"; } +.bi-pause-circle-fill::before { content: "\f4c1"; } +.bi-pause-circle::before { content: "\f4c2"; } +.bi-pause-fill::before { content: "\f4c3"; } +.bi-pause::before { content: "\f4c4"; } +.bi-peace-fill::before { content: "\f4c5"; } +.bi-peace::before { content: "\f4c6"; } +.bi-pen-fill::before { content: "\f4c7"; } +.bi-pen::before { content: "\f4c8"; } +.bi-pencil-fill::before { content: "\f4c9"; } +.bi-pencil-square::before { content: "\f4ca"; } +.bi-pencil::before { content: "\f4cb"; } +.bi-pentagon-fill::before { content: "\f4cc"; } +.bi-pentagon-half::before { content: "\f4cd"; } +.bi-pentagon::before { content: "\f4ce"; } +.bi-people-fill::before { content: "\f4cf"; } +.bi-people::before { content: "\f4d0"; } +.bi-percent::before { content: "\f4d1"; } +.bi-person-badge-fill::before { content: "\f4d2"; } +.bi-person-badge::before { content: "\f4d3"; } +.bi-person-bounding-box::before { content: "\f4d4"; } +.bi-person-check-fill::before { content: "\f4d5"; } +.bi-person-check::before { content: "\f4d6"; } +.bi-person-circle::before { content: "\f4d7"; } +.bi-person-dash-fill::before { content: "\f4d8"; } +.bi-person-dash::before { content: "\f4d9"; } +.bi-person-fill::before { content: "\f4da"; } +.bi-person-lines-fill::before { content: "\f4db"; } +.bi-person-plus-fill::before { content: "\f4dc"; } +.bi-person-plus::before { content: "\f4dd"; } +.bi-person-square::before { content: "\f4de"; } +.bi-person-x-fill::before { content: "\f4df"; } +.bi-person-x::before { content: "\f4e0"; } +.bi-person::before { content: "\f4e1"; } +.bi-phone-fill::before { content: "\f4e2"; } +.bi-phone-landscape-fill::before { content: "\f4e3"; } +.bi-phone-landscape::before { content: "\f4e4"; } +.bi-phone-vibrate-fill::before { content: "\f4e5"; } +.bi-phone-vibrate::before { content: "\f4e6"; } +.bi-phone::before { content: "\f4e7"; } +.bi-pie-chart-fill::before { content: "\f4e8"; } +.bi-pie-chart::before { content: "\f4e9"; } +.bi-pin-angle-fill::before { content: "\f4ea"; } +.bi-pin-angle::before { content: "\f4eb"; } +.bi-pin-fill::before { content: "\f4ec"; } +.bi-pin::before { content: "\f4ed"; } +.bi-pip-fill::before { content: "\f4ee"; } +.bi-pip::before { content: "\f4ef"; } +.bi-play-btn-fill::before { content: "\f4f0"; } +.bi-play-btn::before { content: "\f4f1"; } +.bi-play-circle-fill::before { content: "\f4f2"; } +.bi-play-circle::before { content: "\f4f3"; } +.bi-play-fill::before { content: "\f4f4"; } +.bi-play::before { content: "\f4f5"; } +.bi-plug-fill::before { content: "\f4f6"; } +.bi-plug::before { content: "\f4f7"; } +.bi-plus-circle-dotted::before { content: "\f4f8"; } +.bi-plus-circle-fill::before { content: "\f4f9"; } +.bi-plus-circle::before { content: "\f4fa"; } +.bi-plus-square-dotted::before { content: "\f4fb"; } +.bi-plus-square-fill::before { content: "\f4fc"; } +.bi-plus-square::before { content: "\f4fd"; } +.bi-plus::before { content: "\f4fe"; } +.bi-power::before { content: "\f4ff"; } +.bi-printer-fill::before { content: "\f500"; } +.bi-printer::before { content: "\f501"; } +.bi-puzzle-fill::before { content: "\f502"; } +.bi-puzzle::before { content: "\f503"; } +.bi-question-circle-fill::before { content: "\f504"; } +.bi-question-circle::before { content: "\f505"; } +.bi-question-diamond-fill::before { content: "\f506"; } +.bi-question-diamond::before { content: "\f507"; } +.bi-question-octagon-fill::before { content: "\f508"; } +.bi-question-octagon::before { content: "\f509"; } +.bi-question-square-fill::before { content: "\f50a"; } +.bi-question-square::before { content: "\f50b"; } +.bi-question::before { content: "\f50c"; } +.bi-rainbow::before { content: "\f50d"; } +.bi-receipt-cutoff::before { content: "\f50e"; } +.bi-receipt::before { content: "\f50f"; } +.bi-reception-0::before { content: "\f510"; } +.bi-reception-1::before { content: "\f511"; } +.bi-reception-2::before { content: "\f512"; } +.bi-reception-3::before { content: "\f513"; } +.bi-reception-4::before { content: "\f514"; } +.bi-record-btn-fill::before { content: "\f515"; } +.bi-record-btn::before { content: "\f516"; } +.bi-record-circle-fill::before { content: "\f517"; } +.bi-record-circle::before { content: "\f518"; } +.bi-record-fill::before { content: "\f519"; } +.bi-record::before { content: "\f51a"; } +.bi-record2-fill::before { content: "\f51b"; } +.bi-record2::before { content: "\f51c"; } +.bi-reply-all-fill::before { content: "\f51d"; } +.bi-reply-all::before { content: "\f51e"; } +.bi-reply-fill::before { content: "\f51f"; } +.bi-reply::before { content: "\f520"; } +.bi-rss-fill::before { content: "\f521"; } +.bi-rss::before { content: "\f522"; } +.bi-rulers::before { content: "\f523"; } +.bi-save-fill::before { content: "\f524"; } +.bi-save::before { content: "\f525"; } +.bi-save2-fill::before { content: "\f526"; } +.bi-save2::before { content: "\f527"; } +.bi-scissors::before { content: "\f528"; } +.bi-screwdriver::before { content: "\f529"; } +.bi-search::before { content: "\f52a"; } +.bi-segmented-nav::before { content: "\f52b"; } +.bi-server::before { content: "\f52c"; } +.bi-share-fill::before { content: "\f52d"; } +.bi-share::before { content: "\f52e"; } +.bi-shield-check::before { content: "\f52f"; } +.bi-shield-exclamation::before { content: "\f530"; } +.bi-shield-fill-check::before { content: "\f531"; } +.bi-shield-fill-exclamation::before { content: "\f532"; } +.bi-shield-fill-minus::before { content: "\f533"; } +.bi-shield-fill-plus::before { content: "\f534"; } +.bi-shield-fill-x::before { content: "\f535"; } +.bi-shield-fill::before { content: "\f536"; } +.bi-shield-lock-fill::before { content: "\f537"; } +.bi-shield-lock::before { content: "\f538"; } +.bi-shield-minus::before { content: "\f539"; } +.bi-shield-plus::before { content: "\f53a"; } +.bi-shield-shaded::before { content: "\f53b"; } +.bi-shield-slash-fill::before { content: "\f53c"; } +.bi-shield-slash::before { content: "\f53d"; } +.bi-shield-x::before { content: "\f53e"; } +.bi-shield::before { content: "\f53f"; } +.bi-shift-fill::before { content: "\f540"; } +.bi-shift::before { content: "\f541"; } +.bi-shop-window::before { content: "\f542"; } +.bi-shop::before { content: "\f543"; } +.bi-shuffle::before { content: "\f544"; } +.bi-signpost-2-fill::before { content: "\f545"; } +.bi-signpost-2::before { content: "\f546"; } +.bi-signpost-fill::before { content: "\f547"; } +.bi-signpost-split-fill::before { content: "\f548"; } +.bi-signpost-split::before { content: "\f549"; } +.bi-signpost::before { content: "\f54a"; } +.bi-sim-fill::before { content: "\f54b"; } +.bi-sim::before { content: "\f54c"; } +.bi-skip-backward-btn-fill::before { content: "\f54d"; } +.bi-skip-backward-btn::before { content: "\f54e"; } +.bi-skip-backward-circle-fill::before { content: "\f54f"; } +.bi-skip-backward-circle::before { content: "\f550"; } +.bi-skip-backward-fill::before { content: "\f551"; } +.bi-skip-backward::before { content: "\f552"; } +.bi-skip-end-btn-fill::before { content: "\f553"; } +.bi-skip-end-btn::before { content: "\f554"; } +.bi-skip-end-circle-fill::before { content: "\f555"; } +.bi-skip-end-circle::before { content: "\f556"; } +.bi-skip-end-fill::before { content: "\f557"; } +.bi-skip-end::before { content: "\f558"; } +.bi-skip-forward-btn-fill::before { content: "\f559"; } +.bi-skip-forward-btn::before { content: "\f55a"; } +.bi-skip-forward-circle-fill::before { content: "\f55b"; } +.bi-skip-forward-circle::before { content: "\f55c"; } +.bi-skip-forward-fill::before { content: "\f55d"; } +.bi-skip-forward::before { content: "\f55e"; } +.bi-skip-start-btn-fill::before { content: "\f55f"; } +.bi-skip-start-btn::before { content: "\f560"; } +.bi-skip-start-circle-fill::before { content: "\f561"; } +.bi-skip-start-circle::before { content: "\f562"; } +.bi-skip-start-fill::before { content: "\f563"; } +.bi-skip-start::before { content: "\f564"; } +.bi-slack::before { content: "\f565"; } +.bi-slash-circle-fill::before { content: "\f566"; } +.bi-slash-circle::before { content: "\f567"; } +.bi-slash-square-fill::before { content: "\f568"; } +.bi-slash-square::before { content: "\f569"; } +.bi-slash::before { content: "\f56a"; } +.bi-sliders::before { content: "\f56b"; } +.bi-smartwatch::before { content: "\f56c"; } +.bi-snow::before { content: "\f56d"; } +.bi-snow2::before { content: "\f56e"; } +.bi-snow3::before { content: "\f56f"; } +.bi-sort-alpha-down-alt::before { content: "\f570"; } +.bi-sort-alpha-down::before { content: "\f571"; } +.bi-sort-alpha-up-alt::before { content: "\f572"; } +.bi-sort-alpha-up::before { content: "\f573"; } +.bi-sort-down-alt::before { content: "\f574"; } +.bi-sort-down::before { content: "\f575"; } +.bi-sort-numeric-down-alt::before { content: "\f576"; } +.bi-sort-numeric-down::before { content: "\f577"; } +.bi-sort-numeric-up-alt::before { content: "\f578"; } +.bi-sort-numeric-up::before { content: "\f579"; } +.bi-sort-up-alt::before { content: "\f57a"; } +.bi-sort-up::before { content: "\f57b"; } +.bi-soundwave::before { content: "\f57c"; } +.bi-speaker-fill::before { content: "\f57d"; } +.bi-speaker::before { content: "\f57e"; } +.bi-speedometer::before { content: "\f57f"; } +.bi-speedometer2::before { content: "\f580"; } +.bi-spellcheck::before { content: "\f581"; } +.bi-square-fill::before { content: "\f582"; } +.bi-square-half::before { content: "\f583"; } +.bi-square::before { content: "\f584"; } +.bi-stack::before { content: "\f585"; } +.bi-star-fill::before { content: "\f586"; } +.bi-star-half::before { content: "\f587"; } +.bi-star::before { content: "\f588"; } +.bi-stars::before { content: "\f589"; } +.bi-stickies-fill::before { content: "\f58a"; } +.bi-stickies::before { content: "\f58b"; } +.bi-sticky-fill::before { content: "\f58c"; } +.bi-sticky::before { content: "\f58d"; } +.bi-stop-btn-fill::before { content: "\f58e"; } +.bi-stop-btn::before { content: "\f58f"; } +.bi-stop-circle-fill::before { content: "\f590"; } +.bi-stop-circle::before { content: "\f591"; } +.bi-stop-fill::before { content: "\f592"; } +.bi-stop::before { content: "\f593"; } +.bi-stoplights-fill::before { content: "\f594"; } +.bi-stoplights::before { content: "\f595"; } +.bi-stopwatch-fill::before { content: "\f596"; } +.bi-stopwatch::before { content: "\f597"; } +.bi-subtract::before { content: "\f598"; } +.bi-suit-club-fill::before { content: "\f599"; } +.bi-suit-club::before { content: "\f59a"; } +.bi-suit-diamond-fill::before { content: "\f59b"; } +.bi-suit-diamond::before { content: "\f59c"; } +.bi-suit-heart-fill::before { content: "\f59d"; } +.bi-suit-heart::before { content: "\f59e"; } +.bi-suit-spade-fill::before { content: "\f59f"; } +.bi-suit-spade::before { content: "\f5a0"; } +.bi-sun-fill::before { content: "\f5a1"; } +.bi-sun::before { content: "\f5a2"; } +.bi-sunglasses::before { content: "\f5a3"; } +.bi-sunrise-fill::before { content: "\f5a4"; } +.bi-sunrise::before { content: "\f5a5"; } +.bi-sunset-fill::before { content: "\f5a6"; } +.bi-sunset::before { content: "\f5a7"; } +.bi-symmetry-horizontal::before { content: "\f5a8"; } +.bi-symmetry-vertical::before { content: "\f5a9"; } +.bi-table::before { content: "\f5aa"; } +.bi-tablet-fill::before { content: "\f5ab"; } +.bi-tablet-landscape-fill::before { content: "\f5ac"; } +.bi-tablet-landscape::before { content: "\f5ad"; } +.bi-tablet::before { content: "\f5ae"; } +.bi-tag-fill::before { content: "\f5af"; } +.bi-tag::before { content: "\f5b0"; } +.bi-tags-fill::before { content: "\f5b1"; } +.bi-tags::before { content: "\f5b2"; } +.bi-telegram::before { content: "\f5b3"; } +.bi-telephone-fill::before { content: "\f5b4"; } +.bi-telephone-forward-fill::before { content: "\f5b5"; } +.bi-telephone-forward::before { content: "\f5b6"; } +.bi-telephone-inbound-fill::before { content: "\f5b7"; } +.bi-telephone-inbound::before { content: "\f5b8"; } +.bi-telephone-minus-fill::before { content: "\f5b9"; } +.bi-telephone-minus::before { content: "\f5ba"; } +.bi-telephone-outbound-fill::before { content: "\f5bb"; } +.bi-telephone-outbound::before { content: "\f5bc"; } +.bi-telephone-plus-fill::before { content: "\f5bd"; } +.bi-telephone-plus::before { content: "\f5be"; } +.bi-telephone-x-fill::before { content: "\f5bf"; } +.bi-telephone-x::before { content: "\f5c0"; } +.bi-telephone::before { content: "\f5c1"; } +.bi-terminal-fill::before { content: "\f5c2"; } +.bi-terminal::before { content: "\f5c3"; } +.bi-text-center::before { content: "\f5c4"; } +.bi-text-indent-left::before { content: "\f5c5"; } +.bi-text-indent-right::before { content: "\f5c6"; } +.bi-text-left::before { content: "\f5c7"; } +.bi-text-paragraph::before { content: "\f5c8"; } +.bi-text-right::before { content: "\f5c9"; } +.bi-textarea-resize::before { content: "\f5ca"; } +.bi-textarea-t::before { content: "\f5cb"; } +.bi-textarea::before { content: "\f5cc"; } +.bi-thermometer-half::before { content: "\f5cd"; } +.bi-thermometer-high::before { content: "\f5ce"; } +.bi-thermometer-low::before { content: "\f5cf"; } +.bi-thermometer-snow::before { content: "\f5d0"; } +.bi-thermometer-sun::before { content: "\f5d1"; } +.bi-thermometer::before { content: "\f5d2"; } +.bi-three-dots-vertical::before { content: "\f5d3"; } +.bi-three-dots::before { content: "\f5d4"; } +.bi-toggle-off::before { content: "\f5d5"; } +.bi-toggle-on::before { content: "\f5d6"; } +.bi-toggle2-off::before { content: "\f5d7"; } +.bi-toggle2-on::before { content: "\f5d8"; } +.bi-toggles::before { content: "\f5d9"; } +.bi-toggles2::before { content: "\f5da"; } +.bi-tools::before { content: "\f5db"; } +.bi-tornado::before { content: "\f5dc"; } +.bi-trash-fill::before { content: "\f5dd"; } +.bi-trash::before { content: "\f5de"; } +.bi-trash2-fill::before { content: "\f5df"; } +.bi-trash2::before { content: "\f5e0"; } +.bi-tree-fill::before { content: "\f5e1"; } +.bi-tree::before { content: "\f5e2"; } +.bi-triangle-fill::before { content: "\f5e3"; } +.bi-triangle-half::before { content: "\f5e4"; } +.bi-triangle::before { content: "\f5e5"; } +.bi-trophy-fill::before { content: "\f5e6"; } +.bi-trophy::before { content: "\f5e7"; } +.bi-tropical-storm::before { content: "\f5e8"; } +.bi-truck-flatbed::before { content: "\f5e9"; } +.bi-truck::before { content: "\f5ea"; } +.bi-tsunami::before { content: "\f5eb"; } +.bi-tv-fill::before { content: "\f5ec"; } +.bi-tv::before { content: "\f5ed"; } +.bi-twitch::before { content: "\f5ee"; } +.bi-twitter::before { content: "\f5ef"; } +.bi-type-bold::before { content: "\f5f0"; } +.bi-type-h1::before { content: "\f5f1"; } +.bi-type-h2::before { content: "\f5f2"; } +.bi-type-h3::before { content: "\f5f3"; } +.bi-type-italic::before { content: "\f5f4"; } +.bi-type-strikethrough::before { content: "\f5f5"; } +.bi-type-underline::before { content: "\f5f6"; } +.bi-type::before { content: "\f5f7"; } +.bi-ui-checks-grid::before { content: "\f5f8"; } +.bi-ui-checks::before { content: "\f5f9"; } +.bi-ui-radios-grid::before { content: "\f5fa"; } +.bi-ui-radios::before { content: "\f5fb"; } +.bi-umbrella-fill::before { content: "\f5fc"; } +.bi-umbrella::before { content: "\f5fd"; } +.bi-union::before { content: "\f5fe"; } +.bi-unlock-fill::before { content: "\f5ff"; } +.bi-unlock::before { content: "\f600"; } +.bi-upc-scan::before { content: "\f601"; } +.bi-upc::before { content: "\f602"; } +.bi-upload::before { content: "\f603"; } +.bi-vector-pen::before { content: "\f604"; } +.bi-view-list::before { content: "\f605"; } +.bi-view-stacked::before { content: "\f606"; } +.bi-vinyl-fill::before { content: "\f607"; } +.bi-vinyl::before { content: "\f608"; } +.bi-voicemail::before { content: "\f609"; } +.bi-volume-down-fill::before { content: "\f60a"; } +.bi-volume-down::before { content: "\f60b"; } +.bi-volume-mute-fill::before { content: "\f60c"; } +.bi-volume-mute::before { content: "\f60d"; } +.bi-volume-off-fill::before { content: "\f60e"; } +.bi-volume-off::before { content: "\f60f"; } +.bi-volume-up-fill::before { content: "\f610"; } +.bi-volume-up::before { content: "\f611"; } +.bi-vr::before { content: "\f612"; } +.bi-wallet-fill::before { content: "\f613"; } +.bi-wallet::before { content: "\f614"; } +.bi-wallet2::before { content: "\f615"; } +.bi-watch::before { content: "\f616"; } +.bi-water::before { content: "\f617"; } +.bi-whatsapp::before { content: "\f618"; } +.bi-wifi-1::before { content: "\f619"; } +.bi-wifi-2::before { content: "\f61a"; } +.bi-wifi-off::before { content: "\f61b"; } +.bi-wifi::before { content: "\f61c"; } +.bi-wind::before { content: "\f61d"; } +.bi-window-dock::before { content: "\f61e"; } +.bi-window-sidebar::before { content: "\f61f"; } +.bi-window::before { content: "\f620"; } +.bi-wrench::before { content: "\f621"; } +.bi-x-circle-fill::before { content: "\f622"; } +.bi-x-circle::before { content: "\f623"; } +.bi-x-diamond-fill::before { content: "\f624"; } +.bi-x-diamond::before { content: "\f625"; } +.bi-x-octagon-fill::before { content: "\f626"; } +.bi-x-octagon::before { content: "\f627"; } +.bi-x-square-fill::before { content: "\f628"; } +.bi-x-square::before { content: "\f629"; } +.bi-x::before { content: "\f62a"; } +.bi-youtube::before { content: "\f62b"; } +.bi-zoom-in::before { content: "\f62c"; } +.bi-zoom-out::before { content: "\f62d"; } +.bi-bank::before { content: "\f62e"; } +.bi-bank2::before { content: "\f62f"; } +.bi-bell-slash-fill::before { content: "\f630"; } +.bi-bell-slash::before { content: "\f631"; } +.bi-cash-coin::before { content: "\f632"; } +.bi-check-lg::before { content: "\f633"; } +.bi-coin::before { content: "\f634"; } +.bi-currency-bitcoin::before { content: "\f635"; } +.bi-currency-dollar::before { content: "\f636"; } +.bi-currency-euro::before { content: "\f637"; } +.bi-currency-exchange::before { content: "\f638"; } +.bi-currency-pound::before { content: "\f639"; } +.bi-currency-yen::before { content: "\f63a"; } +.bi-dash-lg::before { content: "\f63b"; } +.bi-exclamation-lg::before { content: "\f63c"; } +.bi-file-earmark-pdf-fill::before { content: "\f63d"; } +.bi-file-earmark-pdf::before { content: "\f63e"; } +.bi-file-pdf-fill::before { content: "\f63f"; } +.bi-file-pdf::before { content: "\f640"; } +.bi-gender-ambiguous::before { content: "\f641"; } +.bi-gender-female::before { content: "\f642"; } +.bi-gender-male::before { content: "\f643"; } +.bi-gender-trans::before { content: "\f644"; } +.bi-headset-vr::before { content: "\f645"; } +.bi-info-lg::before { content: "\f646"; } +.bi-mastodon::before { content: "\f647"; } +.bi-messenger::before { content: "\f648"; } +.bi-piggy-bank-fill::before { content: "\f649"; } +.bi-piggy-bank::before { content: "\f64a"; } +.bi-pin-map-fill::before { content: "\f64b"; } +.bi-pin-map::before { content: "\f64c"; } +.bi-plus-lg::before { content: "\f64d"; } +.bi-question-lg::before { content: "\f64e"; } +.bi-recycle::before { content: "\f64f"; } +.bi-reddit::before { content: "\f650"; } +.bi-safe-fill::before { content: "\f651"; } +.bi-safe2-fill::before { content: "\f652"; } +.bi-safe2::before { content: "\f653"; } +.bi-sd-card-fill::before { content: "\f654"; } +.bi-sd-card::before { content: "\f655"; } +.bi-skype::before { content: "\f656"; } +.bi-slash-lg::before { content: "\f657"; } +.bi-translate::before { content: "\f658"; } +.bi-x-lg::before { content: "\f659"; } +.bi-safe::before { content: "\f65a"; } +.bi-apple::before { content: "\f65b"; } +.bi-microsoft::before { content: "\f65d"; } +.bi-windows::before { content: "\f65e"; } +.bi-behance::before { content: "\f65c"; } +.bi-dribbble::before { content: "\f65f"; } +.bi-line::before { content: "\f660"; } +.bi-medium::before { content: "\f661"; } +.bi-paypal::before { content: "\f662"; } +.bi-pinterest::before { content: "\f663"; } +.bi-signal::before { content: "\f664"; } +.bi-snapchat::before { content: "\f665"; } +.bi-spotify::before { content: "\f666"; } +.bi-stack-overflow::before { content: "\f667"; } +.bi-strava::before { content: "\f668"; } +.bi-wordpress::before { content: "\f669"; } +.bi-vimeo::before { content: "\f66a"; } +.bi-activity::before { content: "\f66b"; } +.bi-easel2-fill::before { content: "\f66c"; } +.bi-easel2::before { content: "\f66d"; } +.bi-easel3-fill::before { content: "\f66e"; } +.bi-easel3::before { content: "\f66f"; } +.bi-fan::before { content: "\f670"; } +.bi-fingerprint::before { content: "\f671"; } +.bi-graph-down-arrow::before { content: "\f672"; } +.bi-graph-up-arrow::before { content: "\f673"; } +.bi-hypnotize::before { content: "\f674"; } +.bi-magic::before { content: "\f675"; } +.bi-person-rolodex::before { content: "\f676"; } +.bi-person-video::before { content: "\f677"; } +.bi-person-video2::before { content: "\f678"; } +.bi-person-video3::before { content: "\f679"; } +.bi-person-workspace::before { content: "\f67a"; } +.bi-radioactive::before { content: "\f67b"; } +.bi-webcam-fill::before { content: "\f67c"; } +.bi-webcam::before { content: "\f67d"; } +.bi-yin-yang::before { content: "\f67e"; } +.bi-bandaid-fill::before { content: "\f680"; } +.bi-bandaid::before { content: "\f681"; } +.bi-bluetooth::before { content: "\f682"; } +.bi-body-text::before { content: "\f683"; } +.bi-boombox::before { content: "\f684"; } +.bi-boxes::before { content: "\f685"; } +.bi-dpad-fill::before { content: "\f686"; } +.bi-dpad::before { content: "\f687"; } +.bi-ear-fill::before { content: "\f688"; } +.bi-ear::before { content: "\f689"; } +.bi-envelope-check-1::before { content: "\f68a"; } +.bi-envelope-check-fill::before { content: "\f68b"; } +.bi-envelope-check::before { content: "\f68c"; } +.bi-envelope-dash-1::before { content: "\f68d"; } +.bi-envelope-dash-fill::before { content: "\f68e"; } +.bi-envelope-dash::before { content: "\f68f"; } +.bi-envelope-exclamation-1::before { content: "\f690"; } +.bi-envelope-exclamation-fill::before { content: "\f691"; } +.bi-envelope-exclamation::before { content: "\f692"; } +.bi-envelope-plus-fill::before { content: "\f693"; } +.bi-envelope-plus::before { content: "\f694"; } +.bi-envelope-slash-1::before { content: "\f695"; } +.bi-envelope-slash-fill::before { content: "\f696"; } +.bi-envelope-slash::before { content: "\f697"; } +.bi-envelope-x-1::before { content: "\f698"; } +.bi-envelope-x-fill::before { content: "\f699"; } +.bi-envelope-x::before { content: "\f69a"; } +.bi-explicit-fill::before { content: "\f69b"; } +.bi-explicit::before { content: "\f69c"; } +.bi-git::before { content: "\f69d"; } +.bi-infinity::before { content: "\f69e"; } +.bi-list-columns-reverse::before { content: "\f69f"; } +.bi-list-columns::before { content: "\f6a0"; } +.bi-meta::before { content: "\f6a1"; } +.bi-mortorboard-fill::before { content: "\f6a2"; } +.bi-mortorboard::before { content: "\f6a3"; } +.bi-nintendo-switch::before { content: "\f6a4"; } +.bi-pc-display-horizontal::before { content: "\f6a5"; } +.bi-pc-display::before { content: "\f6a6"; } +.bi-pc-horizontal::before { content: "\f6a7"; } +.bi-pc::before { content: "\f6a8"; } +.bi-playstation::before { content: "\f6a9"; } +.bi-plus-slash-minus::before { content: "\f6aa"; } +.bi-projector-fill::before { content: "\f6ab"; } +.bi-projector::before { content: "\f6ac"; } +.bi-qr-code-scan::before { content: "\f6ad"; } +.bi-qr-code::before { content: "\f6ae"; } +.bi-quora::before { content: "\f6af"; } +.bi-quote::before { content: "\f6b0"; } +.bi-robot::before { content: "\f6b1"; } +.bi-send-check-fill::before { content: "\f6b2"; } +.bi-send-check::before { content: "\f6b3"; } +.bi-send-dash-fill::before { content: "\f6b4"; } +.bi-send-dash::before { content: "\f6b5"; } +.bi-send-exclamation-1::before { content: "\f6b6"; } +.bi-send-exclamation-fill::before { content: "\f6b7"; } +.bi-send-exclamation::before { content: "\f6b8"; } +.bi-send-fill::before { content: "\f6b9"; } +.bi-send-plus-fill::before { content: "\f6ba"; } +.bi-send-plus::before { content: "\f6bb"; } +.bi-send-slash-fill::before { content: "\f6bc"; } +.bi-send-slash::before { content: "\f6bd"; } +.bi-send-x-fill::before { content: "\f6be"; } +.bi-send-x::before { content: "\f6bf"; } +.bi-send::before { content: "\f6c0"; } +.bi-steam::before { content: "\f6c1"; } +.bi-terminal-dash-1::before { content: "\f6c2"; } +.bi-terminal-dash::before { content: "\f6c3"; } +.bi-terminal-plus::before { content: "\f6c4"; } +.bi-terminal-split::before { content: "\f6c5"; } +.bi-ticket-detailed-fill::before { content: "\f6c6"; } +.bi-ticket-detailed::before { content: "\f6c7"; } +.bi-ticket-fill::before { content: "\f6c8"; } +.bi-ticket-perforated-fill::before { content: "\f6c9"; } +.bi-ticket-perforated::before { content: "\f6ca"; } +.bi-ticket::before { content: "\f6cb"; } +.bi-tiktok::before { content: "\f6cc"; } +.bi-window-dash::before { content: "\f6cd"; } +.bi-window-desktop::before { content: "\f6ce"; } +.bi-window-fullscreen::before { content: "\f6cf"; } +.bi-window-plus::before { content: "\f6d0"; } +.bi-window-split::before { content: "\f6d1"; } +.bi-window-stack::before { content: "\f6d2"; } +.bi-window-x::before { content: "\f6d3"; } +.bi-xbox::before { content: "\f6d4"; } +.bi-ethernet::before { content: "\f6d5"; } +.bi-hdmi-fill::before { content: "\f6d6"; } +.bi-hdmi::before { content: "\f6d7"; } +.bi-usb-c-fill::before { content: "\f6d8"; } +.bi-usb-c::before { content: "\f6d9"; } +.bi-usb-fill::before { content: "\f6da"; } +.bi-usb-plug-fill::before { content: "\f6db"; } +.bi-usb-plug::before { content: "\f6dc"; } +.bi-usb-symbol::before { content: "\f6dd"; } +.bi-usb::before { content: "\f6de"; } +.bi-boombox-fill::before { content: "\f6df"; } +.bi-displayport-1::before { content: "\f6e0"; } +.bi-displayport::before { content: "\f6e1"; } +.bi-gpu-card::before { content: "\f6e2"; } +.bi-memory::before { content: "\f6e3"; } +.bi-modem-fill::before { content: "\f6e4"; } +.bi-modem::before { content: "\f6e5"; } +.bi-motherboard-fill::before { content: "\f6e6"; } +.bi-motherboard::before { content: "\f6e7"; } +.bi-optical-audio-fill::before { content: "\f6e8"; } +.bi-optical-audio::before { content: "\f6e9"; } +.bi-pci-card::before { content: "\f6ea"; } +.bi-router-fill::before { content: "\f6eb"; } +.bi-router::before { content: "\f6ec"; } +.bi-ssd-fill::before { content: "\f6ed"; } +.bi-ssd::before { content: "\f6ee"; } +.bi-thunderbolt-fill::before { content: "\f6ef"; } +.bi-thunderbolt::before { content: "\f6f0"; } +.bi-usb-drive-fill::before { content: "\f6f1"; } +.bi-usb-drive::before { content: "\f6f2"; } +.bi-usb-micro-fill::before { content: "\f6f3"; } +.bi-usb-micro::before { content: "\f6f4"; } +.bi-usb-mini-fill::before { content: "\f6f5"; } +.bi-usb-mini::before { content: "\f6f6"; } +.bi-cloud-haze2::before { content: "\f6f7"; } +.bi-device-hdd-fill::before { content: "\f6f8"; } +.bi-device-hdd::before { content: "\f6f9"; } +.bi-device-ssd-fill::before { content: "\f6fa"; } +.bi-device-ssd::before { content: "\f6fb"; } +.bi-displayport-fill::before { content: "\f6fc"; } +.bi-mortarboard-fill::before { content: "\f6fd"; } +.bi-mortarboard::before { content: "\f6fe"; } +.bi-terminal-x::before { content: "\f6ff"; } +.bi-arrow-through-heart-fill::before { content: "\f700"; } +.bi-arrow-through-heart::before { content: "\f701"; } +.bi-badge-sd-fill::before { content: "\f702"; } +.bi-badge-sd::before { content: "\f703"; } +.bi-bag-heart-fill::before { content: "\f704"; } +.bi-bag-heart::before { content: "\f705"; } +.bi-balloon-fill::before { content: "\f706"; } +.bi-balloon-heart-fill::before { content: "\f707"; } +.bi-balloon-heart::before { content: "\f708"; } +.bi-balloon::before { content: "\f709"; } +.bi-box2-fill::before { content: "\f70a"; } +.bi-box2-heart-fill::before { content: "\f70b"; } +.bi-box2-heart::before { content: "\f70c"; } +.bi-box2::before { content: "\f70d"; } +.bi-braces-asterisk::before { content: "\f70e"; } +.bi-calendar-heart-fill::before { content: "\f70f"; } +.bi-calendar-heart::before { content: "\f710"; } +.bi-calendar2-heart-fill::before { content: "\f711"; } +.bi-calendar2-heart::before { content: "\f712"; } +.bi-chat-heart-fill::before { content: "\f713"; } +.bi-chat-heart::before { content: "\f714"; } +.bi-chat-left-heart-fill::before { content: "\f715"; } +.bi-chat-left-heart::before { content: "\f716"; } +.bi-chat-right-heart-fill::before { content: "\f717"; } +.bi-chat-right-heart::before { content: "\f718"; } +.bi-chat-square-heart-fill::before { content: "\f719"; } +.bi-chat-square-heart::before { content: "\f71a"; } +.bi-clipboard-check-fill::before { content: "\f71b"; } +.bi-clipboard-data-fill::before { content: "\f71c"; } +.bi-clipboard-fill::before { content: "\f71d"; } +.bi-clipboard-heart-fill::before { content: "\f71e"; } +.bi-clipboard-heart::before { content: "\f71f"; } +.bi-clipboard-minus-fill::before { content: "\f720"; } +.bi-clipboard-plus-fill::before { content: "\f721"; } +.bi-clipboard-pulse::before { content: "\f722"; } +.bi-clipboard-x-fill::before { content: "\f723"; } +.bi-clipboard2-check-fill::before { content: "\f724"; } +.bi-clipboard2-check::before { content: "\f725"; } +.bi-clipboard2-data-fill::before { content: "\f726"; } +.bi-clipboard2-data::before { content: "\f727"; } +.bi-clipboard2-fill::before { content: "\f728"; } +.bi-clipboard2-heart-fill::before { content: "\f729"; } +.bi-clipboard2-heart::before { content: "\f72a"; } +.bi-clipboard2-minus-fill::before { content: "\f72b"; } +.bi-clipboard2-minus::before { content: "\f72c"; } +.bi-clipboard2-plus-fill::before { content: "\f72d"; } +.bi-clipboard2-plus::before { content: "\f72e"; } +.bi-clipboard2-pulse-fill::before { content: "\f72f"; } +.bi-clipboard2-pulse::before { content: "\f730"; } +.bi-clipboard2-x-fill::before { content: "\f731"; } +.bi-clipboard2-x::before { content: "\f732"; } +.bi-clipboard2::before { content: "\f733"; } +.bi-emoji-kiss-fill::before { content: "\f734"; } +.bi-emoji-kiss::before { content: "\f735"; } +.bi-envelope-heart-fill::before { content: "\f736"; } +.bi-envelope-heart::before { content: "\f737"; } +.bi-envelope-open-heart-fill::before { content: "\f738"; } +.bi-envelope-open-heart::before { content: "\f739"; } +.bi-envelope-paper-fill::before { content: "\f73a"; } +.bi-envelope-paper-heart-fill::before { content: "\f73b"; } +.bi-envelope-paper-heart::before { content: "\f73c"; } +.bi-envelope-paper::before { content: "\f73d"; } +.bi-filetype-aac::before { content: "\f73e"; } +.bi-filetype-ai::before { content: "\f73f"; } +.bi-filetype-bmp::before { content: "\f740"; } +.bi-filetype-cs::before { content: "\f741"; } +.bi-filetype-css::before { content: "\f742"; } +.bi-filetype-csv::before { content: "\f743"; } +.bi-filetype-doc::before { content: "\f744"; } +.bi-filetype-docx::before { content: "\f745"; } +.bi-filetype-exe::before { content: "\f746"; } +.bi-filetype-gif::before { content: "\f747"; } +.bi-filetype-heic::before { content: "\f748"; } +.bi-filetype-html::before { content: "\f749"; } +.bi-filetype-java::before { content: "\f74a"; } +.bi-filetype-jpg::before { content: "\f74b"; } +.bi-filetype-js::before { content: "\f74c"; } +.bi-filetype-jsx::before { content: "\f74d"; } +.bi-filetype-key::before { content: "\f74e"; } +.bi-filetype-m4p::before { content: "\f74f"; } +.bi-filetype-md::before { content: "\f750"; } +.bi-filetype-mdx::before { content: "\f751"; } +.bi-filetype-mov::before { content: "\f752"; } +.bi-filetype-mp3::before { content: "\f753"; } +.bi-filetype-mp4::before { content: "\f754"; } +.bi-filetype-otf::before { content: "\f755"; } +.bi-filetype-pdf::before { content: "\f756"; } +.bi-filetype-php::before { content: "\f757"; } +.bi-filetype-png::before { content: "\f758"; } +.bi-filetype-ppt-1::before { content: "\f759"; } +.bi-filetype-ppt::before { content: "\f75a"; } +.bi-filetype-psd::before { content: "\f75b"; } +.bi-filetype-py::before { content: "\f75c"; } +.bi-filetype-raw::before { content: "\f75d"; } +.bi-filetype-rb::before { content: "\f75e"; } +.bi-filetype-sass::before { content: "\f75f"; } +.bi-filetype-scss::before { content: "\f760"; } +.bi-filetype-sh::before { content: "\f761"; } +.bi-filetype-svg::before { content: "\f762"; } +.bi-filetype-tiff::before { content: "\f763"; } +.bi-filetype-tsx::before { content: "\f764"; } +.bi-filetype-ttf::before { content: "\f765"; } +.bi-filetype-txt::before { content: "\f766"; } +.bi-filetype-wav::before { content: "\f767"; } +.bi-filetype-woff::before { content: "\f768"; } +.bi-filetype-xls-1::before { content: "\f769"; } +.bi-filetype-xls::before { content: "\f76a"; } +.bi-filetype-xml::before { content: "\f76b"; } +.bi-filetype-yml::before { content: "\f76c"; } +.bi-heart-arrow::before { content: "\f76d"; } +.bi-heart-pulse-fill::before { content: "\f76e"; } +.bi-heart-pulse::before { content: "\f76f"; } +.bi-heartbreak-fill::before { content: "\f770"; } +.bi-heartbreak::before { content: "\f771"; } +.bi-hearts::before { content: "\f772"; } +.bi-hospital-fill::before { content: "\f773"; } +.bi-hospital::before { content: "\f774"; } +.bi-house-heart-fill::before { content: "\f775"; } +.bi-house-heart::before { content: "\f776"; } +.bi-incognito::before { content: "\f777"; } +.bi-magnet-fill::before { content: "\f778"; } +.bi-magnet::before { content: "\f779"; } +.bi-person-heart::before { content: "\f77a"; } +.bi-person-hearts::before { content: "\f77b"; } +.bi-phone-flip::before { content: "\f77c"; } +.bi-plugin::before { content: "\f77d"; } +.bi-postage-fill::before { content: "\f77e"; } +.bi-postage-heart-fill::before { content: "\f77f"; } +.bi-postage-heart::before { content: "\f780"; } +.bi-postage::before { content: "\f781"; } +.bi-postcard-fill::before { content: "\f782"; } +.bi-postcard-heart-fill::before { content: "\f783"; } +.bi-postcard-heart::before { content: "\f784"; } +.bi-postcard::before { content: "\f785"; } +.bi-search-heart-fill::before { content: "\f786"; } +.bi-search-heart::before { content: "\f787"; } +.bi-sliders2-vertical::before { content: "\f788"; } +.bi-sliders2::before { content: "\f789"; } +.bi-trash3-fill::before { content: "\f78a"; } +.bi-trash3::before { content: "\f78b"; } +.bi-valentine::before { content: "\f78c"; } +.bi-valentine2::before { content: "\f78d"; } +.bi-wrench-adjustable-circle-fill::before { content: "\f78e"; } +.bi-wrench-adjustable-circle::before { content: "\f78f"; } +.bi-wrench-adjustable::before { content: "\f790"; } +.bi-filetype-json::before { content: "\f791"; } +.bi-filetype-pptx::before { content: "\f792"; } +.bi-filetype-xlsx::before { content: "\f793"; } diff --git a/resources/stac_mount_save_files/libs/bootstrap/bootstrap-icons.woff b/resources/stac_mount_save_files/libs/bootstrap/bootstrap-icons.woff new file mode 100644 index 0000000..b26ccd1 Binary files /dev/null and b/resources/stac_mount_save_files/libs/bootstrap/bootstrap-icons.woff differ diff --git a/resources/stac_mount_save_files/libs/bootstrap/bootstrap.min.css b/resources/stac_mount_save_files/libs/bootstrap/bootstrap.min.css new file mode 100644 index 0000000..2b6003a --- /dev/null +++ b/resources/stac_mount_save_files/libs/bootstrap/bootstrap.min.css @@ -0,0 +1,10 @@ +/*! + * Bootstrap v5.1.3 (https://getbootstrap.com/) + * Copyright 2011-2021 The Bootstrap Authors + * Copyright 2011-2021 Twitter, Inc. + * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE) + */:root{--bs-blue: #0d6efd;--bs-indigo: #6610f2;--bs-purple: #6f42c1;--bs-pink: #d63384;--bs-red: #dc3545;--bs-orange: #fd7e14;--bs-yellow: #ffc107;--bs-green: #198754;--bs-teal: #20c997;--bs-cyan: #0dcaf0;--bs-white: #ffffff;--bs-gray: #6c757d;--bs-gray-dark: #343a40;--bs-gray-100: #f8f9fa;--bs-gray-200: #e9ecef;--bs-gray-300: #dee2e6;--bs-gray-400: #ced4da;--bs-gray-500: #adb5bd;--bs-gray-600: #6c757d;--bs-gray-700: #495057;--bs-gray-800: #343a40;--bs-gray-900: #212529;--bs-default: #dee2e6;--bs-primary: #0d6efd;--bs-secondary: #6c757d;--bs-success: #198754;--bs-info: #0dcaf0;--bs-warning: #ffc107;--bs-danger: #dc3545;--bs-light: #f8f9fa;--bs-dark: #212529;--bs-default-rgb: 222, 226, 230;--bs-primary-rgb: 13, 110, 253;--bs-secondary-rgb: 108, 117, 125;--bs-success-rgb: 25, 135, 84;--bs-info-rgb: 13, 202, 240;--bs-warning-rgb: 255, 193, 7;--bs-danger-rgb: 220, 53, 69;--bs-light-rgb: 248, 249, 250;--bs-dark-rgb: 33, 37, 41;--bs-white-rgb: 255, 255, 255;--bs-black-rgb: 0, 0, 0;--bs-body-color-rgb: 33, 37, 41;--bs-body-bg-rgb: 255, 255, 255;--bs-font-sans-serif: system-ui, -apple-system, "Segoe UI", Roboto, "Helvetica Neue", Arial, "Noto Sans", "Liberation Sans", sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol", "Noto Color Emoji";--bs-font-monospace: SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace;--bs-gradient: linear-gradient(180deg, rgba(255, 255, 255, 0.15), rgba(255, 255, 255, 0));--bs-root-font-size: 18px;--bs-body-font-family: var(--bs-font-sans-serif);--bs-body-font-size: 1rem;--bs-body-font-weight: 400;--bs-body-line-height: 1.5;--bs-body-color: #212529;--bs-body-bg: #ffffff}*,*::before,*::after{box-sizing:border-box}:root{font-size:var(--bs-root-font-size)}body{margin:0;font-family:var(--bs-body-font-family);font-size:var(--bs-body-font-size);font-weight:var(--bs-body-font-weight);line-height:var(--bs-body-line-height);color:var(--bs-body-color);text-align:var(--bs-body-text-align);background-color:var(--bs-body-bg);-webkit-text-size-adjust:100%;-webkit-tap-highlight-color:rgba(0,0,0,0)}hr{margin:1rem 0;color:inherit;background-color:currentColor;border:0;opacity:.25}hr:not([size]){height:1px}h6,.h6,h5,.h5,h4,.h4,h3,.h3,h2,.h2,h1,.h1{margin-top:0;margin-bottom:.5rem;font-weight:500;line-height:1.2}h1,.h1{font-size:calc(1.345rem + 1.14vw)}@media(min-width: 1200px){h1,.h1{font-size:2.2rem}}h2,.h2{font-size:calc(1.3rem + 0.6vw)}@media(min-width: 1200px){h2,.h2{font-size:1.75rem}}h3,.h3{font-size:calc(1.275rem + 0.3vw)}@media(min-width: 1200px){h3,.h3{font-size:1.5rem}}h4,.h4{font-size:1.25rem}h5,.h5{font-size:1.1rem}h6,.h6{font-size:1rem}p{margin-top:0;margin-bottom:1rem}abbr[title],abbr[data-bs-original-title]{text-decoration:underline dotted;-webkit-text-decoration:underline dotted;-moz-text-decoration:underline dotted;-ms-text-decoration:underline dotted;-o-text-decoration:underline dotted;cursor:help;text-decoration-skip-ink:none}address{margin-bottom:1rem;font-style:normal;line-height:inherit}ol,ul{padding-left:2rem}ol,ul,dl{margin-top:0;margin-bottom:1rem}ol ol,ul ul,ol ul,ul ol{margin-bottom:0}dt{font-weight:700}dd{margin-bottom:.5rem;margin-left:0}blockquote{margin:0 0 1rem;padding:.625rem 1.25rem;border-left:.25rem solid #e9ecef}blockquote p:last-child,blockquote ul:last-child,blockquote ol:last-child{margin-bottom:0}b,strong{font-weight:bolder}small,.small{font-size:0.875em}mark,.mark{padding:.2em;background-color:#fcf8e3}sub,sup{position:relative;font-size:0.75em;line-height:0;vertical-align:baseline}sub{bottom:-0.25em}sup{top:-0.5em}a{color:#0d6efd;text-decoration:underline;-webkit-text-decoration:underline;-moz-text-decoration:underline;-ms-text-decoration:underline;-o-text-decoration:underline}a:hover{color:#0a58ca}a:not([href]):not([class]),a:not([href]):not([class]):hover{color:inherit;text-decoration:none}pre,code,kbd,samp{font-family:var(--bs-font-monospace);font-size:1em;direction:ltr /* rtl:ignore */;unicode-bidi:bidi-override}pre{display:block;margin-top:0;margin-bottom:1rem;overflow:auto;font-size:0.875em;color:#000;background-color:#f6f6f6;padding:.5rem;border:1px solid #dee2e6;border-radius:.25rem}pre code{background-color:transparent;font-size:inherit;color:inherit;word-break:normal}code{font-size:0.875em;color:#9954bb;background-color:#f6f6f6;border-radius:.25rem;padding:.125rem .25rem;word-wrap:break-word}a>code{color:inherit}kbd{padding:.4rem .4rem;font-size:0.875em;color:#fff;background-color:#212529;border-radius:.2rem}kbd kbd{padding:0;font-size:1em;font-weight:700}figure{margin:0 0 1rem}img,svg{vertical-align:middle}table{caption-side:bottom;border-collapse:collapse}caption{padding-top:.5rem;padding-bottom:.5rem;color:#6c757d;text-align:left}th{text-align:inherit;text-align:-webkit-match-parent}thead,tbody,tfoot,tr,td,th{border-color:inherit;border-style:solid;border-width:0}label{display:inline-block}button{border-radius:0}button:focus:not(:focus-visible){outline:0}input,button,select,optgroup,textarea{margin:0;font-family:inherit;font-size:inherit;line-height:inherit}button,select{text-transform:none}[role=button]{cursor:pointer}select{word-wrap:normal}select:disabled{opacity:1}[list]::-webkit-calendar-picker-indicator{display:none}button,[type=button],[type=reset],[type=submit]{-webkit-appearance:button}button:not(:disabled),[type=button]:not(:disabled),[type=reset]:not(:disabled),[type=submit]:not(:disabled){cursor:pointer}::-moz-focus-inner{padding:0;border-style:none}textarea{resize:vertical}fieldset{min-width:0;padding:0;margin:0;border:0}legend{float:left;width:100%;padding:0;margin-bottom:.5rem;font-size:calc(1.275rem + 0.3vw);line-height:inherit}@media(min-width: 1200px){legend{font-size:1.5rem}}legend+*{clear:left}::-webkit-datetime-edit-fields-wrapper,::-webkit-datetime-edit-text,::-webkit-datetime-edit-minute,::-webkit-datetime-edit-hour-field,::-webkit-datetime-edit-day-field,::-webkit-datetime-edit-month-field,::-webkit-datetime-edit-year-field{padding:0}::-webkit-inner-spin-button{height:auto}[type=search]{outline-offset:-2px;-webkit-appearance:textfield}::-webkit-search-decoration{-webkit-appearance:none}::-webkit-color-swatch-wrapper{padding:0}::file-selector-button{font:inherit}::-webkit-file-upload-button{font:inherit;-webkit-appearance:button}output{display:inline-block}iframe{border:0}summary{display:list-item;cursor:pointer}progress{vertical-align:baseline}[hidden]{display:none !important}.lead{font-size:1.25rem;font-weight:300}.display-1{font-size:calc(1.625rem + 4.5vw);font-weight:300;line-height:1.2}@media(min-width: 1200px){.display-1{font-size:5rem}}.display-2{font-size:calc(1.575rem + 3.9vw);font-weight:300;line-height:1.2}@media(min-width: 1200px){.display-2{font-size:4.5rem}}.display-3{font-size:calc(1.525rem + 3.3vw);font-weight:300;line-height:1.2}@media(min-width: 1200px){.display-3{font-size:4rem}}.display-4{font-size:calc(1.475rem + 2.7vw);font-weight:300;line-height:1.2}@media(min-width: 1200px){.display-4{font-size:3.5rem}}.display-5{font-size:calc(1.425rem + 2.1vw);font-weight:300;line-height:1.2}@media(min-width: 1200px){.display-5{font-size:3rem}}.display-6{font-size:calc(1.375rem + 1.5vw);font-weight:300;line-height:1.2}@media(min-width: 1200px){.display-6{font-size:2.5rem}}.list-unstyled{padding-left:0;list-style:none}.list-inline{padding-left:0;list-style:none}.list-inline-item{display:inline-block}.list-inline-item:not(:last-child){margin-right:.5rem}.initialism{font-size:0.875em;text-transform:uppercase}.blockquote{margin-bottom:1rem;font-size:1.25rem}.blockquote>:last-child{margin-bottom:0}.blockquote-footer{margin-top:-1rem;margin-bottom:1rem;font-size:0.875em;color:#6c757d}.blockquote-footer::before{content:"— "}.img-fluid{max-width:100%;height:auto}.img-thumbnail{padding:.25rem;background-color:#fff;border:1px solid #dee2e6;border-radius:.25rem;max-width:100%;height:auto}.figure{display:inline-block}.figure-img{margin-bottom:.5rem;line-height:1}.figure-caption{font-size:0.875em;color:#6c757d}.grid{display:grid;grid-template-rows:repeat(var(--bs-rows, 1), 1fr);grid-template-columns:repeat(var(--bs-columns, 12), 1fr);gap:var(--bs-gap, 1.5rem)}.grid .g-col-1{grid-column:auto/span 1}.grid .g-col-2{grid-column:auto/span 2}.grid .g-col-3{grid-column:auto/span 3}.grid .g-col-4{grid-column:auto/span 4}.grid .g-col-5{grid-column:auto/span 5}.grid .g-col-6{grid-column:auto/span 6}.grid .g-col-7{grid-column:auto/span 7}.grid .g-col-8{grid-column:auto/span 8}.grid .g-col-9{grid-column:auto/span 9}.grid .g-col-10{grid-column:auto/span 10}.grid .g-col-11{grid-column:auto/span 11}.grid .g-col-12{grid-column:auto/span 12}.grid .g-start-1{grid-column-start:1}.grid .g-start-2{grid-column-start:2}.grid .g-start-3{grid-column-start:3}.grid .g-start-4{grid-column-start:4}.grid .g-start-5{grid-column-start:5}.grid .g-start-6{grid-column-start:6}.grid .g-start-7{grid-column-start:7}.grid .g-start-8{grid-column-start:8}.grid .g-start-9{grid-column-start:9}.grid .g-start-10{grid-column-start:10}.grid .g-start-11{grid-column-start:11}@media(min-width: 576px){.grid .g-col-sm-1{grid-column:auto/span 1}.grid .g-col-sm-2{grid-column:auto/span 2}.grid .g-col-sm-3{grid-column:auto/span 3}.grid .g-col-sm-4{grid-column:auto/span 4}.grid .g-col-sm-5{grid-column:auto/span 5}.grid .g-col-sm-6{grid-column:auto/span 6}.grid .g-col-sm-7{grid-column:auto/span 7}.grid .g-col-sm-8{grid-column:auto/span 8}.grid .g-col-sm-9{grid-column:auto/span 9}.grid .g-col-sm-10{grid-column:auto/span 10}.grid .g-col-sm-11{grid-column:auto/span 11}.grid .g-col-sm-12{grid-column:auto/span 12}.grid .g-start-sm-1{grid-column-start:1}.grid .g-start-sm-2{grid-column-start:2}.grid .g-start-sm-3{grid-column-start:3}.grid .g-start-sm-4{grid-column-start:4}.grid .g-start-sm-5{grid-column-start:5}.grid .g-start-sm-6{grid-column-start:6}.grid .g-start-sm-7{grid-column-start:7}.grid .g-start-sm-8{grid-column-start:8}.grid .g-start-sm-9{grid-column-start:9}.grid .g-start-sm-10{grid-column-start:10}.grid .g-start-sm-11{grid-column-start:11}}@media(min-width: 768px){.grid .g-col-md-1{grid-column:auto/span 1}.grid .g-col-md-2{grid-column:auto/span 2}.grid .g-col-md-3{grid-column:auto/span 3}.grid .g-col-md-4{grid-column:auto/span 4}.grid .g-col-md-5{grid-column:auto/span 5}.grid .g-col-md-6{grid-column:auto/span 6}.grid .g-col-md-7{grid-column:auto/span 7}.grid .g-col-md-8{grid-column:auto/span 8}.grid .g-col-md-9{grid-column:auto/span 9}.grid .g-col-md-10{grid-column:auto/span 10}.grid .g-col-md-11{grid-column:auto/span 11}.grid .g-col-md-12{grid-column:auto/span 12}.grid .g-start-md-1{grid-column-start:1}.grid .g-start-md-2{grid-column-start:2}.grid .g-start-md-3{grid-column-start:3}.grid .g-start-md-4{grid-column-start:4}.grid .g-start-md-5{grid-column-start:5}.grid .g-start-md-6{grid-column-start:6}.grid .g-start-md-7{grid-column-start:7}.grid .g-start-md-8{grid-column-start:8}.grid .g-start-md-9{grid-column-start:9}.grid .g-start-md-10{grid-column-start:10}.grid .g-start-md-11{grid-column-start:11}}@media(min-width: 992px){.grid .g-col-lg-1{grid-column:auto/span 1}.grid .g-col-lg-2{grid-column:auto/span 2}.grid .g-col-lg-3{grid-column:auto/span 3}.grid .g-col-lg-4{grid-column:auto/span 4}.grid .g-col-lg-5{grid-column:auto/span 5}.grid .g-col-lg-6{grid-column:auto/span 6}.grid .g-col-lg-7{grid-column:auto/span 7}.grid .g-col-lg-8{grid-column:auto/span 8}.grid .g-col-lg-9{grid-column:auto/span 9}.grid .g-col-lg-10{grid-column:auto/span 10}.grid .g-col-lg-11{grid-column:auto/span 11}.grid .g-col-lg-12{grid-column:auto/span 12}.grid .g-start-lg-1{grid-column-start:1}.grid .g-start-lg-2{grid-column-start:2}.grid .g-start-lg-3{grid-column-start:3}.grid .g-start-lg-4{grid-column-start:4}.grid .g-start-lg-5{grid-column-start:5}.grid .g-start-lg-6{grid-column-start:6}.grid .g-start-lg-7{grid-column-start:7}.grid .g-start-lg-8{grid-column-start:8}.grid .g-start-lg-9{grid-column-start:9}.grid .g-start-lg-10{grid-column-start:10}.grid .g-start-lg-11{grid-column-start:11}}@media(min-width: 1200px){.grid .g-col-xl-1{grid-column:auto/span 1}.grid .g-col-xl-2{grid-column:auto/span 2}.grid .g-col-xl-3{grid-column:auto/span 3}.grid .g-col-xl-4{grid-column:auto/span 4}.grid .g-col-xl-5{grid-column:auto/span 5}.grid .g-col-xl-6{grid-column:auto/span 6}.grid .g-col-xl-7{grid-column:auto/span 7}.grid .g-col-xl-8{grid-column:auto/span 8}.grid .g-col-xl-9{grid-column:auto/span 9}.grid .g-col-xl-10{grid-column:auto/span 10}.grid .g-col-xl-11{grid-column:auto/span 11}.grid .g-col-xl-12{grid-column:auto/span 12}.grid .g-start-xl-1{grid-column-start:1}.grid .g-start-xl-2{grid-column-start:2}.grid .g-start-xl-3{grid-column-start:3}.grid .g-start-xl-4{grid-column-start:4}.grid .g-start-xl-5{grid-column-start:5}.grid .g-start-xl-6{grid-column-start:6}.grid .g-start-xl-7{grid-column-start:7}.grid .g-start-xl-8{grid-column-start:8}.grid .g-start-xl-9{grid-column-start:9}.grid .g-start-xl-10{grid-column-start:10}.grid .g-start-xl-11{grid-column-start:11}}@media(min-width: 1400px){.grid .g-col-xxl-1{grid-column:auto/span 1}.grid .g-col-xxl-2{grid-column:auto/span 2}.grid .g-col-xxl-3{grid-column:auto/span 3}.grid .g-col-xxl-4{grid-column:auto/span 4}.grid .g-col-xxl-5{grid-column:auto/span 5}.grid .g-col-xxl-6{grid-column:auto/span 6}.grid .g-col-xxl-7{grid-column:auto/span 7}.grid .g-col-xxl-8{grid-column:auto/span 8}.grid .g-col-xxl-9{grid-column:auto/span 9}.grid .g-col-xxl-10{grid-column:auto/span 10}.grid .g-col-xxl-11{grid-column:auto/span 11}.grid .g-col-xxl-12{grid-column:auto/span 12}.grid .g-start-xxl-1{grid-column-start:1}.grid .g-start-xxl-2{grid-column-start:2}.grid .g-start-xxl-3{grid-column-start:3}.grid .g-start-xxl-4{grid-column-start:4}.grid .g-start-xxl-5{grid-column-start:5}.grid .g-start-xxl-6{grid-column-start:6}.grid .g-start-xxl-7{grid-column-start:7}.grid .g-start-xxl-8{grid-column-start:8}.grid .g-start-xxl-9{grid-column-start:9}.grid .g-start-xxl-10{grid-column-start:10}.grid .g-start-xxl-11{grid-column-start:11}}.table{--bs-table-bg: transparent;--bs-table-accent-bg: transparent;--bs-table-striped-color: #212529;--bs-table-striped-bg: rgba(0, 0, 0, 0.05);--bs-table-active-color: #212529;--bs-table-active-bg: rgba(0, 0, 0, 0.1);--bs-table-hover-color: #212529;--bs-table-hover-bg: rgba(0, 0, 0, 0.075);width:100%;margin-bottom:1rem;color:#212529;vertical-align:top;border-color:#dee2e6}.table>:not(caption)>*>*{padding:.5rem .5rem;background-color:var(--bs-table-bg);border-bottom-width:1px;box-shadow:inset 0 0 0 9999px var(--bs-table-accent-bg)}.table>tbody{vertical-align:inherit}.table>thead{vertical-align:bottom}.table>:not(:first-child){border-top:2px solid currentColor}.caption-top{caption-side:top}.table-sm>:not(caption)>*>*{padding:.25rem .25rem}.table-bordered>:not(caption)>*{border-width:1px 0}.table-bordered>:not(caption)>*>*{border-width:0 1px}.table-borderless>:not(caption)>*>*{border-bottom-width:0}.table-borderless>:not(:first-child){border-top-width:0}.table-striped>tbody>tr:nth-of-type(odd)>*{--bs-table-accent-bg: var(--bs-table-striped-bg);color:var(--bs-table-striped-color)}.table-active{--bs-table-accent-bg: var(--bs-table-active-bg);color:var(--bs-table-active-color)}.table-hover>tbody>tr:hover>*{--bs-table-accent-bg: var(--bs-table-hover-bg);color:var(--bs-table-hover-color)}.table-primary{--bs-table-bg: #cfe2ff;--bs-table-striped-bg: #c5d7f2;--bs-table-striped-color: #000;--bs-table-active-bg: #bacbe6;--bs-table-active-color: #000;--bs-table-hover-bg: #bfd1ec;--bs-table-hover-color: #000;color:#000;border-color:#bacbe6}.table-secondary{--bs-table-bg: #e2e3e5;--bs-table-striped-bg: #d7d8da;--bs-table-striped-color: #000;--bs-table-active-bg: #cbccce;--bs-table-active-color: #000;--bs-table-hover-bg: #d1d2d4;--bs-table-hover-color: #000;color:#000;border-color:#cbccce}.table-success{--bs-table-bg: #d1e7dd;--bs-table-striped-bg: #c7dbd2;--bs-table-striped-color: #000;--bs-table-active-bg: #bcd0c7;--bs-table-active-color: #000;--bs-table-hover-bg: #c1d6cc;--bs-table-hover-color: #000;color:#000;border-color:#bcd0c7}.table-info{--bs-table-bg: #cff4fc;--bs-table-striped-bg: #c5e8ef;--bs-table-striped-color: #000;--bs-table-active-bg: #badce3;--bs-table-active-color: #000;--bs-table-hover-bg: #bfe2e9;--bs-table-hover-color: #000;color:#000;border-color:#badce3}.table-warning{--bs-table-bg: #fff3cd;--bs-table-striped-bg: #f2e7c3;--bs-table-striped-color: #000;--bs-table-active-bg: #e6dbb9;--bs-table-active-color: #000;--bs-table-hover-bg: #ece1be;--bs-table-hover-color: #000;color:#000;border-color:#e6dbb9}.table-danger{--bs-table-bg: #f8d7da;--bs-table-striped-bg: #eccccf;--bs-table-striped-color: #000;--bs-table-active-bg: #dfc2c4;--bs-table-active-color: #000;--bs-table-hover-bg: #e5c7ca;--bs-table-hover-color: #000;color:#000;border-color:#dfc2c4}.table-light{--bs-table-bg: #f8f9fa;--bs-table-striped-bg: #ecedee;--bs-table-striped-color: #000;--bs-table-active-bg: #dfe0e1;--bs-table-active-color: #000;--bs-table-hover-bg: #e5e6e7;--bs-table-hover-color: #000;color:#000;border-color:#dfe0e1}.table-dark{--bs-table-bg: #212529;--bs-table-striped-bg: #2c3034;--bs-table-striped-color: #ffffff;--bs-table-active-bg: #373b3e;--bs-table-active-color: #ffffff;--bs-table-hover-bg: #323539;--bs-table-hover-color: #ffffff;color:#fff;border-color:#373b3e}.table-responsive{overflow-x:auto;-webkit-overflow-scrolling:touch}@media(max-width: 575.98px){.table-responsive-sm{overflow-x:auto;-webkit-overflow-scrolling:touch}}@media(max-width: 767.98px){.table-responsive-md{overflow-x:auto;-webkit-overflow-scrolling:touch}}@media(max-width: 991.98px){.table-responsive-lg{overflow-x:auto;-webkit-overflow-scrolling:touch}}@media(max-width: 1199.98px){.table-responsive-xl{overflow-x:auto;-webkit-overflow-scrolling:touch}}@media(max-width: 1399.98px){.table-responsive-xxl{overflow-x:auto;-webkit-overflow-scrolling:touch}}.form-label,.shiny-input-container .control-label{margin-bottom:.5rem}.col-form-label{padding-top:calc(0.375rem + 1px);padding-bottom:calc(0.375rem + 1px);margin-bottom:0;font-size:inherit;line-height:1.5}.col-form-label-lg{padding-top:calc(0.5rem + 1px);padding-bottom:calc(0.5rem + 1px);font-size:1.25rem}.col-form-label-sm{padding-top:calc(0.25rem + 1px);padding-bottom:calc(0.25rem + 1px);font-size:0.875rem}.form-text{margin-top:.25rem;font-size:0.875em;color:#6c757d}.form-control{display:block;width:100%;padding:.375rem .75rem;font-size:1rem;font-weight:400;line-height:1.5;color:#212529;background-color:#fff;background-clip:padding-box;border:1px solid #ced4da;appearance:none;-webkit-appearance:none;-moz-appearance:none;-ms-appearance:none;-o-appearance:none;border-radius:.25rem;transition:border-color .15s ease-in-out,box-shadow .15s ease-in-out}@media(prefers-reduced-motion: reduce){.form-control{transition:none}}.form-control[type=file]{overflow:hidden}.form-control[type=file]:not(:disabled):not([readonly]){cursor:pointer}.form-control:focus{color:#212529;background-color:#fff;border-color:#86b7fe;outline:0;box-shadow:0 0 0 .25rem rgba(13,110,253,.25)}.form-control::-webkit-date-and-time-value{height:1.5em}.form-control::placeholder{color:#6c757d;opacity:1}.form-control:disabled,.form-control[readonly]{background-color:#e9ecef;opacity:1}.form-control::file-selector-button{padding:.375rem .75rem;margin:-0.375rem -0.75rem;margin-inline-end:.75rem;color:#212529;background-color:#e9ecef;pointer-events:none;border-color:inherit;border-style:solid;border-width:0;border-inline-end-width:1px;border-radius:0;transition:color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out}@media(prefers-reduced-motion: reduce){.form-control::file-selector-button{transition:none}}.form-control:hover:not(:disabled):not([readonly])::file-selector-button{background-color:#dde0e3}.form-control::-webkit-file-upload-button{padding:.375rem .75rem;margin:-0.375rem -0.75rem;margin-inline-end:.75rem;color:#212529;background-color:#e9ecef;pointer-events:none;border-color:inherit;border-style:solid;border-width:0;border-inline-end-width:1px;border-radius:0;transition:color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out}@media(prefers-reduced-motion: reduce){.form-control::-webkit-file-upload-button{transition:none}}.form-control:hover:not(:disabled):not([readonly])::-webkit-file-upload-button{background-color:#dde0e3}.form-control-plaintext{display:block;width:100%;padding:.375rem 0;margin-bottom:0;line-height:1.5;color:#212529;background-color:transparent;border:solid transparent;border-width:1px 0}.form-control-plaintext.form-control-sm,.form-control-plaintext.form-control-lg{padding-right:0;padding-left:0}.form-control-sm{min-height:calc(1.5em + 0.5rem + 2px);padding:.25rem .5rem;font-size:0.875rem;border-radius:.2rem}.form-control-sm::file-selector-button{padding:.25rem .5rem;margin:-0.25rem -0.5rem;margin-inline-end:.5rem}.form-control-sm::-webkit-file-upload-button{padding:.25rem .5rem;margin:-0.25rem -0.5rem;margin-inline-end:.5rem}.form-control-lg{min-height:calc(1.5em + 1rem + 2px);padding:.5rem 1rem;font-size:1.25rem;border-radius:.3rem}.form-control-lg::file-selector-button{padding:.5rem 1rem;margin:-0.5rem -1rem;margin-inline-end:1rem}.form-control-lg::-webkit-file-upload-button{padding:.5rem 1rem;margin:-0.5rem -1rem;margin-inline-end:1rem}textarea.form-control{min-height:calc(1.5em + 0.75rem + 2px)}textarea.form-control-sm{min-height:calc(1.5em + 0.5rem + 2px)}textarea.form-control-lg{min-height:calc(1.5em + 1rem + 2px)}.form-control-color{width:3rem;height:auto;padding:.375rem}.form-control-color:not(:disabled):not([readonly]){cursor:pointer}.form-control-color::-moz-color-swatch{height:1.5em;border-radius:.25rem}.form-control-color::-webkit-color-swatch{height:1.5em;border-radius:.25rem}.form-select{display:block;width:100%;padding:.375rem 2.25rem .375rem .75rem;-moz-padding-start:calc(0.75rem - 3px);font-size:1rem;font-weight:400;line-height:1.5;color:#212529;background-color:#fff;background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16'%3e%3cpath fill='none' stroke='%23343a40' stroke-linecap='round' stroke-linejoin='round' stroke-width='2' d='M2 5l6 6 6-6'/%3e%3c/svg%3e");background-repeat:no-repeat;background-position:right .75rem center;background-size:16px 12px;border:1px solid #ced4da;border-radius:.25rem;transition:border-color .15s ease-in-out,box-shadow .15s ease-in-out;appearance:none;-webkit-appearance:none;-moz-appearance:none;-ms-appearance:none;-o-appearance:none}@media(prefers-reduced-motion: reduce){.form-select{transition:none}}.form-select:focus{border-color:#86b7fe;outline:0;box-shadow:0 0 0 .25rem rgba(13,110,253,.25)}.form-select[multiple],.form-select[size]:not([size="1"]){padding-right:.75rem;background-image:none}.form-select:disabled{background-color:#e9ecef}.form-select:-moz-focusring{color:transparent;text-shadow:0 0 0 #212529}.form-select-sm{padding-top:.25rem;padding-bottom:.25rem;padding-left:.5rem;font-size:0.875rem;border-radius:.2rem}.form-select-lg{padding-top:.5rem;padding-bottom:.5rem;padding-left:1rem;font-size:1.25rem;border-radius:.3rem}.form-check,.shiny-input-container .checkbox,.shiny-input-container .radio{display:block;min-height:1.5rem;padding-left:0;margin-bottom:.125rem}.form-check .form-check-input,.form-check .shiny-input-container .checkbox input,.form-check .shiny-input-container .radio input,.shiny-input-container .checkbox .form-check-input,.shiny-input-container .checkbox .shiny-input-container .checkbox input,.shiny-input-container .checkbox .shiny-input-container .radio input,.shiny-input-container .radio .form-check-input,.shiny-input-container .radio .shiny-input-container .checkbox input,.shiny-input-container .radio .shiny-input-container .radio input{float:left;margin-left:0}.form-check-input,.shiny-input-container .checkbox input,.shiny-input-container .checkbox-inline input,.shiny-input-container .radio input,.shiny-input-container .radio-inline input{width:1em;height:1em;margin-top:.25em;vertical-align:top;background-color:#fff;background-repeat:no-repeat;background-position:center;background-size:contain;border:1px solid rgba(0,0,0,.25);appearance:none;-webkit-appearance:none;-moz-appearance:none;-ms-appearance:none;-o-appearance:none;color-adjust:exact;-webkit-print-color-adjust:exact}.form-check-input[type=checkbox],.shiny-input-container .checkbox input[type=checkbox],.shiny-input-container .checkbox-inline input[type=checkbox],.shiny-input-container .radio input[type=checkbox],.shiny-input-container .radio-inline input[type=checkbox]{border-radius:.25em}.form-check-input[type=radio],.shiny-input-container .checkbox input[type=radio],.shiny-input-container .checkbox-inline input[type=radio],.shiny-input-container .radio input[type=radio],.shiny-input-container .radio-inline input[type=radio]{border-radius:50%}.form-check-input:active,.shiny-input-container .checkbox input:active,.shiny-input-container .checkbox-inline input:active,.shiny-input-container .radio input:active,.shiny-input-container .radio-inline input:active{filter:brightness(90%)}.form-check-input:focus,.shiny-input-container .checkbox input:focus,.shiny-input-container .checkbox-inline input:focus,.shiny-input-container .radio input:focus,.shiny-input-container .radio-inline input:focus{border-color:#86b7fe;outline:0;box-shadow:0 0 0 .25rem rgba(13,110,253,.25)}.form-check-input:checked,.shiny-input-container .checkbox input:checked,.shiny-input-container .checkbox-inline input:checked,.shiny-input-container .radio input:checked,.shiny-input-container .radio-inline input:checked{background-color:#0d6efd;border-color:#0d6efd}.form-check-input:checked[type=checkbox],.shiny-input-container .checkbox input:checked[type=checkbox],.shiny-input-container .checkbox-inline input:checked[type=checkbox],.shiny-input-container .radio input:checked[type=checkbox],.shiny-input-container .radio-inline input:checked[type=checkbox]{background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 20 20'%3e%3cpath fill='none' stroke='%23ffffff' stroke-linecap='round' stroke-linejoin='round' stroke-width='3' d='M6 10l3 3l6-6'/%3e%3c/svg%3e")}.form-check-input:checked[type=radio],.shiny-input-container .checkbox input:checked[type=radio],.shiny-input-container .checkbox-inline input:checked[type=radio],.shiny-input-container .radio input:checked[type=radio],.shiny-input-container .radio-inline input:checked[type=radio]{background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='-4 -4 8 8'%3e%3ccircle r='2' fill='%23ffffff'/%3e%3c/svg%3e")}.form-check-input[type=checkbox]:indeterminate,.shiny-input-container .checkbox input[type=checkbox]:indeterminate,.shiny-input-container .checkbox-inline input[type=checkbox]:indeterminate,.shiny-input-container .radio input[type=checkbox]:indeterminate,.shiny-input-container .radio-inline input[type=checkbox]:indeterminate{background-color:#0d6efd;border-color:#0d6efd;background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 20 20'%3e%3cpath fill='none' stroke='%23ffffff' stroke-linecap='round' stroke-linejoin='round' stroke-width='3' d='M6 10h8'/%3e%3c/svg%3e")}.form-check-input:disabled,.shiny-input-container .checkbox input:disabled,.shiny-input-container .checkbox-inline input:disabled,.shiny-input-container .radio input:disabled,.shiny-input-container .radio-inline input:disabled{pointer-events:none;filter:none;opacity:.5}.form-check-input[disabled]~.form-check-label,.form-check-input[disabled]~span,.form-check-input:disabled~.form-check-label,.form-check-input:disabled~span,.shiny-input-container .checkbox input[disabled]~.form-check-label,.shiny-input-container .checkbox input[disabled]~span,.shiny-input-container .checkbox input:disabled~.form-check-label,.shiny-input-container .checkbox input:disabled~span,.shiny-input-container .checkbox-inline input[disabled]~.form-check-label,.shiny-input-container .checkbox-inline input[disabled]~span,.shiny-input-container .checkbox-inline input:disabled~.form-check-label,.shiny-input-container .checkbox-inline input:disabled~span,.shiny-input-container .radio input[disabled]~.form-check-label,.shiny-input-container .radio input[disabled]~span,.shiny-input-container .radio input:disabled~.form-check-label,.shiny-input-container .radio input:disabled~span,.shiny-input-container .radio-inline input[disabled]~.form-check-label,.shiny-input-container .radio-inline input[disabled]~span,.shiny-input-container .radio-inline input:disabled~.form-check-label,.shiny-input-container .radio-inline input:disabled~span{opacity:.5}.form-check-label,.shiny-input-container .checkbox label,.shiny-input-container .checkbox-inline label,.shiny-input-container .radio label,.shiny-input-container .radio-inline label{cursor:pointer}.form-switch{padding-left:2.5em}.form-switch .form-check-input{width:2em;margin-left:-2.5em;background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='-4 -4 8 8'%3e%3ccircle r='3' fill='rgba%280, 0, 0, 0.25%29'/%3e%3c/svg%3e");background-position:left center;border-radius:2em;transition:background-position .15s ease-in-out}@media(prefers-reduced-motion: reduce){.form-switch .form-check-input{transition:none}}.form-switch .form-check-input:focus{background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='-4 -4 8 8'%3e%3ccircle r='3' fill='%2386b7fe'/%3e%3c/svg%3e")}.form-switch .form-check-input:checked{background-position:right center;background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='-4 -4 8 8'%3e%3ccircle r='3' fill='%23ffffff'/%3e%3c/svg%3e")}.form-check-inline,.shiny-input-container .checkbox-inline,.shiny-input-container .radio-inline{display:inline-block;margin-right:1rem}.btn-check{position:absolute;clip:rect(0, 0, 0, 0);pointer-events:none}.btn-check[disabled]+.btn,.btn-check:disabled+.btn{pointer-events:none;filter:none;opacity:.65}.form-range{width:100%;height:1.5rem;padding:0;background-color:transparent;appearance:none;-webkit-appearance:none;-moz-appearance:none;-ms-appearance:none;-o-appearance:none}.form-range:focus{outline:0}.form-range:focus::-webkit-slider-thumb{box-shadow:0 0 0 1px #fff,0 0 0 .25rem rgba(13,110,253,.25)}.form-range:focus::-moz-range-thumb{box-shadow:0 0 0 1px #fff,0 0 0 .25rem rgba(13,110,253,.25)}.form-range::-moz-focus-outer{border:0}.form-range::-webkit-slider-thumb{width:1rem;height:1rem;margin-top:-0.25rem;background-color:#0d6efd;border:0;border-radius:1rem;transition:background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out;appearance:none;-webkit-appearance:none;-moz-appearance:none;-ms-appearance:none;-o-appearance:none}@media(prefers-reduced-motion: reduce){.form-range::-webkit-slider-thumb{transition:none}}.form-range::-webkit-slider-thumb:active{background-color:#b6d4fe}.form-range::-webkit-slider-runnable-track{width:100%;height:.5rem;color:transparent;cursor:pointer;background-color:#dee2e6;border-color:transparent;border-radius:1rem}.form-range::-moz-range-thumb{width:1rem;height:1rem;background-color:#0d6efd;border:0;border-radius:1rem;transition:background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out;appearance:none;-webkit-appearance:none;-moz-appearance:none;-ms-appearance:none;-o-appearance:none}@media(prefers-reduced-motion: reduce){.form-range::-moz-range-thumb{transition:none}}.form-range::-moz-range-thumb:active{background-color:#b6d4fe}.form-range::-moz-range-track{width:100%;height:.5rem;color:transparent;cursor:pointer;background-color:#dee2e6;border-color:transparent;border-radius:1rem}.form-range:disabled{pointer-events:none}.form-range:disabled::-webkit-slider-thumb{background-color:#adb5bd}.form-range:disabled::-moz-range-thumb{background-color:#adb5bd}.form-floating{position:relative}.form-floating>.form-control,.form-floating>.form-select{height:calc(3.5rem + 2px);line-height:1.25}.form-floating>label{position:absolute;top:0;left:0;height:100%;padding:1rem .75rem;pointer-events:none;border:1px solid transparent;transform-origin:0 0;transition:opacity .1s ease-in-out,transform .1s ease-in-out}@media(prefers-reduced-motion: reduce){.form-floating>label{transition:none}}.form-floating>.form-control{padding:1rem .75rem}.form-floating>.form-control::placeholder{color:transparent}.form-floating>.form-control:focus,.form-floating>.form-control:not(:placeholder-shown){padding-top:1.625rem;padding-bottom:.625rem}.form-floating>.form-control:-webkit-autofill{padding-top:1.625rem;padding-bottom:.625rem}.form-floating>.form-select{padding-top:1.625rem;padding-bottom:.625rem}.form-floating>.form-control:focus~label,.form-floating>.form-control:not(:placeholder-shown)~label,.form-floating>.form-select~label{opacity:.65;transform:scale(0.85) translateY(-0.5rem) translateX(0.15rem)}.form-floating>.form-control:-webkit-autofill~label{opacity:.65;transform:scale(0.85) translateY(-0.5rem) translateX(0.15rem)}.input-group{position:relative;display:flex;display:-webkit-flex;flex-wrap:wrap;-webkit-flex-wrap:wrap;align-items:stretch;-webkit-align-items:stretch;width:100%}.input-group>.form-control,.input-group>.form-select{position:relative;flex:1 1 auto;-webkit-flex:1 1 auto;width:1%;min-width:0}.input-group>.form-control:focus,.input-group>.form-select:focus{z-index:3}.input-group .btn{position:relative;z-index:2}.input-group .btn:focus{z-index:3}.input-group-text{display:flex;display:-webkit-flex;align-items:center;-webkit-align-items:center;padding:.375rem .75rem;font-size:1rem;font-weight:400;line-height:1.5;color:#212529;text-align:center;white-space:nowrap;background-color:#e9ecef;border:1px solid #ced4da;border-radius:.25rem}.input-group-lg>.form-control,.input-group-lg>.form-select,.input-group-lg>.input-group-text,.input-group-lg>.btn{padding:.5rem 1rem;font-size:1.25rem;border-radius:.3rem}.input-group-sm>.form-control,.input-group-sm>.form-select,.input-group-sm>.input-group-text,.input-group-sm>.btn{padding:.25rem .5rem;font-size:0.875rem;border-radius:.2rem}.input-group-lg>.form-select,.input-group-sm>.form-select{padding-right:3rem}.input-group:not(.has-validation)>:not(:last-child):not(.dropdown-toggle):not(.dropdown-menu),.input-group:not(.has-validation)>.dropdown-toggle:nth-last-child(n+3){border-top-right-radius:0;border-bottom-right-radius:0}.input-group.has-validation>:nth-last-child(n+3):not(.dropdown-toggle):not(.dropdown-menu),.input-group.has-validation>.dropdown-toggle:nth-last-child(n+4){border-top-right-radius:0;border-bottom-right-radius:0}.input-group>:not(:first-child):not(.dropdown-menu):not(.valid-tooltip):not(.valid-feedback):not(.invalid-tooltip):not(.invalid-feedback){margin-left:-1px;border-top-left-radius:0;border-bottom-left-radius:0}.valid-feedback{display:none;width:100%;margin-top:.25rem;font-size:0.875em;color:#198754}.valid-tooltip{position:absolute;top:100%;z-index:5;display:none;max-width:100%;padding:.25rem .5rem;margin-top:.1rem;font-size:0.875rem;color:#fff;background-color:rgba(25,135,84,.9);border-radius:.25rem}.was-validated :valid~.valid-feedback,.was-validated :valid~.valid-tooltip,.is-valid~.valid-feedback,.is-valid~.valid-tooltip{display:block}.was-validated .form-control:valid,.form-control.is-valid{border-color:#198754;padding-right:calc(1.5em + 0.75rem);background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 8 8'%3e%3cpath fill='%23198754' d='M2.3 6.73L.6 4.53c-.4-1.04.46-1.4 1.1-.8l1.1 1.4 3.4-3.8c.6-.63 1.6-.27 1.2.7l-4 4.6c-.43.5-.8.4-1.1.1z'/%3e%3c/svg%3e");background-repeat:no-repeat;background-position:right calc(0.375em + 0.1875rem) center;background-size:calc(0.75em + 0.375rem) calc(0.75em + 0.375rem)}.was-validated .form-control:valid:focus,.form-control.is-valid:focus{border-color:#198754;box-shadow:0 0 0 .25rem rgba(25,135,84,.25)}.was-validated textarea.form-control:valid,textarea.form-control.is-valid{padding-right:calc(1.5em + 0.75rem);background-position:top calc(0.375em + 0.1875rem) right calc(0.375em + 0.1875rem)}.was-validated .form-select:valid,.form-select.is-valid{border-color:#198754}.was-validated .form-select:valid:not([multiple]):not([size]),.was-validated .form-select:valid:not([multiple])[size="1"],.form-select.is-valid:not([multiple]):not([size]),.form-select.is-valid:not([multiple])[size="1"]{padding-right:4.125rem;background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16'%3e%3cpath fill='none' stroke='%23343a40' stroke-linecap='round' stroke-linejoin='round' stroke-width='2' d='M2 5l6 6 6-6'/%3e%3c/svg%3e"),url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 8 8'%3e%3cpath fill='%23198754' d='M2.3 6.73L.6 4.53c-.4-1.04.46-1.4 1.1-.8l1.1 1.4 3.4-3.8c.6-.63 1.6-.27 1.2.7l-4 4.6c-.43.5-.8.4-1.1.1z'/%3e%3c/svg%3e");background-position:right .75rem center,center right 2.25rem;background-size:16px 12px,calc(0.75em + 0.375rem) calc(0.75em + 0.375rem)}.was-validated .form-select:valid:focus,.form-select.is-valid:focus{border-color:#198754;box-shadow:0 0 0 .25rem rgba(25,135,84,.25)}.was-validated .form-check-input:valid,.form-check-input.is-valid{border-color:#198754}.was-validated .form-check-input:valid:checked,.form-check-input.is-valid:checked{background-color:#198754}.was-validated .form-check-input:valid:focus,.form-check-input.is-valid:focus{box-shadow:0 0 0 .25rem rgba(25,135,84,.25)}.was-validated .form-check-input:valid~.form-check-label,.form-check-input.is-valid~.form-check-label{color:#198754}.form-check-inline .form-check-input~.valid-feedback{margin-left:.5em}.was-validated .input-group .form-control:valid,.input-group .form-control.is-valid,.was-validated .input-group .form-select:valid,.input-group .form-select.is-valid{z-index:1}.was-validated .input-group .form-control:valid:focus,.input-group .form-control.is-valid:focus,.was-validated .input-group .form-select:valid:focus,.input-group .form-select.is-valid:focus{z-index:3}.invalid-feedback{display:none;width:100%;margin-top:.25rem;font-size:0.875em;color:#dc3545}.invalid-tooltip{position:absolute;top:100%;z-index:5;display:none;max-width:100%;padding:.25rem .5rem;margin-top:.1rem;font-size:0.875rem;color:#fff;background-color:rgba(220,53,69,.9);border-radius:.25rem}.was-validated :invalid~.invalid-feedback,.was-validated :invalid~.invalid-tooltip,.is-invalid~.invalid-feedback,.is-invalid~.invalid-tooltip{display:block}.was-validated .form-control:invalid,.form-control.is-invalid{border-color:#dc3545;padding-right:calc(1.5em + 0.75rem);background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 12 12' width='12' height='12' fill='none' stroke='%23dc3545'%3e%3ccircle cx='6' cy='6' r='4.5'/%3e%3cpath stroke-linejoin='round' d='M5.8 3.6h.4L6 6.5z'/%3e%3ccircle cx='6' cy='8.2' r='.6' fill='%23dc3545' stroke='none'/%3e%3c/svg%3e");background-repeat:no-repeat;background-position:right calc(0.375em + 0.1875rem) center;background-size:calc(0.75em + 0.375rem) calc(0.75em + 0.375rem)}.was-validated .form-control:invalid:focus,.form-control.is-invalid:focus{border-color:#dc3545;box-shadow:0 0 0 .25rem rgba(220,53,69,.25)}.was-validated textarea.form-control:invalid,textarea.form-control.is-invalid{padding-right:calc(1.5em + 0.75rem);background-position:top calc(0.375em + 0.1875rem) right calc(0.375em + 0.1875rem)}.was-validated .form-select:invalid,.form-select.is-invalid{border-color:#dc3545}.was-validated .form-select:invalid:not([multiple]):not([size]),.was-validated .form-select:invalid:not([multiple])[size="1"],.form-select.is-invalid:not([multiple]):not([size]),.form-select.is-invalid:not([multiple])[size="1"]{padding-right:4.125rem;background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16'%3e%3cpath fill='none' stroke='%23343a40' stroke-linecap='round' stroke-linejoin='round' stroke-width='2' d='M2 5l6 6 6-6'/%3e%3c/svg%3e"),url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 12 12' width='12' height='12' fill='none' stroke='%23dc3545'%3e%3ccircle cx='6' cy='6' r='4.5'/%3e%3cpath stroke-linejoin='round' d='M5.8 3.6h.4L6 6.5z'/%3e%3ccircle cx='6' cy='8.2' r='.6' fill='%23dc3545' stroke='none'/%3e%3c/svg%3e");background-position:right .75rem center,center right 2.25rem;background-size:16px 12px,calc(0.75em + 0.375rem) calc(0.75em + 0.375rem)}.was-validated .form-select:invalid:focus,.form-select.is-invalid:focus{border-color:#dc3545;box-shadow:0 0 0 .25rem rgba(220,53,69,.25)}.was-validated .form-check-input:invalid,.form-check-input.is-invalid{border-color:#dc3545}.was-validated .form-check-input:invalid:checked,.form-check-input.is-invalid:checked{background-color:#dc3545}.was-validated .form-check-input:invalid:focus,.form-check-input.is-invalid:focus{box-shadow:0 0 0 .25rem rgba(220,53,69,.25)}.was-validated .form-check-input:invalid~.form-check-label,.form-check-input.is-invalid~.form-check-label{color:#dc3545}.form-check-inline .form-check-input~.invalid-feedback{margin-left:.5em}.was-validated .input-group .form-control:invalid,.input-group .form-control.is-invalid,.was-validated .input-group .form-select:invalid,.input-group .form-select.is-invalid{z-index:2}.was-validated .input-group .form-control:invalid:focus,.input-group .form-control.is-invalid:focus,.was-validated .input-group .form-select:invalid:focus,.input-group .form-select.is-invalid:focus{z-index:3}.btn{display:inline-block;font-weight:400;line-height:1.5;color:#212529;text-align:center;text-decoration:none;-webkit-text-decoration:none;-moz-text-decoration:none;-ms-text-decoration:none;-o-text-decoration:none;vertical-align:middle;cursor:pointer;user-select:none;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;-o-user-select:none;background-color:transparent;border:1px solid transparent;padding:.375rem .75rem;font-size:1rem;border-radius:.25rem;transition:color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out}@media(prefers-reduced-motion: reduce){.btn{transition:none}}.btn:hover{color:#212529}.btn-check:focus+.btn,.btn:focus{outline:0;box-shadow:0 0 0 .25rem rgba(13,110,253,.25)}.btn:disabled,.btn.disabled,fieldset:disabled .btn{pointer-events:none;opacity:.65}.btn-default{color:#000;background-color:#dee2e6;border-color:#dee2e6}.btn-default:hover{color:#000;background-color:#e3e6ea;border-color:#e1e5e9}.btn-check:focus+.btn-default,.btn-default:focus{color:#000;background-color:#e3e6ea;border-color:#e1e5e9;box-shadow:0 0 0 .25rem rgba(189,192,196,.5)}.btn-check:checked+.btn-default,.btn-check:active+.btn-default,.btn-default:active,.btn-default.active,.show>.btn-default.dropdown-toggle{color:#000;background-color:#e5e8eb;border-color:#e1e5e9}.btn-check:checked+.btn-default:focus,.btn-check:active+.btn-default:focus,.btn-default:active:focus,.btn-default.active:focus,.show>.btn-default.dropdown-toggle:focus{box-shadow:0 0 0 .25rem rgba(189,192,196,.5)}.btn-default:disabled,.btn-default.disabled{color:#000;background-color:#dee2e6;border-color:#dee2e6}.btn-primary{color:#fff;background-color:#0d6efd;border-color:#0d6efd}.btn-primary:hover{color:#fff;background-color:#0b5ed7;border-color:#0a58ca}.btn-check:focus+.btn-primary,.btn-primary:focus{color:#fff;background-color:#0b5ed7;border-color:#0a58ca;box-shadow:0 0 0 .25rem rgba(49,132,253,.5)}.btn-check:checked+.btn-primary,.btn-check:active+.btn-primary,.btn-primary:active,.btn-primary.active,.show>.btn-primary.dropdown-toggle{color:#fff;background-color:#0a58ca;border-color:#0a53be}.btn-check:checked+.btn-primary:focus,.btn-check:active+.btn-primary:focus,.btn-primary:active:focus,.btn-primary.active:focus,.show>.btn-primary.dropdown-toggle:focus{box-shadow:0 0 0 .25rem rgba(49,132,253,.5)}.btn-primary:disabled,.btn-primary.disabled{color:#fff;background-color:#0d6efd;border-color:#0d6efd}.btn-secondary{color:#fff;background-color:#6c757d;border-color:#6c757d}.btn-secondary:hover{color:#fff;background-color:#5c636a;border-color:#565e64}.btn-check:focus+.btn-secondary,.btn-secondary:focus{color:#fff;background-color:#5c636a;border-color:#565e64;box-shadow:0 0 0 .25rem rgba(130,138,145,.5)}.btn-check:checked+.btn-secondary,.btn-check:active+.btn-secondary,.btn-secondary:active,.btn-secondary.active,.show>.btn-secondary.dropdown-toggle{color:#fff;background-color:#565e64;border-color:#51585e}.btn-check:checked+.btn-secondary:focus,.btn-check:active+.btn-secondary:focus,.btn-secondary:active:focus,.btn-secondary.active:focus,.show>.btn-secondary.dropdown-toggle:focus{box-shadow:0 0 0 .25rem rgba(130,138,145,.5)}.btn-secondary:disabled,.btn-secondary.disabled{color:#fff;background-color:#6c757d;border-color:#6c757d}.btn-success{color:#fff;background-color:#198754;border-color:#198754}.btn-success:hover{color:#fff;background-color:#157347;border-color:#146c43}.btn-check:focus+.btn-success,.btn-success:focus{color:#fff;background-color:#157347;border-color:#146c43;box-shadow:0 0 0 .25rem rgba(60,153,110,.5)}.btn-check:checked+.btn-success,.btn-check:active+.btn-success,.btn-success:active,.btn-success.active,.show>.btn-success.dropdown-toggle{color:#fff;background-color:#146c43;border-color:#13653f}.btn-check:checked+.btn-success:focus,.btn-check:active+.btn-success:focus,.btn-success:active:focus,.btn-success.active:focus,.show>.btn-success.dropdown-toggle:focus{box-shadow:0 0 0 .25rem rgba(60,153,110,.5)}.btn-success:disabled,.btn-success.disabled{color:#fff;background-color:#198754;border-color:#198754}.btn-info{color:#000;background-color:#0dcaf0;border-color:#0dcaf0}.btn-info:hover{color:#000;background-color:#31d2f2;border-color:#25cff2}.btn-check:focus+.btn-info,.btn-info:focus{color:#000;background-color:#31d2f2;border-color:#25cff2;box-shadow:0 0 0 .25rem rgba(11,172,204,.5)}.btn-check:checked+.btn-info,.btn-check:active+.btn-info,.btn-info:active,.btn-info.active,.show>.btn-info.dropdown-toggle{color:#000;background-color:#3dd5f3;border-color:#25cff2}.btn-check:checked+.btn-info:focus,.btn-check:active+.btn-info:focus,.btn-info:active:focus,.btn-info.active:focus,.show>.btn-info.dropdown-toggle:focus{box-shadow:0 0 0 .25rem rgba(11,172,204,.5)}.btn-info:disabled,.btn-info.disabled{color:#000;background-color:#0dcaf0;border-color:#0dcaf0}.btn-warning{color:#000;background-color:#ffc107;border-color:#ffc107}.btn-warning:hover{color:#000;background-color:#ffca2c;border-color:#ffc720}.btn-check:focus+.btn-warning,.btn-warning:focus{color:#000;background-color:#ffca2c;border-color:#ffc720;box-shadow:0 0 0 .25rem rgba(217,164,6,.5)}.btn-check:checked+.btn-warning,.btn-check:active+.btn-warning,.btn-warning:active,.btn-warning.active,.show>.btn-warning.dropdown-toggle{color:#000;background-color:#ffcd39;border-color:#ffc720}.btn-check:checked+.btn-warning:focus,.btn-check:active+.btn-warning:focus,.btn-warning:active:focus,.btn-warning.active:focus,.show>.btn-warning.dropdown-toggle:focus{box-shadow:0 0 0 .25rem rgba(217,164,6,.5)}.btn-warning:disabled,.btn-warning.disabled{color:#000;background-color:#ffc107;border-color:#ffc107}.btn-danger{color:#fff;background-color:#dc3545;border-color:#dc3545}.btn-danger:hover{color:#fff;background-color:#bb2d3b;border-color:#b02a37}.btn-check:focus+.btn-danger,.btn-danger:focus{color:#fff;background-color:#bb2d3b;border-color:#b02a37;box-shadow:0 0 0 .25rem rgba(225,83,97,.5)}.btn-check:checked+.btn-danger,.btn-check:active+.btn-danger,.btn-danger:active,.btn-danger.active,.show>.btn-danger.dropdown-toggle{color:#fff;background-color:#b02a37;border-color:#a52834}.btn-check:checked+.btn-danger:focus,.btn-check:active+.btn-danger:focus,.btn-danger:active:focus,.btn-danger.active:focus,.show>.btn-danger.dropdown-toggle:focus{box-shadow:0 0 0 .25rem rgba(225,83,97,.5)}.btn-danger:disabled,.btn-danger.disabled{color:#fff;background-color:#dc3545;border-color:#dc3545}.btn-light{color:#000;background-color:#f8f9fa;border-color:#f8f9fa}.btn-light:hover{color:#000;background-color:#f9fafb;border-color:#f9fafb}.btn-check:focus+.btn-light,.btn-light:focus{color:#000;background-color:#f9fafb;border-color:#f9fafb;box-shadow:0 0 0 .25rem rgba(211,212,213,.5)}.btn-check:checked+.btn-light,.btn-check:active+.btn-light,.btn-light:active,.btn-light.active,.show>.btn-light.dropdown-toggle{color:#000;background-color:#f9fafb;border-color:#f9fafb}.btn-check:checked+.btn-light:focus,.btn-check:active+.btn-light:focus,.btn-light:active:focus,.btn-light.active:focus,.show>.btn-light.dropdown-toggle:focus{box-shadow:0 0 0 .25rem rgba(211,212,213,.5)}.btn-light:disabled,.btn-light.disabled{color:#000;background-color:#f8f9fa;border-color:#f8f9fa}.btn-dark{color:#fff;background-color:#212529;border-color:#212529}.btn-dark:hover{color:#fff;background-color:#1c1f23;border-color:#1a1e21}.btn-check:focus+.btn-dark,.btn-dark:focus{color:#fff;background-color:#1c1f23;border-color:#1a1e21;box-shadow:0 0 0 .25rem rgba(66,70,73,.5)}.btn-check:checked+.btn-dark,.btn-check:active+.btn-dark,.btn-dark:active,.btn-dark.active,.show>.btn-dark.dropdown-toggle{color:#fff;background-color:#1a1e21;border-color:#191c1f}.btn-check:checked+.btn-dark:focus,.btn-check:active+.btn-dark:focus,.btn-dark:active:focus,.btn-dark.active:focus,.show>.btn-dark.dropdown-toggle:focus{box-shadow:0 0 0 .25rem rgba(66,70,73,.5)}.btn-dark:disabled,.btn-dark.disabled{color:#fff;background-color:#212529;border-color:#212529}.btn-outline-default{color:#dee2e6;border-color:#dee2e6;background-color:transparent}.btn-outline-default:hover{color:#000;background-color:#dee2e6;border-color:#dee2e6}.btn-check:focus+.btn-outline-default,.btn-outline-default:focus{box-shadow:0 0 0 .25rem rgba(222,226,230,.5)}.btn-check:checked+.btn-outline-default,.btn-check:active+.btn-outline-default,.btn-outline-default:active,.btn-outline-default.active,.btn-outline-default.dropdown-toggle.show{color:#000;background-color:#dee2e6;border-color:#dee2e6}.btn-check:checked+.btn-outline-default:focus,.btn-check:active+.btn-outline-default:focus,.btn-outline-default:active:focus,.btn-outline-default.active:focus,.btn-outline-default.dropdown-toggle.show:focus{box-shadow:0 0 0 .25rem rgba(222,226,230,.5)}.btn-outline-default:disabled,.btn-outline-default.disabled{color:#dee2e6;background-color:transparent}.btn-outline-primary{color:#0d6efd;border-color:#0d6efd;background-color:transparent}.btn-outline-primary:hover{color:#fff;background-color:#0d6efd;border-color:#0d6efd}.btn-check:focus+.btn-outline-primary,.btn-outline-primary:focus{box-shadow:0 0 0 .25rem rgba(13,110,253,.5)}.btn-check:checked+.btn-outline-primary,.btn-check:active+.btn-outline-primary,.btn-outline-primary:active,.btn-outline-primary.active,.btn-outline-primary.dropdown-toggle.show{color:#fff;background-color:#0d6efd;border-color:#0d6efd}.btn-check:checked+.btn-outline-primary:focus,.btn-check:active+.btn-outline-primary:focus,.btn-outline-primary:active:focus,.btn-outline-primary.active:focus,.btn-outline-primary.dropdown-toggle.show:focus{box-shadow:0 0 0 .25rem rgba(13,110,253,.5)}.btn-outline-primary:disabled,.btn-outline-primary.disabled{color:#0d6efd;background-color:transparent}.btn-outline-secondary{color:#6c757d;border-color:#6c757d;background-color:transparent}.btn-outline-secondary:hover{color:#fff;background-color:#6c757d;border-color:#6c757d}.btn-check:focus+.btn-outline-secondary,.btn-outline-secondary:focus{box-shadow:0 0 0 .25rem rgba(108,117,125,.5)}.btn-check:checked+.btn-outline-secondary,.btn-check:active+.btn-outline-secondary,.btn-outline-secondary:active,.btn-outline-secondary.active,.btn-outline-secondary.dropdown-toggle.show{color:#fff;background-color:#6c757d;border-color:#6c757d}.btn-check:checked+.btn-outline-secondary:focus,.btn-check:active+.btn-outline-secondary:focus,.btn-outline-secondary:active:focus,.btn-outline-secondary.active:focus,.btn-outline-secondary.dropdown-toggle.show:focus{box-shadow:0 0 0 .25rem rgba(108,117,125,.5)}.btn-outline-secondary:disabled,.btn-outline-secondary.disabled{color:#6c757d;background-color:transparent}.btn-outline-success{color:#198754;border-color:#198754;background-color:transparent}.btn-outline-success:hover{color:#fff;background-color:#198754;border-color:#198754}.btn-check:focus+.btn-outline-success,.btn-outline-success:focus{box-shadow:0 0 0 .25rem rgba(25,135,84,.5)}.btn-check:checked+.btn-outline-success,.btn-check:active+.btn-outline-success,.btn-outline-success:active,.btn-outline-success.active,.btn-outline-success.dropdown-toggle.show{color:#fff;background-color:#198754;border-color:#198754}.btn-check:checked+.btn-outline-success:focus,.btn-check:active+.btn-outline-success:focus,.btn-outline-success:active:focus,.btn-outline-success.active:focus,.btn-outline-success.dropdown-toggle.show:focus{box-shadow:0 0 0 .25rem rgba(25,135,84,.5)}.btn-outline-success:disabled,.btn-outline-success.disabled{color:#198754;background-color:transparent}.btn-outline-info{color:#0dcaf0;border-color:#0dcaf0;background-color:transparent}.btn-outline-info:hover{color:#000;background-color:#0dcaf0;border-color:#0dcaf0}.btn-check:focus+.btn-outline-info,.btn-outline-info:focus{box-shadow:0 0 0 .25rem rgba(13,202,240,.5)}.btn-check:checked+.btn-outline-info,.btn-check:active+.btn-outline-info,.btn-outline-info:active,.btn-outline-info.active,.btn-outline-info.dropdown-toggle.show{color:#000;background-color:#0dcaf0;border-color:#0dcaf0}.btn-check:checked+.btn-outline-info:focus,.btn-check:active+.btn-outline-info:focus,.btn-outline-info:active:focus,.btn-outline-info.active:focus,.btn-outline-info.dropdown-toggle.show:focus{box-shadow:0 0 0 .25rem rgba(13,202,240,.5)}.btn-outline-info:disabled,.btn-outline-info.disabled{color:#0dcaf0;background-color:transparent}.btn-outline-warning{color:#ffc107;border-color:#ffc107;background-color:transparent}.btn-outline-warning:hover{color:#000;background-color:#ffc107;border-color:#ffc107}.btn-check:focus+.btn-outline-warning,.btn-outline-warning:focus{box-shadow:0 0 0 .25rem rgba(255,193,7,.5)}.btn-check:checked+.btn-outline-warning,.btn-check:active+.btn-outline-warning,.btn-outline-warning:active,.btn-outline-warning.active,.btn-outline-warning.dropdown-toggle.show{color:#000;background-color:#ffc107;border-color:#ffc107}.btn-check:checked+.btn-outline-warning:focus,.btn-check:active+.btn-outline-warning:focus,.btn-outline-warning:active:focus,.btn-outline-warning.active:focus,.btn-outline-warning.dropdown-toggle.show:focus{box-shadow:0 0 0 .25rem rgba(255,193,7,.5)}.btn-outline-warning:disabled,.btn-outline-warning.disabled{color:#ffc107;background-color:transparent}.btn-outline-danger{color:#dc3545;border-color:#dc3545;background-color:transparent}.btn-outline-danger:hover{color:#fff;background-color:#dc3545;border-color:#dc3545}.btn-check:focus+.btn-outline-danger,.btn-outline-danger:focus{box-shadow:0 0 0 .25rem rgba(220,53,69,.5)}.btn-check:checked+.btn-outline-danger,.btn-check:active+.btn-outline-danger,.btn-outline-danger:active,.btn-outline-danger.active,.btn-outline-danger.dropdown-toggle.show{color:#fff;background-color:#dc3545;border-color:#dc3545}.btn-check:checked+.btn-outline-danger:focus,.btn-check:active+.btn-outline-danger:focus,.btn-outline-danger:active:focus,.btn-outline-danger.active:focus,.btn-outline-danger.dropdown-toggle.show:focus{box-shadow:0 0 0 .25rem rgba(220,53,69,.5)}.btn-outline-danger:disabled,.btn-outline-danger.disabled{color:#dc3545;background-color:transparent}.btn-outline-light{color:#f8f9fa;border-color:#f8f9fa;background-color:transparent}.btn-outline-light:hover{color:#000;background-color:#f8f9fa;border-color:#f8f9fa}.btn-check:focus+.btn-outline-light,.btn-outline-light:focus{box-shadow:0 0 0 .25rem rgba(248,249,250,.5)}.btn-check:checked+.btn-outline-light,.btn-check:active+.btn-outline-light,.btn-outline-light:active,.btn-outline-light.active,.btn-outline-light.dropdown-toggle.show{color:#000;background-color:#f8f9fa;border-color:#f8f9fa}.btn-check:checked+.btn-outline-light:focus,.btn-check:active+.btn-outline-light:focus,.btn-outline-light:active:focus,.btn-outline-light.active:focus,.btn-outline-light.dropdown-toggle.show:focus{box-shadow:0 0 0 .25rem rgba(248,249,250,.5)}.btn-outline-light:disabled,.btn-outline-light.disabled{color:#f8f9fa;background-color:transparent}.btn-outline-dark{color:#212529;border-color:#212529;background-color:transparent}.btn-outline-dark:hover{color:#fff;background-color:#212529;border-color:#212529}.btn-check:focus+.btn-outline-dark,.btn-outline-dark:focus{box-shadow:0 0 0 .25rem rgba(33,37,41,.5)}.btn-check:checked+.btn-outline-dark,.btn-check:active+.btn-outline-dark,.btn-outline-dark:active,.btn-outline-dark.active,.btn-outline-dark.dropdown-toggle.show{color:#fff;background-color:#212529;border-color:#212529}.btn-check:checked+.btn-outline-dark:focus,.btn-check:active+.btn-outline-dark:focus,.btn-outline-dark:active:focus,.btn-outline-dark.active:focus,.btn-outline-dark.dropdown-toggle.show:focus{box-shadow:0 0 0 .25rem rgba(33,37,41,.5)}.btn-outline-dark:disabled,.btn-outline-dark.disabled{color:#212529;background-color:transparent}.btn-link{font-weight:400;color:#0d6efd;text-decoration:underline;-webkit-text-decoration:underline;-moz-text-decoration:underline;-ms-text-decoration:underline;-o-text-decoration:underline}.btn-link:hover{color:#0a58ca}.btn-link:disabled,.btn-link.disabled{color:#6c757d}.btn-lg,.btn-group-lg>.btn{padding:.5rem 1rem;font-size:1.25rem;border-radius:.3rem}.btn-sm,.btn-group-sm>.btn{padding:.25rem .5rem;font-size:0.875rem;border-radius:.2rem}.fade{transition:opacity .15s linear}@media(prefers-reduced-motion: reduce){.fade{transition:none}}.fade:not(.show){opacity:0}.collapse:not(.show){display:none}.collapsing{height:0;overflow:hidden;transition:height .2s ease}@media(prefers-reduced-motion: reduce){.collapsing{transition:none}}.collapsing.collapse-horizontal{width:0;height:auto;transition:width .35s ease}@media(prefers-reduced-motion: reduce){.collapsing.collapse-horizontal{transition:none}}.dropup,.dropend,.dropdown,.dropstart{position:relative}.dropdown-toggle{white-space:nowrap}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.dropdown-toggle:empty::after{margin-left:0}.dropdown-menu{position:absolute;z-index:1000;display:none;min-width:10rem;padding:.5rem 0;margin:0;font-size:1rem;color:#212529;text-align:left;list-style:none;background-color:#fff;background-clip:padding-box;border:1px solid rgba(0,0,0,.15);border-radius:.25rem}.dropdown-menu[data-bs-popper]{top:100%;left:0;margin-top:.125rem}.dropdown-menu-start{--bs-position: start}.dropdown-menu-start[data-bs-popper]{right:auto;left:0}.dropdown-menu-end{--bs-position: end}.dropdown-menu-end[data-bs-popper]{right:0;left:auto}@media(min-width: 576px){.dropdown-menu-sm-start{--bs-position: start}.dropdown-menu-sm-start[data-bs-popper]{right:auto;left:0}.dropdown-menu-sm-end{--bs-position: end}.dropdown-menu-sm-end[data-bs-popper]{right:0;left:auto}}@media(min-width: 768px){.dropdown-menu-md-start{--bs-position: start}.dropdown-menu-md-start[data-bs-popper]{right:auto;left:0}.dropdown-menu-md-end{--bs-position: end}.dropdown-menu-md-end[data-bs-popper]{right:0;left:auto}}@media(min-width: 992px){.dropdown-menu-lg-start{--bs-position: start}.dropdown-menu-lg-start[data-bs-popper]{right:auto;left:0}.dropdown-menu-lg-end{--bs-position: end}.dropdown-menu-lg-end[data-bs-popper]{right:0;left:auto}}@media(min-width: 1200px){.dropdown-menu-xl-start{--bs-position: start}.dropdown-menu-xl-start[data-bs-popper]{right:auto;left:0}.dropdown-menu-xl-end{--bs-position: end}.dropdown-menu-xl-end[data-bs-popper]{right:0;left:auto}}@media(min-width: 1400px){.dropdown-menu-xxl-start{--bs-position: start}.dropdown-menu-xxl-start[data-bs-popper]{right:auto;left:0}.dropdown-menu-xxl-end{--bs-position: end}.dropdown-menu-xxl-end[data-bs-popper]{right:0;left:auto}}.dropup .dropdown-menu[data-bs-popper]{top:auto;bottom:100%;margin-top:0;margin-bottom:.125rem}.dropup .dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:0;border-right:.3em solid transparent;border-bottom:.3em solid;border-left:.3em solid transparent}.dropup .dropdown-toggle:empty::after{margin-left:0}.dropend .dropdown-menu[data-bs-popper]{top:0;right:auto;left:100%;margin-top:0;margin-left:.125rem}.dropend .dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid transparent;border-right:0;border-bottom:.3em solid transparent;border-left:.3em solid}.dropend .dropdown-toggle:empty::after{margin-left:0}.dropend .dropdown-toggle::after{vertical-align:0}.dropstart .dropdown-menu[data-bs-popper]{top:0;right:100%;left:auto;margin-top:0;margin-right:.125rem}.dropstart .dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:""}.dropstart .dropdown-toggle::after{display:none}.dropstart .dropdown-toggle::before{display:inline-block;margin-right:.255em;vertical-align:.255em;content:"";border-top:.3em solid transparent;border-right:.3em solid;border-bottom:.3em solid transparent}.dropstart .dropdown-toggle:empty::after{margin-left:0}.dropstart .dropdown-toggle::before{vertical-align:0}.dropdown-divider{height:0;margin:.5rem 0;overflow:hidden;border-top:1px solid rgba(0,0,0,.15)}.dropdown-item{display:block;width:100%;padding:.25rem 1rem;clear:both;font-weight:400;color:#212529;text-align:inherit;text-decoration:none;-webkit-text-decoration:none;-moz-text-decoration:none;-ms-text-decoration:none;-o-text-decoration:none;white-space:nowrap;background-color:transparent;border:0}.dropdown-item:hover,.dropdown-item:focus{color:#1e2125;background-color:#e9ecef}.dropdown-item.active,.dropdown-item:active{color:#fff;text-decoration:none;background-color:#0d6efd}.dropdown-item.disabled,.dropdown-item:disabled{color:#adb5bd;pointer-events:none;background-color:transparent}.dropdown-menu.show{display:block}.dropdown-header{display:block;padding:.5rem 1rem;margin-bottom:0;font-size:0.875rem;color:#6c757d;white-space:nowrap}.dropdown-item-text{display:block;padding:.25rem 1rem;color:#212529}.dropdown-menu-dark{color:#dee2e6;background-color:#343a40;border-color:rgba(0,0,0,.15)}.dropdown-menu-dark .dropdown-item{color:#dee2e6}.dropdown-menu-dark .dropdown-item:hover,.dropdown-menu-dark .dropdown-item:focus{color:#fff;background-color:rgba(255,255,255,.15)}.dropdown-menu-dark .dropdown-item.active,.dropdown-menu-dark .dropdown-item:active{color:#fff;background-color:#0d6efd}.dropdown-menu-dark .dropdown-item.disabled,.dropdown-menu-dark .dropdown-item:disabled{color:#adb5bd}.dropdown-menu-dark .dropdown-divider{border-color:rgba(0,0,0,.15)}.dropdown-menu-dark .dropdown-item-text{color:#dee2e6}.dropdown-menu-dark .dropdown-header{color:#adb5bd}.btn-group,.btn-group-vertical{position:relative;display:inline-flex;vertical-align:middle}.btn-group>.btn,.btn-group-vertical>.btn{position:relative;flex:1 1 auto;-webkit-flex:1 1 auto}.btn-group>.btn-check:checked+.btn,.btn-group>.btn-check:focus+.btn,.btn-group>.btn:hover,.btn-group>.btn:focus,.btn-group>.btn:active,.btn-group>.btn.active,.btn-group-vertical>.btn-check:checked+.btn,.btn-group-vertical>.btn-check:focus+.btn,.btn-group-vertical>.btn:hover,.btn-group-vertical>.btn:focus,.btn-group-vertical>.btn:active,.btn-group-vertical>.btn.active{z-index:1}.btn-toolbar{display:flex;display:-webkit-flex;flex-wrap:wrap;-webkit-flex-wrap:wrap;justify-content:flex-start;-webkit-justify-content:flex-start}.btn-toolbar .input-group{width:auto}.btn-group>.btn:not(:first-child),.btn-group>.btn-group:not(:first-child){margin-left:-1px}.btn-group>.btn:not(:last-child):not(.dropdown-toggle),.btn-group>.btn-group:not(:last-child)>.btn{border-top-right-radius:0;border-bottom-right-radius:0}.btn-group>.btn:nth-child(n+3),.btn-group>:not(.btn-check)+.btn,.btn-group>.btn-group:not(:first-child)>.btn{border-top-left-radius:0;border-bottom-left-radius:0}.dropdown-toggle-split{padding-right:.5625rem;padding-left:.5625rem}.dropdown-toggle-split::after,.dropup .dropdown-toggle-split::after,.dropend .dropdown-toggle-split::after{margin-left:0}.dropstart .dropdown-toggle-split::before{margin-right:0}.btn-sm+.dropdown-toggle-split,.btn-group-sm>.btn+.dropdown-toggle-split{padding-right:.375rem;padding-left:.375rem}.btn-lg+.dropdown-toggle-split,.btn-group-lg>.btn+.dropdown-toggle-split{padding-right:.75rem;padding-left:.75rem}.btn-group-vertical{flex-direction:column;-webkit-flex-direction:column;align-items:flex-start;-webkit-align-items:flex-start;justify-content:center;-webkit-justify-content:center}.btn-group-vertical>.btn,.btn-group-vertical>.btn-group{width:100%}.btn-group-vertical>.btn:not(:first-child),.btn-group-vertical>.btn-group:not(:first-child){margin-top:-1px}.btn-group-vertical>.btn:not(:last-child):not(.dropdown-toggle),.btn-group-vertical>.btn-group:not(:last-child)>.btn{border-bottom-right-radius:0;border-bottom-left-radius:0}.btn-group-vertical>.btn~.btn,.btn-group-vertical>.btn-group:not(:first-child)>.btn{border-top-left-radius:0;border-top-right-radius:0}.nav{display:flex;display:-webkit-flex;flex-wrap:wrap;-webkit-flex-wrap:wrap;padding-left:0;margin-bottom:0;list-style:none}.nav-link{display:block;padding:.5rem 1rem;color:#0d6efd;text-decoration:none;-webkit-text-decoration:none;-moz-text-decoration:none;-ms-text-decoration:none;-o-text-decoration:none;transition:color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out}@media(prefers-reduced-motion: reduce){.nav-link{transition:none}}.nav-link:hover,.nav-link:focus{color:#0a58ca}.nav-link.disabled{color:#6c757d;pointer-events:none;cursor:default}.nav-tabs{border-bottom:1px solid #dee2e6}.nav-tabs .nav-link{margin-bottom:-1px;background:none;border:1px solid transparent;border-top-left-radius:.25rem;border-top-right-radius:.25rem}.nav-tabs .nav-link:hover,.nav-tabs .nav-link:focus{border-color:#e9ecef #e9ecef #dee2e6;isolation:isolate}.nav-tabs .nav-link.disabled{color:#6c757d;background-color:transparent;border-color:transparent}.nav-tabs .nav-link.active,.nav-tabs .nav-item.show .nav-link{color:#495057;background-color:#fff;border-color:#dee2e6 #dee2e6 #fff}.nav-tabs .dropdown-menu{margin-top:-1px;border-top-left-radius:0;border-top-right-radius:0}.nav-pills .nav-link{background:none;border:0;border-radius:.25rem}.nav-pills .nav-link.active,.nav-pills .show>.nav-link{color:#fff;background-color:#0d6efd}.nav-fill>.nav-link,.nav-fill .nav-item{flex:1 1 auto;-webkit-flex:1 1 auto;text-align:center}.nav-justified>.nav-link,.nav-justified .nav-item{flex-basis:0;-webkit-flex-basis:0;flex-grow:1;-webkit-flex-grow:1;text-align:center}.nav-fill .nav-item .nav-link,.nav-justified .nav-item .nav-link{width:100%}.tab-content>.tab-pane{display:none}.tab-content>.active{display:block}.navbar{position:relative;display:flex;display:-webkit-flex;flex-wrap:wrap;-webkit-flex-wrap:wrap;align-items:center;-webkit-align-items:center;justify-content:space-between;-webkit-justify-content:space-between;padding-top:.5rem;padding-bottom:.5rem}.navbar>.container-xxl,.navbar>.container-xl,.navbar>.container-lg,.navbar>.container-md,.navbar>.container-sm,.navbar>.container,.navbar>.container-fluid{display:flex;display:-webkit-flex;flex-wrap:inherit;-webkit-flex-wrap:inherit;align-items:center;-webkit-align-items:center;justify-content:space-between;-webkit-justify-content:space-between}.navbar-brand{padding-top:.3125rem;padding-bottom:.3125rem;margin-right:1rem;font-size:1.25rem;text-decoration:none;-webkit-text-decoration:none;-moz-text-decoration:none;-ms-text-decoration:none;-o-text-decoration:none;white-space:nowrap}.navbar-nav{display:flex;display:-webkit-flex;flex-direction:column;-webkit-flex-direction:column;padding-left:0;margin-bottom:0;list-style:none}.navbar-nav .nav-link{padding-right:0;padding-left:0}.navbar-nav .dropdown-menu{position:static}.navbar-text{padding-top:.5rem;padding-bottom:.5rem}.navbar-collapse{flex-basis:100%;-webkit-flex-basis:100%;flex-grow:1;-webkit-flex-grow:1;align-items:center;-webkit-align-items:center}.navbar-toggler{padding:.25rem .75rem;font-size:1.25rem;line-height:1;background-color:transparent;border:1px solid transparent;border-radius:.25rem;transition:box-shadow .15s ease-in-out}@media(prefers-reduced-motion: reduce){.navbar-toggler{transition:none}}.navbar-toggler:hover{text-decoration:none}.navbar-toggler:focus{text-decoration:none;outline:0;box-shadow:0 0 0 .25rem}.navbar-toggler-icon{display:inline-block;width:1.5em;height:1.5em;vertical-align:middle;background-repeat:no-repeat;background-position:center;background-size:100%}.navbar-nav-scroll{max-height:var(--bs-scroll-height, 75vh);overflow-y:auto}@media(min-width: 576px){.navbar-expand-sm{flex-wrap:nowrap;-webkit-flex-wrap:nowrap;justify-content:flex-start;-webkit-justify-content:flex-start}.navbar-expand-sm .navbar-nav{flex-direction:row;-webkit-flex-direction:row}.navbar-expand-sm .navbar-nav .dropdown-menu{position:absolute}.navbar-expand-sm .navbar-nav .nav-link{padding-right:.5rem;padding-left:.5rem}.navbar-expand-sm .navbar-nav-scroll{overflow:visible}.navbar-expand-sm .navbar-collapse{display:flex !important;display:-webkit-flex !important;flex-basis:auto;-webkit-flex-basis:auto}.navbar-expand-sm .navbar-toggler{display:none}.navbar-expand-sm .offcanvas-header{display:none}.navbar-expand-sm .offcanvas{position:inherit;bottom:0;z-index:1000;flex-grow:1;-webkit-flex-grow:1;visibility:visible !important;background-color:transparent;border-right:0;border-left:0;transition:none;transform:none}.navbar-expand-sm .offcanvas-top,.navbar-expand-sm .offcanvas-bottom{height:auto;border-top:0;border-bottom:0}.navbar-expand-sm .offcanvas-body{display:flex;display:-webkit-flex;flex-grow:0;-webkit-flex-grow:0;padding:0;overflow-y:visible}}@media(min-width: 768px){.navbar-expand-md{flex-wrap:nowrap;-webkit-flex-wrap:nowrap;justify-content:flex-start;-webkit-justify-content:flex-start}.navbar-expand-md .navbar-nav{flex-direction:row;-webkit-flex-direction:row}.navbar-expand-md .navbar-nav .dropdown-menu{position:absolute}.navbar-expand-md .navbar-nav .nav-link{padding-right:.5rem;padding-left:.5rem}.navbar-expand-md .navbar-nav-scroll{overflow:visible}.navbar-expand-md .navbar-collapse{display:flex !important;display:-webkit-flex !important;flex-basis:auto;-webkit-flex-basis:auto}.navbar-expand-md .navbar-toggler{display:none}.navbar-expand-md .offcanvas-header{display:none}.navbar-expand-md .offcanvas{position:inherit;bottom:0;z-index:1000;flex-grow:1;-webkit-flex-grow:1;visibility:visible !important;background-color:transparent;border-right:0;border-left:0;transition:none;transform:none}.navbar-expand-md .offcanvas-top,.navbar-expand-md .offcanvas-bottom{height:auto;border-top:0;border-bottom:0}.navbar-expand-md .offcanvas-body{display:flex;display:-webkit-flex;flex-grow:0;-webkit-flex-grow:0;padding:0;overflow-y:visible}}@media(min-width: 992px){.navbar-expand-lg{flex-wrap:nowrap;-webkit-flex-wrap:nowrap;justify-content:flex-start;-webkit-justify-content:flex-start}.navbar-expand-lg .navbar-nav{flex-direction:row;-webkit-flex-direction:row}.navbar-expand-lg .navbar-nav .dropdown-menu{position:absolute}.navbar-expand-lg .navbar-nav .nav-link{padding-right:.5rem;padding-left:.5rem}.navbar-expand-lg .navbar-nav-scroll{overflow:visible}.navbar-expand-lg .navbar-collapse{display:flex !important;display:-webkit-flex !important;flex-basis:auto;-webkit-flex-basis:auto}.navbar-expand-lg .navbar-toggler{display:none}.navbar-expand-lg .offcanvas-header{display:none}.navbar-expand-lg .offcanvas{position:inherit;bottom:0;z-index:1000;flex-grow:1;-webkit-flex-grow:1;visibility:visible !important;background-color:transparent;border-right:0;border-left:0;transition:none;transform:none}.navbar-expand-lg .offcanvas-top,.navbar-expand-lg .offcanvas-bottom{height:auto;border-top:0;border-bottom:0}.navbar-expand-lg .offcanvas-body{display:flex;display:-webkit-flex;flex-grow:0;-webkit-flex-grow:0;padding:0;overflow-y:visible}}@media(min-width: 1200px){.navbar-expand-xl{flex-wrap:nowrap;-webkit-flex-wrap:nowrap;justify-content:flex-start;-webkit-justify-content:flex-start}.navbar-expand-xl .navbar-nav{flex-direction:row;-webkit-flex-direction:row}.navbar-expand-xl .navbar-nav .dropdown-menu{position:absolute}.navbar-expand-xl .navbar-nav .nav-link{padding-right:.5rem;padding-left:.5rem}.navbar-expand-xl .navbar-nav-scroll{overflow:visible}.navbar-expand-xl .navbar-collapse{display:flex !important;display:-webkit-flex !important;flex-basis:auto;-webkit-flex-basis:auto}.navbar-expand-xl .navbar-toggler{display:none}.navbar-expand-xl .offcanvas-header{display:none}.navbar-expand-xl .offcanvas{position:inherit;bottom:0;z-index:1000;flex-grow:1;-webkit-flex-grow:1;visibility:visible !important;background-color:transparent;border-right:0;border-left:0;transition:none;transform:none}.navbar-expand-xl .offcanvas-top,.navbar-expand-xl .offcanvas-bottom{height:auto;border-top:0;border-bottom:0}.navbar-expand-xl .offcanvas-body{display:flex;display:-webkit-flex;flex-grow:0;-webkit-flex-grow:0;padding:0;overflow-y:visible}}@media(min-width: 1400px){.navbar-expand-xxl{flex-wrap:nowrap;-webkit-flex-wrap:nowrap;justify-content:flex-start;-webkit-justify-content:flex-start}.navbar-expand-xxl .navbar-nav{flex-direction:row;-webkit-flex-direction:row}.navbar-expand-xxl .navbar-nav .dropdown-menu{position:absolute}.navbar-expand-xxl .navbar-nav .nav-link{padding-right:.5rem;padding-left:.5rem}.navbar-expand-xxl .navbar-nav-scroll{overflow:visible}.navbar-expand-xxl .navbar-collapse{display:flex !important;display:-webkit-flex !important;flex-basis:auto;-webkit-flex-basis:auto}.navbar-expand-xxl .navbar-toggler{display:none}.navbar-expand-xxl .offcanvas-header{display:none}.navbar-expand-xxl .offcanvas{position:inherit;bottom:0;z-index:1000;flex-grow:1;-webkit-flex-grow:1;visibility:visible !important;background-color:transparent;border-right:0;border-left:0;transition:none;transform:none}.navbar-expand-xxl .offcanvas-top,.navbar-expand-xxl .offcanvas-bottom{height:auto;border-top:0;border-bottom:0}.navbar-expand-xxl .offcanvas-body{display:flex;display:-webkit-flex;flex-grow:0;-webkit-flex-grow:0;padding:0;overflow-y:visible}}.navbar-expand{flex-wrap:nowrap;-webkit-flex-wrap:nowrap;justify-content:flex-start;-webkit-justify-content:flex-start}.navbar-expand .navbar-nav{flex-direction:row;-webkit-flex-direction:row}.navbar-expand .navbar-nav .dropdown-menu{position:absolute}.navbar-expand .navbar-nav .nav-link{padding-right:.5rem;padding-left:.5rem}.navbar-expand .navbar-nav-scroll{overflow:visible}.navbar-expand .navbar-collapse{display:flex !important;display:-webkit-flex !important;flex-basis:auto;-webkit-flex-basis:auto}.navbar-expand .navbar-toggler{display:none}.navbar-expand .offcanvas-header{display:none}.navbar-expand .offcanvas{position:inherit;bottom:0;z-index:1000;flex-grow:1;-webkit-flex-grow:1;visibility:visible !important;background-color:transparent;border-right:0;border-left:0;transition:none;transform:none}.navbar-expand .offcanvas-top,.navbar-expand .offcanvas-bottom{height:auto;border-top:0;border-bottom:0}.navbar-expand .offcanvas-body{display:flex;display:-webkit-flex;flex-grow:0;-webkit-flex-grow:0;padding:0;overflow-y:visible}.navbar-light{background-color:#517699}.navbar-light .navbar-brand{color:#fdfefe}.navbar-light .navbar-brand:hover,.navbar-light .navbar-brand:focus{color:#fdfefe}.navbar-light .navbar-nav .nav-link{color:#fdfefe}.navbar-light .navbar-nav .nav-link:hover,.navbar-light .navbar-nav .nav-link:focus{color:rgba(253,254,254,.8)}.navbar-light .navbar-nav .nav-link.disabled{color:rgba(253,254,254,.75)}.navbar-light .navbar-nav .show>.nav-link,.navbar-light .navbar-nav .nav-link.active{color:#fdfefe}.navbar-light .navbar-toggler{color:#fdfefe;border-color:rgba(253,254,254,.4)}.navbar-light .navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 30 30'%3e%3cpath stroke='%23fdfefe' stroke-linecap='round' stroke-miterlimit='10' stroke-width='2' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-light .navbar-text{color:#fdfefe}.navbar-light .navbar-text a,.navbar-light .navbar-text a:hover,.navbar-light .navbar-text a:focus{color:#fdfefe}.navbar-dark{background-color:#517699}.navbar-dark .navbar-brand{color:#fdfefe}.navbar-dark .navbar-brand:hover,.navbar-dark .navbar-brand:focus{color:#fdfefe}.navbar-dark .navbar-nav .nav-link{color:#fdfefe}.navbar-dark .navbar-nav .nav-link:hover,.navbar-dark .navbar-nav .nav-link:focus{color:rgba(253,254,254,.8)}.navbar-dark .navbar-nav .nav-link.disabled{color:rgba(253,254,254,.75)}.navbar-dark .navbar-nav .show>.nav-link,.navbar-dark .navbar-nav .active>.nav-link,.navbar-dark .navbar-nav .nav-link.active{color:#fdfefe}.navbar-dark .navbar-toggler{color:#fdfefe;border-color:rgba(253,254,254,.4)}.navbar-dark .navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 30 30'%3e%3cpath stroke='%23fdfefe' stroke-linecap='round' stroke-miterlimit='10' stroke-width='2' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-dark .navbar-text{color:#fdfefe}.navbar-dark .navbar-text a,.navbar-dark .navbar-text a:hover,.navbar-dark .navbar-text a:focus{color:#fdfefe}.card{position:relative;display:flex;display:-webkit-flex;flex-direction:column;-webkit-flex-direction:column;min-width:0;word-wrap:break-word;background-color:#fff;background-clip:border-box;border:1px solid rgba(0,0,0,.125);border-radius:.25rem}.card>hr{margin-right:0;margin-left:0}.card>.list-group{border-top:inherit;border-bottom:inherit}.card>.list-group:first-child{border-top-width:0;border-top-left-radius:calc(0.25rem - 1px);border-top-right-radius:calc(0.25rem - 1px)}.card>.list-group:last-child{border-bottom-width:0;border-bottom-right-radius:calc(0.25rem - 1px);border-bottom-left-radius:calc(0.25rem - 1px)}.card>.card-header+.list-group,.card>.list-group+.card-footer{border-top:0}.card-body{flex:1 1 auto;-webkit-flex:1 1 auto;padding:1rem 1rem}.card-title{margin-bottom:.5rem}.card-subtitle{margin-top:-0.25rem;margin-bottom:0}.card-text:last-child{margin-bottom:0}.card-link+.card-link{margin-left:1rem}.card-header{padding:.5rem 1rem;margin-bottom:0;background-color:rgba(0,0,0,.03);border-bottom:1px solid rgba(0,0,0,.125)}.card-header:first-child{border-radius:calc(0.25rem - 1px) calc(0.25rem - 1px) 0 0}.card-footer{padding:.5rem 1rem;background-color:rgba(0,0,0,.03);border-top:1px solid rgba(0,0,0,.125)}.card-footer:last-child{border-radius:0 0 calc(0.25rem - 1px) calc(0.25rem - 1px)}.card-header-tabs{margin-right:-0.5rem;margin-bottom:-0.5rem;margin-left:-0.5rem;border-bottom:0}.card-header-pills{margin-right:-0.5rem;margin-left:-0.5rem}.card-img-overlay{position:absolute;top:0;right:0;bottom:0;left:0;padding:1rem;border-radius:calc(0.25rem - 1px)}.card-img,.card-img-top,.card-img-bottom{width:100%}.card-img,.card-img-top{border-top-left-radius:calc(0.25rem - 1px);border-top-right-radius:calc(0.25rem - 1px)}.card-img,.card-img-bottom{border-bottom-right-radius:calc(0.25rem - 1px);border-bottom-left-radius:calc(0.25rem - 1px)}.card-group>.card{margin-bottom:.75rem}@media(min-width: 576px){.card-group{display:flex;display:-webkit-flex;flex-flow:row wrap;-webkit-flex-flow:row wrap}.card-group>.card{flex:1 0 0%;-webkit-flex:1 0 0%;margin-bottom:0}.card-group>.card+.card{margin-left:0;border-left:0}.card-group>.card:not(:last-child){border-top-right-radius:0;border-bottom-right-radius:0}.card-group>.card:not(:last-child) .card-img-top,.card-group>.card:not(:last-child) .card-header{border-top-right-radius:0}.card-group>.card:not(:last-child) .card-img-bottom,.card-group>.card:not(:last-child) .card-footer{border-bottom-right-radius:0}.card-group>.card:not(:first-child){border-top-left-radius:0;border-bottom-left-radius:0}.card-group>.card:not(:first-child) .card-img-top,.card-group>.card:not(:first-child) .card-header{border-top-left-radius:0}.card-group>.card:not(:first-child) .card-img-bottom,.card-group>.card:not(:first-child) .card-footer{border-bottom-left-radius:0}}.accordion-button{position:relative;display:flex;display:-webkit-flex;align-items:center;-webkit-align-items:center;width:100%;padding:1rem 1.25rem;font-size:1rem;color:#212529;text-align:left;background-color:#fff;border:0;border-radius:0;overflow-anchor:none;transition:color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out,border-radius .15s ease}@media(prefers-reduced-motion: reduce){.accordion-button{transition:none}}.accordion-button:not(.collapsed){color:#0c63e4;background-color:#e7f1ff;box-shadow:inset 0 -1px 0 rgba(0,0,0,.125)}.accordion-button:not(.collapsed)::after{background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16' fill='%230c63e4'%3e%3cpath fill-rule='evenodd' d='M1.646 4.646a.5.5 0 0 1 .708 0L8 10.293l5.646-5.647a.5.5 0 0 1 .708.708l-6 6a.5.5 0 0 1-.708 0l-6-6a.5.5 0 0 1 0-.708z'/%3e%3c/svg%3e");transform:rotate(-180deg)}.accordion-button::after{flex-shrink:0;-webkit-flex-shrink:0;width:1.25rem;height:1.25rem;margin-left:auto;content:"";background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16' fill='%23212529'%3e%3cpath fill-rule='evenodd' d='M1.646 4.646a.5.5 0 0 1 .708 0L8 10.293l5.646-5.647a.5.5 0 0 1 .708.708l-6 6a.5.5 0 0 1-.708 0l-6-6a.5.5 0 0 1 0-.708z'/%3e%3c/svg%3e");background-repeat:no-repeat;background-size:1.25rem;transition:transform .2s ease-in-out}@media(prefers-reduced-motion: reduce){.accordion-button::after{transition:none}}.accordion-button:hover{z-index:2}.accordion-button:focus{z-index:3;border-color:#86b7fe;outline:0;box-shadow:0 0 0 .25rem rgba(13,110,253,.25)}.accordion-header{margin-bottom:0}.accordion-item{background-color:#fff;border:1px solid rgba(0,0,0,.125)}.accordion-item:first-of-type{border-top-left-radius:.25rem;border-top-right-radius:.25rem}.accordion-item:first-of-type .accordion-button{border-top-left-radius:calc(0.25rem - 1px);border-top-right-radius:calc(0.25rem - 1px)}.accordion-item:not(:first-of-type){border-top:0}.accordion-item:last-of-type{border-bottom-right-radius:.25rem;border-bottom-left-radius:.25rem}.accordion-item:last-of-type .accordion-button.collapsed{border-bottom-right-radius:calc(0.25rem - 1px);border-bottom-left-radius:calc(0.25rem - 1px)}.accordion-item:last-of-type .accordion-collapse{border-bottom-right-radius:.25rem;border-bottom-left-radius:.25rem}.accordion-body{padding:1rem 1.25rem}.accordion-flush .accordion-collapse{border-width:0}.accordion-flush .accordion-item{border-right:0;border-left:0;border-radius:0}.accordion-flush .accordion-item:first-child{border-top:0}.accordion-flush .accordion-item:last-child{border-bottom:0}.accordion-flush .accordion-item .accordion-button{border-radius:0}.breadcrumb{display:flex;display:-webkit-flex;flex-wrap:wrap;-webkit-flex-wrap:wrap;padding:0 0;margin-bottom:1rem;list-style:none}.breadcrumb-item+.breadcrumb-item{padding-left:.5rem}.breadcrumb-item+.breadcrumb-item::before{float:left;padding-right:.5rem;color:#6c757d;content:var(--bs-breadcrumb-divider, "/") /* rtl: var(--bs-breadcrumb-divider, "/") */}.breadcrumb-item.active{color:#6c757d}.pagination{display:flex;display:-webkit-flex;padding-left:0;list-style:none}.page-link{position:relative;display:block;color:#0d6efd;text-decoration:none;-webkit-text-decoration:none;-moz-text-decoration:none;-ms-text-decoration:none;-o-text-decoration:none;background-color:#fff;border:1px solid #dee2e6;transition:color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out}@media(prefers-reduced-motion: reduce){.page-link{transition:none}}.page-link:hover{z-index:2;color:#0a58ca;background-color:#e9ecef;border-color:#dee2e6}.page-link:focus{z-index:3;color:#0a58ca;background-color:#e9ecef;outline:0;box-shadow:0 0 0 .25rem rgba(13,110,253,.25)}.page-item:not(:first-child) .page-link{margin-left:-1px}.page-item.active .page-link{z-index:3;color:#fff;background-color:#0d6efd;border-color:#0d6efd}.page-item.disabled .page-link{color:#6c757d;pointer-events:none;background-color:#fff;border-color:#dee2e6}.page-link{padding:.375rem .75rem}.page-item:first-child .page-link{border-top-left-radius:.25rem;border-bottom-left-radius:.25rem}.page-item:last-child .page-link{border-top-right-radius:.25rem;border-bottom-right-radius:.25rem}.pagination-lg .page-link{padding:.75rem 1.5rem;font-size:1.25rem}.pagination-lg .page-item:first-child .page-link{border-top-left-radius:.3rem;border-bottom-left-radius:.3rem}.pagination-lg .page-item:last-child .page-link{border-top-right-radius:.3rem;border-bottom-right-radius:.3rem}.pagination-sm .page-link{padding:.25rem .5rem;font-size:0.875rem}.pagination-sm .page-item:first-child .page-link{border-top-left-radius:.2rem;border-bottom-left-radius:.2rem}.pagination-sm .page-item:last-child .page-link{border-top-right-radius:.2rem;border-bottom-right-radius:.2rem}.badge{display:inline-block;padding:.35em .65em;font-size:0.75em;font-weight:700;line-height:1;color:#fff;text-align:center;white-space:nowrap;vertical-align:baseline;border-radius:.25rem}.badge:empty{display:none}.btn .badge{position:relative;top:-1px}.alert{position:relative;padding:1rem 1rem;margin-bottom:1rem;border:1px solid transparent;border-radius:.25rem}.alert-heading{color:inherit}.alert-link{font-weight:700}.alert-dismissible{padding-right:3rem}.alert-dismissible .btn-close{position:absolute;top:0;right:0;z-index:2;padding:1.25rem 1rem}.alert-default{color:#595a5c;background-color:#f8f9fa;border-color:#f5f6f8}.alert-default .alert-link{color:#47484a}.alert-primary{color:#084298;background-color:#cfe2ff;border-color:#b6d4fe}.alert-primary .alert-link{color:#06357a}.alert-secondary{color:#41464b;background-color:#e2e3e5;border-color:#d3d6d8}.alert-secondary .alert-link{color:#34383c}.alert-success{color:#0f5132;background-color:#d1e7dd;border-color:#badbcc}.alert-success .alert-link{color:#0c4128}.alert-info{color:#055160;background-color:#cff4fc;border-color:#b6effb}.alert-info .alert-link{color:#04414d}.alert-warning{color:#664d03;background-color:#fff3cd;border-color:#ffecb5}.alert-warning .alert-link{color:#523e02}.alert-danger{color:#842029;background-color:#f8d7da;border-color:#f5c2c7}.alert-danger .alert-link{color:#6a1a21}.alert-light{color:#636464;background-color:#fefefe;border-color:#fdfdfe}.alert-light .alert-link{color:#4f5050}.alert-dark{color:#141619;background-color:#d3d3d4;border-color:#bcbebf}.alert-dark .alert-link{color:#101214}@keyframes progress-bar-stripes{0%{background-position-x:1rem}}.progress{display:flex;display:-webkit-flex;height:1rem;overflow:hidden;font-size:0.75rem;background-color:#e9ecef;border-radius:.25rem}.progress-bar{display:flex;display:-webkit-flex;flex-direction:column;-webkit-flex-direction:column;justify-content:center;-webkit-justify-content:center;overflow:hidden;color:#fff;text-align:center;white-space:nowrap;background-color:#0d6efd;transition:width .6s ease}@media(prefers-reduced-motion: reduce){.progress-bar{transition:none}}.progress-bar-striped{background-image:linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);background-size:1rem 1rem}.progress-bar-animated{animation:1s linear infinite progress-bar-stripes}@media(prefers-reduced-motion: reduce){.progress-bar-animated{animation:none}}.list-group{display:flex;display:-webkit-flex;flex-direction:column;-webkit-flex-direction:column;padding-left:0;margin-bottom:0;border-radius:.25rem}.list-group-numbered{list-style-type:none;counter-reset:section}.list-group-numbered>li::before{content:counters(section, ".") ". ";counter-increment:section}.list-group-item-action{width:100%;color:#495057;text-align:inherit}.list-group-item-action:hover,.list-group-item-action:focus{z-index:1;color:#495057;text-decoration:none;background-color:#f8f9fa}.list-group-item-action:active{color:#212529;background-color:#e9ecef}.list-group-item{position:relative;display:block;padding:.5rem 1rem;color:#212529;text-decoration:none;-webkit-text-decoration:none;-moz-text-decoration:none;-ms-text-decoration:none;-o-text-decoration:none;background-color:#fff;border:1px solid rgba(0,0,0,.125)}.list-group-item:first-child{border-top-left-radius:inherit;border-top-right-radius:inherit}.list-group-item:last-child{border-bottom-right-radius:inherit;border-bottom-left-radius:inherit}.list-group-item.disabled,.list-group-item:disabled{color:#6c757d;pointer-events:none;background-color:#fff}.list-group-item.active{z-index:2;color:#fff;background-color:#0d6efd;border-color:#0d6efd}.list-group-item+.list-group-item{border-top-width:0}.list-group-item+.list-group-item.active{margin-top:-1px;border-top-width:1px}.list-group-horizontal{flex-direction:row;-webkit-flex-direction:row}.list-group-horizontal>.list-group-item:first-child{border-bottom-left-radius:.25rem;border-top-right-radius:0}.list-group-horizontal>.list-group-item:last-child{border-top-right-radius:.25rem;border-bottom-left-radius:0}.list-group-horizontal>.list-group-item.active{margin-top:0}.list-group-horizontal>.list-group-item+.list-group-item{border-top-width:1px;border-left-width:0}.list-group-horizontal>.list-group-item+.list-group-item.active{margin-left:-1px;border-left-width:1px}@media(min-width: 576px){.list-group-horizontal-sm{flex-direction:row;-webkit-flex-direction:row}.list-group-horizontal-sm>.list-group-item:first-child{border-bottom-left-radius:.25rem;border-top-right-radius:0}.list-group-horizontal-sm>.list-group-item:last-child{border-top-right-radius:.25rem;border-bottom-left-radius:0}.list-group-horizontal-sm>.list-group-item.active{margin-top:0}.list-group-horizontal-sm>.list-group-item+.list-group-item{border-top-width:1px;border-left-width:0}.list-group-horizontal-sm>.list-group-item+.list-group-item.active{margin-left:-1px;border-left-width:1px}}@media(min-width: 768px){.list-group-horizontal-md{flex-direction:row;-webkit-flex-direction:row}.list-group-horizontal-md>.list-group-item:first-child{border-bottom-left-radius:.25rem;border-top-right-radius:0}.list-group-horizontal-md>.list-group-item:last-child{border-top-right-radius:.25rem;border-bottom-left-radius:0}.list-group-horizontal-md>.list-group-item.active{margin-top:0}.list-group-horizontal-md>.list-group-item+.list-group-item{border-top-width:1px;border-left-width:0}.list-group-horizontal-md>.list-group-item+.list-group-item.active{margin-left:-1px;border-left-width:1px}}@media(min-width: 992px){.list-group-horizontal-lg{flex-direction:row;-webkit-flex-direction:row}.list-group-horizontal-lg>.list-group-item:first-child{border-bottom-left-radius:.25rem;border-top-right-radius:0}.list-group-horizontal-lg>.list-group-item:last-child{border-top-right-radius:.25rem;border-bottom-left-radius:0}.list-group-horizontal-lg>.list-group-item.active{margin-top:0}.list-group-horizontal-lg>.list-group-item+.list-group-item{border-top-width:1px;border-left-width:0}.list-group-horizontal-lg>.list-group-item+.list-group-item.active{margin-left:-1px;border-left-width:1px}}@media(min-width: 1200px){.list-group-horizontal-xl{flex-direction:row;-webkit-flex-direction:row}.list-group-horizontal-xl>.list-group-item:first-child{border-bottom-left-radius:.25rem;border-top-right-radius:0}.list-group-horizontal-xl>.list-group-item:last-child{border-top-right-radius:.25rem;border-bottom-left-radius:0}.list-group-horizontal-xl>.list-group-item.active{margin-top:0}.list-group-horizontal-xl>.list-group-item+.list-group-item{border-top-width:1px;border-left-width:0}.list-group-horizontal-xl>.list-group-item+.list-group-item.active{margin-left:-1px;border-left-width:1px}}@media(min-width: 1400px){.list-group-horizontal-xxl{flex-direction:row;-webkit-flex-direction:row}.list-group-horizontal-xxl>.list-group-item:first-child{border-bottom-left-radius:.25rem;border-top-right-radius:0}.list-group-horizontal-xxl>.list-group-item:last-child{border-top-right-radius:.25rem;border-bottom-left-radius:0}.list-group-horizontal-xxl>.list-group-item.active{margin-top:0}.list-group-horizontal-xxl>.list-group-item+.list-group-item{border-top-width:1px;border-left-width:0}.list-group-horizontal-xxl>.list-group-item+.list-group-item.active{margin-left:-1px;border-left-width:1px}}.list-group-flush{border-radius:0}.list-group-flush>.list-group-item{border-width:0 0 1px}.list-group-flush>.list-group-item:last-child{border-bottom-width:0}.list-group-item-default{color:#595a5c;background-color:#f8f9fa}.list-group-item-default.list-group-item-action:hover,.list-group-item-default.list-group-item-action:focus{color:#595a5c;background-color:#dfe0e1}.list-group-item-default.list-group-item-action.active{color:#fff;background-color:#595a5c;border-color:#595a5c}.list-group-item-primary{color:#084298;background-color:#cfe2ff}.list-group-item-primary.list-group-item-action:hover,.list-group-item-primary.list-group-item-action:focus{color:#084298;background-color:#bacbe6}.list-group-item-primary.list-group-item-action.active{color:#fff;background-color:#084298;border-color:#084298}.list-group-item-secondary{color:#41464b;background-color:#e2e3e5}.list-group-item-secondary.list-group-item-action:hover,.list-group-item-secondary.list-group-item-action:focus{color:#41464b;background-color:#cbccce}.list-group-item-secondary.list-group-item-action.active{color:#fff;background-color:#41464b;border-color:#41464b}.list-group-item-success{color:#0f5132;background-color:#d1e7dd}.list-group-item-success.list-group-item-action:hover,.list-group-item-success.list-group-item-action:focus{color:#0f5132;background-color:#bcd0c7}.list-group-item-success.list-group-item-action.active{color:#fff;background-color:#0f5132;border-color:#0f5132}.list-group-item-info{color:#055160;background-color:#cff4fc}.list-group-item-info.list-group-item-action:hover,.list-group-item-info.list-group-item-action:focus{color:#055160;background-color:#badce3}.list-group-item-info.list-group-item-action.active{color:#fff;background-color:#055160;border-color:#055160}.list-group-item-warning{color:#664d03;background-color:#fff3cd}.list-group-item-warning.list-group-item-action:hover,.list-group-item-warning.list-group-item-action:focus{color:#664d03;background-color:#e6dbb9}.list-group-item-warning.list-group-item-action.active{color:#fff;background-color:#664d03;border-color:#664d03}.list-group-item-danger{color:#842029;background-color:#f8d7da}.list-group-item-danger.list-group-item-action:hover,.list-group-item-danger.list-group-item-action:focus{color:#842029;background-color:#dfc2c4}.list-group-item-danger.list-group-item-action.active{color:#fff;background-color:#842029;border-color:#842029}.list-group-item-light{color:#636464;background-color:#fefefe}.list-group-item-light.list-group-item-action:hover,.list-group-item-light.list-group-item-action:focus{color:#636464;background-color:#e5e5e5}.list-group-item-light.list-group-item-action.active{color:#fff;background-color:#636464;border-color:#636464}.list-group-item-dark{color:#141619;background-color:#d3d3d4}.list-group-item-dark.list-group-item-action:hover,.list-group-item-dark.list-group-item-action:focus{color:#141619;background-color:#bebebf}.list-group-item-dark.list-group-item-action.active{color:#fff;background-color:#141619;border-color:#141619}.btn-close{box-sizing:content-box;width:1em;height:1em;padding:.25em .25em;color:#000;background:transparent url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16' fill='%23000'%3e%3cpath d='M.293.293a1 1 0 011.414 0L8 6.586 14.293.293a1 1 0 111.414 1.414L9.414 8l6.293 6.293a1 1 0 01-1.414 1.414L8 9.414l-6.293 6.293a1 1 0 01-1.414-1.414L6.586 8 .293 1.707a1 1 0 010-1.414z'/%3e%3c/svg%3e") center/1em auto no-repeat;border:0;border-radius:.25rem;opacity:.5}.btn-close:hover{color:#000;text-decoration:none;opacity:.75}.btn-close:focus{outline:0;box-shadow:0 0 0 .25rem rgba(13,110,253,.25);opacity:1}.btn-close:disabled,.btn-close.disabled{pointer-events:none;user-select:none;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;-o-user-select:none;opacity:.25}.btn-close-white{filter:invert(1) grayscale(100%) brightness(200%)}.toast{width:350px;max-width:100%;font-size:0.875rem;pointer-events:auto;background-color:rgba(255,255,255,.85);background-clip:padding-box;border:1px solid rgba(0,0,0,.1);box-shadow:0 .5rem 1rem rgba(0,0,0,.15);border-radius:.25rem}.toast.showing{opacity:0}.toast:not(.show){display:none}.toast-container{width:max-content;width:-webkit-max-content;width:-moz-max-content;width:-ms-max-content;width:-o-max-content;max-width:100%;pointer-events:none}.toast-container>:not(:last-child){margin-bottom:.75rem}.toast-header{display:flex;display:-webkit-flex;align-items:center;-webkit-align-items:center;padding:.5rem .75rem;color:#6c757d;background-color:rgba(255,255,255,.85);background-clip:padding-box;border-bottom:1px solid rgba(0,0,0,.05);border-top-left-radius:calc(0.25rem - 1px);border-top-right-radius:calc(0.25rem - 1px)}.toast-header .btn-close{margin-right:-0.375rem;margin-left:.75rem}.toast-body{padding:.75rem;word-wrap:break-word}.modal{position:fixed;top:0;left:0;z-index:1055;display:none;width:100%;height:100%;overflow-x:hidden;overflow-y:auto;outline:0}.modal-dialog{position:relative;width:auto;margin:.5rem;pointer-events:none}.modal.fade .modal-dialog{transition:transform .3s ease-out;transform:translate(0, -50px)}@media(prefers-reduced-motion: reduce){.modal.fade .modal-dialog{transition:none}}.modal.show .modal-dialog{transform:none}.modal.modal-static .modal-dialog{transform:scale(1.02)}.modal-dialog-scrollable{height:calc(100% - 1rem)}.modal-dialog-scrollable .modal-content{max-height:100%;overflow:hidden}.modal-dialog-scrollable .modal-body{overflow-y:auto}.modal-dialog-centered{display:flex;display:-webkit-flex;align-items:center;-webkit-align-items:center;min-height:calc(100% - 1rem)}.modal-content{position:relative;display:flex;display:-webkit-flex;flex-direction:column;-webkit-flex-direction:column;width:100%;pointer-events:auto;background-color:#fff;background-clip:padding-box;border:1px solid rgba(0,0,0,.2);border-radius:.3rem;outline:0}.modal-backdrop{position:fixed;top:0;left:0;z-index:1050;width:100vw;height:100vh;background-color:#000}.modal-backdrop.fade{opacity:0}.modal-backdrop.show{opacity:.5}.modal-header{display:flex;display:-webkit-flex;flex-shrink:0;-webkit-flex-shrink:0;align-items:center;-webkit-align-items:center;justify-content:space-between;-webkit-justify-content:space-between;padding:1rem 1rem;border-bottom:1px solid #dee2e6;border-top-left-radius:calc(0.3rem - 1px);border-top-right-radius:calc(0.3rem - 1px)}.modal-header .btn-close{padding:.5rem .5rem;margin:-0.5rem -0.5rem -0.5rem auto}.modal-title{margin-bottom:0;line-height:1.5}.modal-body{position:relative;flex:1 1 auto;-webkit-flex:1 1 auto;padding:1rem}.modal-footer{display:flex;display:-webkit-flex;flex-wrap:wrap;-webkit-flex-wrap:wrap;flex-shrink:0;-webkit-flex-shrink:0;align-items:center;-webkit-align-items:center;justify-content:flex-end;-webkit-justify-content:flex-end;padding:.75rem;border-top:1px solid #dee2e6;border-bottom-right-radius:calc(0.3rem - 1px);border-bottom-left-radius:calc(0.3rem - 1px)}.modal-footer>*{margin:.25rem}@media(min-width: 576px){.modal-dialog{max-width:500px;margin:1.75rem auto}.modal-dialog-scrollable{height:calc(100% - 3.5rem)}.modal-dialog-centered{min-height:calc(100% - 3.5rem)}.modal-sm{max-width:300px}}@media(min-width: 992px){.modal-lg,.modal-xl{max-width:800px}}@media(min-width: 1200px){.modal-xl{max-width:1140px}}.modal-fullscreen{width:100vw;max-width:none;height:100%;margin:0}.modal-fullscreen .modal-content{height:100%;border:0;border-radius:0}.modal-fullscreen .modal-header{border-radius:0}.modal-fullscreen .modal-body{overflow-y:auto}.modal-fullscreen .modal-footer{border-radius:0}@media(max-width: 575.98px){.modal-fullscreen-sm-down{width:100vw;max-width:none;height:100%;margin:0}.modal-fullscreen-sm-down .modal-content{height:100%;border:0;border-radius:0}.modal-fullscreen-sm-down .modal-header{border-radius:0}.modal-fullscreen-sm-down .modal-body{overflow-y:auto}.modal-fullscreen-sm-down .modal-footer{border-radius:0}}@media(max-width: 767.98px){.modal-fullscreen-md-down{width:100vw;max-width:none;height:100%;margin:0}.modal-fullscreen-md-down .modal-content{height:100%;border:0;border-radius:0}.modal-fullscreen-md-down .modal-header{border-radius:0}.modal-fullscreen-md-down .modal-body{overflow-y:auto}.modal-fullscreen-md-down .modal-footer{border-radius:0}}@media(max-width: 991.98px){.modal-fullscreen-lg-down{width:100vw;max-width:none;height:100%;margin:0}.modal-fullscreen-lg-down .modal-content{height:100%;border:0;border-radius:0}.modal-fullscreen-lg-down .modal-header{border-radius:0}.modal-fullscreen-lg-down .modal-body{overflow-y:auto}.modal-fullscreen-lg-down .modal-footer{border-radius:0}}@media(max-width: 1199.98px){.modal-fullscreen-xl-down{width:100vw;max-width:none;height:100%;margin:0}.modal-fullscreen-xl-down .modal-content{height:100%;border:0;border-radius:0}.modal-fullscreen-xl-down .modal-header{border-radius:0}.modal-fullscreen-xl-down .modal-body{overflow-y:auto}.modal-fullscreen-xl-down .modal-footer{border-radius:0}}@media(max-width: 1399.98px){.modal-fullscreen-xxl-down{width:100vw;max-width:none;height:100%;margin:0}.modal-fullscreen-xxl-down .modal-content{height:100%;border:0;border-radius:0}.modal-fullscreen-xxl-down .modal-header{border-radius:0}.modal-fullscreen-xxl-down .modal-body{overflow-y:auto}.modal-fullscreen-xxl-down .modal-footer{border-radius:0}}.tooltip{position:absolute;z-index:1080;display:block;margin:0;font-family:var(--bs-font-sans-serif);font-style:normal;font-weight:400;line-height:1.5;text-align:left;text-align:start;text-decoration:none;text-shadow:none;text-transform:none;letter-spacing:normal;word-break:normal;word-spacing:normal;white-space:normal;line-break:auto;font-size:0.875rem;word-wrap:break-word;opacity:0}.tooltip.show{opacity:.9}.tooltip .tooltip-arrow{position:absolute;display:block;width:.8rem;height:.4rem}.tooltip .tooltip-arrow::before{position:absolute;content:"";border-color:transparent;border-style:solid}.bs-tooltip-top,.bs-tooltip-auto[data-popper-placement^=top]{padding:.4rem 0}.bs-tooltip-top .tooltip-arrow,.bs-tooltip-auto[data-popper-placement^=top] .tooltip-arrow{bottom:0}.bs-tooltip-top .tooltip-arrow::before,.bs-tooltip-auto[data-popper-placement^=top] .tooltip-arrow::before{top:-1px;border-width:.4rem .4rem 0;border-top-color:#000}.bs-tooltip-end,.bs-tooltip-auto[data-popper-placement^=right]{padding:0 .4rem}.bs-tooltip-end .tooltip-arrow,.bs-tooltip-auto[data-popper-placement^=right] .tooltip-arrow{left:0;width:.4rem;height:.8rem}.bs-tooltip-end .tooltip-arrow::before,.bs-tooltip-auto[data-popper-placement^=right] .tooltip-arrow::before{right:-1px;border-width:.4rem .4rem .4rem 0;border-right-color:#000}.bs-tooltip-bottom,.bs-tooltip-auto[data-popper-placement^=bottom]{padding:.4rem 0}.bs-tooltip-bottom .tooltip-arrow,.bs-tooltip-auto[data-popper-placement^=bottom] .tooltip-arrow{top:0}.bs-tooltip-bottom .tooltip-arrow::before,.bs-tooltip-auto[data-popper-placement^=bottom] .tooltip-arrow::before{bottom:-1px;border-width:0 .4rem .4rem;border-bottom-color:#000}.bs-tooltip-start,.bs-tooltip-auto[data-popper-placement^=left]{padding:0 .4rem}.bs-tooltip-start .tooltip-arrow,.bs-tooltip-auto[data-popper-placement^=left] .tooltip-arrow{right:0;width:.4rem;height:.8rem}.bs-tooltip-start .tooltip-arrow::before,.bs-tooltip-auto[data-popper-placement^=left] .tooltip-arrow::before{left:-1px;border-width:.4rem 0 .4rem .4rem;border-left-color:#000}.tooltip-inner{max-width:200px;padding:.25rem .5rem;color:#fff;text-align:center;background-color:#000;border-radius:.25rem}.popover{position:absolute;top:0;left:0 /* rtl:ignore */;z-index:1070;display:block;max-width:276px;font-family:var(--bs-font-sans-serif);font-style:normal;font-weight:400;line-height:1.5;text-align:left;text-align:start;text-decoration:none;text-shadow:none;text-transform:none;letter-spacing:normal;word-break:normal;word-spacing:normal;white-space:normal;line-break:auto;font-size:0.875rem;word-wrap:break-word;background-color:#fff;background-clip:padding-box;border:1px solid rgba(0,0,0,.2);border-radius:.3rem}.popover .popover-arrow{position:absolute;display:block;width:1rem;height:.5rem}.popover .popover-arrow::before,.popover .popover-arrow::after{position:absolute;display:block;content:"";border-color:transparent;border-style:solid}.bs-popover-top>.popover-arrow,.bs-popover-auto[data-popper-placement^=top]>.popover-arrow{bottom:calc(-0.5rem - 1px)}.bs-popover-top>.popover-arrow::before,.bs-popover-auto[data-popper-placement^=top]>.popover-arrow::before{bottom:0;border-width:.5rem .5rem 0;border-top-color:rgba(0,0,0,.25)}.bs-popover-top>.popover-arrow::after,.bs-popover-auto[data-popper-placement^=top]>.popover-arrow::after{bottom:1px;border-width:.5rem .5rem 0;border-top-color:#fff}.bs-popover-end>.popover-arrow,.bs-popover-auto[data-popper-placement^=right]>.popover-arrow{left:calc(-0.5rem - 1px);width:.5rem;height:1rem}.bs-popover-end>.popover-arrow::before,.bs-popover-auto[data-popper-placement^=right]>.popover-arrow::before{left:0;border-width:.5rem .5rem .5rem 0;border-right-color:rgba(0,0,0,.25)}.bs-popover-end>.popover-arrow::after,.bs-popover-auto[data-popper-placement^=right]>.popover-arrow::after{left:1px;border-width:.5rem .5rem .5rem 0;border-right-color:#fff}.bs-popover-bottom>.popover-arrow,.bs-popover-auto[data-popper-placement^=bottom]>.popover-arrow{top:calc(-0.5rem - 1px)}.bs-popover-bottom>.popover-arrow::before,.bs-popover-auto[data-popper-placement^=bottom]>.popover-arrow::before{top:0;border-width:0 .5rem .5rem .5rem;border-bottom-color:rgba(0,0,0,.25)}.bs-popover-bottom>.popover-arrow::after,.bs-popover-auto[data-popper-placement^=bottom]>.popover-arrow::after{top:1px;border-width:0 .5rem .5rem .5rem;border-bottom-color:#fff}.bs-popover-bottom .popover-header::before,.bs-popover-auto[data-popper-placement^=bottom] .popover-header::before{position:absolute;top:0;left:50%;display:block;width:1rem;margin-left:-0.5rem;content:"";border-bottom:1px solid #f0f0f0}.bs-popover-start>.popover-arrow,.bs-popover-auto[data-popper-placement^=left]>.popover-arrow{right:calc(-0.5rem - 1px);width:.5rem;height:1rem}.bs-popover-start>.popover-arrow::before,.bs-popover-auto[data-popper-placement^=left]>.popover-arrow::before{right:0;border-width:.5rem 0 .5rem .5rem;border-left-color:rgba(0,0,0,.25)}.bs-popover-start>.popover-arrow::after,.bs-popover-auto[data-popper-placement^=left]>.popover-arrow::after{right:1px;border-width:.5rem 0 .5rem .5rem;border-left-color:#fff}.popover-header{padding:.5rem 1rem;margin-bottom:0;font-size:1rem;background-color:#f0f0f0;border-bottom:1px solid rgba(0,0,0,.2);border-top-left-radius:calc(0.3rem - 1px);border-top-right-radius:calc(0.3rem - 1px)}.popover-header:empty{display:none}.popover-body{padding:1rem 1rem;color:#212529}.carousel{position:relative}.carousel.pointer-event{touch-action:pan-y;-webkit-touch-action:pan-y;-moz-touch-action:pan-y;-ms-touch-action:pan-y;-o-touch-action:pan-y}.carousel-inner{position:relative;width:100%;overflow:hidden}.carousel-inner::after{display:block;clear:both;content:""}.carousel-item{position:relative;display:none;float:left;width:100%;margin-right:-100%;backface-visibility:hidden;-webkit-backface-visibility:hidden;-moz-backface-visibility:hidden;-ms-backface-visibility:hidden;-o-backface-visibility:hidden;transition:transform .6s ease-in-out}@media(prefers-reduced-motion: reduce){.carousel-item{transition:none}}.carousel-item.active,.carousel-item-next,.carousel-item-prev{display:block}.carousel-item-next:not(.carousel-item-start),.active.carousel-item-end{transform:translateX(100%)}.carousel-item-prev:not(.carousel-item-end),.active.carousel-item-start{transform:translateX(-100%)}.carousel-fade .carousel-item{opacity:0;transition-property:opacity;transform:none}.carousel-fade .carousel-item.active,.carousel-fade .carousel-item-next.carousel-item-start,.carousel-fade .carousel-item-prev.carousel-item-end{z-index:1;opacity:1}.carousel-fade .active.carousel-item-start,.carousel-fade .active.carousel-item-end{z-index:0;opacity:0;transition:opacity 0s .6s}@media(prefers-reduced-motion: reduce){.carousel-fade .active.carousel-item-start,.carousel-fade .active.carousel-item-end{transition:none}}.carousel-control-prev,.carousel-control-next{position:absolute;top:0;bottom:0;z-index:1;display:flex;display:-webkit-flex;align-items:center;-webkit-align-items:center;justify-content:center;-webkit-justify-content:center;width:15%;padding:0;color:#fff;text-align:center;background:none;border:0;opacity:.5;transition:opacity .15s ease}@media(prefers-reduced-motion: reduce){.carousel-control-prev,.carousel-control-next{transition:none}}.carousel-control-prev:hover,.carousel-control-prev:focus,.carousel-control-next:hover,.carousel-control-next:focus{color:#fff;text-decoration:none;outline:0;opacity:.9}.carousel-control-prev{left:0}.carousel-control-next{right:0}.carousel-control-prev-icon,.carousel-control-next-icon{display:inline-block;width:2rem;height:2rem;background-repeat:no-repeat;background-position:50%;background-size:100% 100%}.carousel-control-prev-icon{background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16' fill='%23ffffff'%3e%3cpath d='M11.354 1.646a.5.5 0 0 1 0 .708L5.707 8l5.647 5.646a.5.5 0 0 1-.708.708l-6-6a.5.5 0 0 1 0-.708l6-6a.5.5 0 0 1 .708 0z'/%3e%3c/svg%3e")}.carousel-control-next-icon{background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16' fill='%23ffffff'%3e%3cpath d='M4.646 1.646a.5.5 0 0 1 .708 0l6 6a.5.5 0 0 1 0 .708l-6 6a.5.5 0 0 1-.708-.708L10.293 8 4.646 2.354a.5.5 0 0 1 0-.708z'/%3e%3c/svg%3e")}.carousel-indicators{position:absolute;right:0;bottom:0;left:0;z-index:2;display:flex;display:-webkit-flex;justify-content:center;-webkit-justify-content:center;padding:0;margin-right:15%;margin-bottom:1rem;margin-left:15%;list-style:none}.carousel-indicators [data-bs-target]{box-sizing:content-box;flex:0 1 auto;-webkit-flex:0 1 auto;width:30px;height:3px;padding:0;margin-right:3px;margin-left:3px;text-indent:-999px;cursor:pointer;background-color:#fff;background-clip:padding-box;border:0;border-top:10px solid transparent;border-bottom:10px solid transparent;opacity:.5;transition:opacity .6s ease}@media(prefers-reduced-motion: reduce){.carousel-indicators [data-bs-target]{transition:none}}.carousel-indicators .active{opacity:1}.carousel-caption{position:absolute;right:15%;bottom:1.25rem;left:15%;padding-top:1.25rem;padding-bottom:1.25rem;color:#fff;text-align:center}.carousel-dark .carousel-control-prev-icon,.carousel-dark .carousel-control-next-icon{filter:invert(1) grayscale(100)}.carousel-dark .carousel-indicators [data-bs-target]{background-color:#000}.carousel-dark .carousel-caption{color:#000}@keyframes spinner-border{to{transform:rotate(360deg) /* rtl:ignore */}}.spinner-border{display:inline-block;width:2rem;height:2rem;vertical-align:-0.125em;border:.25em solid currentColor;border-right-color:transparent;border-radius:50%;animation:.75s linear infinite spinner-border}.spinner-border-sm{width:1rem;height:1rem;border-width:.2em}@keyframes spinner-grow{0%{transform:scale(0)}50%{opacity:1;transform:none}}.spinner-grow{display:inline-block;width:2rem;height:2rem;vertical-align:-0.125em;background-color:currentColor;border-radius:50%;opacity:0;animation:.75s linear infinite spinner-grow}.spinner-grow-sm{width:1rem;height:1rem}@media(prefers-reduced-motion: reduce){.spinner-border,.spinner-grow{animation-duration:1.5s;-webkit-animation-duration:1.5s;-moz-animation-duration:1.5s;-ms-animation-duration:1.5s;-o-animation-duration:1.5s}}.offcanvas{position:fixed;bottom:0;z-index:1045;display:flex;display:-webkit-flex;flex-direction:column;-webkit-flex-direction:column;max-width:100%;visibility:hidden;background-color:#fff;background-clip:padding-box;outline:0;transition:transform .3s ease-in-out}@media(prefers-reduced-motion: reduce){.offcanvas{transition:none}}.offcanvas-backdrop{position:fixed;top:0;left:0;z-index:1040;width:100vw;height:100vh;background-color:#000}.offcanvas-backdrop.fade{opacity:0}.offcanvas-backdrop.show{opacity:.5}.offcanvas-header{display:flex;display:-webkit-flex;align-items:center;-webkit-align-items:center;justify-content:space-between;-webkit-justify-content:space-between;padding:1rem 1rem}.offcanvas-header .btn-close{padding:.5rem .5rem;margin-top:-0.5rem;margin-right:-0.5rem;margin-bottom:-0.5rem}.offcanvas-title{margin-bottom:0;line-height:1.5}.offcanvas-body{flex-grow:1;-webkit-flex-grow:1;padding:1rem 1rem;overflow-y:auto}.offcanvas-start{top:0;left:0;width:400px;border-right:1px solid rgba(0,0,0,.2);transform:translateX(-100%)}.offcanvas-end{top:0;right:0;width:400px;border-left:1px solid rgba(0,0,0,.2);transform:translateX(100%)}.offcanvas-top{top:0;right:0;left:0;height:30vh;max-height:100%;border-bottom:1px solid rgba(0,0,0,.2);transform:translateY(-100%)}.offcanvas-bottom{right:0;left:0;height:30vh;max-height:100%;border-top:1px solid rgba(0,0,0,.2);transform:translateY(100%)}.offcanvas.show{transform:none}.placeholder{display:inline-block;min-height:1em;vertical-align:middle;cursor:wait;background-color:currentColor;opacity:.5}.placeholder.btn::before{display:inline-block;content:""}.placeholder-xs{min-height:.6em}.placeholder-sm{min-height:.8em}.placeholder-lg{min-height:1.2em}.placeholder-glow .placeholder{animation:placeholder-glow 2s ease-in-out infinite}@keyframes placeholder-glow{50%{opacity:.2}}.placeholder-wave{mask-image:linear-gradient(130deg, #000 55%, rgba(0, 0, 0, 0.8) 75%, #000 95%);-webkit-mask-image:linear-gradient(130deg, #000 55%, rgba(0, 0, 0, 0.8) 75%, #000 95%);mask-size:200% 100%;-webkit-mask-size:200% 100%;animation:placeholder-wave 2s linear infinite}@keyframes placeholder-wave{100%{mask-position:-200% 0%;-webkit-mask-position:-200% 0%}}.clearfix::after{display:block;clear:both;content:""}.link-default{color:#dee2e6}.link-default:hover,.link-default:focus{color:#e5e8eb}.link-primary{color:#0d6efd}.link-primary:hover,.link-primary:focus{color:#0a58ca}.link-secondary{color:#6c757d}.link-secondary:hover,.link-secondary:focus{color:#565e64}.link-success{color:#198754}.link-success:hover,.link-success:focus{color:#146c43}.link-info{color:#0dcaf0}.link-info:hover,.link-info:focus{color:#3dd5f3}.link-warning{color:#ffc107}.link-warning:hover,.link-warning:focus{color:#ffcd39}.link-danger{color:#dc3545}.link-danger:hover,.link-danger:focus{color:#b02a37}.link-light{color:#f8f9fa}.link-light:hover,.link-light:focus{color:#f9fafb}.link-dark{color:#212529}.link-dark:hover,.link-dark:focus{color:#1a1e21}.ratio{position:relative;width:100%}.ratio::before{display:block;padding-top:var(--bs-aspect-ratio);content:""}.ratio>*{position:absolute;top:0;left:0;width:100%;height:100%}.ratio-1x1{--bs-aspect-ratio: 100%}.ratio-4x3{--bs-aspect-ratio: calc(3 / 4 * 100%)}.ratio-16x9{--bs-aspect-ratio: calc(9 / 16 * 100%)}.ratio-21x9{--bs-aspect-ratio: calc(9 / 21 * 100%)}.fixed-top{position:fixed;top:0;right:0;left:0;z-index:1030}.fixed-bottom{position:fixed;right:0;bottom:0;left:0;z-index:1030}.sticky-top{position:sticky;top:0;z-index:1020}@media(min-width: 576px){.sticky-sm-top{position:sticky;top:0;z-index:1020}}@media(min-width: 768px){.sticky-md-top{position:sticky;top:0;z-index:1020}}@media(min-width: 992px){.sticky-lg-top{position:sticky;top:0;z-index:1020}}@media(min-width: 1200px){.sticky-xl-top{position:sticky;top:0;z-index:1020}}@media(min-width: 1400px){.sticky-xxl-top{position:sticky;top:0;z-index:1020}}.hstack{display:flex;display:-webkit-flex;flex-direction:row;-webkit-flex-direction:row;align-items:center;-webkit-align-items:center;align-self:stretch;-webkit-align-self:stretch}.vstack{display:flex;display:-webkit-flex;flex:1 1 auto;-webkit-flex:1 1 auto;flex-direction:column;-webkit-flex-direction:column;align-self:stretch;-webkit-align-self:stretch}.visually-hidden,.visually-hidden-focusable:not(:focus):not(:focus-within){position:absolute !important;width:1px !important;height:1px !important;padding:0 !important;margin:-1px !important;overflow:hidden !important;clip:rect(0, 0, 0, 0) !important;white-space:nowrap !important;border:0 !important}.stretched-link::after{position:absolute;top:0;right:0;bottom:0;left:0;z-index:1;content:""}.text-truncate{overflow:hidden;text-overflow:ellipsis;white-space:nowrap}.vr{display:inline-block;align-self:stretch;-webkit-align-self:stretch;width:1px;min-height:1em;background-color:currentColor;opacity:.25}.align-baseline{vertical-align:baseline !important}.align-top{vertical-align:top !important}.align-middle{vertical-align:middle !important}.align-bottom{vertical-align:bottom !important}.align-text-bottom{vertical-align:text-bottom !important}.align-text-top{vertical-align:text-top !important}.float-start{float:left !important}.float-end{float:right !important}.float-none{float:none !important}.opacity-0{opacity:0 !important}.opacity-25{opacity:.25 !important}.opacity-50{opacity:.5 !important}.opacity-75{opacity:.75 !important}.opacity-100{opacity:1 !important}.overflow-auto{overflow:auto !important}.overflow-hidden{overflow:hidden !important}.overflow-visible{overflow:visible !important}.overflow-scroll{overflow:scroll !important}.d-inline{display:inline !important}.d-inline-block{display:inline-block !important}.d-block{display:block !important}.d-grid{display:grid !important}.d-table{display:table !important}.d-table-row{display:table-row !important}.d-table-cell{display:table-cell !important}.d-flex{display:flex !important}.d-inline-flex{display:inline-flex !important}.d-none{display:none !important}.shadow{box-shadow:0 .5rem 1rem rgba(0,0,0,.15) !important}.shadow-sm{box-shadow:0 .125rem .25rem rgba(0,0,0,.075) !important}.shadow-lg{box-shadow:0 1rem 3rem rgba(0,0,0,.175) !important}.shadow-none{box-shadow:none !important}.position-static{position:static !important}.position-relative{position:relative !important}.position-absolute{position:absolute !important}.position-fixed{position:fixed !important}.position-sticky{position:sticky !important}.top-0{top:0 !important}.top-50{top:50% !important}.top-100{top:100% !important}.bottom-0{bottom:0 !important}.bottom-50{bottom:50% !important}.bottom-100{bottom:100% !important}.start-0{left:0 !important}.start-50{left:50% !important}.start-100{left:100% !important}.end-0{right:0 !important}.end-50{right:50% !important}.end-100{right:100% !important}.translate-middle{transform:translate(-50%, -50%) !important}.translate-middle-x{transform:translateX(-50%) !important}.translate-middle-y{transform:translateY(-50%) !important}.border{border:1px solid #dee2e6 !important}.border-0{border:0 !important}.border-top{border-top:1px solid #dee2e6 !important}.border-top-0{border-top:0 !important}.border-end{border-right:1px solid #dee2e6 !important}.border-end-0{border-right:0 !important}.border-bottom{border-bottom:1px solid #dee2e6 !important}.border-bottom-0{border-bottom:0 !important}.border-start{border-left:1px solid #dee2e6 !important}.border-start-0{border-left:0 !important}.border-default{border-color:#dee2e6 !important}.border-primary{border-color:#0d6efd !important}.border-secondary{border-color:#6c757d !important}.border-success{border-color:#198754 !important}.border-info{border-color:#0dcaf0 !important}.border-warning{border-color:#ffc107 !important}.border-danger{border-color:#dc3545 !important}.border-light{border-color:#f8f9fa !important}.border-dark{border-color:#212529 !important}.border-white{border-color:#fff !important}.border-1{border-width:1px !important}.border-2{border-width:2px !important}.border-3{border-width:3px !important}.border-4{border-width:4px !important}.border-5{border-width:5px !important}.w-25{width:25% !important}.w-50{width:50% !important}.w-75{width:75% !important}.w-100{width:100% !important}.w-auto{width:auto !important}.mw-100{max-width:100% !important}.vw-100{width:100vw !important}.min-vw-100{min-width:100vw !important}.h-25{height:25% !important}.h-50{height:50% !important}.h-75{height:75% !important}.h-100{height:100% !important}.h-auto{height:auto !important}.mh-100{max-height:100% !important}.vh-100{height:100vh !important}.min-vh-100{min-height:100vh !important}.flex-fill{flex:1 1 auto !important}.flex-row{flex-direction:row !important}.flex-column{flex-direction:column !important}.flex-row-reverse{flex-direction:row-reverse !important}.flex-column-reverse{flex-direction:column-reverse !important}.flex-grow-0{flex-grow:0 !important}.flex-grow-1{flex-grow:1 !important}.flex-shrink-0{flex-shrink:0 !important}.flex-shrink-1{flex-shrink:1 !important}.flex-wrap{flex-wrap:wrap !important}.flex-nowrap{flex-wrap:nowrap !important}.flex-wrap-reverse{flex-wrap:wrap-reverse !important}.gap-0{gap:0 !important}.gap-1{gap:.25rem !important}.gap-2{gap:.5rem !important}.gap-3{gap:1rem !important}.gap-4{gap:1.5rem !important}.gap-5{gap:3rem !important}.justify-content-start{justify-content:flex-start !important}.justify-content-end{justify-content:flex-end !important}.justify-content-center{justify-content:center !important}.justify-content-between{justify-content:space-between !important}.justify-content-around{justify-content:space-around !important}.justify-content-evenly{justify-content:space-evenly !important}.align-items-start{align-items:flex-start !important}.align-items-end{align-items:flex-end !important}.align-items-center{align-items:center !important}.align-items-baseline{align-items:baseline !important}.align-items-stretch{align-items:stretch !important}.align-content-start{align-content:flex-start !important}.align-content-end{align-content:flex-end !important}.align-content-center{align-content:center !important}.align-content-between{align-content:space-between !important}.align-content-around{align-content:space-around !important}.align-content-stretch{align-content:stretch !important}.align-self-auto{align-self:auto !important}.align-self-start{align-self:flex-start !important}.align-self-end{align-self:flex-end !important}.align-self-center{align-self:center !important}.align-self-baseline{align-self:baseline !important}.align-self-stretch{align-self:stretch !important}.order-first{order:-1 !important}.order-0{order:0 !important}.order-1{order:1 !important}.order-2{order:2 !important}.order-3{order:3 !important}.order-4{order:4 !important}.order-5{order:5 !important}.order-last{order:6 !important}.m-0{margin:0 !important}.m-1{margin:.25rem !important}.m-2{margin:.5rem !important}.m-3{margin:1rem !important}.m-4{margin:1.5rem !important}.m-5{margin:3rem !important}.m-auto{margin:auto !important}.mx-0{margin-right:0 !important;margin-left:0 !important}.mx-1{margin-right:.25rem !important;margin-left:.25rem !important}.mx-2{margin-right:.5rem !important;margin-left:.5rem !important}.mx-3{margin-right:1rem !important;margin-left:1rem !important}.mx-4{margin-right:1.5rem !important;margin-left:1.5rem !important}.mx-5{margin-right:3rem !important;margin-left:3rem !important}.mx-auto{margin-right:auto !important;margin-left:auto !important}.my-0{margin-top:0 !important;margin-bottom:0 !important}.my-1{margin-top:.25rem !important;margin-bottom:.25rem !important}.my-2{margin-top:.5rem !important;margin-bottom:.5rem !important}.my-3{margin-top:1rem !important;margin-bottom:1rem !important}.my-4{margin-top:1.5rem !important;margin-bottom:1.5rem !important}.my-5{margin-top:3rem !important;margin-bottom:3rem !important}.my-auto{margin-top:auto !important;margin-bottom:auto !important}.mt-0{margin-top:0 !important}.mt-1{margin-top:.25rem !important}.mt-2{margin-top:.5rem !important}.mt-3{margin-top:1rem !important}.mt-4{margin-top:1.5rem !important}.mt-5{margin-top:3rem !important}.mt-auto{margin-top:auto !important}.me-0{margin-right:0 !important}.me-1{margin-right:.25rem !important}.me-2{margin-right:.5rem !important}.me-3{margin-right:1rem !important}.me-4{margin-right:1.5rem !important}.me-5{margin-right:3rem !important}.me-auto{margin-right:auto !important}.mb-0{margin-bottom:0 !important}.mb-1{margin-bottom:.25rem !important}.mb-2{margin-bottom:.5rem !important}.mb-3{margin-bottom:1rem !important}.mb-4{margin-bottom:1.5rem !important}.mb-5{margin-bottom:3rem !important}.mb-auto{margin-bottom:auto !important}.ms-0{margin-left:0 !important}.ms-1{margin-left:.25rem !important}.ms-2{margin-left:.5rem !important}.ms-3{margin-left:1rem !important}.ms-4{margin-left:1.5rem !important}.ms-5{margin-left:3rem !important}.ms-auto{margin-left:auto !important}.p-0{padding:0 !important}.p-1{padding:.25rem !important}.p-2{padding:.5rem !important}.p-3{padding:1rem !important}.p-4{padding:1.5rem !important}.p-5{padding:3rem !important}.px-0{padding-right:0 !important;padding-left:0 !important}.px-1{padding-right:.25rem !important;padding-left:.25rem !important}.px-2{padding-right:.5rem !important;padding-left:.5rem !important}.px-3{padding-right:1rem !important;padding-left:1rem !important}.px-4{padding-right:1.5rem !important;padding-left:1.5rem !important}.px-5{padding-right:3rem !important;padding-left:3rem !important}.py-0{padding-top:0 !important;padding-bottom:0 !important}.py-1{padding-top:.25rem !important;padding-bottom:.25rem !important}.py-2{padding-top:.5rem !important;padding-bottom:.5rem !important}.py-3{padding-top:1rem !important;padding-bottom:1rem !important}.py-4{padding-top:1.5rem !important;padding-bottom:1.5rem !important}.py-5{padding-top:3rem !important;padding-bottom:3rem !important}.pt-0{padding-top:0 !important}.pt-1{padding-top:.25rem !important}.pt-2{padding-top:.5rem !important}.pt-3{padding-top:1rem !important}.pt-4{padding-top:1.5rem !important}.pt-5{padding-top:3rem !important}.pe-0{padding-right:0 !important}.pe-1{padding-right:.25rem !important}.pe-2{padding-right:.5rem !important}.pe-3{padding-right:1rem !important}.pe-4{padding-right:1.5rem !important}.pe-5{padding-right:3rem !important}.pb-0{padding-bottom:0 !important}.pb-1{padding-bottom:.25rem !important}.pb-2{padding-bottom:.5rem !important}.pb-3{padding-bottom:1rem !important}.pb-4{padding-bottom:1.5rem !important}.pb-5{padding-bottom:3rem !important}.ps-0{padding-left:0 !important}.ps-1{padding-left:.25rem !important}.ps-2{padding-left:.5rem !important}.ps-3{padding-left:1rem !important}.ps-4{padding-left:1.5rem !important}.ps-5{padding-left:3rem !important}.font-monospace{font-family:var(--bs-font-monospace) !important}.fs-1{font-size:calc(1.345rem + 1.14vw) !important}.fs-2{font-size:calc(1.3rem + 0.6vw) !important}.fs-3{font-size:calc(1.275rem + 0.3vw) !important}.fs-4{font-size:1.25rem !important}.fs-5{font-size:1.1rem !important}.fs-6{font-size:1rem !important}.fst-italic{font-style:italic !important}.fst-normal{font-style:normal !important}.fw-light{font-weight:300 !important}.fw-lighter{font-weight:lighter !important}.fw-normal{font-weight:400 !important}.fw-bold{font-weight:700 !important}.fw-bolder{font-weight:bolder !important}.lh-1{line-height:1 !important}.lh-sm{line-height:1.25 !important}.lh-base{line-height:1.5 !important}.lh-lg{line-height:2 !important}.text-start{text-align:left !important}.text-end{text-align:right !important}.text-center{text-align:center !important}.text-decoration-none{text-decoration:none !important}.text-decoration-underline{text-decoration:underline !important}.text-decoration-line-through{text-decoration:line-through !important}.text-lowercase{text-transform:lowercase !important}.text-uppercase{text-transform:uppercase !important}.text-capitalize{text-transform:capitalize !important}.text-wrap{white-space:normal !important}.text-nowrap{white-space:nowrap !important}.text-break{word-wrap:break-word !important;word-break:break-word !important}.text-default{--bs-text-opacity: 1;color:rgba(var(--bs-default-rgb), var(--bs-text-opacity)) !important}.text-primary{--bs-text-opacity: 1;color:rgba(var(--bs-primary-rgb), var(--bs-text-opacity)) !important}.text-secondary{--bs-text-opacity: 1;color:rgba(var(--bs-secondary-rgb), var(--bs-text-opacity)) !important}.text-success{--bs-text-opacity: 1;color:rgba(var(--bs-success-rgb), var(--bs-text-opacity)) !important}.text-info{--bs-text-opacity: 1;color:rgba(var(--bs-info-rgb), var(--bs-text-opacity)) !important}.text-warning{--bs-text-opacity: 1;color:rgba(var(--bs-warning-rgb), var(--bs-text-opacity)) !important}.text-danger{--bs-text-opacity: 1;color:rgba(var(--bs-danger-rgb), var(--bs-text-opacity)) !important}.text-light{--bs-text-opacity: 1;color:rgba(var(--bs-light-rgb), var(--bs-text-opacity)) !important}.text-dark{--bs-text-opacity: 1;color:rgba(var(--bs-dark-rgb), var(--bs-text-opacity)) !important}.text-black{--bs-text-opacity: 1;color:rgba(var(--bs-black-rgb), var(--bs-text-opacity)) !important}.text-white{--bs-text-opacity: 1;color:rgba(var(--bs-white-rgb), var(--bs-text-opacity)) !important}.text-body{--bs-text-opacity: 1;color:rgba(var(--bs-body-color-rgb), var(--bs-text-opacity)) !important}.text-muted{--bs-text-opacity: 1;color:#6c757d !important}.text-black-50{--bs-text-opacity: 1;color:rgba(0,0,0,.5) !important}.text-white-50{--bs-text-opacity: 1;color:rgba(255,255,255,.5) !important}.text-reset{--bs-text-opacity: 1;color:inherit !important}.text-opacity-25{--bs-text-opacity: 0.25}.text-opacity-50{--bs-text-opacity: 0.5}.text-opacity-75{--bs-text-opacity: 0.75}.text-opacity-100{--bs-text-opacity: 1}.bg-default{--bs-bg-opacity: 1;background-color:rgba(var(--bs-default-rgb), var(--bs-bg-opacity)) !important}.bg-primary{--bs-bg-opacity: 1;background-color:rgba(var(--bs-primary-rgb), var(--bs-bg-opacity)) !important}.bg-secondary{--bs-bg-opacity: 1;background-color:rgba(var(--bs-secondary-rgb), var(--bs-bg-opacity)) !important}.bg-success{--bs-bg-opacity: 1;background-color:rgba(var(--bs-success-rgb), var(--bs-bg-opacity)) !important}.bg-info{--bs-bg-opacity: 1;background-color:rgba(var(--bs-info-rgb), var(--bs-bg-opacity)) !important}.bg-warning{--bs-bg-opacity: 1;background-color:rgba(var(--bs-warning-rgb), var(--bs-bg-opacity)) !important}.bg-danger{--bs-bg-opacity: 1;background-color:rgba(var(--bs-danger-rgb), var(--bs-bg-opacity)) !important}.bg-light{--bs-bg-opacity: 1;background-color:rgba(var(--bs-light-rgb), var(--bs-bg-opacity)) !important}.bg-dark{--bs-bg-opacity: 1;background-color:rgba(var(--bs-dark-rgb), var(--bs-bg-opacity)) !important}.bg-black{--bs-bg-opacity: 1;background-color:rgba(var(--bs-black-rgb), var(--bs-bg-opacity)) !important}.bg-white{--bs-bg-opacity: 1;background-color:rgba(var(--bs-white-rgb), var(--bs-bg-opacity)) !important}.bg-body{--bs-bg-opacity: 1;background-color:rgba(var(--bs-body-bg-rgb), var(--bs-bg-opacity)) !important}.bg-transparent{--bs-bg-opacity: 1;background-color:transparent !important}.bg-opacity-10{--bs-bg-opacity: 0.1}.bg-opacity-25{--bs-bg-opacity: 0.25}.bg-opacity-50{--bs-bg-opacity: 0.5}.bg-opacity-75{--bs-bg-opacity: 0.75}.bg-opacity-100{--bs-bg-opacity: 1}.bg-gradient{background-image:var(--bs-gradient) !important}.user-select-all{user-select:all !important}.user-select-auto{user-select:auto !important}.user-select-none{user-select:none !important}.pe-none{pointer-events:none !important}.pe-auto{pointer-events:auto !important}.rounded{border-radius:.25rem !important}.rounded-0{border-radius:0 !important}.rounded-1{border-radius:.2rem !important}.rounded-2{border-radius:.25rem !important}.rounded-3{border-radius:.3rem !important}.rounded-circle{border-radius:50% !important}.rounded-pill{border-radius:50rem !important}.rounded-top{border-top-left-radius:.25rem !important;border-top-right-radius:.25rem !important}.rounded-end{border-top-right-radius:.25rem !important;border-bottom-right-radius:.25rem !important}.rounded-bottom{border-bottom-right-radius:.25rem !important;border-bottom-left-radius:.25rem !important}.rounded-start{border-bottom-left-radius:.25rem !important;border-top-left-radius:.25rem !important}.visible{visibility:visible !important}.invisible{visibility:hidden !important}@media(min-width: 576px){.float-sm-start{float:left !important}.float-sm-end{float:right !important}.float-sm-none{float:none !important}.d-sm-inline{display:inline !important}.d-sm-inline-block{display:inline-block !important}.d-sm-block{display:block !important}.d-sm-grid{display:grid !important}.d-sm-table{display:table !important}.d-sm-table-row{display:table-row !important}.d-sm-table-cell{display:table-cell !important}.d-sm-flex{display:flex !important}.d-sm-inline-flex{display:inline-flex !important}.d-sm-none{display:none !important}.flex-sm-fill{flex:1 1 auto !important}.flex-sm-row{flex-direction:row !important}.flex-sm-column{flex-direction:column !important}.flex-sm-row-reverse{flex-direction:row-reverse !important}.flex-sm-column-reverse{flex-direction:column-reverse !important}.flex-sm-grow-0{flex-grow:0 !important}.flex-sm-grow-1{flex-grow:1 !important}.flex-sm-shrink-0{flex-shrink:0 !important}.flex-sm-shrink-1{flex-shrink:1 !important}.flex-sm-wrap{flex-wrap:wrap !important}.flex-sm-nowrap{flex-wrap:nowrap !important}.flex-sm-wrap-reverse{flex-wrap:wrap-reverse !important}.gap-sm-0{gap:0 !important}.gap-sm-1{gap:.25rem !important}.gap-sm-2{gap:.5rem !important}.gap-sm-3{gap:1rem !important}.gap-sm-4{gap:1.5rem !important}.gap-sm-5{gap:3rem !important}.justify-content-sm-start{justify-content:flex-start !important}.justify-content-sm-end{justify-content:flex-end !important}.justify-content-sm-center{justify-content:center !important}.justify-content-sm-between{justify-content:space-between !important}.justify-content-sm-around{justify-content:space-around !important}.justify-content-sm-evenly{justify-content:space-evenly !important}.align-items-sm-start{align-items:flex-start !important}.align-items-sm-end{align-items:flex-end !important}.align-items-sm-center{align-items:center !important}.align-items-sm-baseline{align-items:baseline !important}.align-items-sm-stretch{align-items:stretch !important}.align-content-sm-start{align-content:flex-start !important}.align-content-sm-end{align-content:flex-end !important}.align-content-sm-center{align-content:center !important}.align-content-sm-between{align-content:space-between !important}.align-content-sm-around{align-content:space-around !important}.align-content-sm-stretch{align-content:stretch !important}.align-self-sm-auto{align-self:auto !important}.align-self-sm-start{align-self:flex-start !important}.align-self-sm-end{align-self:flex-end !important}.align-self-sm-center{align-self:center !important}.align-self-sm-baseline{align-self:baseline !important}.align-self-sm-stretch{align-self:stretch !important}.order-sm-first{order:-1 !important}.order-sm-0{order:0 !important}.order-sm-1{order:1 !important}.order-sm-2{order:2 !important}.order-sm-3{order:3 !important}.order-sm-4{order:4 !important}.order-sm-5{order:5 !important}.order-sm-last{order:6 !important}.m-sm-0{margin:0 !important}.m-sm-1{margin:.25rem !important}.m-sm-2{margin:.5rem !important}.m-sm-3{margin:1rem !important}.m-sm-4{margin:1.5rem !important}.m-sm-5{margin:3rem !important}.m-sm-auto{margin:auto !important}.mx-sm-0{margin-right:0 !important;margin-left:0 !important}.mx-sm-1{margin-right:.25rem !important;margin-left:.25rem !important}.mx-sm-2{margin-right:.5rem !important;margin-left:.5rem !important}.mx-sm-3{margin-right:1rem !important;margin-left:1rem !important}.mx-sm-4{margin-right:1.5rem !important;margin-left:1.5rem !important}.mx-sm-5{margin-right:3rem !important;margin-left:3rem !important}.mx-sm-auto{margin-right:auto !important;margin-left:auto !important}.my-sm-0{margin-top:0 !important;margin-bottom:0 !important}.my-sm-1{margin-top:.25rem !important;margin-bottom:.25rem !important}.my-sm-2{margin-top:.5rem !important;margin-bottom:.5rem !important}.my-sm-3{margin-top:1rem !important;margin-bottom:1rem !important}.my-sm-4{margin-top:1.5rem !important;margin-bottom:1.5rem !important}.my-sm-5{margin-top:3rem !important;margin-bottom:3rem !important}.my-sm-auto{margin-top:auto !important;margin-bottom:auto !important}.mt-sm-0{margin-top:0 !important}.mt-sm-1{margin-top:.25rem !important}.mt-sm-2{margin-top:.5rem !important}.mt-sm-3{margin-top:1rem !important}.mt-sm-4{margin-top:1.5rem !important}.mt-sm-5{margin-top:3rem !important}.mt-sm-auto{margin-top:auto !important}.me-sm-0{margin-right:0 !important}.me-sm-1{margin-right:.25rem !important}.me-sm-2{margin-right:.5rem !important}.me-sm-3{margin-right:1rem !important}.me-sm-4{margin-right:1.5rem !important}.me-sm-5{margin-right:3rem !important}.me-sm-auto{margin-right:auto !important}.mb-sm-0{margin-bottom:0 !important}.mb-sm-1{margin-bottom:.25rem !important}.mb-sm-2{margin-bottom:.5rem !important}.mb-sm-3{margin-bottom:1rem !important}.mb-sm-4{margin-bottom:1.5rem !important}.mb-sm-5{margin-bottom:3rem !important}.mb-sm-auto{margin-bottom:auto !important}.ms-sm-0{margin-left:0 !important}.ms-sm-1{margin-left:.25rem !important}.ms-sm-2{margin-left:.5rem !important}.ms-sm-3{margin-left:1rem !important}.ms-sm-4{margin-left:1.5rem !important}.ms-sm-5{margin-left:3rem !important}.ms-sm-auto{margin-left:auto !important}.p-sm-0{padding:0 !important}.p-sm-1{padding:.25rem !important}.p-sm-2{padding:.5rem !important}.p-sm-3{padding:1rem !important}.p-sm-4{padding:1.5rem !important}.p-sm-5{padding:3rem !important}.px-sm-0{padding-right:0 !important;padding-left:0 !important}.px-sm-1{padding-right:.25rem !important;padding-left:.25rem !important}.px-sm-2{padding-right:.5rem !important;padding-left:.5rem !important}.px-sm-3{padding-right:1rem !important;padding-left:1rem !important}.px-sm-4{padding-right:1.5rem !important;padding-left:1.5rem !important}.px-sm-5{padding-right:3rem !important;padding-left:3rem !important}.py-sm-0{padding-top:0 !important;padding-bottom:0 !important}.py-sm-1{padding-top:.25rem !important;padding-bottom:.25rem !important}.py-sm-2{padding-top:.5rem !important;padding-bottom:.5rem !important}.py-sm-3{padding-top:1rem !important;padding-bottom:1rem !important}.py-sm-4{padding-top:1.5rem !important;padding-bottom:1.5rem !important}.py-sm-5{padding-top:3rem !important;padding-bottom:3rem !important}.pt-sm-0{padding-top:0 !important}.pt-sm-1{padding-top:.25rem !important}.pt-sm-2{padding-top:.5rem !important}.pt-sm-3{padding-top:1rem !important}.pt-sm-4{padding-top:1.5rem !important}.pt-sm-5{padding-top:3rem !important}.pe-sm-0{padding-right:0 !important}.pe-sm-1{padding-right:.25rem !important}.pe-sm-2{padding-right:.5rem !important}.pe-sm-3{padding-right:1rem !important}.pe-sm-4{padding-right:1.5rem !important}.pe-sm-5{padding-right:3rem !important}.pb-sm-0{padding-bottom:0 !important}.pb-sm-1{padding-bottom:.25rem !important}.pb-sm-2{padding-bottom:.5rem !important}.pb-sm-3{padding-bottom:1rem !important}.pb-sm-4{padding-bottom:1.5rem !important}.pb-sm-5{padding-bottom:3rem !important}.ps-sm-0{padding-left:0 !important}.ps-sm-1{padding-left:.25rem !important}.ps-sm-2{padding-left:.5rem !important}.ps-sm-3{padding-left:1rem !important}.ps-sm-4{padding-left:1.5rem !important}.ps-sm-5{padding-left:3rem !important}.text-sm-start{text-align:left !important}.text-sm-end{text-align:right !important}.text-sm-center{text-align:center !important}}@media(min-width: 768px){.float-md-start{float:left !important}.float-md-end{float:right !important}.float-md-none{float:none !important}.d-md-inline{display:inline !important}.d-md-inline-block{display:inline-block !important}.d-md-block{display:block !important}.d-md-grid{display:grid !important}.d-md-table{display:table !important}.d-md-table-row{display:table-row !important}.d-md-table-cell{display:table-cell !important}.d-md-flex{display:flex !important}.d-md-inline-flex{display:inline-flex !important}.d-md-none{display:none !important}.flex-md-fill{flex:1 1 auto !important}.flex-md-row{flex-direction:row !important}.flex-md-column{flex-direction:column !important}.flex-md-row-reverse{flex-direction:row-reverse !important}.flex-md-column-reverse{flex-direction:column-reverse !important}.flex-md-grow-0{flex-grow:0 !important}.flex-md-grow-1{flex-grow:1 !important}.flex-md-shrink-0{flex-shrink:0 !important}.flex-md-shrink-1{flex-shrink:1 !important}.flex-md-wrap{flex-wrap:wrap !important}.flex-md-nowrap{flex-wrap:nowrap !important}.flex-md-wrap-reverse{flex-wrap:wrap-reverse !important}.gap-md-0{gap:0 !important}.gap-md-1{gap:.25rem !important}.gap-md-2{gap:.5rem !important}.gap-md-3{gap:1rem !important}.gap-md-4{gap:1.5rem !important}.gap-md-5{gap:3rem !important}.justify-content-md-start{justify-content:flex-start !important}.justify-content-md-end{justify-content:flex-end !important}.justify-content-md-center{justify-content:center !important}.justify-content-md-between{justify-content:space-between !important}.justify-content-md-around{justify-content:space-around !important}.justify-content-md-evenly{justify-content:space-evenly !important}.align-items-md-start{align-items:flex-start !important}.align-items-md-end{align-items:flex-end !important}.align-items-md-center{align-items:center !important}.align-items-md-baseline{align-items:baseline !important}.align-items-md-stretch{align-items:stretch !important}.align-content-md-start{align-content:flex-start !important}.align-content-md-end{align-content:flex-end !important}.align-content-md-center{align-content:center !important}.align-content-md-between{align-content:space-between !important}.align-content-md-around{align-content:space-around !important}.align-content-md-stretch{align-content:stretch !important}.align-self-md-auto{align-self:auto !important}.align-self-md-start{align-self:flex-start !important}.align-self-md-end{align-self:flex-end !important}.align-self-md-center{align-self:center !important}.align-self-md-baseline{align-self:baseline !important}.align-self-md-stretch{align-self:stretch !important}.order-md-first{order:-1 !important}.order-md-0{order:0 !important}.order-md-1{order:1 !important}.order-md-2{order:2 !important}.order-md-3{order:3 !important}.order-md-4{order:4 !important}.order-md-5{order:5 !important}.order-md-last{order:6 !important}.m-md-0{margin:0 !important}.m-md-1{margin:.25rem !important}.m-md-2{margin:.5rem !important}.m-md-3{margin:1rem !important}.m-md-4{margin:1.5rem !important}.m-md-5{margin:3rem !important}.m-md-auto{margin:auto !important}.mx-md-0{margin-right:0 !important;margin-left:0 !important}.mx-md-1{margin-right:.25rem !important;margin-left:.25rem !important}.mx-md-2{margin-right:.5rem !important;margin-left:.5rem !important}.mx-md-3{margin-right:1rem !important;margin-left:1rem !important}.mx-md-4{margin-right:1.5rem !important;margin-left:1.5rem !important}.mx-md-5{margin-right:3rem !important;margin-left:3rem !important}.mx-md-auto{margin-right:auto !important;margin-left:auto !important}.my-md-0{margin-top:0 !important;margin-bottom:0 !important}.my-md-1{margin-top:.25rem !important;margin-bottom:.25rem !important}.my-md-2{margin-top:.5rem !important;margin-bottom:.5rem !important}.my-md-3{margin-top:1rem !important;margin-bottom:1rem !important}.my-md-4{margin-top:1.5rem !important;margin-bottom:1.5rem !important}.my-md-5{margin-top:3rem !important;margin-bottom:3rem !important}.my-md-auto{margin-top:auto !important;margin-bottom:auto !important}.mt-md-0{margin-top:0 !important}.mt-md-1{margin-top:.25rem !important}.mt-md-2{margin-top:.5rem !important}.mt-md-3{margin-top:1rem !important}.mt-md-4{margin-top:1.5rem !important}.mt-md-5{margin-top:3rem !important}.mt-md-auto{margin-top:auto !important}.me-md-0{margin-right:0 !important}.me-md-1{margin-right:.25rem !important}.me-md-2{margin-right:.5rem !important}.me-md-3{margin-right:1rem !important}.me-md-4{margin-right:1.5rem !important}.me-md-5{margin-right:3rem !important}.me-md-auto{margin-right:auto !important}.mb-md-0{margin-bottom:0 !important}.mb-md-1{margin-bottom:.25rem !important}.mb-md-2{margin-bottom:.5rem !important}.mb-md-3{margin-bottom:1rem !important}.mb-md-4{margin-bottom:1.5rem !important}.mb-md-5{margin-bottom:3rem !important}.mb-md-auto{margin-bottom:auto !important}.ms-md-0{margin-left:0 !important}.ms-md-1{margin-left:.25rem !important}.ms-md-2{margin-left:.5rem !important}.ms-md-3{margin-left:1rem !important}.ms-md-4{margin-left:1.5rem !important}.ms-md-5{margin-left:3rem !important}.ms-md-auto{margin-left:auto !important}.p-md-0{padding:0 !important}.p-md-1{padding:.25rem !important}.p-md-2{padding:.5rem !important}.p-md-3{padding:1rem !important}.p-md-4{padding:1.5rem !important}.p-md-5{padding:3rem !important}.px-md-0{padding-right:0 !important;padding-left:0 !important}.px-md-1{padding-right:.25rem !important;padding-left:.25rem !important}.px-md-2{padding-right:.5rem !important;padding-left:.5rem !important}.px-md-3{padding-right:1rem !important;padding-left:1rem !important}.px-md-4{padding-right:1.5rem !important;padding-left:1.5rem !important}.px-md-5{padding-right:3rem !important;padding-left:3rem !important}.py-md-0{padding-top:0 !important;padding-bottom:0 !important}.py-md-1{padding-top:.25rem !important;padding-bottom:.25rem !important}.py-md-2{padding-top:.5rem !important;padding-bottom:.5rem !important}.py-md-3{padding-top:1rem !important;padding-bottom:1rem !important}.py-md-4{padding-top:1.5rem !important;padding-bottom:1.5rem !important}.py-md-5{padding-top:3rem !important;padding-bottom:3rem !important}.pt-md-0{padding-top:0 !important}.pt-md-1{padding-top:.25rem !important}.pt-md-2{padding-top:.5rem !important}.pt-md-3{padding-top:1rem !important}.pt-md-4{padding-top:1.5rem !important}.pt-md-5{padding-top:3rem !important}.pe-md-0{padding-right:0 !important}.pe-md-1{padding-right:.25rem !important}.pe-md-2{padding-right:.5rem !important}.pe-md-3{padding-right:1rem !important}.pe-md-4{padding-right:1.5rem !important}.pe-md-5{padding-right:3rem !important}.pb-md-0{padding-bottom:0 !important}.pb-md-1{padding-bottom:.25rem !important}.pb-md-2{padding-bottom:.5rem !important}.pb-md-3{padding-bottom:1rem !important}.pb-md-4{padding-bottom:1.5rem !important}.pb-md-5{padding-bottom:3rem !important}.ps-md-0{padding-left:0 !important}.ps-md-1{padding-left:.25rem !important}.ps-md-2{padding-left:.5rem !important}.ps-md-3{padding-left:1rem !important}.ps-md-4{padding-left:1.5rem !important}.ps-md-5{padding-left:3rem !important}.text-md-start{text-align:left !important}.text-md-end{text-align:right !important}.text-md-center{text-align:center !important}}@media(min-width: 992px){.float-lg-start{float:left !important}.float-lg-end{float:right !important}.float-lg-none{float:none !important}.d-lg-inline{display:inline !important}.d-lg-inline-block{display:inline-block !important}.d-lg-block{display:block !important}.d-lg-grid{display:grid !important}.d-lg-table{display:table !important}.d-lg-table-row{display:table-row !important}.d-lg-table-cell{display:table-cell !important}.d-lg-flex{display:flex !important}.d-lg-inline-flex{display:inline-flex !important}.d-lg-none{display:none !important}.flex-lg-fill{flex:1 1 auto !important}.flex-lg-row{flex-direction:row !important}.flex-lg-column{flex-direction:column !important}.flex-lg-row-reverse{flex-direction:row-reverse !important}.flex-lg-column-reverse{flex-direction:column-reverse !important}.flex-lg-grow-0{flex-grow:0 !important}.flex-lg-grow-1{flex-grow:1 !important}.flex-lg-shrink-0{flex-shrink:0 !important}.flex-lg-shrink-1{flex-shrink:1 !important}.flex-lg-wrap{flex-wrap:wrap !important}.flex-lg-nowrap{flex-wrap:nowrap !important}.flex-lg-wrap-reverse{flex-wrap:wrap-reverse !important}.gap-lg-0{gap:0 !important}.gap-lg-1{gap:.25rem !important}.gap-lg-2{gap:.5rem !important}.gap-lg-3{gap:1rem !important}.gap-lg-4{gap:1.5rem !important}.gap-lg-5{gap:3rem !important}.justify-content-lg-start{justify-content:flex-start !important}.justify-content-lg-end{justify-content:flex-end !important}.justify-content-lg-center{justify-content:center !important}.justify-content-lg-between{justify-content:space-between !important}.justify-content-lg-around{justify-content:space-around !important}.justify-content-lg-evenly{justify-content:space-evenly !important}.align-items-lg-start{align-items:flex-start !important}.align-items-lg-end{align-items:flex-end !important}.align-items-lg-center{align-items:center !important}.align-items-lg-baseline{align-items:baseline !important}.align-items-lg-stretch{align-items:stretch !important}.align-content-lg-start{align-content:flex-start !important}.align-content-lg-end{align-content:flex-end !important}.align-content-lg-center{align-content:center !important}.align-content-lg-between{align-content:space-between !important}.align-content-lg-around{align-content:space-around !important}.align-content-lg-stretch{align-content:stretch !important}.align-self-lg-auto{align-self:auto !important}.align-self-lg-start{align-self:flex-start !important}.align-self-lg-end{align-self:flex-end !important}.align-self-lg-center{align-self:center !important}.align-self-lg-baseline{align-self:baseline !important}.align-self-lg-stretch{align-self:stretch !important}.order-lg-first{order:-1 !important}.order-lg-0{order:0 !important}.order-lg-1{order:1 !important}.order-lg-2{order:2 !important}.order-lg-3{order:3 !important}.order-lg-4{order:4 !important}.order-lg-5{order:5 !important}.order-lg-last{order:6 !important}.m-lg-0{margin:0 !important}.m-lg-1{margin:.25rem !important}.m-lg-2{margin:.5rem !important}.m-lg-3{margin:1rem !important}.m-lg-4{margin:1.5rem !important}.m-lg-5{margin:3rem !important}.m-lg-auto{margin:auto !important}.mx-lg-0{margin-right:0 !important;margin-left:0 !important}.mx-lg-1{margin-right:.25rem !important;margin-left:.25rem !important}.mx-lg-2{margin-right:.5rem !important;margin-left:.5rem !important}.mx-lg-3{margin-right:1rem !important;margin-left:1rem !important}.mx-lg-4{margin-right:1.5rem !important;margin-left:1.5rem !important}.mx-lg-5{margin-right:3rem !important;margin-left:3rem !important}.mx-lg-auto{margin-right:auto !important;margin-left:auto !important}.my-lg-0{margin-top:0 !important;margin-bottom:0 !important}.my-lg-1{margin-top:.25rem !important;margin-bottom:.25rem !important}.my-lg-2{margin-top:.5rem !important;margin-bottom:.5rem !important}.my-lg-3{margin-top:1rem !important;margin-bottom:1rem !important}.my-lg-4{margin-top:1.5rem !important;margin-bottom:1.5rem !important}.my-lg-5{margin-top:3rem !important;margin-bottom:3rem !important}.my-lg-auto{margin-top:auto !important;margin-bottom:auto !important}.mt-lg-0{margin-top:0 !important}.mt-lg-1{margin-top:.25rem !important}.mt-lg-2{margin-top:.5rem !important}.mt-lg-3{margin-top:1rem !important}.mt-lg-4{margin-top:1.5rem !important}.mt-lg-5{margin-top:3rem !important}.mt-lg-auto{margin-top:auto !important}.me-lg-0{margin-right:0 !important}.me-lg-1{margin-right:.25rem !important}.me-lg-2{margin-right:.5rem !important}.me-lg-3{margin-right:1rem !important}.me-lg-4{margin-right:1.5rem !important}.me-lg-5{margin-right:3rem !important}.me-lg-auto{margin-right:auto !important}.mb-lg-0{margin-bottom:0 !important}.mb-lg-1{margin-bottom:.25rem !important}.mb-lg-2{margin-bottom:.5rem !important}.mb-lg-3{margin-bottom:1rem !important}.mb-lg-4{margin-bottom:1.5rem !important}.mb-lg-5{margin-bottom:3rem !important}.mb-lg-auto{margin-bottom:auto !important}.ms-lg-0{margin-left:0 !important}.ms-lg-1{margin-left:.25rem !important}.ms-lg-2{margin-left:.5rem !important}.ms-lg-3{margin-left:1rem !important}.ms-lg-4{margin-left:1.5rem !important}.ms-lg-5{margin-left:3rem !important}.ms-lg-auto{margin-left:auto !important}.p-lg-0{padding:0 !important}.p-lg-1{padding:.25rem !important}.p-lg-2{padding:.5rem !important}.p-lg-3{padding:1rem !important}.p-lg-4{padding:1.5rem !important}.p-lg-5{padding:3rem !important}.px-lg-0{padding-right:0 !important;padding-left:0 !important}.px-lg-1{padding-right:.25rem !important;padding-left:.25rem !important}.px-lg-2{padding-right:.5rem !important;padding-left:.5rem !important}.px-lg-3{padding-right:1rem !important;padding-left:1rem !important}.px-lg-4{padding-right:1.5rem !important;padding-left:1.5rem !important}.px-lg-5{padding-right:3rem !important;padding-left:3rem !important}.py-lg-0{padding-top:0 !important;padding-bottom:0 !important}.py-lg-1{padding-top:.25rem !important;padding-bottom:.25rem !important}.py-lg-2{padding-top:.5rem !important;padding-bottom:.5rem !important}.py-lg-3{padding-top:1rem !important;padding-bottom:1rem !important}.py-lg-4{padding-top:1.5rem !important;padding-bottom:1.5rem !important}.py-lg-5{padding-top:3rem !important;padding-bottom:3rem !important}.pt-lg-0{padding-top:0 !important}.pt-lg-1{padding-top:.25rem !important}.pt-lg-2{padding-top:.5rem !important}.pt-lg-3{padding-top:1rem !important}.pt-lg-4{padding-top:1.5rem !important}.pt-lg-5{padding-top:3rem !important}.pe-lg-0{padding-right:0 !important}.pe-lg-1{padding-right:.25rem !important}.pe-lg-2{padding-right:.5rem !important}.pe-lg-3{padding-right:1rem !important}.pe-lg-4{padding-right:1.5rem !important}.pe-lg-5{padding-right:3rem !important}.pb-lg-0{padding-bottom:0 !important}.pb-lg-1{padding-bottom:.25rem !important}.pb-lg-2{padding-bottom:.5rem !important}.pb-lg-3{padding-bottom:1rem !important}.pb-lg-4{padding-bottom:1.5rem !important}.pb-lg-5{padding-bottom:3rem !important}.ps-lg-0{padding-left:0 !important}.ps-lg-1{padding-left:.25rem !important}.ps-lg-2{padding-left:.5rem !important}.ps-lg-3{padding-left:1rem !important}.ps-lg-4{padding-left:1.5rem !important}.ps-lg-5{padding-left:3rem !important}.text-lg-start{text-align:left !important}.text-lg-end{text-align:right !important}.text-lg-center{text-align:center !important}}@media(min-width: 1200px){.float-xl-start{float:left !important}.float-xl-end{float:right !important}.float-xl-none{float:none !important}.d-xl-inline{display:inline !important}.d-xl-inline-block{display:inline-block !important}.d-xl-block{display:block !important}.d-xl-grid{display:grid !important}.d-xl-table{display:table !important}.d-xl-table-row{display:table-row !important}.d-xl-table-cell{display:table-cell !important}.d-xl-flex{display:flex !important}.d-xl-inline-flex{display:inline-flex !important}.d-xl-none{display:none !important}.flex-xl-fill{flex:1 1 auto !important}.flex-xl-row{flex-direction:row !important}.flex-xl-column{flex-direction:column !important}.flex-xl-row-reverse{flex-direction:row-reverse !important}.flex-xl-column-reverse{flex-direction:column-reverse !important}.flex-xl-grow-0{flex-grow:0 !important}.flex-xl-grow-1{flex-grow:1 !important}.flex-xl-shrink-0{flex-shrink:0 !important}.flex-xl-shrink-1{flex-shrink:1 !important}.flex-xl-wrap{flex-wrap:wrap !important}.flex-xl-nowrap{flex-wrap:nowrap !important}.flex-xl-wrap-reverse{flex-wrap:wrap-reverse !important}.gap-xl-0{gap:0 !important}.gap-xl-1{gap:.25rem !important}.gap-xl-2{gap:.5rem !important}.gap-xl-3{gap:1rem !important}.gap-xl-4{gap:1.5rem !important}.gap-xl-5{gap:3rem !important}.justify-content-xl-start{justify-content:flex-start !important}.justify-content-xl-end{justify-content:flex-end !important}.justify-content-xl-center{justify-content:center !important}.justify-content-xl-between{justify-content:space-between !important}.justify-content-xl-around{justify-content:space-around !important}.justify-content-xl-evenly{justify-content:space-evenly !important}.align-items-xl-start{align-items:flex-start !important}.align-items-xl-end{align-items:flex-end !important}.align-items-xl-center{align-items:center !important}.align-items-xl-baseline{align-items:baseline !important}.align-items-xl-stretch{align-items:stretch !important}.align-content-xl-start{align-content:flex-start !important}.align-content-xl-end{align-content:flex-end !important}.align-content-xl-center{align-content:center !important}.align-content-xl-between{align-content:space-between !important}.align-content-xl-around{align-content:space-around !important}.align-content-xl-stretch{align-content:stretch !important}.align-self-xl-auto{align-self:auto !important}.align-self-xl-start{align-self:flex-start !important}.align-self-xl-end{align-self:flex-end !important}.align-self-xl-center{align-self:center !important}.align-self-xl-baseline{align-self:baseline !important}.align-self-xl-stretch{align-self:stretch !important}.order-xl-first{order:-1 !important}.order-xl-0{order:0 !important}.order-xl-1{order:1 !important}.order-xl-2{order:2 !important}.order-xl-3{order:3 !important}.order-xl-4{order:4 !important}.order-xl-5{order:5 !important}.order-xl-last{order:6 !important}.m-xl-0{margin:0 !important}.m-xl-1{margin:.25rem !important}.m-xl-2{margin:.5rem !important}.m-xl-3{margin:1rem !important}.m-xl-4{margin:1.5rem !important}.m-xl-5{margin:3rem !important}.m-xl-auto{margin:auto !important}.mx-xl-0{margin-right:0 !important;margin-left:0 !important}.mx-xl-1{margin-right:.25rem !important;margin-left:.25rem !important}.mx-xl-2{margin-right:.5rem !important;margin-left:.5rem !important}.mx-xl-3{margin-right:1rem !important;margin-left:1rem !important}.mx-xl-4{margin-right:1.5rem !important;margin-left:1.5rem !important}.mx-xl-5{margin-right:3rem !important;margin-left:3rem !important}.mx-xl-auto{margin-right:auto !important;margin-left:auto !important}.my-xl-0{margin-top:0 !important;margin-bottom:0 !important}.my-xl-1{margin-top:.25rem !important;margin-bottom:.25rem !important}.my-xl-2{margin-top:.5rem !important;margin-bottom:.5rem !important}.my-xl-3{margin-top:1rem !important;margin-bottom:1rem !important}.my-xl-4{margin-top:1.5rem !important;margin-bottom:1.5rem !important}.my-xl-5{margin-top:3rem !important;margin-bottom:3rem !important}.my-xl-auto{margin-top:auto !important;margin-bottom:auto !important}.mt-xl-0{margin-top:0 !important}.mt-xl-1{margin-top:.25rem !important}.mt-xl-2{margin-top:.5rem !important}.mt-xl-3{margin-top:1rem !important}.mt-xl-4{margin-top:1.5rem !important}.mt-xl-5{margin-top:3rem !important}.mt-xl-auto{margin-top:auto !important}.me-xl-0{margin-right:0 !important}.me-xl-1{margin-right:.25rem !important}.me-xl-2{margin-right:.5rem !important}.me-xl-3{margin-right:1rem !important}.me-xl-4{margin-right:1.5rem !important}.me-xl-5{margin-right:3rem !important}.me-xl-auto{margin-right:auto !important}.mb-xl-0{margin-bottom:0 !important}.mb-xl-1{margin-bottom:.25rem !important}.mb-xl-2{margin-bottom:.5rem !important}.mb-xl-3{margin-bottom:1rem !important}.mb-xl-4{margin-bottom:1.5rem !important}.mb-xl-5{margin-bottom:3rem !important}.mb-xl-auto{margin-bottom:auto !important}.ms-xl-0{margin-left:0 !important}.ms-xl-1{margin-left:.25rem !important}.ms-xl-2{margin-left:.5rem !important}.ms-xl-3{margin-left:1rem !important}.ms-xl-4{margin-left:1.5rem !important}.ms-xl-5{margin-left:3rem !important}.ms-xl-auto{margin-left:auto !important}.p-xl-0{padding:0 !important}.p-xl-1{padding:.25rem !important}.p-xl-2{padding:.5rem !important}.p-xl-3{padding:1rem !important}.p-xl-4{padding:1.5rem !important}.p-xl-5{padding:3rem !important}.px-xl-0{padding-right:0 !important;padding-left:0 !important}.px-xl-1{padding-right:.25rem !important;padding-left:.25rem !important}.px-xl-2{padding-right:.5rem !important;padding-left:.5rem !important}.px-xl-3{padding-right:1rem !important;padding-left:1rem !important}.px-xl-4{padding-right:1.5rem !important;padding-left:1.5rem !important}.px-xl-5{padding-right:3rem !important;padding-left:3rem !important}.py-xl-0{padding-top:0 !important;padding-bottom:0 !important}.py-xl-1{padding-top:.25rem !important;padding-bottom:.25rem !important}.py-xl-2{padding-top:.5rem !important;padding-bottom:.5rem !important}.py-xl-3{padding-top:1rem !important;padding-bottom:1rem !important}.py-xl-4{padding-top:1.5rem !important;padding-bottom:1.5rem !important}.py-xl-5{padding-top:3rem !important;padding-bottom:3rem !important}.pt-xl-0{padding-top:0 !important}.pt-xl-1{padding-top:.25rem !important}.pt-xl-2{padding-top:.5rem !important}.pt-xl-3{padding-top:1rem !important}.pt-xl-4{padding-top:1.5rem !important}.pt-xl-5{padding-top:3rem !important}.pe-xl-0{padding-right:0 !important}.pe-xl-1{padding-right:.25rem !important}.pe-xl-2{padding-right:.5rem !important}.pe-xl-3{padding-right:1rem !important}.pe-xl-4{padding-right:1.5rem !important}.pe-xl-5{padding-right:3rem !important}.pb-xl-0{padding-bottom:0 !important}.pb-xl-1{padding-bottom:.25rem !important}.pb-xl-2{padding-bottom:.5rem !important}.pb-xl-3{padding-bottom:1rem !important}.pb-xl-4{padding-bottom:1.5rem !important}.pb-xl-5{padding-bottom:3rem !important}.ps-xl-0{padding-left:0 !important}.ps-xl-1{padding-left:.25rem !important}.ps-xl-2{padding-left:.5rem !important}.ps-xl-3{padding-left:1rem !important}.ps-xl-4{padding-left:1.5rem !important}.ps-xl-5{padding-left:3rem !important}.text-xl-start{text-align:left !important}.text-xl-end{text-align:right !important}.text-xl-center{text-align:center !important}}@media(min-width: 1400px){.float-xxl-start{float:left !important}.float-xxl-end{float:right !important}.float-xxl-none{float:none !important}.d-xxl-inline{display:inline !important}.d-xxl-inline-block{display:inline-block !important}.d-xxl-block{display:block !important}.d-xxl-grid{display:grid !important}.d-xxl-table{display:table !important}.d-xxl-table-row{display:table-row !important}.d-xxl-table-cell{display:table-cell !important}.d-xxl-flex{display:flex !important}.d-xxl-inline-flex{display:inline-flex !important}.d-xxl-none{display:none !important}.flex-xxl-fill{flex:1 1 auto !important}.flex-xxl-row{flex-direction:row !important}.flex-xxl-column{flex-direction:column !important}.flex-xxl-row-reverse{flex-direction:row-reverse !important}.flex-xxl-column-reverse{flex-direction:column-reverse !important}.flex-xxl-grow-0{flex-grow:0 !important}.flex-xxl-grow-1{flex-grow:1 !important}.flex-xxl-shrink-0{flex-shrink:0 !important}.flex-xxl-shrink-1{flex-shrink:1 !important}.flex-xxl-wrap{flex-wrap:wrap !important}.flex-xxl-nowrap{flex-wrap:nowrap !important}.flex-xxl-wrap-reverse{flex-wrap:wrap-reverse !important}.gap-xxl-0{gap:0 !important}.gap-xxl-1{gap:.25rem !important}.gap-xxl-2{gap:.5rem !important}.gap-xxl-3{gap:1rem !important}.gap-xxl-4{gap:1.5rem !important}.gap-xxl-5{gap:3rem !important}.justify-content-xxl-start{justify-content:flex-start !important}.justify-content-xxl-end{justify-content:flex-end !important}.justify-content-xxl-center{justify-content:center !important}.justify-content-xxl-between{justify-content:space-between !important}.justify-content-xxl-around{justify-content:space-around !important}.justify-content-xxl-evenly{justify-content:space-evenly !important}.align-items-xxl-start{align-items:flex-start !important}.align-items-xxl-end{align-items:flex-end !important}.align-items-xxl-center{align-items:center !important}.align-items-xxl-baseline{align-items:baseline !important}.align-items-xxl-stretch{align-items:stretch !important}.align-content-xxl-start{align-content:flex-start !important}.align-content-xxl-end{align-content:flex-end !important}.align-content-xxl-center{align-content:center !important}.align-content-xxl-between{align-content:space-between !important}.align-content-xxl-around{align-content:space-around !important}.align-content-xxl-stretch{align-content:stretch !important}.align-self-xxl-auto{align-self:auto !important}.align-self-xxl-start{align-self:flex-start !important}.align-self-xxl-end{align-self:flex-end !important}.align-self-xxl-center{align-self:center !important}.align-self-xxl-baseline{align-self:baseline !important}.align-self-xxl-stretch{align-self:stretch !important}.order-xxl-first{order:-1 !important}.order-xxl-0{order:0 !important}.order-xxl-1{order:1 !important}.order-xxl-2{order:2 !important}.order-xxl-3{order:3 !important}.order-xxl-4{order:4 !important}.order-xxl-5{order:5 !important}.order-xxl-last{order:6 !important}.m-xxl-0{margin:0 !important}.m-xxl-1{margin:.25rem !important}.m-xxl-2{margin:.5rem !important}.m-xxl-3{margin:1rem !important}.m-xxl-4{margin:1.5rem !important}.m-xxl-5{margin:3rem !important}.m-xxl-auto{margin:auto !important}.mx-xxl-0{margin-right:0 !important;margin-left:0 !important}.mx-xxl-1{margin-right:.25rem !important;margin-left:.25rem !important}.mx-xxl-2{margin-right:.5rem !important;margin-left:.5rem !important}.mx-xxl-3{margin-right:1rem !important;margin-left:1rem !important}.mx-xxl-4{margin-right:1.5rem !important;margin-left:1.5rem !important}.mx-xxl-5{margin-right:3rem !important;margin-left:3rem !important}.mx-xxl-auto{margin-right:auto !important;margin-left:auto !important}.my-xxl-0{margin-top:0 !important;margin-bottom:0 !important}.my-xxl-1{margin-top:.25rem !important;margin-bottom:.25rem !important}.my-xxl-2{margin-top:.5rem !important;margin-bottom:.5rem !important}.my-xxl-3{margin-top:1rem !important;margin-bottom:1rem !important}.my-xxl-4{margin-top:1.5rem !important;margin-bottom:1.5rem !important}.my-xxl-5{margin-top:3rem !important;margin-bottom:3rem !important}.my-xxl-auto{margin-top:auto !important;margin-bottom:auto !important}.mt-xxl-0{margin-top:0 !important}.mt-xxl-1{margin-top:.25rem !important}.mt-xxl-2{margin-top:.5rem !important}.mt-xxl-3{margin-top:1rem !important}.mt-xxl-4{margin-top:1.5rem !important}.mt-xxl-5{margin-top:3rem !important}.mt-xxl-auto{margin-top:auto !important}.me-xxl-0{margin-right:0 !important}.me-xxl-1{margin-right:.25rem !important}.me-xxl-2{margin-right:.5rem !important}.me-xxl-3{margin-right:1rem !important}.me-xxl-4{margin-right:1.5rem !important}.me-xxl-5{margin-right:3rem !important}.me-xxl-auto{margin-right:auto !important}.mb-xxl-0{margin-bottom:0 !important}.mb-xxl-1{margin-bottom:.25rem !important}.mb-xxl-2{margin-bottom:.5rem !important}.mb-xxl-3{margin-bottom:1rem !important}.mb-xxl-4{margin-bottom:1.5rem !important}.mb-xxl-5{margin-bottom:3rem !important}.mb-xxl-auto{margin-bottom:auto !important}.ms-xxl-0{margin-left:0 !important}.ms-xxl-1{margin-left:.25rem !important}.ms-xxl-2{margin-left:.5rem !important}.ms-xxl-3{margin-left:1rem !important}.ms-xxl-4{margin-left:1.5rem !important}.ms-xxl-5{margin-left:3rem !important}.ms-xxl-auto{margin-left:auto !important}.p-xxl-0{padding:0 !important}.p-xxl-1{padding:.25rem !important}.p-xxl-2{padding:.5rem !important}.p-xxl-3{padding:1rem !important}.p-xxl-4{padding:1.5rem !important}.p-xxl-5{padding:3rem !important}.px-xxl-0{padding-right:0 !important;padding-left:0 !important}.px-xxl-1{padding-right:.25rem !important;padding-left:.25rem !important}.px-xxl-2{padding-right:.5rem !important;padding-left:.5rem !important}.px-xxl-3{padding-right:1rem !important;padding-left:1rem !important}.px-xxl-4{padding-right:1.5rem !important;padding-left:1.5rem !important}.px-xxl-5{padding-right:3rem !important;padding-left:3rem !important}.py-xxl-0{padding-top:0 !important;padding-bottom:0 !important}.py-xxl-1{padding-top:.25rem !important;padding-bottom:.25rem !important}.py-xxl-2{padding-top:.5rem !important;padding-bottom:.5rem !important}.py-xxl-3{padding-top:1rem !important;padding-bottom:1rem !important}.py-xxl-4{padding-top:1.5rem !important;padding-bottom:1.5rem !important}.py-xxl-5{padding-top:3rem !important;padding-bottom:3rem !important}.pt-xxl-0{padding-top:0 !important}.pt-xxl-1{padding-top:.25rem !important}.pt-xxl-2{padding-top:.5rem !important}.pt-xxl-3{padding-top:1rem !important}.pt-xxl-4{padding-top:1.5rem !important}.pt-xxl-5{padding-top:3rem !important}.pe-xxl-0{padding-right:0 !important}.pe-xxl-1{padding-right:.25rem !important}.pe-xxl-2{padding-right:.5rem !important}.pe-xxl-3{padding-right:1rem !important}.pe-xxl-4{padding-right:1.5rem !important}.pe-xxl-5{padding-right:3rem !important}.pb-xxl-0{padding-bottom:0 !important}.pb-xxl-1{padding-bottom:.25rem !important}.pb-xxl-2{padding-bottom:.5rem !important}.pb-xxl-3{padding-bottom:1rem !important}.pb-xxl-4{padding-bottom:1.5rem !important}.pb-xxl-5{padding-bottom:3rem !important}.ps-xxl-0{padding-left:0 !important}.ps-xxl-1{padding-left:.25rem !important}.ps-xxl-2{padding-left:.5rem !important}.ps-xxl-3{padding-left:1rem !important}.ps-xxl-4{padding-left:1.5rem !important}.ps-xxl-5{padding-left:3rem !important}.text-xxl-start{text-align:left !important}.text-xxl-end{text-align:right !important}.text-xxl-center{text-align:center !important}}.bg-default{color:#000}.bg-primary{color:#fff}.bg-secondary{color:#fff}.bg-success{color:#fff}.bg-info{color:#000}.bg-warning{color:#000}.bg-danger{color:#fff}.bg-light{color:#000}.bg-dark{color:#fff}@media(min-width: 1200px){.fs-1{font-size:2.2rem !important}.fs-2{font-size:1.75rem !important}.fs-3{font-size:1.5rem !important}}@media print{.d-print-inline{display:inline !important}.d-print-inline-block{display:inline-block !important}.d-print-block{display:block !important}.d-print-grid{display:grid !important}.d-print-table{display:table !important}.d-print-table-row{display:table-row !important}.d-print-table-cell{display:table-cell !important}.d-print-flex{display:flex !important}.d-print-inline-flex{display:inline-flex !important}.d-print-none{display:none !important}}.tippy-box[data-theme~=quarto]{background-color:#fff;color:#212529;border-radius:.25rem;border:solid 1px #dee2e6;font-size:.875rem}.tippy-box[data-theme~=quarto] .tippy-arrow{color:#dee2e6}.tippy-box[data-placement^=bottom]>.tippy-arrow{top:-1px}.tippy-box[data-placement^=bottom]>.tippy-content{padding:.75em 1em;z-index:1}.top-right{position:absolute;top:1em;right:1em}.hidden{display:none !important}.quarto-layout-panel{margin-bottom:1em}.quarto-layout-panel>figure{width:100%}.quarto-layout-panel>figure>figcaption,.quarto-layout-panel>.panel-caption{margin-top:10pt}.quarto-layout-panel>.table-caption{margin-top:0px}.table-caption p{margin-bottom:.5em}.quarto-layout-row{display:flex;flex-direction:row;align-items:flex-start}.quarto-layout-valign-top{align-items:flex-start}.quarto-layout-valign-bottom{align-items:flex-end}.quarto-layout-valign-center{align-items:center}.quarto-layout-cell{position:relative;margin-right:20px}.quarto-layout-cell:last-child{margin-right:0}.quarto-layout-cell figure,.quarto-layout-cell>p{margin:.2em}.quarto-layout-cell img{max-width:100%}.quarto-layout-cell .html-widget{width:100% !important}.quarto-layout-cell div figure p{margin:0}.quarto-layout-cell figure{display:inline-block;margin-inline-start:0;margin-inline-end:0}.quarto-layout-cell table{display:inline-table}.quarto-layout-cell-subref figcaption,figure .quarto-layout-row figure figcaption{text-align:center;font-style:italic}.quarto-figure{position:relative;margin-bottom:1em}.quarto-figure>figure{width:100%;margin-bottom:0}.quarto-figure-left>figure>p{text-align:left}.quarto-figure-center>figure>p{text-align:center}.quarto-figure-right>figure>p{text-align:right}figure>p:empty{display:none}figure>p:first-child{margin-top:0;margin-bottom:0}figure>figcaption{margin-top:.5em}div[id^=tbl-]{position:relative}.quarto-figure>.anchorjs-link,div[id^=tbl-]>.anchorjs-link{position:absolute;top:0;right:0}.quarto-figure:hover>.anchorjs-link,div[id^=tbl-]:hover>.anchorjs-link,h2:hover>.anchorjs-link,.h2:hover>.anchorjs-link,h3:hover>.anchorjs-link,.h3:hover>.anchorjs-link,h4:hover>.anchorjs-link,.h4:hover>.anchorjs-link,h5:hover>.anchorjs-link,.h5:hover>.anchorjs-link,h6:hover>.anchorjs-link,.h6:hover>.anchorjs-link,.reveal-anchorjs-link>.anchorjs-link{opacity:1}#title-block-header{margin-block-end:1rem;position:relative;margin-top:-1px}#title-block-header .abstract{margin-block-start:1rem}#title-block-header .abstract .abstract-title{font-weight:600}#title-block-header a{text-decoration:none}#title-block-header .author,#title-block-header .date,#title-block-header .doi{margin-block-end:.2rem}#title-block-header .quarto-title-block>div{display:flex}#title-block-header .quarto-title-block>div>h1,#title-block-header .quarto-title-block>div>.h1{flex-grow:1}#title-block-header .quarto-title-block>div>button{flex-shrink:0;height:2.25rem;margin-top:0}@media(min-width: 992px){#title-block-header .quarto-title-block>div>button{margin-top:5px}}tr.header>th>p:last-of-type{margin-bottom:0px}table,.table{caption-side:top;margin-bottom:1.5rem}caption,.table-caption{text-align:center}.utterances{max-width:none;margin-left:-8px}iframe{margin-bottom:1em}details{margin-bottom:1em}details[show]{margin-bottom:0}details>summary{color:#6c757d}details>summary>p:only-child{display:inline}pre.sourceCode,code.sourceCode{position:relative}code{white-space:pre}@media print{code{white-space:pre-wrap}}pre>code{display:block}pre>code.sourceCode{white-space:pre}pre>code.sourceCode>span>a:first-child::before{text-decoration:none}pre.code-overflow-wrap>code.sourceCode{white-space:pre-wrap}pre.code-overflow-scroll>code.sourceCode{white-space:pre}code a:any-link{color:inherit;text-decoration:none}code a:hover{color:inherit;text-decoration:underline}ul.task-list{padding-left:1em}[data-tippy-root]{display:inline-block}.tippy-content .footnote-back{display:none}.quarto-embedded-source-code{display:none}.quarto-unresolved-ref{font-weight:600}.quarto-cover-image{max-width:35%;float:right;margin-left:30px}.cell-output-display .widget-subarea{margin-bottom:1em}.cell-output-display:not(.no-overflow-x){overflow-x:auto}.panel-input{margin-bottom:1em}.panel-input>div,.panel-input>div>div{display:inline-block;vertical-align:top;padding-right:12px}.panel-input>p:last-child{margin-bottom:0}.layout-sidebar{margin-bottom:1em}.layout-sidebar .tab-content{border:none}.tab-content>.page-columns.active{display:grid}div.sourceCode>iframe{width:100%;height:300px;margin-bottom:-0.5em}div.ansi-escaped-output{font-family:monospace;display:block}/*! +* +* ansi colors from IPython notebook's +* +*/.ansi-black-fg{color:#3e424d}.ansi-black-bg{background-color:#3e424d}.ansi-black-intense-fg{color:#282c36}.ansi-black-intense-bg{background-color:#282c36}.ansi-red-fg{color:#e75c58}.ansi-red-bg{background-color:#e75c58}.ansi-red-intense-fg{color:#b22b31}.ansi-red-intense-bg{background-color:#b22b31}.ansi-green-fg{color:#00a250}.ansi-green-bg{background-color:#00a250}.ansi-green-intense-fg{color:#007427}.ansi-green-intense-bg{background-color:#007427}.ansi-yellow-fg{color:#ddb62b}.ansi-yellow-bg{background-color:#ddb62b}.ansi-yellow-intense-fg{color:#b27d12}.ansi-yellow-intense-bg{background-color:#b27d12}.ansi-blue-fg{color:#208ffb}.ansi-blue-bg{background-color:#208ffb}.ansi-blue-intense-fg{color:#0065ca}.ansi-blue-intense-bg{background-color:#0065ca}.ansi-magenta-fg{color:#d160c4}.ansi-magenta-bg{background-color:#d160c4}.ansi-magenta-intense-fg{color:#a03196}.ansi-magenta-intense-bg{background-color:#a03196}.ansi-cyan-fg{color:#60c6c8}.ansi-cyan-bg{background-color:#60c6c8}.ansi-cyan-intense-fg{color:#258f8f}.ansi-cyan-intense-bg{background-color:#258f8f}.ansi-white-fg{color:#c5c1b4}.ansi-white-bg{background-color:#c5c1b4}.ansi-white-intense-fg{color:#a1a6b2}.ansi-white-intense-bg{background-color:#a1a6b2}.ansi-default-inverse-fg{color:#fff}.ansi-default-inverse-bg{background-color:#000}.ansi-bold{font-weight:bold}.ansi-underline{text-decoration:underline}:root{--quarto-body-bg: #ffffff;--quarto-body-color: #212529;--quarto-text-muted: #6c757d;--quarto-border-color: #dee2e6;--quarto-border-width: 1px;--quarto-border-radius: 0.25rem}.code-copy-button{position:absolute;top:0;right:0;border:0;margin-top:5px;margin-right:5px;background-color:transparent}.code-copy-button:focus{outline:none}pre.sourceCode:hover>.code-copy-button>.bi::before{display:inline-block;height:1rem;width:1rem;content:"";vertical-align:-0.125em;background-image:url('data:image/svg+xml,');background-repeat:no-repeat;background-size:1rem 1rem}pre.sourceCode:hover>.code-copy-button-checked>.bi::before{background-image:url('data:image/svg+xml,')}pre.sourceCode:hover>.code-copy-button:hover>.bi::before{background-image:url('data:image/svg+xml,')}pre.sourceCode:hover>.code-copy-button-checked:hover>.bi::before{background-image:url('data:image/svg+xml,')}main ol ol,main ul ul,main ol ul,main ul ol{margin-bottom:1em}body{margin:0}main.page-columns>header>h1.title,main.page-columns>header>.title.h1{margin-bottom:0}@media(min-width: 992px){body .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset] 5fr [page-start page-start-inset] 35px [body-start-outset] 35px [body-start] 1.5em [body-content-start] minmax(500px, calc(850px - 3em)) [body-content-end] 1.5em [body-end] 35px [body-end-outset] minmax(75px, 145px) [page-end-inset] 35px [page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.fullcontent:not(.floating):not(.docked) .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset] 5fr [page-start page-start-inset] 35px [body-start-outset] 35px [body-start] 1.5em [body-content-start] minmax(500px, calc(850px - 3em)) [body-content-end] 1.5em [body-end] 35px [body-end-outset] 35px [page-end-inset page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.slimcontent:not(.floating):not(.docked) .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset] 5fr [page-start page-start-inset] 35px [body-start-outset] 35px [body-start] 1.5em [body-content-start] minmax(500px, calc(750px - 3em)) [body-content-end] 1.5em [body-end] 50px [body-end-outset] minmax(0px, 200px) [page-end-inset] 50px [page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.listing:not(.floating):not(.docked) .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset page-start] minmax(50px, 100px) [page-start-inset] 50px [body-start-outset] 50px [body-start] 1.5em [body-content-start] minmax(500px, calc(1200px - 3em)) [body-content-end] 3em [body-end] 50px [body-end-outset] minmax(0px, 250px) [page-end-inset] 50px [page-end] 1fr [screen-end-inset] 1.5em [screen-end]}body:not(.floating):not(.docked) .page-columns.toc-left{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset] 5fr [page-start] 35px [page-start-inset] minmax(0px, 175px) [body-start-outset] 35px [body-start] 1.5em [body-content-start] minmax(450px, calc(750px - 3em)) [body-content-end] 1.5em [body-end] 50px [body-end-outset] minmax(0px, 200px) [page-end-inset] 50px [page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body:not(.floating):not(.docked) .page-columns.toc-left .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset] 5fr [page-start] 35px [page-start-inset] minmax(0px, 175px) [body-start-outset] 35px [body-start] 1.5em [body-content-start] minmax(450px, calc(750px - 3em)) [body-content-end] 1.5em [body-end] 50px [body-end-outset] minmax(0px, 200px) [page-end-inset] 50px [page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.floating .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset] 5fr [page-start] minmax(25px, 50px) [page-start-inset] minmax(50px, 150px) [body-start-outset] minmax(25px, 50px) [body-start] 1.5em [body-content-start] minmax(500px, calc(800px - 3em)) [body-content-end] 1.5em [body-end] minmax(25px, 50px) [body-end-outset] minmax(50px, 150px) [page-end-inset] minmax(25px, 50px) [page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.docked .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset page-start] minmax(50px, 100px) [page-start-inset] 50px [body-start-outset] 50px [body-start] 1.5em [body-content-start] minmax(500px, calc( 1000px - 3em )) [body-content-end] 1.5em [body-end] 50px [body-end-outset] minmax(0px, 100px) [page-end-inset] 50px [page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.docked.fullcontent .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset page-start] minmax(50px, 100px) [page-start-inset] 50px [body-start-outset] 50px [body-start] 1.5em [body-content-start] minmax(500px, calc( 1000px - 3em )) [body-content-end] 1.5em [body-end body-end-outset page-end-inset page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.floating.fullcontent .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset] 5fr [page-start] 50px [page-start-inset] minmax(50px, 150px) [body-start-outset] 50px [body-start] 1.5em [body-content-start] minmax(500px, calc(800px - 3em)) [body-content-end] 1.5em [body-end body-end-outset page-end-inset page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.docked.slimcontent .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset page-start] minmax(50px, 100px) [page-start-inset] 50px [body-start-outset] 50px [body-start] 1.5em [body-content-start] minmax(450px, calc(750px - 3em)) [body-content-end] 1.5em [body-end] 50px [body-end-outset] minmax(0px, 200px) [page-end-inset] 50px [page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.docked.listing .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset page-start] minmax(50px, 100px) [page-start-inset] 50px [body-start-outset] 50px [body-start] 1.5em [body-content-start] minmax(500px, calc( 1000px - 3em )) [body-content-end] 1.5em [body-end] 50px [body-end-outset] minmax(0px, 100px) [page-end-inset] 50px [page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.floating.slimcontent .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset] 5fr [page-start] 50px [page-start-inset] minmax(50px, 150px) [body-start-outset] 50px [body-start] 1.5em [body-content-start] minmax(450px, calc(750px - 3em)) [body-content-end] 1.5em [body-end] 50px [body-end-outset] minmax(50px, 150px) [page-end-inset] 50px [page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.floating.listing .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset] 5fr [page-start] minmax(25px, 50px) [page-start-inset] minmax(50px, 150px) [body-start-outset] minmax(25px, 50px) [body-start] 1.5em [body-content-start] minmax(500px, calc(800px - 3em)) [body-content-end] 1.5em [body-end] minmax(25px, 50px) [body-end-outset] minmax(50px, 150px) [page-end-inset] minmax(25px, 50px) [page-end] 5fr [screen-end-inset] 1.5em [screen-end]}}@media(max-width: 991.98px){body .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset page-start page-start-inset body-start-outset] 5fr [body-start] 1.5em [body-content-start] minmax(500px, calc(750px - 3em)) [body-content-end] 1.5em [body-end] 35px [body-end-outset] minmax(75px, 145px) [page-end-inset] 35px [page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.fullcontent:not(.floating):not(.docked) .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset page-start page-start-inset body-start-outset] 5fr [body-start] 1.5em [body-content-start] minmax(500px, calc(750px - 3em)) [body-content-end] 1.5em [body-end body-end-outset page-end-inset page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.slimcontent:not(.floating):not(.docked) .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset page-start page-start-inset body-start-outset] 5fr [body-start] 1.5em [body-content-start] minmax(500px, calc(750px - 3em)) [body-content-end] 1.5em [body-end] 35px [body-end-outset] minmax(75px, 145px) [page-end-inset] 35px [page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.listing:not(.floating):not(.docked) .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset page-start page-start-inset body-start-outset] 5fr [body-start] 1.5em [body-content-start] minmax(500px, calc(1200px - 3em)) [body-content-end body-end body-end-outset page-end-inset page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body:not(.floating):not(.docked) .page-columns.toc-left{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset] 5fr [page-start] 35px [page-start-inset] minmax(0px, 145px) [body-start-outset] 35px [body-start] 1.5em [body-content-start] minmax(450px, calc(750px - 3em)) [body-content-end] 1.5em [body-end body-end-outset page-end-inset page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body:not(.floating):not(.docked) .page-columns.toc-left .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset] 5fr [page-start] 35px [page-start-inset] minmax(0px, 145px) [body-start-outset] 35px [body-start] 1.5em [body-content-start] minmax(450px, calc(750px - 3em)) [body-content-end] 1.5em [body-end body-end-outset page-end-inset page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.floating .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset] 5fr [page-start page-start-inset body-start-outset body-start] 1em [body-content-start] minmax(500px, calc(750px - 3em)) [body-content-end] 1.5em [body-end] 50px [body-end-outset] minmax(75px, 150px) [page-end-inset] 25px [page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.docked .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset page-start page-start-inset body-start-outset body-start body-content-start] minmax(500px, calc(750px - 3em)) [body-content-end] 1.5em [body-end] 50px [body-end-outset] minmax(25px, 50px) [page-end-inset] 50px [page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.docked.fullcontent .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset page-start page-start-inset body-start-outset body-start body-content-start] minmax(500px, calc( 1000px - 3em )) [body-content-end] 1.5em [body-end body-end-outset page-end-inset page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.floating.fullcontent .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset] 5fr [page-start page-start-inset body-start-outset body-start] 1em [body-content-start] minmax(500px, calc(800px - 3em)) [body-content-end] 1.5em [body-end body-end-outset page-end-inset page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.docked.slimcontent .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset page-start page-start-inset body-start-outset body-start body-content-start] minmax(500px, calc(750px - 3em)) [body-content-end] 1.5em [body-end] 35px [body-end-outset] minmax(75px, 145px) [page-end-inset] 35px [page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.docked.listing .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset page-start page-start-inset body-start-outset body-start body-content-start] minmax(500px, calc(750px - 3em)) [body-content-end] 1.5em [body-end] 50px [body-end-outset] minmax(25px, 50px) [page-end-inset] 50px [page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.floating.slimcontent .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset] 5fr [page-start page-start-inset body-start-outset body-start] 1em [body-content-start] minmax(500px, calc(750px - 3em)) [body-content-end] 1.5em [body-end] 35px [body-end-outset] minmax(75px, 145px) [page-end-inset] 35px [page-end] 4fr [screen-end-inset] 1.5em [screen-end]}body.floating.listing .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset] 5fr [page-start page-start-inset body-start-outset body-start] 1em [body-content-start] minmax(500px, calc(750px - 3em)) [body-content-end] 1.5em [body-end] 50px [body-end-outset] minmax(75px, 150px) [page-end-inset] 25px [page-end] 5fr [screen-end-inset] 1.5em [screen-end]}}@media(max-width: 767.98px){body .page-columns,body.fullcontent:not(.floating):not(.docked) .page-columns,body.slimcontent:not(.floating):not(.docked) .page-columns,body.docked .page-columns,body.docked.slimcontent .page-columns,body.docked.fullcontent .page-columns,body.floating .page-columns,body.floating.slimcontent .page-columns,body.floating.fullcontent .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset page-start page-start-inset body-start-outset body-start body-content-start] minmax(0px, 1fr) [body-content-end body-end body-end-outset page-end-inset page-end screen-end-inset] 1.5em [screen-end]}body:not(.floating):not(.docked) .page-columns.toc-left{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset page-start page-start-inset body-start-outset body-start body-content-start] minmax(0px, 1fr) [body-content-end body-end body-end-outset page-end-inset page-end screen-end-inset] 1.5em [screen-end]}body:not(.floating):not(.docked) .page-columns.toc-left .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset page-start page-start-inset body-start-outset body-start body-content-start] minmax(0px, 1fr) [body-content-end body-end body-end-outset page-end-inset page-end screen-end-inset] 1.5em [screen-end]}nav[role=doc-toc]{display:none}}body,.page-row-navigation{grid-template-rows:[page-top] max-content [contents-top] max-content [contents-bottom] max-content [page-bottom]}.page-rows-contents{grid-template-rows:[content-top] minmax(max-content, 1fr) [content-bottom] minmax(60px, max-content) [page-bottom]}.page-full{grid-column:screen-start/screen-end !important}.page-columns>*{grid-column:body-content-start/body-content-end}.page-columns.column-page>*{grid-column:page-start/page-end}.page-columns.column-page-left>*{grid-column:page-start/body-content-end}.page-columns.column-page-right>*{grid-column:body-content-start/page-end}.page-rows{grid-auto-rows:auto}.header{grid-column:screen-start/screen-end;grid-row:page-top/contents-top}#quarto-content{padding:0;grid-column:screen-start/screen-end;grid-row:contents-top/contents-bottom}body.floating .sidebar.sidebar-navigation{grid-column:page-start/body-start;grid-row:content-top/page-bottom}body.docked .sidebar.sidebar-navigation{grid-column:screen-start/body-start;grid-row:content-top/page-bottom}.sidebar.toc-left{grid-column:page-start/body-start;grid-row:content-top/page-bottom}.sidebar.margin-sidebar{grid-column:body-end/page-end;grid-row:content-top/page-bottom}.page-columns .content{grid-column:body-content-start/body-content-end;grid-row:content-top/content-bottom;align-content:flex-start}.page-columns .page-navigation{grid-column:body-content-start/body-content-end;grid-row:content-bottom/page-bottom}.page-columns .footer{grid-column:screen-start/screen-end;grid-row:contents-bottom/page-bottom}.page-columns .column-body{grid-column:body-content-start/body-content-end}.page-columns .column-body-fullbleed{grid-column:body-start/body-end}.page-columns .column-body-outset{grid-column:body-start-outset/body-end-outset;z-index:998;transform:translate3d(0, 0, 0)}.page-columns .column-body-outset table{background:#fff}.page-columns .column-body-outset-left{grid-column:body-start-outset/body-content-end;z-index:998;transform:translate3d(0, 0, 0)}.page-columns .column-body-outset-left table{background:#fff}.page-columns .column-body-outset-right{grid-column:body-content-start/body-end-outset;z-index:998;transform:translate3d(0, 0, 0)}.page-columns .column-body-outset-right table{background:#fff}.page-columns .column-page{grid-column:page-start/page-end;z-index:998;transform:translate3d(0, 0, 0)}.page-columns .column-page table{background:#fff}.page-columns .column-page-inset{grid-column:page-start-inset/page-end-inset;z-index:998;transform:translate3d(0, 0, 0)}.page-columns .column-page-inset table{background:#fff}.page-columns .column-page-inset-left{grid-column:page-start-inset/body-content-end;z-index:998;transform:translate3d(0, 0, 0)}.page-columns .column-page-inset-left table{background:#fff}.page-columns .column-page-inset-right{grid-column:body-content-start/page-end-inset;z-index:998;transform:translate3d(0, 0, 0)}.page-columns .column-page-inset-right figcaption table{background:#fff}.page-columns .column-page-left{grid-column:page-start/body-content-end;z-index:998;transform:translate3d(0, 0, 0)}.page-columns .column-page-left table{background:#fff}.page-columns .column-page-right{grid-column:body-content-start/page-end;z-index:998;transform:translate3d(0, 0, 0)}.page-columns .column-page-right figcaption table{background:#fff}#quarto-content.page-columns #quarto-margin-sidebar,#quarto-content.page-columns #quarto-sidebar{z-index:1}@media(max-width: 991.98px){#quarto-content.page-columns #quarto-margin-sidebar.collapse,#quarto-content.page-columns #quarto-sidebar.collapse{z-index:1055}}#quarto-content.page-columns main.column-page,#quarto-content.page-columns main.column-page-right,#quarto-content.page-columns main.column-page-left{z-index:0}.page-columns .column-screen-inset{grid-column:screen-start-inset/screen-end-inset;z-index:998;transform:translate3d(0, 0, 0)}.page-columns .column-screen-inset table{background:#fff}.page-columns .column-screen-inset-left{grid-column:screen-start-inset/body-content-end;z-index:998;transform:translate3d(0, 0, 0)}.page-columns .column-screen-inset-left table{background:#fff}.page-columns .column-screen-inset-right{grid-column:body-content-start/screen-end-inset;z-index:998;transform:translate3d(0, 0, 0)}.page-columns .column-screen-inset-right table{background:#fff}.page-columns .column-screen{grid-column:screen-start/screen-end;z-index:998;transform:translate3d(0, 0, 0)}.page-columns .column-screen table{background:#fff}.page-columns .column-screen-left{grid-column:screen-start/body-content-end;z-index:998;transform:translate3d(0, 0, 0)}.page-columns .column-screen-left table{background:#fff}.page-columns .column-screen-right{grid-column:body-content-start/screen-end;z-index:998;transform:translate3d(0, 0, 0)}.page-columns .column-screen-right table{background:#fff}.page-columns .column-screen-inset-shaded{grid-column:screen-start/screen-end;padding:1em;background:#f8f9fa;z-index:998;transform:translate3d(0, 0, 0);margin-bottom:1em}.zindex-content{z-index:998;transform:translate3d(0, 0, 0)}.zindex-modal{z-index:1055;transform:translate3d(0, 0, 0)}.zindex-over-content{z-index:999;transform:translate3d(0, 0, 0)}img.img-fluid.column-screen,img.img-fluid.column-screen-inset-shaded,img.img-fluid.column-screen-inset,img.img-fluid.column-screen-inset-left,img.img-fluid.column-screen-inset-right,img.img-fluid.column-screen-left,img.img-fluid.column-screen-right{width:100%}@media(min-width: 992px){.margin-caption,div.aside,aside,.column-margin{grid-column:body-end/page-end !important;z-index:998}.column-sidebar{grid-column:page-start/body-start !important;z-index:998}.column-leftmargin{grid-column:screen-start-inset/body-start !important;z-index:998}.no-row-height{height:1em;overflow:visible}}@media(max-width: 991.98px){.margin-caption,div.aside,aside,.column-margin{grid-column:body-end/page-end !important;z-index:998}.no-row-height{height:1em;overflow:visible}.page-columns.page-full{overflow:visible}.page-columns.toc-left .margin-caption,.page-columns.toc-left div.aside,.page-columns.toc-left aside,.page-columns.toc-left .column-margin{grid-column:body-content-start/body-content-end !important;z-index:998;transform:translate3d(0, 0, 0)}.page-columns.toc-left .no-row-height{height:initial;overflow:initial}}@media(max-width: 767.98px){.margin-caption,div.aside,aside,.column-margin{grid-column:body-content-start/body-content-end !important;z-index:998;transform:translate3d(0, 0, 0)}.no-row-height{height:initial;overflow:initial}#quarto-margin-sidebar{display:none}.hidden-sm{display:none}}.panel-grid{display:grid;grid-template-rows:repeat(1, 1fr);grid-template-columns:repeat(24, 1fr);gap:1em}.panel-grid .g-col-1{grid-column:auto/span 1}.panel-grid .g-col-2{grid-column:auto/span 2}.panel-grid .g-col-3{grid-column:auto/span 3}.panel-grid .g-col-4{grid-column:auto/span 4}.panel-grid .g-col-5{grid-column:auto/span 5}.panel-grid .g-col-6{grid-column:auto/span 6}.panel-grid .g-col-7{grid-column:auto/span 7}.panel-grid .g-col-8{grid-column:auto/span 8}.panel-grid .g-col-9{grid-column:auto/span 9}.panel-grid .g-col-10{grid-column:auto/span 10}.panel-grid .g-col-11{grid-column:auto/span 11}.panel-grid .g-col-12{grid-column:auto/span 12}.panel-grid .g-col-13{grid-column:auto/span 13}.panel-grid .g-col-14{grid-column:auto/span 14}.panel-grid .g-col-15{grid-column:auto/span 15}.panel-grid .g-col-16{grid-column:auto/span 16}.panel-grid .g-col-17{grid-column:auto/span 17}.panel-grid .g-col-18{grid-column:auto/span 18}.panel-grid .g-col-19{grid-column:auto/span 19}.panel-grid .g-col-20{grid-column:auto/span 20}.panel-grid .g-col-21{grid-column:auto/span 21}.panel-grid .g-col-22{grid-column:auto/span 22}.panel-grid .g-col-23{grid-column:auto/span 23}.panel-grid .g-col-24{grid-column:auto/span 24}.panel-grid .g-start-1{grid-column-start:1}.panel-grid .g-start-2{grid-column-start:2}.panel-grid .g-start-3{grid-column-start:3}.panel-grid .g-start-4{grid-column-start:4}.panel-grid .g-start-5{grid-column-start:5}.panel-grid .g-start-6{grid-column-start:6}.panel-grid .g-start-7{grid-column-start:7}.panel-grid .g-start-8{grid-column-start:8}.panel-grid .g-start-9{grid-column-start:9}.panel-grid .g-start-10{grid-column-start:10}.panel-grid .g-start-11{grid-column-start:11}.panel-grid .g-start-12{grid-column-start:12}.panel-grid .g-start-13{grid-column-start:13}.panel-grid .g-start-14{grid-column-start:14}.panel-grid .g-start-15{grid-column-start:15}.panel-grid .g-start-16{grid-column-start:16}.panel-grid .g-start-17{grid-column-start:17}.panel-grid .g-start-18{grid-column-start:18}.panel-grid .g-start-19{grid-column-start:19}.panel-grid .g-start-20{grid-column-start:20}.panel-grid .g-start-21{grid-column-start:21}.panel-grid .g-start-22{grid-column-start:22}.panel-grid .g-start-23{grid-column-start:23}@media(min-width: 576px){.panel-grid .g-col-sm-1{grid-column:auto/span 1}.panel-grid .g-col-sm-2{grid-column:auto/span 2}.panel-grid .g-col-sm-3{grid-column:auto/span 3}.panel-grid .g-col-sm-4{grid-column:auto/span 4}.panel-grid .g-col-sm-5{grid-column:auto/span 5}.panel-grid .g-col-sm-6{grid-column:auto/span 6}.panel-grid .g-col-sm-7{grid-column:auto/span 7}.panel-grid .g-col-sm-8{grid-column:auto/span 8}.panel-grid .g-col-sm-9{grid-column:auto/span 9}.panel-grid .g-col-sm-10{grid-column:auto/span 10}.panel-grid .g-col-sm-11{grid-column:auto/span 11}.panel-grid .g-col-sm-12{grid-column:auto/span 12}.panel-grid .g-col-sm-13{grid-column:auto/span 13}.panel-grid .g-col-sm-14{grid-column:auto/span 14}.panel-grid .g-col-sm-15{grid-column:auto/span 15}.panel-grid .g-col-sm-16{grid-column:auto/span 16}.panel-grid .g-col-sm-17{grid-column:auto/span 17}.panel-grid .g-col-sm-18{grid-column:auto/span 18}.panel-grid .g-col-sm-19{grid-column:auto/span 19}.panel-grid .g-col-sm-20{grid-column:auto/span 20}.panel-grid .g-col-sm-21{grid-column:auto/span 21}.panel-grid .g-col-sm-22{grid-column:auto/span 22}.panel-grid .g-col-sm-23{grid-column:auto/span 23}.panel-grid .g-col-sm-24{grid-column:auto/span 24}.panel-grid .g-start-sm-1{grid-column-start:1}.panel-grid .g-start-sm-2{grid-column-start:2}.panel-grid .g-start-sm-3{grid-column-start:3}.panel-grid .g-start-sm-4{grid-column-start:4}.panel-grid .g-start-sm-5{grid-column-start:5}.panel-grid .g-start-sm-6{grid-column-start:6}.panel-grid .g-start-sm-7{grid-column-start:7}.panel-grid .g-start-sm-8{grid-column-start:8}.panel-grid .g-start-sm-9{grid-column-start:9}.panel-grid .g-start-sm-10{grid-column-start:10}.panel-grid .g-start-sm-11{grid-column-start:11}.panel-grid .g-start-sm-12{grid-column-start:12}.panel-grid .g-start-sm-13{grid-column-start:13}.panel-grid .g-start-sm-14{grid-column-start:14}.panel-grid .g-start-sm-15{grid-column-start:15}.panel-grid .g-start-sm-16{grid-column-start:16}.panel-grid .g-start-sm-17{grid-column-start:17}.panel-grid .g-start-sm-18{grid-column-start:18}.panel-grid .g-start-sm-19{grid-column-start:19}.panel-grid .g-start-sm-20{grid-column-start:20}.panel-grid .g-start-sm-21{grid-column-start:21}.panel-grid .g-start-sm-22{grid-column-start:22}.panel-grid .g-start-sm-23{grid-column-start:23}}@media(min-width: 768px){.panel-grid .g-col-md-1{grid-column:auto/span 1}.panel-grid .g-col-md-2{grid-column:auto/span 2}.panel-grid .g-col-md-3{grid-column:auto/span 3}.panel-grid .g-col-md-4{grid-column:auto/span 4}.panel-grid .g-col-md-5{grid-column:auto/span 5}.panel-grid .g-col-md-6{grid-column:auto/span 6}.panel-grid .g-col-md-7{grid-column:auto/span 7}.panel-grid .g-col-md-8{grid-column:auto/span 8}.panel-grid .g-col-md-9{grid-column:auto/span 9}.panel-grid .g-col-md-10{grid-column:auto/span 10}.panel-grid .g-col-md-11{grid-column:auto/span 11}.panel-grid .g-col-md-12{grid-column:auto/span 12}.panel-grid .g-col-md-13{grid-column:auto/span 13}.panel-grid .g-col-md-14{grid-column:auto/span 14}.panel-grid .g-col-md-15{grid-column:auto/span 15}.panel-grid .g-col-md-16{grid-column:auto/span 16}.panel-grid .g-col-md-17{grid-column:auto/span 17}.panel-grid .g-col-md-18{grid-column:auto/span 18}.panel-grid .g-col-md-19{grid-column:auto/span 19}.panel-grid .g-col-md-20{grid-column:auto/span 20}.panel-grid .g-col-md-21{grid-column:auto/span 21}.panel-grid .g-col-md-22{grid-column:auto/span 22}.panel-grid .g-col-md-23{grid-column:auto/span 23}.panel-grid .g-col-md-24{grid-column:auto/span 24}.panel-grid .g-start-md-1{grid-column-start:1}.panel-grid .g-start-md-2{grid-column-start:2}.panel-grid .g-start-md-3{grid-column-start:3}.panel-grid .g-start-md-4{grid-column-start:4}.panel-grid .g-start-md-5{grid-column-start:5}.panel-grid .g-start-md-6{grid-column-start:6}.panel-grid .g-start-md-7{grid-column-start:7}.panel-grid .g-start-md-8{grid-column-start:8}.panel-grid .g-start-md-9{grid-column-start:9}.panel-grid .g-start-md-10{grid-column-start:10}.panel-grid .g-start-md-11{grid-column-start:11}.panel-grid .g-start-md-12{grid-column-start:12}.panel-grid .g-start-md-13{grid-column-start:13}.panel-grid .g-start-md-14{grid-column-start:14}.panel-grid .g-start-md-15{grid-column-start:15}.panel-grid .g-start-md-16{grid-column-start:16}.panel-grid .g-start-md-17{grid-column-start:17}.panel-grid .g-start-md-18{grid-column-start:18}.panel-grid .g-start-md-19{grid-column-start:19}.panel-grid .g-start-md-20{grid-column-start:20}.panel-grid .g-start-md-21{grid-column-start:21}.panel-grid .g-start-md-22{grid-column-start:22}.panel-grid .g-start-md-23{grid-column-start:23}}@media(min-width: 992px){.panel-grid .g-col-lg-1{grid-column:auto/span 1}.panel-grid .g-col-lg-2{grid-column:auto/span 2}.panel-grid .g-col-lg-3{grid-column:auto/span 3}.panel-grid .g-col-lg-4{grid-column:auto/span 4}.panel-grid .g-col-lg-5{grid-column:auto/span 5}.panel-grid .g-col-lg-6{grid-column:auto/span 6}.panel-grid .g-col-lg-7{grid-column:auto/span 7}.panel-grid .g-col-lg-8{grid-column:auto/span 8}.panel-grid .g-col-lg-9{grid-column:auto/span 9}.panel-grid .g-col-lg-10{grid-column:auto/span 10}.panel-grid .g-col-lg-11{grid-column:auto/span 11}.panel-grid .g-col-lg-12{grid-column:auto/span 12}.panel-grid .g-col-lg-13{grid-column:auto/span 13}.panel-grid .g-col-lg-14{grid-column:auto/span 14}.panel-grid .g-col-lg-15{grid-column:auto/span 15}.panel-grid .g-col-lg-16{grid-column:auto/span 16}.panel-grid .g-col-lg-17{grid-column:auto/span 17}.panel-grid .g-col-lg-18{grid-column:auto/span 18}.panel-grid .g-col-lg-19{grid-column:auto/span 19}.panel-grid .g-col-lg-20{grid-column:auto/span 20}.panel-grid .g-col-lg-21{grid-column:auto/span 21}.panel-grid .g-col-lg-22{grid-column:auto/span 22}.panel-grid .g-col-lg-23{grid-column:auto/span 23}.panel-grid .g-col-lg-24{grid-column:auto/span 24}.panel-grid .g-start-lg-1{grid-column-start:1}.panel-grid .g-start-lg-2{grid-column-start:2}.panel-grid .g-start-lg-3{grid-column-start:3}.panel-grid .g-start-lg-4{grid-column-start:4}.panel-grid .g-start-lg-5{grid-column-start:5}.panel-grid .g-start-lg-6{grid-column-start:6}.panel-grid .g-start-lg-7{grid-column-start:7}.panel-grid .g-start-lg-8{grid-column-start:8}.panel-grid .g-start-lg-9{grid-column-start:9}.panel-grid .g-start-lg-10{grid-column-start:10}.panel-grid .g-start-lg-11{grid-column-start:11}.panel-grid .g-start-lg-12{grid-column-start:12}.panel-grid .g-start-lg-13{grid-column-start:13}.panel-grid .g-start-lg-14{grid-column-start:14}.panel-grid .g-start-lg-15{grid-column-start:15}.panel-grid .g-start-lg-16{grid-column-start:16}.panel-grid .g-start-lg-17{grid-column-start:17}.panel-grid .g-start-lg-18{grid-column-start:18}.panel-grid .g-start-lg-19{grid-column-start:19}.panel-grid .g-start-lg-20{grid-column-start:20}.panel-grid .g-start-lg-21{grid-column-start:21}.panel-grid .g-start-lg-22{grid-column-start:22}.panel-grid .g-start-lg-23{grid-column-start:23}}@media(min-width: 1200px){.panel-grid .g-col-xl-1{grid-column:auto/span 1}.panel-grid .g-col-xl-2{grid-column:auto/span 2}.panel-grid .g-col-xl-3{grid-column:auto/span 3}.panel-grid .g-col-xl-4{grid-column:auto/span 4}.panel-grid .g-col-xl-5{grid-column:auto/span 5}.panel-grid .g-col-xl-6{grid-column:auto/span 6}.panel-grid .g-col-xl-7{grid-column:auto/span 7}.panel-grid .g-col-xl-8{grid-column:auto/span 8}.panel-grid .g-col-xl-9{grid-column:auto/span 9}.panel-grid .g-col-xl-10{grid-column:auto/span 10}.panel-grid .g-col-xl-11{grid-column:auto/span 11}.panel-grid .g-col-xl-12{grid-column:auto/span 12}.panel-grid .g-col-xl-13{grid-column:auto/span 13}.panel-grid .g-col-xl-14{grid-column:auto/span 14}.panel-grid .g-col-xl-15{grid-column:auto/span 15}.panel-grid .g-col-xl-16{grid-column:auto/span 16}.panel-grid .g-col-xl-17{grid-column:auto/span 17}.panel-grid .g-col-xl-18{grid-column:auto/span 18}.panel-grid .g-col-xl-19{grid-column:auto/span 19}.panel-grid .g-col-xl-20{grid-column:auto/span 20}.panel-grid .g-col-xl-21{grid-column:auto/span 21}.panel-grid .g-col-xl-22{grid-column:auto/span 22}.panel-grid .g-col-xl-23{grid-column:auto/span 23}.panel-grid .g-col-xl-24{grid-column:auto/span 24}.panel-grid .g-start-xl-1{grid-column-start:1}.panel-grid .g-start-xl-2{grid-column-start:2}.panel-grid .g-start-xl-3{grid-column-start:3}.panel-grid .g-start-xl-4{grid-column-start:4}.panel-grid .g-start-xl-5{grid-column-start:5}.panel-grid .g-start-xl-6{grid-column-start:6}.panel-grid .g-start-xl-7{grid-column-start:7}.panel-grid .g-start-xl-8{grid-column-start:8}.panel-grid .g-start-xl-9{grid-column-start:9}.panel-grid .g-start-xl-10{grid-column-start:10}.panel-grid .g-start-xl-11{grid-column-start:11}.panel-grid .g-start-xl-12{grid-column-start:12}.panel-grid .g-start-xl-13{grid-column-start:13}.panel-grid .g-start-xl-14{grid-column-start:14}.panel-grid .g-start-xl-15{grid-column-start:15}.panel-grid .g-start-xl-16{grid-column-start:16}.panel-grid .g-start-xl-17{grid-column-start:17}.panel-grid .g-start-xl-18{grid-column-start:18}.panel-grid .g-start-xl-19{grid-column-start:19}.panel-grid .g-start-xl-20{grid-column-start:20}.panel-grid .g-start-xl-21{grid-column-start:21}.panel-grid .g-start-xl-22{grid-column-start:22}.panel-grid .g-start-xl-23{grid-column-start:23}}@media(min-width: 1400px){.panel-grid .g-col-xxl-1{grid-column:auto/span 1}.panel-grid .g-col-xxl-2{grid-column:auto/span 2}.panel-grid .g-col-xxl-3{grid-column:auto/span 3}.panel-grid .g-col-xxl-4{grid-column:auto/span 4}.panel-grid .g-col-xxl-5{grid-column:auto/span 5}.panel-grid .g-col-xxl-6{grid-column:auto/span 6}.panel-grid .g-col-xxl-7{grid-column:auto/span 7}.panel-grid .g-col-xxl-8{grid-column:auto/span 8}.panel-grid .g-col-xxl-9{grid-column:auto/span 9}.panel-grid .g-col-xxl-10{grid-column:auto/span 10}.panel-grid .g-col-xxl-11{grid-column:auto/span 11}.panel-grid .g-col-xxl-12{grid-column:auto/span 12}.panel-grid .g-col-xxl-13{grid-column:auto/span 13}.panel-grid .g-col-xxl-14{grid-column:auto/span 14}.panel-grid .g-col-xxl-15{grid-column:auto/span 15}.panel-grid .g-col-xxl-16{grid-column:auto/span 16}.panel-grid .g-col-xxl-17{grid-column:auto/span 17}.panel-grid .g-col-xxl-18{grid-column:auto/span 18}.panel-grid .g-col-xxl-19{grid-column:auto/span 19}.panel-grid .g-col-xxl-20{grid-column:auto/span 20}.panel-grid .g-col-xxl-21{grid-column:auto/span 21}.panel-grid .g-col-xxl-22{grid-column:auto/span 22}.panel-grid .g-col-xxl-23{grid-column:auto/span 23}.panel-grid .g-col-xxl-24{grid-column:auto/span 24}.panel-grid .g-start-xxl-1{grid-column-start:1}.panel-grid .g-start-xxl-2{grid-column-start:2}.panel-grid .g-start-xxl-3{grid-column-start:3}.panel-grid .g-start-xxl-4{grid-column-start:4}.panel-grid .g-start-xxl-5{grid-column-start:5}.panel-grid .g-start-xxl-6{grid-column-start:6}.panel-grid .g-start-xxl-7{grid-column-start:7}.panel-grid .g-start-xxl-8{grid-column-start:8}.panel-grid .g-start-xxl-9{grid-column-start:9}.panel-grid .g-start-xxl-10{grid-column-start:10}.panel-grid .g-start-xxl-11{grid-column-start:11}.panel-grid .g-start-xxl-12{grid-column-start:12}.panel-grid .g-start-xxl-13{grid-column-start:13}.panel-grid .g-start-xxl-14{grid-column-start:14}.panel-grid .g-start-xxl-15{grid-column-start:15}.panel-grid .g-start-xxl-16{grid-column-start:16}.panel-grid .g-start-xxl-17{grid-column-start:17}.panel-grid .g-start-xxl-18{grid-column-start:18}.panel-grid .g-start-xxl-19{grid-column-start:19}.panel-grid .g-start-xxl-20{grid-column-start:20}.panel-grid .g-start-xxl-21{grid-column-start:21}.panel-grid .g-start-xxl-22{grid-column-start:22}.panel-grid .g-start-xxl-23{grid-column-start:23}}main{margin-top:1em;margin-bottom:1em}h1,.h1,h2,.h2{margin-top:2rem;margin-bottom:1rem}h1.title,.title.h1{margin-top:0}h2,.h2{border-bottom:1px solid #dee2e6;padding-bottom:.5rem}h3,.h3,h4,.h4{margin-top:1.5rem}.header-section-number{color:#5a6570}.nav-link.active .header-section-number{color:inherit}mark,.mark{padding:0em}.panel-caption,caption,.figure-caption{font-size:1rem}.panel-caption,.figure-caption,figcaption{color:#5a6570}.table-caption,caption{color:#212529}.quarto-layout-cell[data-ref-parent] caption{color:#5a6570}.column-margin figcaption,.margin-caption,div.aside,aside,.column-margin{color:#5a6570;font-size:.825rem}.panel-caption.margin-caption{text-align:inherit}.column-margin.column-container p{margin-bottom:0}.column-margin.column-container>*:not(.collapse){padding-top:.5em;padding-bottom:.5em;display:block}.column-margin.column-container>*.collapse:not(.show){display:none}@media(min-width: 768px){.column-margin.column-container .callout-margin-content:first-child{margin-top:4.5em}.column-margin.column-container .callout-margin-content-simple:first-child{margin-top:3.5em}}.margin-caption>*{padding-top:.5em;padding-bottom:.5em}@media(max-width: 767.98px){.quarto-layout-row{flex-direction:column}}.tab-content{margin-top:0px;border-left:#dee2e6 1px solid;border-right:#dee2e6 1px solid;border-bottom:#dee2e6 1px solid;margin-left:0;padding:1em;margin-bottom:1em}@media(max-width: 767.98px){.layout-sidebar{margin-left:0;margin-right:0}}.panel-sidebar,.panel-sidebar .form-control,.panel-input,.panel-input .form-control,.selectize-dropdown{font-size:.9rem}.panel-sidebar .form-control,.panel-input .form-control{padding-top:.1rem}.tab-pane div.sourceCode{margin-top:0px}.tab-pane>p{padding-top:1em}.tab-content>.tab-pane:not(.active){display:none !important}div.sourceCode{background-color:rgba(233,236,239,.65);border:1px solid rgba(233,236,239,.65);border-radius:.25rem}pre.sourceCode{background-color:transparent}pre.sourceCode{border:none;font-size:.875em;overflow:visible !important;padding:.4em}.callout pre.sourceCode{padding-left:0}div.sourceCode{overflow-y:hidden}.callout div.sourceCode{margin-left:initial}.blockquote{font-size:inherit;padding-left:1rem;padding-right:1.5rem;color:#5a6570}.blockquote h1:first-child,.blockquote .h1:first-child,.blockquote h2:first-child,.blockquote .h2:first-child,.blockquote h3:first-child,.blockquote .h3:first-child,.blockquote h4:first-child,.blockquote .h4:first-child,.blockquote h5:first-child,.blockquote .h5:first-child{margin-top:0}pre{background-color:initial;padding:initial;border:initial}p code:not(.sourceCode),li code:not(.sourceCode){background-color:#f6f6f6;padding:.2em}nav p code:not(.sourceCode),nav li code:not(.sourceCode){background-color:transparent;padding:0}#quarto-embedded-source-code-modal>.modal-dialog{max-width:1000px;padding-left:1.75rem;padding-right:1.75rem}#quarto-embedded-source-code-modal>.modal-dialog>.modal-content>.modal-body{padding:0}#quarto-embedded-source-code-modal>.modal-dialog>.modal-content>.modal-body div.sourceCode{margin:0;padding:.2rem .2rem;border-radius:0px;border:none}#quarto-embedded-source-code-modal>.modal-dialog>.modal-content>.modal-header{padding:.7rem}.code-tools-button{font-size:1rem;padding:.15rem .15rem;margin-left:5px;color:#6c757d;background-color:transparent;transition:initial;cursor:pointer}.code-tools-button>.bi::before{display:inline-block;height:1rem;width:1rem;content:"";vertical-align:-0.125em;background-image:url('data:image/svg+xml,');background-repeat:no-repeat;background-size:1rem 1rem}.code-tools-button:hover>.bi::before{background-image:url('data:image/svg+xml,')}#quarto-embedded-source-code-modal .code-copy-button>.bi::before{background-image:url('data:image/svg+xml,')}#quarto-embedded-source-code-modal .code-copy-button-checked>.bi::before{background-image:url('data:image/svg+xml,')}.sidebar{will-change:top;transition:top 200ms linear;position:sticky;overflow-y:auto;padding-top:1.2em;max-height:100vh}.sidebar.toc-left,.sidebar.margin-sidebar{top:0px;padding-top:1em}.sidebar.toc-left>*,.sidebar.margin-sidebar>*{padding-top:.5em}.sidebar.quarto-banner-title-block-sidebar>*{padding-top:1.65em}.sidebar nav[role=doc-toc]>h2,.sidebar nav[role=doc-toc]>.h2{font-size:.875rem;font-weight:400;margin-bottom:.5rem;margin-top:.3rem;font-family:inherit;border-bottom:0;padding-bottom:0;padding-top:0px}.sidebar nav[role=doc-toc]>ul a{border-left:1px solid #e9ecef;padding-left:.6rem}.sidebar nav[role=doc-toc]>ul a:empty{display:none}.sidebar nav[role=doc-toc] ul{padding-left:0;list-style:none;font-size:.875rem;font-weight:300}.sidebar nav[role=doc-toc]>ul li a{line-height:1.1rem;padding-bottom:.2rem;padding-top:.2rem;color:inherit}.sidebar nav[role=doc-toc] ul>li>ul>li>a{padding-left:1.2em}.sidebar nav[role=doc-toc] ul>li>ul>li>ul>li>a{padding-left:2.4em}.sidebar nav[role=doc-toc] ul>li>ul>li>ul>li>ul>li>a{padding-left:3.6em}.sidebar nav[role=doc-toc] ul>li>ul>li>ul>li>ul>li>ul>li>a{padding-left:4.8em}.sidebar nav[role=doc-toc] ul>li>ul>li>ul>li>ul>li>ul>li>ul>li>a{padding-left:6em}.sidebar nav[role=doc-toc] ul>li>ul>li>a.active{border-left:1px solid #0d6efd;color:#0d6efd !important}.sidebar nav[role=doc-toc] ul>li>a.active{border-left:1px solid #0d6efd;color:#0d6efd !important}kbd,.kbd{color:#212529;background-color:#f8f9fa;border:1px solid;border-radius:5px;border-color:#dee2e6}div.hanging-indent{margin-left:1em;text-indent:-1em}.citation a,.footnote-ref{text-decoration:none}.footnotes ol{padding-left:1em}.tippy-content>*{margin-bottom:.7em}.tippy-content>*:last-child{margin-bottom:0}.table a{word-break:break-word}.table>:not(:first-child){border-top-width:1px;border-top-color:#dee2e6}.table>thead{border-bottom:1px solid currentColor}.table>tbody{border-top:1px solid #dee2e6}.callout{margin-top:1.25rem;margin-bottom:1.25rem;border-radius:.25rem}.callout.callout-style-simple{padding:.4em .7em;border-left:5px solid;border-right:1px solid #dee2e6;border-top:1px solid #dee2e6;border-bottom:1px solid #dee2e6}.callout.callout-style-default{border-left:5px solid;border-right:1px solid #dee2e6;border-top:1px solid #dee2e6;border-bottom:1px solid #dee2e6}.callout .callout-body-container{flex-grow:1}.callout.callout-style-simple .callout-body{font-size:.9rem;font-weight:400}.callout.callout-style-default .callout-body{font-size:.9rem;font-weight:400}.callout.callout-captioned .callout-body{margin-top:.2em}.callout:not(.no-icon).callout-captioned.callout-style-simple .callout-body{padding-left:1.6em}.callout.callout-captioned>.callout-header{padding-top:.2em;margin-bottom:-0.2em}.callout.callout-style-simple>div.callout-header{border-bottom:none;font-size:.9rem;font-weight:600;opacity:75%}.callout.callout-style-default>div.callout-header{border-bottom:none;font-weight:600;opacity:85%;font-size:.9rem;padding-left:.5em;padding-right:.5em}.callout.callout-style-default div.callout-body{padding-left:.5em;padding-right:.5em}.callout.callout-style-default div.callout-body>:first-child{margin-top:.5em}.callout>div.callout-header[data-bs-toggle=collapse]{cursor:pointer}.callout.callout-style-default .callout-header[aria-expanded=false],.callout.callout-style-default .callout-header[aria-expanded=true]{padding-top:0px;margin-bottom:0px;align-items:center}.callout.callout-captioned .callout-body>:last-child:not(.sourceCode),.callout.callout-captioned .callout-body>div>:last-child:not(.sourceCode){margin-bottom:.5rem}.callout:not(.callout-captioned) .callout-body>:first-child,.callout:not(.callout-captioned) .callout-body>div>:first-child{margin-top:.25rem}.callout:not(.callout-captioned) .callout-body>:last-child,.callout:not(.callout-captioned) .callout-body>div>:last-child{margin-bottom:.2rem}.callout.callout-style-simple .callout-icon::before,.callout.callout-style-simple .callout-toggle::before{height:1rem;width:1rem;display:inline-block;content:"";background-repeat:no-repeat;background-size:1rem 1rem}.callout.callout-style-default .callout-icon::before,.callout.callout-style-default .callout-toggle::before{height:.9rem;width:.9rem;display:inline-block;content:"";background-repeat:no-repeat;background-size:.9rem .9rem}.callout.callout-style-default .callout-toggle::before{margin-top:5px}.callout .callout-btn-toggle .callout-toggle::before{transition:transform .2s linear}.callout .callout-header[aria-expanded=false] .callout-toggle::before{transform:rotate(-90deg)}.callout .callout-header[aria-expanded=true] .callout-toggle::before{transform:none}.callout.callout-style-simple:not(.no-icon) div.callout-icon-container{padding-top:.2em;padding-right:.55em}.callout.callout-style-default:not(.no-icon) div.callout-icon-container{padding-top:.1em;padding-right:.35em}.callout.callout-style-default:not(.no-icon) div.callout-caption-container{margin-top:-1px}.callout.callout-style-default.callout-caution:not(.no-icon) div.callout-icon-container{padding-top:.3em;padding-right:.35em}.callout>.callout-body>.callout-icon-container>.no-icon,.callout>.callout-header>.callout-icon-container>.no-icon{display:none}div.callout.callout{border-left-color:#6c757d}div.callout.callout-style-default>.callout-header{background-color:#6c757d}div.callout-note.callout{border-left-color:#0d6efd}div.callout-note.callout-style-default>.callout-header{background-color:#e7f1ff}div.callout-note:not(.callout-captioned) .callout-icon::before{background-image:url('data:image/svg+xml,');}div.callout-note.callout-captioned .callout-icon::before{background-image:url('data:image/svg+xml,');}div.callout-note .callout-toggle::before{background-image:url('data:image/svg+xml,')}div.callout-tip.callout{border-left-color:#198754}div.callout-tip.callout-style-default>.callout-header{background-color:#e8f3ee}div.callout-tip:not(.callout-captioned) .callout-icon::before{background-image:url('data:image/svg+xml,');}div.callout-tip.callout-captioned .callout-icon::before{background-image:url('data:image/svg+xml,');}div.callout-tip .callout-toggle::before{background-image:url('data:image/svg+xml,')}div.callout-warning.callout{border-left-color:#ffc107}div.callout-warning.callout-style-default>.callout-header{background-color:#fff9e6}div.callout-warning:not(.callout-captioned) .callout-icon::before{background-image:url('data:image/svg+xml,');}div.callout-warning.callout-captioned .callout-icon::before{background-image:url('data:image/svg+xml,');}div.callout-warning .callout-toggle::before{background-image:url('data:image/svg+xml,')}div.callout-caution.callout{border-left-color:#fd7e14}div.callout-caution.callout-style-default>.callout-header{background-color:#fff2e8}div.callout-caution:not(.callout-captioned) .callout-icon::before{background-image:url('data:image/svg+xml,');}div.callout-caution.callout-captioned .callout-icon::before{background-image:url('data:image/svg+xml,');}div.callout-caution .callout-toggle::before{background-image:url('data:image/svg+xml,')}div.callout-important.callout{border-left-color:#dc3545}div.callout-important.callout-style-default>.callout-header{background-color:#fcebec}div.callout-important:not(.callout-captioned) .callout-icon::before{background-image:url('data:image/svg+xml,');}div.callout-important.callout-captioned .callout-icon::before{background-image:url('data:image/svg+xml,');}div.callout-important .callout-toggle::before{background-image:url('data:image/svg+xml,')}.quarto-toggle-container{display:flex}@media(min-width: 992px){.navbar .quarto-color-scheme-toggle{padding-left:.5rem;padding-right:.5rem}}@media(max-width: 767.98px){.navbar .quarto-color-scheme-toggle{padding-left:0;padding-right:0;padding-bottom:.5em}}.quarto-reader-toggle .bi::before,.quarto-color-scheme-toggle .bi::before{display:inline-block;height:1rem;width:1rem;content:"";background-repeat:no-repeat;background-size:1rem 1rem}.navbar-collapse .quarto-color-scheme-toggle{padding-left:.6rem;padding-right:0;margin-top:-12px}.sidebar-navigation{padding-left:20px}.sidebar-navigation .quarto-color-scheme-toggle .bi::before{padding-top:.2rem;margin-bottom:-0.2rem}.sidebar-tools-main .quarto-color-scheme-toggle .bi::before{padding-top:.2rem;margin-bottom:-0.2rem}.navbar .quarto-color-scheme-toggle .bi::before{padding-top:7px;margin-bottom:-7px;padding-left:2px;margin-right:2px}.navbar .quarto-color-scheme-toggle:not(.alternate) .bi::before{background-image:url('data:image/svg+xml,')}.navbar .quarto-color-scheme-toggle.alternate .bi::before{background-image:url('data:image/svg+xml,')}.sidebar-navigation .quarto-color-scheme-toggle:not(.alternate) .bi::before{background-image:url('data:image/svg+xml,')}.sidebar-navigation .quarto-color-scheme-toggle.alternate .bi::before{background-image:url('data:image/svg+xml,')}.quarto-sidebar-toggle{border-color:#dee2e6;border-bottom-left-radius:.25rem;border-bottom-right-radius:.25rem;border-style:solid;border-width:1px;overflow:hidden;border-top-width:0px;padding-top:0px !important}.quarto-sidebar-toggle-title{cursor:pointer;padding-bottom:2px;margin-left:.25em;text-align:center;font-weight:400;font-size:.775em}#quarto-content .quarto-sidebar-toggle{background:#fafafa}#quarto-content .quarto-sidebar-toggle-title{color:#212529}.quarto-sidebar-toggle-icon{color:#dee2e6;margin-right:.5em;float:right;transition:transform .2s ease}.quarto-sidebar-toggle-icon::before{padding-top:5px}.quarto-sidebar-toggle.expanded .quarto-sidebar-toggle-icon{transform:rotate(-180deg)}.quarto-sidebar-toggle.expanded .quarto-sidebar-toggle-title{border-bottom:solid #dee2e6 1px}.quarto-sidebar-toggle-contents{background-color:#fff;padding-right:10px;padding-left:10px;margin-top:0px !important;transition:max-height .5s ease}.quarto-sidebar-toggle.expanded .quarto-sidebar-toggle-contents{padding-top:1em;padding-bottom:10px}.quarto-sidebar-toggle:not(.expanded) .quarto-sidebar-toggle-contents{padding-top:0px !important;padding-bottom:0px}nav[role=doc-toc]{z-index:1020}#quarto-sidebar>*,nav[role=doc-toc]>*{transition:opacity .1s ease,border .1s ease}#quarto-sidebar.slow>*,nav[role=doc-toc].slow>*{transition:opacity .4s ease,border .4s ease}.quarto-color-scheme-toggle:not(.alternate).top-right .bi::before{background-image:url('data:image/svg+xml,')}.quarto-color-scheme-toggle.alternate.top-right .bi::before{background-image:url('data:image/svg+xml,')}#quarto-appendix.default{border-top:1px solid #dee2e6}#quarto-appendix.default{background-color:#fff;padding-top:1.5em;margin-top:2em;z-index:998}#quarto-appendix.default .quarto-appendix-heading{margin-top:0;line-height:1.4em;font-weight:600;opacity:.9;border-bottom:none;margin-bottom:0}#quarto-appendix.default .footnotes ol,#quarto-appendix.default .footnotes ol li>p:last-of-type,#quarto-appendix.default .quarto-appendix-contents>p:last-of-type{margin-bottom:0}#quarto-appendix.default .quarto-appendix-secondary-label{margin-bottom:.4em}#quarto-appendix.default .quarto-appendix-bibtex{font-size:.7em;padding:1em;border:solid 1px #dee2e6;margin-bottom:1em}#quarto-appendix.default .quarto-appendix-bibtex code.sourceCode{white-space:pre-wrap}#quarto-appendix.default .quarto-appendix-citeas{font-size:.9em;padding:1em;border:solid 1px #dee2e6;margin-bottom:1em}#quarto-appendix.default .quarto-appendix-heading{font-size:1em !important}#quarto-appendix.default *[role=doc-endnotes]>ol,#quarto-appendix.default .quarto-appendix-contents>*:not(h2):not(.h2){font-size:.9em}#quarto-appendix.default section{padding-bottom:1.5em}#quarto-appendix.default section *[role=doc-endnotes],#quarto-appendix.default section>*:not(a){opacity:.9;word-wrap:break-word}.btn.btn-quarto,div.cell-output-display .btn-quarto{color:#fefefe;background-color:#6c757d;border-color:#6c757d}.btn.btn-quarto:hover,div.cell-output-display .btn-quarto:hover{color:#fefefe;background-color:#828a91;border-color:#7b838a}.btn-check:focus+.btn.btn-quarto,.btn.btn-quarto:focus,.btn-check:focus+div.cell-output-display .btn-quarto,div.cell-output-display .btn-quarto:focus{color:#fefefe;background-color:#828a91;border-color:#7b838a;box-shadow:0 0 0 .25rem rgba(130,138,144,.5)}.btn-check:checked+.btn.btn-quarto,.btn-check:active+.btn.btn-quarto,.btn.btn-quarto:active,.btn.btn-quarto.active,.show>.btn.btn-quarto.dropdown-toggle,.btn-check:checked+div.cell-output-display .btn-quarto,.btn-check:active+div.cell-output-display .btn-quarto,div.cell-output-display .btn-quarto:active,div.cell-output-display .btn-quarto.active,.show>div.cell-output-display .btn-quarto.dropdown-toggle{color:#000;background-color:#899197;border-color:#7b838a}.btn-check:checked+.btn.btn-quarto:focus,.btn-check:active+.btn.btn-quarto:focus,.btn.btn-quarto:active:focus,.btn.btn-quarto.active:focus,.show>.btn.btn-quarto.dropdown-toggle:focus,.btn-check:checked+div.cell-output-display .btn-quarto:focus,.btn-check:active+div.cell-output-display .btn-quarto:focus,div.cell-output-display .btn-quarto:active:focus,div.cell-output-display .btn-quarto.active:focus,.show>div.cell-output-display .btn-quarto.dropdown-toggle:focus{box-shadow:0 0 0 .25rem rgba(130,138,144,.5)}.btn.btn-quarto:disabled,.btn.btn-quarto.disabled,div.cell-output-display .btn-quarto:disabled,div.cell-output-display .btn-quarto.disabled{color:#fff;background-color:#6c757d;border-color:#6c757d}nav.quarto-secondary-nav.color-navbar{background-color:#517699;color:#fdfefe}nav.quarto-secondary-nav.color-navbar h1,nav.quarto-secondary-nav.color-navbar .h1,nav.quarto-secondary-nav.color-navbar .quarto-btn-toggle{color:#fdfefe}@media(max-width: 991.98px){body.nav-sidebar .quarto-title-banner,body.nav-sidebar .quarto-title-banner{display:none}}p.subtitle{margin-top:.25em;margin-bottom:.5em}code a:any-link{color:inherit;text-decoration-color:#6c757d}/*! light */a.external:after{display:inline-block;height:.75rem;width:.75rem;margin-bottom:.15em;margin-left:.25em;content:"";vertical-align:-0.125em;background-image:url('data:image/svg+xml,');background-repeat:no-repeat;background-size:.75rem .75rem}a.external:after:hover{cursor:pointer}.quarto-ext-icon{display:inline-block;font-size:.75em;padding-left:.3em}.quarto-title-banner{margin-bottom:1em;color:#fdfefe;background:#517699}.quarto-title-banner .code-tools-button{color:#b9dcdc}.quarto-title-banner .code-tools-button:hover{color:#fdfefe}.quarto-title-banner .code-tools-button>.bi::before{background-image:url('data:image/svg+xml,')}.quarto-title-banner .code-tools-button:hover>.bi::before{background-image:url('data:image/svg+xml,')}.quarto-title-banner .quarto-title .title{font-weight:600}.quarto-title-banner .quarto-categories{margin-top:.75em}@media(min-width: 992px){.quarto-title-banner{padding-top:2.5em;padding-bottom:2.5em}}@media(max-width: 991.98px){.quarto-title-banner{padding-top:1em;padding-bottom:1em}}main.quarto-banner-title-block section:first-of-type h2:first-of-type,main.quarto-banner-title-block section:first-of-type .h2:first-of-type,main.quarto-banner-title-block section:first-of-type h3:first-of-type,main.quarto-banner-title-block section:first-of-type .h3:first-of-type,main.quarto-banner-title-block section:first-of-type h4:first-of-type,main.quarto-banner-title-block section:first-of-type .h4:first-of-type{margin-top:0}.quarto-title .quarto-categories{display:flex;column-gap:.4em;padding-bottom:.5em;margin-top:.25em}.quarto-title .quarto-categories .quarto-category{padding:.25em .75em;font-size:.65em;text-transform:uppercase;border:solid 1px;border-radius:.25rem;opacity:.6}.quarto-title .quarto-categories .quarto-category a{color:inherit}#title-block-header.quarto-title-block.default .quarto-title-meta{display:grid;grid-template-columns:repeat(2, 1fr)}#title-block-header.quarto-title-block.default .quarto-title .title{margin-bottom:0}#title-block-header.quarto-title-block.default .quarto-title-author-orcid img{margin-top:-5px}#title-block-header.quarto-title-block.default .quarto-description p:last-of-type{margin-bottom:0}#title-block-header.quarto-title-block.default .quarto-title-meta-contents p,#title-block-header.quarto-title-block.default .quarto-title-authors p,#title-block-header.quarto-title-block.default .quarto-title-affiliations p{margin-bottom:.1em}#title-block-header.quarto-title-block.default .quarto-title-meta-heading{text-transform:uppercase;margin-top:1em;font-size:.8em;opacity:.8;font-weight:400}#title-block-header.quarto-title-block.default .quarto-title-meta-contents{font-size:.9em}#title-block-header.quarto-title-block.default .quarto-title-meta-contents a{color:#212529}#title-block-header.quarto-title-block.default .quarto-title-meta-contents p.affiliation:last-of-type{margin-bottom:.7em}#title-block-header.quarto-title-block.default p.affiliation{margin-bottom:.1em}#title-block-header.quarto-title-block.default .description,#title-block-header.quarto-title-block.default .abstract{margin-top:0}#title-block-header.quarto-title-block.default .description>p,#title-block-header.quarto-title-block.default .abstract>p{font-size:.9em}#title-block-header.quarto-title-block.default .description>p:last-of-type,#title-block-header.quarto-title-block.default .abstract>p:last-of-type{margin-bottom:0}#title-block-header.quarto-title-block.default .description .abstract-title,#title-block-header.quarto-title-block.default .abstract .abstract-title{margin-top:1em;text-transform:uppercase;font-size:.8em;opacity:.8;font-weight:400}#title-block-header.quarto-title-block.default .quarto-title-meta-author{display:grid;grid-template-columns:1fr 1fr}/*# sourceMappingURL=397ef2e52d54cf686e4908b90039e9db.css.map */ diff --git a/resources/stac_mount_save_files/libs/bootstrap/bootstrap.min.js b/resources/stac_mount_save_files/libs/bootstrap/bootstrap.min.js new file mode 100644 index 0000000..cc0a255 --- /dev/null +++ b/resources/stac_mount_save_files/libs/bootstrap/bootstrap.min.js @@ -0,0 +1,7 @@ +/*! + * Bootstrap v5.1.3 (https://getbootstrap.com/) + * Copyright 2011-2021 The Bootstrap Authors (https://github.com/twbs/bootstrap/graphs/contributors) + * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE) + */ +!function(t,e){"object"==typeof exports&&"undefined"!=typeof module?module.exports=e():"function"==typeof define&&define.amd?define(e):(t="undefined"!=typeof globalThis?globalThis:t||self).bootstrap=e()}(this,(function(){"use strict";const t="transitionend",e=t=>{let e=t.getAttribute("data-bs-target");if(!e||"#"===e){let i=t.getAttribute("href");if(!i||!i.includes("#")&&!i.startsWith("."))return null;i.includes("#")&&!i.startsWith("#")&&(i=`#${i.split("#")[1]}`),e=i&&"#"!==i?i.trim():null}return e},i=t=>{const i=e(t);return i&&document.querySelector(i)?i:null},n=t=>{const i=e(t);return i?document.querySelector(i):null},s=e=>{e.dispatchEvent(new Event(t))},o=t=>!(!t||"object"!=typeof t)&&(void 0!==t.jquery&&(t=t[0]),void 0!==t.nodeType),r=t=>o(t)?t.jquery?t[0]:t:"string"==typeof t&&t.length>0?document.querySelector(t):null,a=(t,e,i)=>{Object.keys(i).forEach((n=>{const s=i[n],r=e[n],a=r&&o(r)?"element":null==(l=r)?`${l}`:{}.toString.call(l).match(/\s([a-z]+)/i)[1].toLowerCase();var l;if(!new RegExp(s).test(a))throw new TypeError(`${t.toUpperCase()}: Option "${n}" provided type "${a}" but expected type "${s}".`)}))},l=t=>!(!o(t)||0===t.getClientRects().length)&&"visible"===getComputedStyle(t).getPropertyValue("visibility"),c=t=>!t||t.nodeType!==Node.ELEMENT_NODE||!!t.classList.contains("disabled")||(void 0!==t.disabled?t.disabled:t.hasAttribute("disabled")&&"false"!==t.getAttribute("disabled")),h=t=>{if(!document.documentElement.attachShadow)return null;if("function"==typeof t.getRootNode){const e=t.getRootNode();return e instanceof ShadowRoot?e:null}return t instanceof ShadowRoot?t:t.parentNode?h(t.parentNode):null},d=()=>{},u=t=>{t.offsetHeight},f=()=>{const{jQuery:t}=window;return t&&!document.body.hasAttribute("data-bs-no-jquery")?t:null},p=[],m=()=>"rtl"===document.documentElement.dir,g=t=>{var e;e=()=>{const e=f();if(e){const i=t.NAME,n=e.fn[i];e.fn[i]=t.jQueryInterface,e.fn[i].Constructor=t,e.fn[i].noConflict=()=>(e.fn[i]=n,t.jQueryInterface)}},"loading"===document.readyState?(p.length||document.addEventListener("DOMContentLoaded",(()=>{p.forEach((t=>t()))})),p.push(e)):e()},_=t=>{"function"==typeof t&&t()},b=(e,i,n=!0)=>{if(!n)return void _(e);const o=(t=>{if(!t)return 0;let{transitionDuration:e,transitionDelay:i}=window.getComputedStyle(t);const n=Number.parseFloat(e),s=Number.parseFloat(i);return n||s?(e=e.split(",")[0],i=i.split(",")[0],1e3*(Number.parseFloat(e)+Number.parseFloat(i))):0})(i)+5;let r=!1;const a=({target:n})=>{n===i&&(r=!0,i.removeEventListener(t,a),_(e))};i.addEventListener(t,a),setTimeout((()=>{r||s(i)}),o)},v=(t,e,i,n)=>{let s=t.indexOf(e);if(-1===s)return t[!i&&n?t.length-1:0];const o=t.length;return s+=i?1:-1,n&&(s=(s+o)%o),t[Math.max(0,Math.min(s,o-1))]},y=/[^.]*(?=\..*)\.|.*/,w=/\..*/,E=/::\d+$/,A={};let T=1;const O={mouseenter:"mouseover",mouseleave:"mouseout"},C=/^(mouseenter|mouseleave)/i,k=new Set(["click","dblclick","mouseup","mousedown","contextmenu","mousewheel","DOMMouseScroll","mouseover","mouseout","mousemove","selectstart","selectend","keydown","keypress","keyup","orientationchange","touchstart","touchmove","touchend","touchcancel","pointerdown","pointermove","pointerup","pointerleave","pointercancel","gesturestart","gesturechange","gestureend","focus","blur","change","reset","select","submit","focusin","focusout","load","unload","beforeunload","resize","move","DOMContentLoaded","readystatechange","error","abort","scroll"]);function L(t,e){return e&&`${e}::${T++}`||t.uidEvent||T++}function x(t){const e=L(t);return t.uidEvent=e,A[e]=A[e]||{},A[e]}function D(t,e,i=null){const n=Object.keys(t);for(let s=0,o=n.length;sfunction(e){if(!e.relatedTarget||e.relatedTarget!==e.delegateTarget&&!e.delegateTarget.contains(e.relatedTarget))return t.call(this,e)};n?n=t(n):i=t(i)}const[o,r,a]=S(e,i,n),l=x(t),c=l[a]||(l[a]={}),h=D(c,r,o?i:null);if(h)return void(h.oneOff=h.oneOff&&s);const d=L(r,e.replace(y,"")),u=o?function(t,e,i){return function n(s){const o=t.querySelectorAll(e);for(let{target:r}=s;r&&r!==this;r=r.parentNode)for(let a=o.length;a--;)if(o[a]===r)return s.delegateTarget=r,n.oneOff&&j.off(t,s.type,e,i),i.apply(r,[s]);return null}}(t,i,n):function(t,e){return function i(n){return n.delegateTarget=t,i.oneOff&&j.off(t,n.type,e),e.apply(t,[n])}}(t,i);u.delegationSelector=o?i:null,u.originalHandler=r,u.oneOff=s,u.uidEvent=d,c[d]=u,t.addEventListener(a,u,o)}function I(t,e,i,n,s){const o=D(e[i],n,s);o&&(t.removeEventListener(i,o,Boolean(s)),delete e[i][o.uidEvent])}function P(t){return t=t.replace(w,""),O[t]||t}const j={on(t,e,i,n){N(t,e,i,n,!1)},one(t,e,i,n){N(t,e,i,n,!0)},off(t,e,i,n){if("string"!=typeof e||!t)return;const[s,o,r]=S(e,i,n),a=r!==e,l=x(t),c=e.startsWith(".");if(void 0!==o){if(!l||!l[r])return;return void I(t,l,r,o,s?i:null)}c&&Object.keys(l).forEach((i=>{!function(t,e,i,n){const s=e[i]||{};Object.keys(s).forEach((o=>{if(o.includes(n)){const n=s[o];I(t,e,i,n.originalHandler,n.delegationSelector)}}))}(t,l,i,e.slice(1))}));const h=l[r]||{};Object.keys(h).forEach((i=>{const n=i.replace(E,"");if(!a||e.includes(n)){const e=h[i];I(t,l,r,e.originalHandler,e.delegationSelector)}}))},trigger(t,e,i){if("string"!=typeof e||!t)return null;const n=f(),s=P(e),o=e!==s,r=k.has(s);let a,l=!0,c=!0,h=!1,d=null;return o&&n&&(a=n.Event(e,i),n(t).trigger(a),l=!a.isPropagationStopped(),c=!a.isImmediatePropagationStopped(),h=a.isDefaultPrevented()),r?(d=document.createEvent("HTMLEvents"),d.initEvent(s,l,!0)):d=new CustomEvent(e,{bubbles:l,cancelable:!0}),void 0!==i&&Object.keys(i).forEach((t=>{Object.defineProperty(d,t,{get:()=>i[t]})})),h&&d.preventDefault(),c&&t.dispatchEvent(d),d.defaultPrevented&&void 0!==a&&a.preventDefault(),d}},M=new Map,H={set(t,e,i){M.has(t)||M.set(t,new Map);const n=M.get(t);n.has(e)||0===n.size?n.set(e,i):console.error(`Bootstrap doesn't allow more than one instance per element. Bound instance: ${Array.from(n.keys())[0]}.`)},get:(t,e)=>M.has(t)&&M.get(t).get(e)||null,remove(t,e){if(!M.has(t))return;const i=M.get(t);i.delete(e),0===i.size&&M.delete(t)}};class B{constructor(t){(t=r(t))&&(this._element=t,H.set(this._element,this.constructor.DATA_KEY,this))}dispose(){H.remove(this._element,this.constructor.DATA_KEY),j.off(this._element,this.constructor.EVENT_KEY),Object.getOwnPropertyNames(this).forEach((t=>{this[t]=null}))}_queueCallback(t,e,i=!0){b(t,e,i)}static getInstance(t){return H.get(r(t),this.DATA_KEY)}static getOrCreateInstance(t,e={}){return this.getInstance(t)||new this(t,"object"==typeof e?e:null)}static get VERSION(){return"5.1.3"}static get NAME(){throw new Error('You have to implement the static method "NAME", for each component!')}static get DATA_KEY(){return`bs.${this.NAME}`}static get EVENT_KEY(){return`.${this.DATA_KEY}`}}const R=(t,e="hide")=>{const i=`click.dismiss${t.EVENT_KEY}`,s=t.NAME;j.on(document,i,`[data-bs-dismiss="${s}"]`,(function(i){if(["A","AREA"].includes(this.tagName)&&i.preventDefault(),c(this))return;const o=n(this)||this.closest(`.${s}`);t.getOrCreateInstance(o)[e]()}))};class W extends B{static get NAME(){return"alert"}close(){if(j.trigger(this._element,"close.bs.alert").defaultPrevented)return;this._element.classList.remove("show");const t=this._element.classList.contains("fade");this._queueCallback((()=>this._destroyElement()),this._element,t)}_destroyElement(){this._element.remove(),j.trigger(this._element,"closed.bs.alert"),this.dispose()}static jQueryInterface(t){return this.each((function(){const e=W.getOrCreateInstance(this);if("string"==typeof t){if(void 0===e[t]||t.startsWith("_")||"constructor"===t)throw new TypeError(`No method named "${t}"`);e[t](this)}}))}}R(W,"close"),g(W);const $='[data-bs-toggle="button"]';class z extends B{static get NAME(){return"button"}toggle(){this._element.setAttribute("aria-pressed",this._element.classList.toggle("active"))}static jQueryInterface(t){return this.each((function(){const e=z.getOrCreateInstance(this);"toggle"===t&&e[t]()}))}}function q(t){return"true"===t||"false"!==t&&(t===Number(t).toString()?Number(t):""===t||"null"===t?null:t)}function F(t){return t.replace(/[A-Z]/g,(t=>`-${t.toLowerCase()}`))}j.on(document,"click.bs.button.data-api",$,(t=>{t.preventDefault();const e=t.target.closest($);z.getOrCreateInstance(e).toggle()})),g(z);const U={setDataAttribute(t,e,i){t.setAttribute(`data-bs-${F(e)}`,i)},removeDataAttribute(t,e){t.removeAttribute(`data-bs-${F(e)}`)},getDataAttributes(t){if(!t)return{};const e={};return Object.keys(t.dataset).filter((t=>t.startsWith("bs"))).forEach((i=>{let n=i.replace(/^bs/,"");n=n.charAt(0).toLowerCase()+n.slice(1,n.length),e[n]=q(t.dataset[i])})),e},getDataAttribute:(t,e)=>q(t.getAttribute(`data-bs-${F(e)}`)),offset(t){const e=t.getBoundingClientRect();return{top:e.top+window.pageYOffset,left:e.left+window.pageXOffset}},position:t=>({top:t.offsetTop,left:t.offsetLeft})},V={find:(t,e=document.documentElement)=>[].concat(...Element.prototype.querySelectorAll.call(e,t)),findOne:(t,e=document.documentElement)=>Element.prototype.querySelector.call(e,t),children:(t,e)=>[].concat(...t.children).filter((t=>t.matches(e))),parents(t,e){const i=[];let n=t.parentNode;for(;n&&n.nodeType===Node.ELEMENT_NODE&&3!==n.nodeType;)n.matches(e)&&i.push(n),n=n.parentNode;return i},prev(t,e){let i=t.previousElementSibling;for(;i;){if(i.matches(e))return[i];i=i.previousElementSibling}return[]},next(t,e){let i=t.nextElementSibling;for(;i;){if(i.matches(e))return[i];i=i.nextElementSibling}return[]},focusableChildren(t){const e=["a","button","input","textarea","select","details","[tabindex]",'[contenteditable="true"]'].map((t=>`${t}:not([tabindex^="-"])`)).join(", ");return this.find(e,t).filter((t=>!c(t)&&l(t)))}},K="carousel",X={interval:5e3,keyboard:!0,slide:!1,pause:"hover",wrap:!0,touch:!0},Y={interval:"(number|boolean)",keyboard:"boolean",slide:"(boolean|string)",pause:"(string|boolean)",wrap:"boolean",touch:"boolean"},Q="next",G="prev",Z="left",J="right",tt={ArrowLeft:J,ArrowRight:Z},et="slid.bs.carousel",it="active",nt=".active.carousel-item";class st extends B{constructor(t,e){super(t),this._items=null,this._interval=null,this._activeElement=null,this._isPaused=!1,this._isSliding=!1,this.touchTimeout=null,this.touchStartX=0,this.touchDeltaX=0,this._config=this._getConfig(e),this._indicatorsElement=V.findOne(".carousel-indicators",this._element),this._touchSupported="ontouchstart"in document.documentElement||navigator.maxTouchPoints>0,this._pointerEvent=Boolean(window.PointerEvent),this._addEventListeners()}static get Default(){return X}static get NAME(){return K}next(){this._slide(Q)}nextWhenVisible(){!document.hidden&&l(this._element)&&this.next()}prev(){this._slide(G)}pause(t){t||(this._isPaused=!0),V.findOne(".carousel-item-next, .carousel-item-prev",this._element)&&(s(this._element),this.cycle(!0)),clearInterval(this._interval),this._interval=null}cycle(t){t||(this._isPaused=!1),this._interval&&(clearInterval(this._interval),this._interval=null),this._config&&this._config.interval&&!this._isPaused&&(this._updateInterval(),this._interval=setInterval((document.visibilityState?this.nextWhenVisible:this.next).bind(this),this._config.interval))}to(t){this._activeElement=V.findOne(nt,this._element);const e=this._getItemIndex(this._activeElement);if(t>this._items.length-1||t<0)return;if(this._isSliding)return void j.one(this._element,et,(()=>this.to(t)));if(e===t)return this.pause(),void this.cycle();const i=t>e?Q:G;this._slide(i,this._items[t])}_getConfig(t){return t={...X,...U.getDataAttributes(this._element),..."object"==typeof t?t:{}},a(K,t,Y),t}_handleSwipe(){const t=Math.abs(this.touchDeltaX);if(t<=40)return;const e=t/this.touchDeltaX;this.touchDeltaX=0,e&&this._slide(e>0?J:Z)}_addEventListeners(){this._config.keyboard&&j.on(this._element,"keydown.bs.carousel",(t=>this._keydown(t))),"hover"===this._config.pause&&(j.on(this._element,"mouseenter.bs.carousel",(t=>this.pause(t))),j.on(this._element,"mouseleave.bs.carousel",(t=>this.cycle(t)))),this._config.touch&&this._touchSupported&&this._addTouchEventListeners()}_addTouchEventListeners(){const t=t=>this._pointerEvent&&("pen"===t.pointerType||"touch"===t.pointerType),e=e=>{t(e)?this.touchStartX=e.clientX:this._pointerEvent||(this.touchStartX=e.touches[0].clientX)},i=t=>{this.touchDeltaX=t.touches&&t.touches.length>1?0:t.touches[0].clientX-this.touchStartX},n=e=>{t(e)&&(this.touchDeltaX=e.clientX-this.touchStartX),this._handleSwipe(),"hover"===this._config.pause&&(this.pause(),this.touchTimeout&&clearTimeout(this.touchTimeout),this.touchTimeout=setTimeout((t=>this.cycle(t)),500+this._config.interval))};V.find(".carousel-item img",this._element).forEach((t=>{j.on(t,"dragstart.bs.carousel",(t=>t.preventDefault()))})),this._pointerEvent?(j.on(this._element,"pointerdown.bs.carousel",(t=>e(t))),j.on(this._element,"pointerup.bs.carousel",(t=>n(t))),this._element.classList.add("pointer-event")):(j.on(this._element,"touchstart.bs.carousel",(t=>e(t))),j.on(this._element,"touchmove.bs.carousel",(t=>i(t))),j.on(this._element,"touchend.bs.carousel",(t=>n(t))))}_keydown(t){if(/input|textarea/i.test(t.target.tagName))return;const e=tt[t.key];e&&(t.preventDefault(),this._slide(e))}_getItemIndex(t){return this._items=t&&t.parentNode?V.find(".carousel-item",t.parentNode):[],this._items.indexOf(t)}_getItemByOrder(t,e){const i=t===Q;return v(this._items,e,i,this._config.wrap)}_triggerSlideEvent(t,e){const i=this._getItemIndex(t),n=this._getItemIndex(V.findOne(nt,this._element));return j.trigger(this._element,"slide.bs.carousel",{relatedTarget:t,direction:e,from:n,to:i})}_setActiveIndicatorElement(t){if(this._indicatorsElement){const e=V.findOne(".active",this._indicatorsElement);e.classList.remove(it),e.removeAttribute("aria-current");const i=V.find("[data-bs-target]",this._indicatorsElement);for(let e=0;e{j.trigger(this._element,et,{relatedTarget:o,direction:d,from:s,to:r})};if(this._element.classList.contains("slide")){o.classList.add(h),u(o),n.classList.add(c),o.classList.add(c);const t=()=>{o.classList.remove(c,h),o.classList.add(it),n.classList.remove(it,h,c),this._isSliding=!1,setTimeout(f,0)};this._queueCallback(t,n,!0)}else n.classList.remove(it),o.classList.add(it),this._isSliding=!1,f();a&&this.cycle()}_directionToOrder(t){return[J,Z].includes(t)?m()?t===Z?G:Q:t===Z?Q:G:t}_orderToDirection(t){return[Q,G].includes(t)?m()?t===G?Z:J:t===G?J:Z:t}static carouselInterface(t,e){const i=st.getOrCreateInstance(t,e);let{_config:n}=i;"object"==typeof e&&(n={...n,...e});const s="string"==typeof e?e:n.slide;if("number"==typeof e)i.to(e);else if("string"==typeof s){if(void 0===i[s])throw new TypeError(`No method named "${s}"`);i[s]()}else n.interval&&n.ride&&(i.pause(),i.cycle())}static jQueryInterface(t){return this.each((function(){st.carouselInterface(this,t)}))}static dataApiClickHandler(t){const e=n(this);if(!e||!e.classList.contains("carousel"))return;const i={...U.getDataAttributes(e),...U.getDataAttributes(this)},s=this.getAttribute("data-bs-slide-to");s&&(i.interval=!1),st.carouselInterface(e,i),s&&st.getInstance(e).to(s),t.preventDefault()}}j.on(document,"click.bs.carousel.data-api","[data-bs-slide], [data-bs-slide-to]",st.dataApiClickHandler),j.on(window,"load.bs.carousel.data-api",(()=>{const t=V.find('[data-bs-ride="carousel"]');for(let e=0,i=t.length;et===this._element));null!==s&&o.length&&(this._selector=s,this._triggerArray.push(e))}this._initializeChildren(),this._config.parent||this._addAriaAndCollapsedClass(this._triggerArray,this._isShown()),this._config.toggle&&this.toggle()}static get Default(){return rt}static get NAME(){return ot}toggle(){this._isShown()?this.hide():this.show()}show(){if(this._isTransitioning||this._isShown())return;let t,e=[];if(this._config.parent){const t=V.find(ut,this._config.parent);e=V.find(".collapse.show, .collapse.collapsing",this._config.parent).filter((e=>!t.includes(e)))}const i=V.findOne(this._selector);if(e.length){const n=e.find((t=>i!==t));if(t=n?pt.getInstance(n):null,t&&t._isTransitioning)return}if(j.trigger(this._element,"show.bs.collapse").defaultPrevented)return;e.forEach((e=>{i!==e&&pt.getOrCreateInstance(e,{toggle:!1}).hide(),t||H.set(e,"bs.collapse",null)}));const n=this._getDimension();this._element.classList.remove(ct),this._element.classList.add(ht),this._element.style[n]=0,this._addAriaAndCollapsedClass(this._triggerArray,!0),this._isTransitioning=!0;const s=`scroll${n[0].toUpperCase()+n.slice(1)}`;this._queueCallback((()=>{this._isTransitioning=!1,this._element.classList.remove(ht),this._element.classList.add(ct,lt),this._element.style[n]="",j.trigger(this._element,"shown.bs.collapse")}),this._element,!0),this._element.style[n]=`${this._element[s]}px`}hide(){if(this._isTransitioning||!this._isShown())return;if(j.trigger(this._element,"hide.bs.collapse").defaultPrevented)return;const t=this._getDimension();this._element.style[t]=`${this._element.getBoundingClientRect()[t]}px`,u(this._element),this._element.classList.add(ht),this._element.classList.remove(ct,lt);const e=this._triggerArray.length;for(let t=0;t{this._isTransitioning=!1,this._element.classList.remove(ht),this._element.classList.add(ct),j.trigger(this._element,"hidden.bs.collapse")}),this._element,!0)}_isShown(t=this._element){return t.classList.contains(lt)}_getConfig(t){return(t={...rt,...U.getDataAttributes(this._element),...t}).toggle=Boolean(t.toggle),t.parent=r(t.parent),a(ot,t,at),t}_getDimension(){return this._element.classList.contains("collapse-horizontal")?"width":"height"}_initializeChildren(){if(!this._config.parent)return;const t=V.find(ut,this._config.parent);V.find(ft,this._config.parent).filter((e=>!t.includes(e))).forEach((t=>{const e=n(t);e&&this._addAriaAndCollapsedClass([t],this._isShown(e))}))}_addAriaAndCollapsedClass(t,e){t.length&&t.forEach((t=>{e?t.classList.remove(dt):t.classList.add(dt),t.setAttribute("aria-expanded",e)}))}static jQueryInterface(t){return this.each((function(){const e={};"string"==typeof t&&/show|hide/.test(t)&&(e.toggle=!1);const i=pt.getOrCreateInstance(this,e);if("string"==typeof t){if(void 0===i[t])throw new TypeError(`No method named "${t}"`);i[t]()}}))}}j.on(document,"click.bs.collapse.data-api",ft,(function(t){("A"===t.target.tagName||t.delegateTarget&&"A"===t.delegateTarget.tagName)&&t.preventDefault();const e=i(this);V.find(e).forEach((t=>{pt.getOrCreateInstance(t,{toggle:!1}).toggle()}))})),g(pt);var mt="top",gt="bottom",_t="right",bt="left",vt="auto",yt=[mt,gt,_t,bt],wt="start",Et="end",At="clippingParents",Tt="viewport",Ot="popper",Ct="reference",kt=yt.reduce((function(t,e){return t.concat([e+"-"+wt,e+"-"+Et])}),[]),Lt=[].concat(yt,[vt]).reduce((function(t,e){return t.concat([e,e+"-"+wt,e+"-"+Et])}),[]),xt="beforeRead",Dt="read",St="afterRead",Nt="beforeMain",It="main",Pt="afterMain",jt="beforeWrite",Mt="write",Ht="afterWrite",Bt=[xt,Dt,St,Nt,It,Pt,jt,Mt,Ht];function Rt(t){return t?(t.nodeName||"").toLowerCase():null}function Wt(t){if(null==t)return window;if("[object Window]"!==t.toString()){var e=t.ownerDocument;return e&&e.defaultView||window}return t}function $t(t){return t instanceof Wt(t).Element||t instanceof Element}function zt(t){return t instanceof Wt(t).HTMLElement||t instanceof HTMLElement}function qt(t){return"undefined"!=typeof ShadowRoot&&(t instanceof Wt(t).ShadowRoot||t instanceof ShadowRoot)}const Ft={name:"applyStyles",enabled:!0,phase:"write",fn:function(t){var e=t.state;Object.keys(e.elements).forEach((function(t){var i=e.styles[t]||{},n=e.attributes[t]||{},s=e.elements[t];zt(s)&&Rt(s)&&(Object.assign(s.style,i),Object.keys(n).forEach((function(t){var e=n[t];!1===e?s.removeAttribute(t):s.setAttribute(t,!0===e?"":e)})))}))},effect:function(t){var e=t.state,i={popper:{position:e.options.strategy,left:"0",top:"0",margin:"0"},arrow:{position:"absolute"},reference:{}};return Object.assign(e.elements.popper.style,i.popper),e.styles=i,e.elements.arrow&&Object.assign(e.elements.arrow.style,i.arrow),function(){Object.keys(e.elements).forEach((function(t){var n=e.elements[t],s=e.attributes[t]||{},o=Object.keys(e.styles.hasOwnProperty(t)?e.styles[t]:i[t]).reduce((function(t,e){return t[e]="",t}),{});zt(n)&&Rt(n)&&(Object.assign(n.style,o),Object.keys(s).forEach((function(t){n.removeAttribute(t)})))}))}},requires:["computeStyles"]};function Ut(t){return t.split("-")[0]}function Vt(t,e){var i=t.getBoundingClientRect();return{width:i.width/1,height:i.height/1,top:i.top/1,right:i.right/1,bottom:i.bottom/1,left:i.left/1,x:i.left/1,y:i.top/1}}function Kt(t){var e=Vt(t),i=t.offsetWidth,n=t.offsetHeight;return Math.abs(e.width-i)<=1&&(i=e.width),Math.abs(e.height-n)<=1&&(n=e.height),{x:t.offsetLeft,y:t.offsetTop,width:i,height:n}}function Xt(t,e){var i=e.getRootNode&&e.getRootNode();if(t.contains(e))return!0;if(i&&qt(i)){var n=e;do{if(n&&t.isSameNode(n))return!0;n=n.parentNode||n.host}while(n)}return!1}function Yt(t){return Wt(t).getComputedStyle(t)}function Qt(t){return["table","td","th"].indexOf(Rt(t))>=0}function Gt(t){return(($t(t)?t.ownerDocument:t.document)||window.document).documentElement}function Zt(t){return"html"===Rt(t)?t:t.assignedSlot||t.parentNode||(qt(t)?t.host:null)||Gt(t)}function Jt(t){return zt(t)&&"fixed"!==Yt(t).position?t.offsetParent:null}function te(t){for(var e=Wt(t),i=Jt(t);i&&Qt(i)&&"static"===Yt(i).position;)i=Jt(i);return i&&("html"===Rt(i)||"body"===Rt(i)&&"static"===Yt(i).position)?e:i||function(t){var e=-1!==navigator.userAgent.toLowerCase().indexOf("firefox");if(-1!==navigator.userAgent.indexOf("Trident")&&zt(t)&&"fixed"===Yt(t).position)return null;for(var i=Zt(t);zt(i)&&["html","body"].indexOf(Rt(i))<0;){var n=Yt(i);if("none"!==n.transform||"none"!==n.perspective||"paint"===n.contain||-1!==["transform","perspective"].indexOf(n.willChange)||e&&"filter"===n.willChange||e&&n.filter&&"none"!==n.filter)return i;i=i.parentNode}return null}(t)||e}function ee(t){return["top","bottom"].indexOf(t)>=0?"x":"y"}var ie=Math.max,ne=Math.min,se=Math.round;function oe(t,e,i){return ie(t,ne(e,i))}function re(t){return Object.assign({},{top:0,right:0,bottom:0,left:0},t)}function ae(t,e){return e.reduce((function(e,i){return e[i]=t,e}),{})}const le={name:"arrow",enabled:!0,phase:"main",fn:function(t){var e,i=t.state,n=t.name,s=t.options,o=i.elements.arrow,r=i.modifiersData.popperOffsets,a=Ut(i.placement),l=ee(a),c=[bt,_t].indexOf(a)>=0?"height":"width";if(o&&r){var h=function(t,e){return re("number"!=typeof(t="function"==typeof t?t(Object.assign({},e.rects,{placement:e.placement})):t)?t:ae(t,yt))}(s.padding,i),d=Kt(o),u="y"===l?mt:bt,f="y"===l?gt:_t,p=i.rects.reference[c]+i.rects.reference[l]-r[l]-i.rects.popper[c],m=r[l]-i.rects.reference[l],g=te(o),_=g?"y"===l?g.clientHeight||0:g.clientWidth||0:0,b=p/2-m/2,v=h[u],y=_-d[c]-h[f],w=_/2-d[c]/2+b,E=oe(v,w,y),A=l;i.modifiersData[n]=((e={})[A]=E,e.centerOffset=E-w,e)}},effect:function(t){var e=t.state,i=t.options.element,n=void 0===i?"[data-popper-arrow]":i;null!=n&&("string"!=typeof n||(n=e.elements.popper.querySelector(n)))&&Xt(e.elements.popper,n)&&(e.elements.arrow=n)},requires:["popperOffsets"],requiresIfExists:["preventOverflow"]};function ce(t){return t.split("-")[1]}var he={top:"auto",right:"auto",bottom:"auto",left:"auto"};function de(t){var e,i=t.popper,n=t.popperRect,s=t.placement,o=t.variation,r=t.offsets,a=t.position,l=t.gpuAcceleration,c=t.adaptive,h=t.roundOffsets,d=!0===h?function(t){var e=t.x,i=t.y,n=window.devicePixelRatio||1;return{x:se(se(e*n)/n)||0,y:se(se(i*n)/n)||0}}(r):"function"==typeof h?h(r):r,u=d.x,f=void 0===u?0:u,p=d.y,m=void 0===p?0:p,g=r.hasOwnProperty("x"),_=r.hasOwnProperty("y"),b=bt,v=mt,y=window;if(c){var w=te(i),E="clientHeight",A="clientWidth";w===Wt(i)&&"static"!==Yt(w=Gt(i)).position&&"absolute"===a&&(E="scrollHeight",A="scrollWidth"),w=w,s!==mt&&(s!==bt&&s!==_t||o!==Et)||(v=gt,m-=w[E]-n.height,m*=l?1:-1),s!==bt&&(s!==mt&&s!==gt||o!==Et)||(b=_t,f-=w[A]-n.width,f*=l?1:-1)}var T,O=Object.assign({position:a},c&&he);return l?Object.assign({},O,((T={})[v]=_?"0":"",T[b]=g?"0":"",T.transform=(y.devicePixelRatio||1)<=1?"translate("+f+"px, "+m+"px)":"translate3d("+f+"px, "+m+"px, 0)",T)):Object.assign({},O,((e={})[v]=_?m+"px":"",e[b]=g?f+"px":"",e.transform="",e))}const ue={name:"computeStyles",enabled:!0,phase:"beforeWrite",fn:function(t){var e=t.state,i=t.options,n=i.gpuAcceleration,s=void 0===n||n,o=i.adaptive,r=void 0===o||o,a=i.roundOffsets,l=void 0===a||a,c={placement:Ut(e.placement),variation:ce(e.placement),popper:e.elements.popper,popperRect:e.rects.popper,gpuAcceleration:s};null!=e.modifiersData.popperOffsets&&(e.styles.popper=Object.assign({},e.styles.popper,de(Object.assign({},c,{offsets:e.modifiersData.popperOffsets,position:e.options.strategy,adaptive:r,roundOffsets:l})))),null!=e.modifiersData.arrow&&(e.styles.arrow=Object.assign({},e.styles.arrow,de(Object.assign({},c,{offsets:e.modifiersData.arrow,position:"absolute",adaptive:!1,roundOffsets:l})))),e.attributes.popper=Object.assign({},e.attributes.popper,{"data-popper-placement":e.placement})},data:{}};var fe={passive:!0};const pe={name:"eventListeners",enabled:!0,phase:"write",fn:function(){},effect:function(t){var e=t.state,i=t.instance,n=t.options,s=n.scroll,o=void 0===s||s,r=n.resize,a=void 0===r||r,l=Wt(e.elements.popper),c=[].concat(e.scrollParents.reference,e.scrollParents.popper);return o&&c.forEach((function(t){t.addEventListener("scroll",i.update,fe)})),a&&l.addEventListener("resize",i.update,fe),function(){o&&c.forEach((function(t){t.removeEventListener("scroll",i.update,fe)})),a&&l.removeEventListener("resize",i.update,fe)}},data:{}};var me={left:"right",right:"left",bottom:"top",top:"bottom"};function ge(t){return t.replace(/left|right|bottom|top/g,(function(t){return me[t]}))}var _e={start:"end",end:"start"};function be(t){return t.replace(/start|end/g,(function(t){return _e[t]}))}function ve(t){var e=Wt(t);return{scrollLeft:e.pageXOffset,scrollTop:e.pageYOffset}}function ye(t){return Vt(Gt(t)).left+ve(t).scrollLeft}function we(t){var e=Yt(t),i=e.overflow,n=e.overflowX,s=e.overflowY;return/auto|scroll|overlay|hidden/.test(i+s+n)}function Ee(t){return["html","body","#document"].indexOf(Rt(t))>=0?t.ownerDocument.body:zt(t)&&we(t)?t:Ee(Zt(t))}function Ae(t,e){var i;void 0===e&&(e=[]);var n=Ee(t),s=n===(null==(i=t.ownerDocument)?void 0:i.body),o=Wt(n),r=s?[o].concat(o.visualViewport||[],we(n)?n:[]):n,a=e.concat(r);return s?a:a.concat(Ae(Zt(r)))}function Te(t){return Object.assign({},t,{left:t.x,top:t.y,right:t.x+t.width,bottom:t.y+t.height})}function Oe(t,e){return e===Tt?Te(function(t){var e=Wt(t),i=Gt(t),n=e.visualViewport,s=i.clientWidth,o=i.clientHeight,r=0,a=0;return n&&(s=n.width,o=n.height,/^((?!chrome|android).)*safari/i.test(navigator.userAgent)||(r=n.offsetLeft,a=n.offsetTop)),{width:s,height:o,x:r+ye(t),y:a}}(t)):zt(e)?function(t){var e=Vt(t);return e.top=e.top+t.clientTop,e.left=e.left+t.clientLeft,e.bottom=e.top+t.clientHeight,e.right=e.left+t.clientWidth,e.width=t.clientWidth,e.height=t.clientHeight,e.x=e.left,e.y=e.top,e}(e):Te(function(t){var e,i=Gt(t),n=ve(t),s=null==(e=t.ownerDocument)?void 0:e.body,o=ie(i.scrollWidth,i.clientWidth,s?s.scrollWidth:0,s?s.clientWidth:0),r=ie(i.scrollHeight,i.clientHeight,s?s.scrollHeight:0,s?s.clientHeight:0),a=-n.scrollLeft+ye(t),l=-n.scrollTop;return"rtl"===Yt(s||i).direction&&(a+=ie(i.clientWidth,s?s.clientWidth:0)-o),{width:o,height:r,x:a,y:l}}(Gt(t)))}function Ce(t){var e,i=t.reference,n=t.element,s=t.placement,o=s?Ut(s):null,r=s?ce(s):null,a=i.x+i.width/2-n.width/2,l=i.y+i.height/2-n.height/2;switch(o){case mt:e={x:a,y:i.y-n.height};break;case gt:e={x:a,y:i.y+i.height};break;case _t:e={x:i.x+i.width,y:l};break;case bt:e={x:i.x-n.width,y:l};break;default:e={x:i.x,y:i.y}}var c=o?ee(o):null;if(null!=c){var h="y"===c?"height":"width";switch(r){case wt:e[c]=e[c]-(i[h]/2-n[h]/2);break;case Et:e[c]=e[c]+(i[h]/2-n[h]/2)}}return e}function ke(t,e){void 0===e&&(e={});var i=e,n=i.placement,s=void 0===n?t.placement:n,o=i.boundary,r=void 0===o?At:o,a=i.rootBoundary,l=void 0===a?Tt:a,c=i.elementContext,h=void 0===c?Ot:c,d=i.altBoundary,u=void 0!==d&&d,f=i.padding,p=void 0===f?0:f,m=re("number"!=typeof p?p:ae(p,yt)),g=h===Ot?Ct:Ot,_=t.rects.popper,b=t.elements[u?g:h],v=function(t,e,i){var n="clippingParents"===e?function(t){var e=Ae(Zt(t)),i=["absolute","fixed"].indexOf(Yt(t).position)>=0&&zt(t)?te(t):t;return $t(i)?e.filter((function(t){return $t(t)&&Xt(t,i)&&"body"!==Rt(t)})):[]}(t):[].concat(e),s=[].concat(n,[i]),o=s[0],r=s.reduce((function(e,i){var n=Oe(t,i);return e.top=ie(n.top,e.top),e.right=ne(n.right,e.right),e.bottom=ne(n.bottom,e.bottom),e.left=ie(n.left,e.left),e}),Oe(t,o));return r.width=r.right-r.left,r.height=r.bottom-r.top,r.x=r.left,r.y=r.top,r}($t(b)?b:b.contextElement||Gt(t.elements.popper),r,l),y=Vt(t.elements.reference),w=Ce({reference:y,element:_,strategy:"absolute",placement:s}),E=Te(Object.assign({},_,w)),A=h===Ot?E:y,T={top:v.top-A.top+m.top,bottom:A.bottom-v.bottom+m.bottom,left:v.left-A.left+m.left,right:A.right-v.right+m.right},O=t.modifiersData.offset;if(h===Ot&&O){var C=O[s];Object.keys(T).forEach((function(t){var e=[_t,gt].indexOf(t)>=0?1:-1,i=[mt,gt].indexOf(t)>=0?"y":"x";T[t]+=C[i]*e}))}return T}function Le(t,e){void 0===e&&(e={});var i=e,n=i.placement,s=i.boundary,o=i.rootBoundary,r=i.padding,a=i.flipVariations,l=i.allowedAutoPlacements,c=void 0===l?Lt:l,h=ce(n),d=h?a?kt:kt.filter((function(t){return ce(t)===h})):yt,u=d.filter((function(t){return c.indexOf(t)>=0}));0===u.length&&(u=d);var f=u.reduce((function(e,i){return e[i]=ke(t,{placement:i,boundary:s,rootBoundary:o,padding:r})[Ut(i)],e}),{});return Object.keys(f).sort((function(t,e){return f[t]-f[e]}))}const xe={name:"flip",enabled:!0,phase:"main",fn:function(t){var e=t.state,i=t.options,n=t.name;if(!e.modifiersData[n]._skip){for(var s=i.mainAxis,o=void 0===s||s,r=i.altAxis,a=void 0===r||r,l=i.fallbackPlacements,c=i.padding,h=i.boundary,d=i.rootBoundary,u=i.altBoundary,f=i.flipVariations,p=void 0===f||f,m=i.allowedAutoPlacements,g=e.options.placement,_=Ut(g),b=l||(_!==g&&p?function(t){if(Ut(t)===vt)return[];var e=ge(t);return[be(t),e,be(e)]}(g):[ge(g)]),v=[g].concat(b).reduce((function(t,i){return t.concat(Ut(i)===vt?Le(e,{placement:i,boundary:h,rootBoundary:d,padding:c,flipVariations:p,allowedAutoPlacements:m}):i)}),[]),y=e.rects.reference,w=e.rects.popper,E=new Map,A=!0,T=v[0],O=0;O=0,D=x?"width":"height",S=ke(e,{placement:C,boundary:h,rootBoundary:d,altBoundary:u,padding:c}),N=x?L?_t:bt:L?gt:mt;y[D]>w[D]&&(N=ge(N));var I=ge(N),P=[];if(o&&P.push(S[k]<=0),a&&P.push(S[N]<=0,S[I]<=0),P.every((function(t){return t}))){T=C,A=!1;break}E.set(C,P)}if(A)for(var j=function(t){var e=v.find((function(e){var i=E.get(e);if(i)return i.slice(0,t).every((function(t){return t}))}));if(e)return T=e,"break"},M=p?3:1;M>0&&"break"!==j(M);M--);e.placement!==T&&(e.modifiersData[n]._skip=!0,e.placement=T,e.reset=!0)}},requiresIfExists:["offset"],data:{_skip:!1}};function De(t,e,i){return void 0===i&&(i={x:0,y:0}),{top:t.top-e.height-i.y,right:t.right-e.width+i.x,bottom:t.bottom-e.height+i.y,left:t.left-e.width-i.x}}function Se(t){return[mt,_t,gt,bt].some((function(e){return t[e]>=0}))}const Ne={name:"hide",enabled:!0,phase:"main",requiresIfExists:["preventOverflow"],fn:function(t){var e=t.state,i=t.name,n=e.rects.reference,s=e.rects.popper,o=e.modifiersData.preventOverflow,r=ke(e,{elementContext:"reference"}),a=ke(e,{altBoundary:!0}),l=De(r,n),c=De(a,s,o),h=Se(l),d=Se(c);e.modifiersData[i]={referenceClippingOffsets:l,popperEscapeOffsets:c,isReferenceHidden:h,hasPopperEscaped:d},e.attributes.popper=Object.assign({},e.attributes.popper,{"data-popper-reference-hidden":h,"data-popper-escaped":d})}},Ie={name:"offset",enabled:!0,phase:"main",requires:["popperOffsets"],fn:function(t){var e=t.state,i=t.options,n=t.name,s=i.offset,o=void 0===s?[0,0]:s,r=Lt.reduce((function(t,i){return t[i]=function(t,e,i){var n=Ut(t),s=[bt,mt].indexOf(n)>=0?-1:1,o="function"==typeof i?i(Object.assign({},e,{placement:t})):i,r=o[0],a=o[1];return r=r||0,a=(a||0)*s,[bt,_t].indexOf(n)>=0?{x:a,y:r}:{x:r,y:a}}(i,e.rects,o),t}),{}),a=r[e.placement],l=a.x,c=a.y;null!=e.modifiersData.popperOffsets&&(e.modifiersData.popperOffsets.x+=l,e.modifiersData.popperOffsets.y+=c),e.modifiersData[n]=r}},Pe={name:"popperOffsets",enabled:!0,phase:"read",fn:function(t){var e=t.state,i=t.name;e.modifiersData[i]=Ce({reference:e.rects.reference,element:e.rects.popper,strategy:"absolute",placement:e.placement})},data:{}},je={name:"preventOverflow",enabled:!0,phase:"main",fn:function(t){var e=t.state,i=t.options,n=t.name,s=i.mainAxis,o=void 0===s||s,r=i.altAxis,a=void 0!==r&&r,l=i.boundary,c=i.rootBoundary,h=i.altBoundary,d=i.padding,u=i.tether,f=void 0===u||u,p=i.tetherOffset,m=void 0===p?0:p,g=ke(e,{boundary:l,rootBoundary:c,padding:d,altBoundary:h}),_=Ut(e.placement),b=ce(e.placement),v=!b,y=ee(_),w="x"===y?"y":"x",E=e.modifiersData.popperOffsets,A=e.rects.reference,T=e.rects.popper,O="function"==typeof m?m(Object.assign({},e.rects,{placement:e.placement})):m,C={x:0,y:0};if(E){if(o||a){var k="y"===y?mt:bt,L="y"===y?gt:_t,x="y"===y?"height":"width",D=E[y],S=E[y]+g[k],N=E[y]-g[L],I=f?-T[x]/2:0,P=b===wt?A[x]:T[x],j=b===wt?-T[x]:-A[x],M=e.elements.arrow,H=f&&M?Kt(M):{width:0,height:0},B=e.modifiersData["arrow#persistent"]?e.modifiersData["arrow#persistent"].padding:{top:0,right:0,bottom:0,left:0},R=B[k],W=B[L],$=oe(0,A[x],H[x]),z=v?A[x]/2-I-$-R-O:P-$-R-O,q=v?-A[x]/2+I+$+W+O:j+$+W+O,F=e.elements.arrow&&te(e.elements.arrow),U=F?"y"===y?F.clientTop||0:F.clientLeft||0:0,V=e.modifiersData.offset?e.modifiersData.offset[e.placement][y]:0,K=E[y]+z-V-U,X=E[y]+q-V;if(o){var Y=oe(f?ne(S,K):S,D,f?ie(N,X):N);E[y]=Y,C[y]=Y-D}if(a){var Q="x"===y?mt:bt,G="x"===y?gt:_t,Z=E[w],J=Z+g[Q],tt=Z-g[G],et=oe(f?ne(J,K):J,Z,f?ie(tt,X):tt);E[w]=et,C[w]=et-Z}}e.modifiersData[n]=C}},requiresIfExists:["offset"]};function Me(t,e,i){void 0===i&&(i=!1);var n=zt(e);zt(e)&&function(t){var e=t.getBoundingClientRect();e.width,t.offsetWidth,e.height,t.offsetHeight}(e);var s,o,r=Gt(e),a=Vt(t),l={scrollLeft:0,scrollTop:0},c={x:0,y:0};return(n||!n&&!i)&&(("body"!==Rt(e)||we(r))&&(l=(s=e)!==Wt(s)&&zt(s)?{scrollLeft:(o=s).scrollLeft,scrollTop:o.scrollTop}:ve(s)),zt(e)?((c=Vt(e)).x+=e.clientLeft,c.y+=e.clientTop):r&&(c.x=ye(r))),{x:a.left+l.scrollLeft-c.x,y:a.top+l.scrollTop-c.y,width:a.width,height:a.height}}function He(t){var e=new Map,i=new Set,n=[];function s(t){i.add(t.name),[].concat(t.requires||[],t.requiresIfExists||[]).forEach((function(t){if(!i.has(t)){var n=e.get(t);n&&s(n)}})),n.push(t)}return t.forEach((function(t){e.set(t.name,t)})),t.forEach((function(t){i.has(t.name)||s(t)})),n}var Be={placement:"bottom",modifiers:[],strategy:"absolute"};function Re(){for(var t=arguments.length,e=new Array(t),i=0;ij.on(t,"mouseover",d))),this._element.focus(),this._element.setAttribute("aria-expanded",!0),this._menu.classList.add(Je),this._element.classList.add(Je),j.trigger(this._element,"shown.bs.dropdown",t)}hide(){if(c(this._element)||!this._isShown(this._menu))return;const t={relatedTarget:this._element};this._completeHide(t)}dispose(){this._popper&&this._popper.destroy(),super.dispose()}update(){this._inNavbar=this._detectNavbar(),this._popper&&this._popper.update()}_completeHide(t){j.trigger(this._element,"hide.bs.dropdown",t).defaultPrevented||("ontouchstart"in document.documentElement&&[].concat(...document.body.children).forEach((t=>j.off(t,"mouseover",d))),this._popper&&this._popper.destroy(),this._menu.classList.remove(Je),this._element.classList.remove(Je),this._element.setAttribute("aria-expanded","false"),U.removeDataAttribute(this._menu,"popper"),j.trigger(this._element,"hidden.bs.dropdown",t))}_getConfig(t){if(t={...this.constructor.Default,...U.getDataAttributes(this._element),...t},a(Ue,t,this.constructor.DefaultType),"object"==typeof t.reference&&!o(t.reference)&&"function"!=typeof t.reference.getBoundingClientRect)throw new TypeError(`${Ue.toUpperCase()}: Option "reference" provided type "object" without a required "getBoundingClientRect" method.`);return t}_createPopper(t){if(void 0===Fe)throw new TypeError("Bootstrap's dropdowns require Popper (https://popper.js.org)");let e=this._element;"parent"===this._config.reference?e=t:o(this._config.reference)?e=r(this._config.reference):"object"==typeof this._config.reference&&(e=this._config.reference);const i=this._getPopperConfig(),n=i.modifiers.find((t=>"applyStyles"===t.name&&!1===t.enabled));this._popper=qe(e,this._menu,i),n&&U.setDataAttribute(this._menu,"popper","static")}_isShown(t=this._element){return t.classList.contains(Je)}_getMenuElement(){return V.next(this._element,ei)[0]}_getPlacement(){const t=this._element.parentNode;if(t.classList.contains("dropend"))return ri;if(t.classList.contains("dropstart"))return ai;const e="end"===getComputedStyle(this._menu).getPropertyValue("--bs-position").trim();return t.classList.contains("dropup")?e?ni:ii:e?oi:si}_detectNavbar(){return null!==this._element.closest(".navbar")}_getOffset(){const{offset:t}=this._config;return"string"==typeof t?t.split(",").map((t=>Number.parseInt(t,10))):"function"==typeof t?e=>t(e,this._element):t}_getPopperConfig(){const t={placement:this._getPlacement(),modifiers:[{name:"preventOverflow",options:{boundary:this._config.boundary}},{name:"offset",options:{offset:this._getOffset()}}]};return"static"===this._config.display&&(t.modifiers=[{name:"applyStyles",enabled:!1}]),{...t,..."function"==typeof this._config.popperConfig?this._config.popperConfig(t):this._config.popperConfig}}_selectMenuItem({key:t,target:e}){const i=V.find(".dropdown-menu .dropdown-item:not(.disabled):not(:disabled)",this._menu).filter(l);i.length&&v(i,e,t===Ye,!i.includes(e)).focus()}static jQueryInterface(t){return this.each((function(){const e=hi.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===e[t])throw new TypeError(`No method named "${t}"`);e[t]()}}))}static clearMenus(t){if(t&&(2===t.button||"keyup"===t.type&&"Tab"!==t.key))return;const e=V.find(ti);for(let i=0,n=e.length;ie+t)),this._setElementAttributes(di,"paddingRight",(e=>e+t)),this._setElementAttributes(ui,"marginRight",(e=>e-t))}_disableOverFlow(){this._saveInitialAttribute(this._element,"overflow"),this._element.style.overflow="hidden"}_setElementAttributes(t,e,i){const n=this.getWidth();this._applyManipulationCallback(t,(t=>{if(t!==this._element&&window.innerWidth>t.clientWidth+n)return;this._saveInitialAttribute(t,e);const s=window.getComputedStyle(t)[e];t.style[e]=`${i(Number.parseFloat(s))}px`}))}reset(){this._resetElementAttributes(this._element,"overflow"),this._resetElementAttributes(this._element,"paddingRight"),this._resetElementAttributes(di,"paddingRight"),this._resetElementAttributes(ui,"marginRight")}_saveInitialAttribute(t,e){const i=t.style[e];i&&U.setDataAttribute(t,e,i)}_resetElementAttributes(t,e){this._applyManipulationCallback(t,(t=>{const i=U.getDataAttribute(t,e);void 0===i?t.style.removeProperty(e):(U.removeDataAttribute(t,e),t.style[e]=i)}))}_applyManipulationCallback(t,e){o(t)?e(t):V.find(t,this._element).forEach(e)}isOverflowing(){return this.getWidth()>0}}const pi={className:"modal-backdrop",isVisible:!0,isAnimated:!1,rootElement:"body",clickCallback:null},mi={className:"string",isVisible:"boolean",isAnimated:"boolean",rootElement:"(element|string)",clickCallback:"(function|null)"},gi="show",_i="mousedown.bs.backdrop";class bi{constructor(t){this._config=this._getConfig(t),this._isAppended=!1,this._element=null}show(t){this._config.isVisible?(this._append(),this._config.isAnimated&&u(this._getElement()),this._getElement().classList.add(gi),this._emulateAnimation((()=>{_(t)}))):_(t)}hide(t){this._config.isVisible?(this._getElement().classList.remove(gi),this._emulateAnimation((()=>{this.dispose(),_(t)}))):_(t)}_getElement(){if(!this._element){const t=document.createElement("div");t.className=this._config.className,this._config.isAnimated&&t.classList.add("fade"),this._element=t}return this._element}_getConfig(t){return(t={...pi,..."object"==typeof t?t:{}}).rootElement=r(t.rootElement),a("backdrop",t,mi),t}_append(){this._isAppended||(this._config.rootElement.append(this._getElement()),j.on(this._getElement(),_i,(()=>{_(this._config.clickCallback)})),this._isAppended=!0)}dispose(){this._isAppended&&(j.off(this._element,_i),this._element.remove(),this._isAppended=!1)}_emulateAnimation(t){b(t,this._getElement(),this._config.isAnimated)}}const vi={trapElement:null,autofocus:!0},yi={trapElement:"element",autofocus:"boolean"},wi=".bs.focustrap",Ei="backward";class Ai{constructor(t){this._config=this._getConfig(t),this._isActive=!1,this._lastTabNavDirection=null}activate(){const{trapElement:t,autofocus:e}=this._config;this._isActive||(e&&t.focus(),j.off(document,wi),j.on(document,"focusin.bs.focustrap",(t=>this._handleFocusin(t))),j.on(document,"keydown.tab.bs.focustrap",(t=>this._handleKeydown(t))),this._isActive=!0)}deactivate(){this._isActive&&(this._isActive=!1,j.off(document,wi))}_handleFocusin(t){const{target:e}=t,{trapElement:i}=this._config;if(e===document||e===i||i.contains(e))return;const n=V.focusableChildren(i);0===n.length?i.focus():this._lastTabNavDirection===Ei?n[n.length-1].focus():n[0].focus()}_handleKeydown(t){"Tab"===t.key&&(this._lastTabNavDirection=t.shiftKey?Ei:"forward")}_getConfig(t){return t={...vi,..."object"==typeof t?t:{}},a("focustrap",t,yi),t}}const Ti="modal",Oi="Escape",Ci={backdrop:!0,keyboard:!0,focus:!0},ki={backdrop:"(boolean|string)",keyboard:"boolean",focus:"boolean"},Li="hidden.bs.modal",xi="show.bs.modal",Di="resize.bs.modal",Si="click.dismiss.bs.modal",Ni="keydown.dismiss.bs.modal",Ii="mousedown.dismiss.bs.modal",Pi="modal-open",ji="show",Mi="modal-static";class Hi extends B{constructor(t,e){super(t),this._config=this._getConfig(e),this._dialog=V.findOne(".modal-dialog",this._element),this._backdrop=this._initializeBackDrop(),this._focustrap=this._initializeFocusTrap(),this._isShown=!1,this._ignoreBackdropClick=!1,this._isTransitioning=!1,this._scrollBar=new fi}static get Default(){return Ci}static get NAME(){return Ti}toggle(t){return this._isShown?this.hide():this.show(t)}show(t){this._isShown||this._isTransitioning||j.trigger(this._element,xi,{relatedTarget:t}).defaultPrevented||(this._isShown=!0,this._isAnimated()&&(this._isTransitioning=!0),this._scrollBar.hide(),document.body.classList.add(Pi),this._adjustDialog(),this._setEscapeEvent(),this._setResizeEvent(),j.on(this._dialog,Ii,(()=>{j.one(this._element,"mouseup.dismiss.bs.modal",(t=>{t.target===this._element&&(this._ignoreBackdropClick=!0)}))})),this._showBackdrop((()=>this._showElement(t))))}hide(){if(!this._isShown||this._isTransitioning)return;if(j.trigger(this._element,"hide.bs.modal").defaultPrevented)return;this._isShown=!1;const t=this._isAnimated();t&&(this._isTransitioning=!0),this._setEscapeEvent(),this._setResizeEvent(),this._focustrap.deactivate(),this._element.classList.remove(ji),j.off(this._element,Si),j.off(this._dialog,Ii),this._queueCallback((()=>this._hideModal()),this._element,t)}dispose(){[window,this._dialog].forEach((t=>j.off(t,".bs.modal"))),this._backdrop.dispose(),this._focustrap.deactivate(),super.dispose()}handleUpdate(){this._adjustDialog()}_initializeBackDrop(){return new bi({isVisible:Boolean(this._config.backdrop),isAnimated:this._isAnimated()})}_initializeFocusTrap(){return new Ai({trapElement:this._element})}_getConfig(t){return t={...Ci,...U.getDataAttributes(this._element),..."object"==typeof t?t:{}},a(Ti,t,ki),t}_showElement(t){const e=this._isAnimated(),i=V.findOne(".modal-body",this._dialog);this._element.parentNode&&this._element.parentNode.nodeType===Node.ELEMENT_NODE||document.body.append(this._element),this._element.style.display="block",this._element.removeAttribute("aria-hidden"),this._element.setAttribute("aria-modal",!0),this._element.setAttribute("role","dialog"),this._element.scrollTop=0,i&&(i.scrollTop=0),e&&u(this._element),this._element.classList.add(ji),this._queueCallback((()=>{this._config.focus&&this._focustrap.activate(),this._isTransitioning=!1,j.trigger(this._element,"shown.bs.modal",{relatedTarget:t})}),this._dialog,e)}_setEscapeEvent(){this._isShown?j.on(this._element,Ni,(t=>{this._config.keyboard&&t.key===Oi?(t.preventDefault(),this.hide()):this._config.keyboard||t.key!==Oi||this._triggerBackdropTransition()})):j.off(this._element,Ni)}_setResizeEvent(){this._isShown?j.on(window,Di,(()=>this._adjustDialog())):j.off(window,Di)}_hideModal(){this._element.style.display="none",this._element.setAttribute("aria-hidden",!0),this._element.removeAttribute("aria-modal"),this._element.removeAttribute("role"),this._isTransitioning=!1,this._backdrop.hide((()=>{document.body.classList.remove(Pi),this._resetAdjustments(),this._scrollBar.reset(),j.trigger(this._element,Li)}))}_showBackdrop(t){j.on(this._element,Si,(t=>{this._ignoreBackdropClick?this._ignoreBackdropClick=!1:t.target===t.currentTarget&&(!0===this._config.backdrop?this.hide():"static"===this._config.backdrop&&this._triggerBackdropTransition())})),this._backdrop.show(t)}_isAnimated(){return this._element.classList.contains("fade")}_triggerBackdropTransition(){if(j.trigger(this._element,"hidePrevented.bs.modal").defaultPrevented)return;const{classList:t,scrollHeight:e,style:i}=this._element,n=e>document.documentElement.clientHeight;!n&&"hidden"===i.overflowY||t.contains(Mi)||(n||(i.overflowY="hidden"),t.add(Mi),this._queueCallback((()=>{t.remove(Mi),n||this._queueCallback((()=>{i.overflowY=""}),this._dialog)}),this._dialog),this._element.focus())}_adjustDialog(){const t=this._element.scrollHeight>document.documentElement.clientHeight,e=this._scrollBar.getWidth(),i=e>0;(!i&&t&&!m()||i&&!t&&m())&&(this._element.style.paddingLeft=`${e}px`),(i&&!t&&!m()||!i&&t&&m())&&(this._element.style.paddingRight=`${e}px`)}_resetAdjustments(){this._element.style.paddingLeft="",this._element.style.paddingRight=""}static jQueryInterface(t,e){return this.each((function(){const i=Hi.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===i[t])throw new TypeError(`No method named "${t}"`);i[t](e)}}))}}j.on(document,"click.bs.modal.data-api",'[data-bs-toggle="modal"]',(function(t){const e=n(this);["A","AREA"].includes(this.tagName)&&t.preventDefault(),j.one(e,xi,(t=>{t.defaultPrevented||j.one(e,Li,(()=>{l(this)&&this.focus()}))}));const i=V.findOne(".modal.show");i&&Hi.getInstance(i).hide(),Hi.getOrCreateInstance(e).toggle(this)})),R(Hi),g(Hi);const Bi="offcanvas",Ri={backdrop:!0,keyboard:!0,scroll:!1},Wi={backdrop:"boolean",keyboard:"boolean",scroll:"boolean"},$i="show",zi=".offcanvas.show",qi="hidden.bs.offcanvas";class Fi extends B{constructor(t,e){super(t),this._config=this._getConfig(e),this._isShown=!1,this._backdrop=this._initializeBackDrop(),this._focustrap=this._initializeFocusTrap(),this._addEventListeners()}static get NAME(){return Bi}static get Default(){return Ri}toggle(t){return this._isShown?this.hide():this.show(t)}show(t){this._isShown||j.trigger(this._element,"show.bs.offcanvas",{relatedTarget:t}).defaultPrevented||(this._isShown=!0,this._element.style.visibility="visible",this._backdrop.show(),this._config.scroll||(new fi).hide(),this._element.removeAttribute("aria-hidden"),this._element.setAttribute("aria-modal",!0),this._element.setAttribute("role","dialog"),this._element.classList.add($i),this._queueCallback((()=>{this._config.scroll||this._focustrap.activate(),j.trigger(this._element,"shown.bs.offcanvas",{relatedTarget:t})}),this._element,!0))}hide(){this._isShown&&(j.trigger(this._element,"hide.bs.offcanvas").defaultPrevented||(this._focustrap.deactivate(),this._element.blur(),this._isShown=!1,this._element.classList.remove($i),this._backdrop.hide(),this._queueCallback((()=>{this._element.setAttribute("aria-hidden",!0),this._element.removeAttribute("aria-modal"),this._element.removeAttribute("role"),this._element.style.visibility="hidden",this._config.scroll||(new fi).reset(),j.trigger(this._element,qi)}),this._element,!0)))}dispose(){this._backdrop.dispose(),this._focustrap.deactivate(),super.dispose()}_getConfig(t){return t={...Ri,...U.getDataAttributes(this._element),..."object"==typeof t?t:{}},a(Bi,t,Wi),t}_initializeBackDrop(){return new bi({className:"offcanvas-backdrop",isVisible:this._config.backdrop,isAnimated:!0,rootElement:this._element.parentNode,clickCallback:()=>this.hide()})}_initializeFocusTrap(){return new Ai({trapElement:this._element})}_addEventListeners(){j.on(this._element,"keydown.dismiss.bs.offcanvas",(t=>{this._config.keyboard&&"Escape"===t.key&&this.hide()}))}static jQueryInterface(t){return this.each((function(){const e=Fi.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===e[t]||t.startsWith("_")||"constructor"===t)throw new TypeError(`No method named "${t}"`);e[t](this)}}))}}j.on(document,"click.bs.offcanvas.data-api",'[data-bs-toggle="offcanvas"]',(function(t){const e=n(this);if(["A","AREA"].includes(this.tagName)&&t.preventDefault(),c(this))return;j.one(e,qi,(()=>{l(this)&&this.focus()}));const i=V.findOne(zi);i&&i!==e&&Fi.getInstance(i).hide(),Fi.getOrCreateInstance(e).toggle(this)})),j.on(window,"load.bs.offcanvas.data-api",(()=>V.find(zi).forEach((t=>Fi.getOrCreateInstance(t).show())))),R(Fi),g(Fi);const Ui=new Set(["background","cite","href","itemtype","longdesc","poster","src","xlink:href"]),Vi=/^(?:(?:https?|mailto|ftp|tel|file|sms):|[^#&/:?]*(?:[#/?]|$))/i,Ki=/^data:(?:image\/(?:bmp|gif|jpeg|jpg|png|tiff|webp)|video\/(?:mpeg|mp4|ogg|webm)|audio\/(?:mp3|oga|ogg|opus));base64,[\d+/a-z]+=*$/i,Xi=(t,e)=>{const i=t.nodeName.toLowerCase();if(e.includes(i))return!Ui.has(i)||Boolean(Vi.test(t.nodeValue)||Ki.test(t.nodeValue));const n=e.filter((t=>t instanceof RegExp));for(let t=0,e=n.length;t{Xi(t,r)||i.removeAttribute(t.nodeName)}))}return n.body.innerHTML}const Qi="tooltip",Gi=new Set(["sanitize","allowList","sanitizeFn"]),Zi={animation:"boolean",template:"string",title:"(string|element|function)",trigger:"string",delay:"(number|object)",html:"boolean",selector:"(string|boolean)",placement:"(string|function)",offset:"(array|string|function)",container:"(string|element|boolean)",fallbackPlacements:"array",boundary:"(string|element)",customClass:"(string|function)",sanitize:"boolean",sanitizeFn:"(null|function)",allowList:"object",popperConfig:"(null|object|function)"},Ji={AUTO:"auto",TOP:"top",RIGHT:m()?"left":"right",BOTTOM:"bottom",LEFT:m()?"right":"left"},tn={animation:!0,template:'',trigger:"hover focus",title:"",delay:0,html:!1,selector:!1,placement:"top",offset:[0,0],container:!1,fallbackPlacements:["top","right","bottom","left"],boundary:"clippingParents",customClass:"",sanitize:!0,sanitizeFn:null,allowList:{"*":["class","dir","id","lang","role",/^aria-[\w-]*$/i],a:["target","href","title","rel"],area:[],b:[],br:[],col:[],code:[],div:[],em:[],hr:[],h1:[],h2:[],h3:[],h4:[],h5:[],h6:[],i:[],img:["src","srcset","alt","title","width","height"],li:[],ol:[],p:[],pre:[],s:[],small:[],span:[],sub:[],sup:[],strong:[],u:[],ul:[]},popperConfig:null},en={HIDE:"hide.bs.tooltip",HIDDEN:"hidden.bs.tooltip",SHOW:"show.bs.tooltip",SHOWN:"shown.bs.tooltip",INSERTED:"inserted.bs.tooltip",CLICK:"click.bs.tooltip",FOCUSIN:"focusin.bs.tooltip",FOCUSOUT:"focusout.bs.tooltip",MOUSEENTER:"mouseenter.bs.tooltip",MOUSELEAVE:"mouseleave.bs.tooltip"},nn="fade",sn="show",on="show",rn="out",an=".tooltip-inner",ln=".modal",cn="hide.bs.modal",hn="hover",dn="focus";class un extends B{constructor(t,e){if(void 0===Fe)throw new TypeError("Bootstrap's tooltips require Popper (https://popper.js.org)");super(t),this._isEnabled=!0,this._timeout=0,this._hoverState="",this._activeTrigger={},this._popper=null,this._config=this._getConfig(e),this.tip=null,this._setListeners()}static get Default(){return tn}static get NAME(){return Qi}static get Event(){return en}static get DefaultType(){return Zi}enable(){this._isEnabled=!0}disable(){this._isEnabled=!1}toggleEnabled(){this._isEnabled=!this._isEnabled}toggle(t){if(this._isEnabled)if(t){const e=this._initializeOnDelegatedTarget(t);e._activeTrigger.click=!e._activeTrigger.click,e._isWithActiveTrigger()?e._enter(null,e):e._leave(null,e)}else{if(this.getTipElement().classList.contains(sn))return void this._leave(null,this);this._enter(null,this)}}dispose(){clearTimeout(this._timeout),j.off(this._element.closest(ln),cn,this._hideModalHandler),this.tip&&this.tip.remove(),this._disposePopper(),super.dispose()}show(){if("none"===this._element.style.display)throw new Error("Please use show on visible elements");if(!this.isWithContent()||!this._isEnabled)return;const t=j.trigger(this._element,this.constructor.Event.SHOW),e=h(this._element),i=null===e?this._element.ownerDocument.documentElement.contains(this._element):e.contains(this._element);if(t.defaultPrevented||!i)return;"tooltip"===this.constructor.NAME&&this.tip&&this.getTitle()!==this.tip.querySelector(an).innerHTML&&(this._disposePopper(),this.tip.remove(),this.tip=null);const n=this.getTipElement(),s=(t=>{do{t+=Math.floor(1e6*Math.random())}while(document.getElementById(t));return t})(this.constructor.NAME);n.setAttribute("id",s),this._element.setAttribute("aria-describedby",s),this._config.animation&&n.classList.add(nn);const o="function"==typeof this._config.placement?this._config.placement.call(this,n,this._element):this._config.placement,r=this._getAttachment(o);this._addAttachmentClass(r);const{container:a}=this._config;H.set(n,this.constructor.DATA_KEY,this),this._element.ownerDocument.documentElement.contains(this.tip)||(a.append(n),j.trigger(this._element,this.constructor.Event.INSERTED)),this._popper?this._popper.update():this._popper=qe(this._element,n,this._getPopperConfig(r)),n.classList.add(sn);const l=this._resolvePossibleFunction(this._config.customClass);l&&n.classList.add(...l.split(" ")),"ontouchstart"in document.documentElement&&[].concat(...document.body.children).forEach((t=>{j.on(t,"mouseover",d)}));const c=this.tip.classList.contains(nn);this._queueCallback((()=>{const t=this._hoverState;this._hoverState=null,j.trigger(this._element,this.constructor.Event.SHOWN),t===rn&&this._leave(null,this)}),this.tip,c)}hide(){if(!this._popper)return;const t=this.getTipElement();if(j.trigger(this._element,this.constructor.Event.HIDE).defaultPrevented)return;t.classList.remove(sn),"ontouchstart"in document.documentElement&&[].concat(...document.body.children).forEach((t=>j.off(t,"mouseover",d))),this._activeTrigger.click=!1,this._activeTrigger.focus=!1,this._activeTrigger.hover=!1;const e=this.tip.classList.contains(nn);this._queueCallback((()=>{this._isWithActiveTrigger()||(this._hoverState!==on&&t.remove(),this._cleanTipClass(),this._element.removeAttribute("aria-describedby"),j.trigger(this._element,this.constructor.Event.HIDDEN),this._disposePopper())}),this.tip,e),this._hoverState=""}update(){null!==this._popper&&this._popper.update()}isWithContent(){return Boolean(this.getTitle())}getTipElement(){if(this.tip)return this.tip;const t=document.createElement("div");t.innerHTML=this._config.template;const e=t.children[0];return this.setContent(e),e.classList.remove(nn,sn),this.tip=e,this.tip}setContent(t){this._sanitizeAndSetContent(t,this.getTitle(),an)}_sanitizeAndSetContent(t,e,i){const n=V.findOne(i,t);e||!n?this.setElementContent(n,e):n.remove()}setElementContent(t,e){if(null!==t)return o(e)?(e=r(e),void(this._config.html?e.parentNode!==t&&(t.innerHTML="",t.append(e)):t.textContent=e.textContent)):void(this._config.html?(this._config.sanitize&&(e=Yi(e,this._config.allowList,this._config.sanitizeFn)),t.innerHTML=e):t.textContent=e)}getTitle(){const t=this._element.getAttribute("data-bs-original-title")||this._config.title;return this._resolvePossibleFunction(t)}updateAttachment(t){return"right"===t?"end":"left"===t?"start":t}_initializeOnDelegatedTarget(t,e){return e||this.constructor.getOrCreateInstance(t.delegateTarget,this._getDelegateConfig())}_getOffset(){const{offset:t}=this._config;return"string"==typeof t?t.split(",").map((t=>Number.parseInt(t,10))):"function"==typeof t?e=>t(e,this._element):t}_resolvePossibleFunction(t){return"function"==typeof t?t.call(this._element):t}_getPopperConfig(t){const e={placement:t,modifiers:[{name:"flip",options:{fallbackPlacements:this._config.fallbackPlacements}},{name:"offset",options:{offset:this._getOffset()}},{name:"preventOverflow",options:{boundary:this._config.boundary}},{name:"arrow",options:{element:`.${this.constructor.NAME}-arrow`}},{name:"onChange",enabled:!0,phase:"afterWrite",fn:t=>this._handlePopperPlacementChange(t)}],onFirstUpdate:t=>{t.options.placement!==t.placement&&this._handlePopperPlacementChange(t)}};return{...e,..."function"==typeof this._config.popperConfig?this._config.popperConfig(e):this._config.popperConfig}}_addAttachmentClass(t){this.getTipElement().classList.add(`${this._getBasicClassPrefix()}-${this.updateAttachment(t)}`)}_getAttachment(t){return Ji[t.toUpperCase()]}_setListeners(){this._config.trigger.split(" ").forEach((t=>{if("click"===t)j.on(this._element,this.constructor.Event.CLICK,this._config.selector,(t=>this.toggle(t)));else if("manual"!==t){const e=t===hn?this.constructor.Event.MOUSEENTER:this.constructor.Event.FOCUSIN,i=t===hn?this.constructor.Event.MOUSELEAVE:this.constructor.Event.FOCUSOUT;j.on(this._element,e,this._config.selector,(t=>this._enter(t))),j.on(this._element,i,this._config.selector,(t=>this._leave(t)))}})),this._hideModalHandler=()=>{this._element&&this.hide()},j.on(this._element.closest(ln),cn,this._hideModalHandler),this._config.selector?this._config={...this._config,trigger:"manual",selector:""}:this._fixTitle()}_fixTitle(){const t=this._element.getAttribute("title"),e=typeof this._element.getAttribute("data-bs-original-title");(t||"string"!==e)&&(this._element.setAttribute("data-bs-original-title",t||""),!t||this._element.getAttribute("aria-label")||this._element.textContent||this._element.setAttribute("aria-label",t),this._element.setAttribute("title",""))}_enter(t,e){e=this._initializeOnDelegatedTarget(t,e),t&&(e._activeTrigger["focusin"===t.type?dn:hn]=!0),e.getTipElement().classList.contains(sn)||e._hoverState===on?e._hoverState=on:(clearTimeout(e._timeout),e._hoverState=on,e._config.delay&&e._config.delay.show?e._timeout=setTimeout((()=>{e._hoverState===on&&e.show()}),e._config.delay.show):e.show())}_leave(t,e){e=this._initializeOnDelegatedTarget(t,e),t&&(e._activeTrigger["focusout"===t.type?dn:hn]=e._element.contains(t.relatedTarget)),e._isWithActiveTrigger()||(clearTimeout(e._timeout),e._hoverState=rn,e._config.delay&&e._config.delay.hide?e._timeout=setTimeout((()=>{e._hoverState===rn&&e.hide()}),e._config.delay.hide):e.hide())}_isWithActiveTrigger(){for(const t in this._activeTrigger)if(this._activeTrigger[t])return!0;return!1}_getConfig(t){const e=U.getDataAttributes(this._element);return Object.keys(e).forEach((t=>{Gi.has(t)&&delete e[t]})),(t={...this.constructor.Default,...e,..."object"==typeof t&&t?t:{}}).container=!1===t.container?document.body:r(t.container),"number"==typeof t.delay&&(t.delay={show:t.delay,hide:t.delay}),"number"==typeof t.title&&(t.title=t.title.toString()),"number"==typeof t.content&&(t.content=t.content.toString()),a(Qi,t,this.constructor.DefaultType),t.sanitize&&(t.template=Yi(t.template,t.allowList,t.sanitizeFn)),t}_getDelegateConfig(){const t={};for(const e in this._config)this.constructor.Default[e]!==this._config[e]&&(t[e]=this._config[e]);return t}_cleanTipClass(){const t=this.getTipElement(),e=new RegExp(`(^|\\s)${this._getBasicClassPrefix()}\\S+`,"g"),i=t.getAttribute("class").match(e);null!==i&&i.length>0&&i.map((t=>t.trim())).forEach((e=>t.classList.remove(e)))}_getBasicClassPrefix(){return"bs-tooltip"}_handlePopperPlacementChange(t){const{state:e}=t;e&&(this.tip=e.elements.popper,this._cleanTipClass(),this._addAttachmentClass(this._getAttachment(e.placement)))}_disposePopper(){this._popper&&(this._popper.destroy(),this._popper=null)}static jQueryInterface(t){return this.each((function(){const e=un.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===e[t])throw new TypeError(`No method named "${t}"`);e[t]()}}))}}g(un);const fn={...un.Default,placement:"right",offset:[0,8],trigger:"click",content:"",template:''},pn={...un.DefaultType,content:"(string|element|function)"},mn={HIDE:"hide.bs.popover",HIDDEN:"hidden.bs.popover",SHOW:"show.bs.popover",SHOWN:"shown.bs.popover",INSERTED:"inserted.bs.popover",CLICK:"click.bs.popover",FOCUSIN:"focusin.bs.popover",FOCUSOUT:"focusout.bs.popover",MOUSEENTER:"mouseenter.bs.popover",MOUSELEAVE:"mouseleave.bs.popover"};class gn extends un{static get Default(){return fn}static get NAME(){return"popover"}static get Event(){return mn}static get DefaultType(){return pn}isWithContent(){return this.getTitle()||this._getContent()}setContent(t){this._sanitizeAndSetContent(t,this.getTitle(),".popover-header"),this._sanitizeAndSetContent(t,this._getContent(),".popover-body")}_getContent(){return this._resolvePossibleFunction(this._config.content)}_getBasicClassPrefix(){return"bs-popover"}static jQueryInterface(t){return this.each((function(){const e=gn.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===e[t])throw new TypeError(`No method named "${t}"`);e[t]()}}))}}g(gn);const _n="scrollspy",bn={offset:10,method:"auto",target:""},vn={offset:"number",method:"string",target:"(string|element)"},yn="active",wn=".nav-link, .list-group-item, .dropdown-item",En="position";class An extends B{constructor(t,e){super(t),this._scrollElement="BODY"===this._element.tagName?window:this._element,this._config=this._getConfig(e),this._offsets=[],this._targets=[],this._activeTarget=null,this._scrollHeight=0,j.on(this._scrollElement,"scroll.bs.scrollspy",(()=>this._process())),this.refresh(),this._process()}static get Default(){return bn}static get NAME(){return _n}refresh(){const t=this._scrollElement===this._scrollElement.window?"offset":En,e="auto"===this._config.method?t:this._config.method,n=e===En?this._getScrollTop():0;this._offsets=[],this._targets=[],this._scrollHeight=this._getScrollHeight(),V.find(wn,this._config.target).map((t=>{const s=i(t),o=s?V.findOne(s):null;if(o){const t=o.getBoundingClientRect();if(t.width||t.height)return[U[e](o).top+n,s]}return null})).filter((t=>t)).sort(((t,e)=>t[0]-e[0])).forEach((t=>{this._offsets.push(t[0]),this._targets.push(t[1])}))}dispose(){j.off(this._scrollElement,".bs.scrollspy"),super.dispose()}_getConfig(t){return(t={...bn,...U.getDataAttributes(this._element),..."object"==typeof t&&t?t:{}}).target=r(t.target)||document.documentElement,a(_n,t,vn),t}_getScrollTop(){return this._scrollElement===window?this._scrollElement.pageYOffset:this._scrollElement.scrollTop}_getScrollHeight(){return this._scrollElement.scrollHeight||Math.max(document.body.scrollHeight,document.documentElement.scrollHeight)}_getOffsetHeight(){return this._scrollElement===window?window.innerHeight:this._scrollElement.getBoundingClientRect().height}_process(){const t=this._getScrollTop()+this._config.offset,e=this._getScrollHeight(),i=this._config.offset+e-this._getOffsetHeight();if(this._scrollHeight!==e&&this.refresh(),t>=i){const t=this._targets[this._targets.length-1];this._activeTarget!==t&&this._activate(t)}else{if(this._activeTarget&&t0)return this._activeTarget=null,void this._clear();for(let e=this._offsets.length;e--;)this._activeTarget!==this._targets[e]&&t>=this._offsets[e]&&(void 0===this._offsets[e+1]||t`${e}[data-bs-target="${t}"],${e}[href="${t}"]`)),i=V.findOne(e.join(","),this._config.target);i.classList.add(yn),i.classList.contains("dropdown-item")?V.findOne(".dropdown-toggle",i.closest(".dropdown")).classList.add(yn):V.parents(i,".nav, .list-group").forEach((t=>{V.prev(t,".nav-link, .list-group-item").forEach((t=>t.classList.add(yn))),V.prev(t,".nav-item").forEach((t=>{V.children(t,".nav-link").forEach((t=>t.classList.add(yn)))}))})),j.trigger(this._scrollElement,"activate.bs.scrollspy",{relatedTarget:t})}_clear(){V.find(wn,this._config.target).filter((t=>t.classList.contains(yn))).forEach((t=>t.classList.remove(yn)))}static jQueryInterface(t){return this.each((function(){const e=An.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===e[t])throw new TypeError(`No method named "${t}"`);e[t]()}}))}}j.on(window,"load.bs.scrollspy.data-api",(()=>{V.find('[data-bs-spy="scroll"]').forEach((t=>new An(t)))})),g(An);const Tn="active",On="fade",Cn="show",kn=".active",Ln=":scope > li > .active";class xn extends B{static get NAME(){return"tab"}show(){if(this._element.parentNode&&this._element.parentNode.nodeType===Node.ELEMENT_NODE&&this._element.classList.contains(Tn))return;let t;const e=n(this._element),i=this._element.closest(".nav, .list-group");if(i){const e="UL"===i.nodeName||"OL"===i.nodeName?Ln:kn;t=V.find(e,i),t=t[t.length-1]}const s=t?j.trigger(t,"hide.bs.tab",{relatedTarget:this._element}):null;if(j.trigger(this._element,"show.bs.tab",{relatedTarget:t}).defaultPrevented||null!==s&&s.defaultPrevented)return;this._activate(this._element,i);const o=()=>{j.trigger(t,"hidden.bs.tab",{relatedTarget:this._element}),j.trigger(this._element,"shown.bs.tab",{relatedTarget:t})};e?this._activate(e,e.parentNode,o):o()}_activate(t,e,i){const n=(!e||"UL"!==e.nodeName&&"OL"!==e.nodeName?V.children(e,kn):V.find(Ln,e))[0],s=i&&n&&n.classList.contains(On),o=()=>this._transitionComplete(t,n,i);n&&s?(n.classList.remove(Cn),this._queueCallback(o,t,!0)):o()}_transitionComplete(t,e,i){if(e){e.classList.remove(Tn);const t=V.findOne(":scope > .dropdown-menu .active",e.parentNode);t&&t.classList.remove(Tn),"tab"===e.getAttribute("role")&&e.setAttribute("aria-selected",!1)}t.classList.add(Tn),"tab"===t.getAttribute("role")&&t.setAttribute("aria-selected",!0),u(t),t.classList.contains(On)&&t.classList.add(Cn);let n=t.parentNode;if(n&&"LI"===n.nodeName&&(n=n.parentNode),n&&n.classList.contains("dropdown-menu")){const e=t.closest(".dropdown");e&&V.find(".dropdown-toggle",e).forEach((t=>t.classList.add(Tn))),t.setAttribute("aria-expanded",!0)}i&&i()}static jQueryInterface(t){return this.each((function(){const e=xn.getOrCreateInstance(this);if("string"==typeof t){if(void 0===e[t])throw new TypeError(`No method named "${t}"`);e[t]()}}))}}j.on(document,"click.bs.tab.data-api",'[data-bs-toggle="tab"], [data-bs-toggle="pill"], [data-bs-toggle="list"]',(function(t){["A","AREA"].includes(this.tagName)&&t.preventDefault(),c(this)||xn.getOrCreateInstance(this).show()})),g(xn);const Dn="toast",Sn="hide",Nn="show",In="showing",Pn={animation:"boolean",autohide:"boolean",delay:"number"},jn={animation:!0,autohide:!0,delay:5e3};class Mn extends B{constructor(t,e){super(t),this._config=this._getConfig(e),this._timeout=null,this._hasMouseInteraction=!1,this._hasKeyboardInteraction=!1,this._setListeners()}static get DefaultType(){return Pn}static get Default(){return jn}static get NAME(){return Dn}show(){j.trigger(this._element,"show.bs.toast").defaultPrevented||(this._clearTimeout(),this._config.animation&&this._element.classList.add("fade"),this._element.classList.remove(Sn),u(this._element),this._element.classList.add(Nn),this._element.classList.add(In),this._queueCallback((()=>{this._element.classList.remove(In),j.trigger(this._element,"shown.bs.toast"),this._maybeScheduleHide()}),this._element,this._config.animation))}hide(){this._element.classList.contains(Nn)&&(j.trigger(this._element,"hide.bs.toast").defaultPrevented||(this._element.classList.add(In),this._queueCallback((()=>{this._element.classList.add(Sn),this._element.classList.remove(In),this._element.classList.remove(Nn),j.trigger(this._element,"hidden.bs.toast")}),this._element,this._config.animation)))}dispose(){this._clearTimeout(),this._element.classList.contains(Nn)&&this._element.classList.remove(Nn),super.dispose()}_getConfig(t){return t={...jn,...U.getDataAttributes(this._element),..."object"==typeof t&&t?t:{}},a(Dn,t,this.constructor.DefaultType),t}_maybeScheduleHide(){this._config.autohide&&(this._hasMouseInteraction||this._hasKeyboardInteraction||(this._timeout=setTimeout((()=>{this.hide()}),this._config.delay)))}_onInteraction(t,e){switch(t.type){case"mouseover":case"mouseout":this._hasMouseInteraction=e;break;case"focusin":case"focusout":this._hasKeyboardInteraction=e}if(e)return void this._clearTimeout();const i=t.relatedTarget;this._element===i||this._element.contains(i)||this._maybeScheduleHide()}_setListeners(){j.on(this._element,"mouseover.bs.toast",(t=>this._onInteraction(t,!0))),j.on(this._element,"mouseout.bs.toast",(t=>this._onInteraction(t,!1))),j.on(this._element,"focusin.bs.toast",(t=>this._onInteraction(t,!0))),j.on(this._element,"focusout.bs.toast",(t=>this._onInteraction(t,!1)))}_clearTimeout(){clearTimeout(this._timeout),this._timeout=null}static jQueryInterface(t){return this.each((function(){const e=Mn.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===e[t])throw new TypeError(`No method named "${t}"`);e[t](this)}}))}}return R(Mn),g(Mn),{Alert:W,Button:z,Carousel:st,Collapse:pt,Dropdown:hi,Modal:Hi,Offcanvas:Fi,Popover:gn,ScrollSpy:An,Tab:xn,Toast:Mn,Tooltip:un}})); +//# sourceMappingURL=bootstrap.bundle.min.js.map \ No newline at end of file diff --git a/resources/stac_mount_save_files/libs/clipboard/clipboard.min.js b/resources/stac_mount_save_files/libs/clipboard/clipboard.min.js new file mode 100644 index 0000000..41c6a0f --- /dev/null +++ b/resources/stac_mount_save_files/libs/clipboard/clipboard.min.js @@ -0,0 +1,7 @@ +/*! + * clipboard.js v2.0.10 + * https://clipboardjs.com/ + * + * Licensed MIT © Zeno Rocha + */ +!function(t,e){"object"==typeof exports&&"object"==typeof module?module.exports=e():"function"==typeof define&&define.amd?define([],e):"object"==typeof exports?exports.ClipboardJS=e():t.ClipboardJS=e()}(this,function(){return n={686:function(t,e,n){"use strict";n.d(e,{default:function(){return o}});var e=n(279),i=n.n(e),e=n(370),u=n.n(e),e=n(817),c=n.n(e);function a(t){try{return document.execCommand(t)}catch(t){return}}var f=function(t){t=c()(t);return a("cut"),t};var l=function(t){var e,n,o,r=1.anchorjs-link,.anchorjs-link:focus{opacity:1}",u.sheet.cssRules.length),u.sheet.insertRule("[data-anchorjs-icon]::after{content:attr(data-anchorjs-icon)}",u.sheet.cssRules.length),u.sheet.insertRule('@font-face{font-family:anchorjs-icons;src:url(data:n/a;base64,AAEAAAALAIAAAwAwT1MvMg8yG2cAAAE4AAAAYGNtYXDp3gC3AAABpAAAAExnYXNwAAAAEAAAA9wAAAAIZ2x5ZlQCcfwAAAH4AAABCGhlYWQHFvHyAAAAvAAAADZoaGVhBnACFwAAAPQAAAAkaG10eASAADEAAAGYAAAADGxvY2EACACEAAAB8AAAAAhtYXhwAAYAVwAAARgAAAAgbmFtZQGOH9cAAAMAAAAAunBvc3QAAwAAAAADvAAAACAAAQAAAAEAAHzE2p9fDzz1AAkEAAAAAADRecUWAAAAANQA6R8AAAAAAoACwAAAAAgAAgAAAAAAAAABAAADwP/AAAACgAAA/9MCrQABAAAAAAAAAAAAAAAAAAAAAwABAAAAAwBVAAIAAAAAAAIAAAAAAAAAAAAAAAAAAAAAAAMCQAGQAAUAAAKZAswAAACPApkCzAAAAesAMwEJAAAAAAAAAAAAAAAAAAAAARAAAAAAAAAAAAAAAAAAAAAAQAAg//0DwP/AAEADwABAAAAAAQAAAAAAAAAAAAAAIAAAAAAAAAIAAAACgAAxAAAAAwAAAAMAAAAcAAEAAwAAABwAAwABAAAAHAAEADAAAAAIAAgAAgAAACDpy//9//8AAAAg6cv//f///+EWNwADAAEAAAAAAAAAAAAAAAAACACEAAEAAAAAAAAAAAAAAAAxAAACAAQARAKAAsAAKwBUAAABIiYnJjQ3NzY2MzIWFxYUBwcGIicmNDc3NjQnJiYjIgYHBwYUFxYUBwYGIwciJicmNDc3NjIXFhQHBwYUFxYWMzI2Nzc2NCcmNDc2MhcWFAcHBgYjARQGDAUtLXoWOR8fORYtLTgKGwoKCjgaGg0gEhIgDXoaGgkJBQwHdR85Fi0tOAobCgoKOBoaDSASEiANehoaCQkKGwotLXoWOR8BMwUFLYEuehYXFxYugC44CQkKGwo4GkoaDQ0NDXoaShoKGwoFBe8XFi6ALjgJCQobCjgaShoNDQ0NehpKGgobCgoKLYEuehYXAAAADACWAAEAAAAAAAEACAAAAAEAAAAAAAIAAwAIAAEAAAAAAAMACAAAAAEAAAAAAAQACAAAAAEAAAAAAAUAAQALAAEAAAAAAAYACAAAAAMAAQQJAAEAEAAMAAMAAQQJAAIABgAcAAMAAQQJAAMAEAAMAAMAAQQJAAQAEAAMAAMAAQQJAAUAAgAiAAMAAQQJAAYAEAAMYW5jaG9yanM0MDBAAGEAbgBjAGgAbwByAGoAcwA0ADAAMABAAAAAAwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAAH//wAP) format("truetype")}',u.sheet.cssRules.length)),u=document.querySelectorAll("[id]"),t=[].map.call(u,function(A){return A.id}),i=0;i\]./()*\\\n\t\b\v\u00A0]/g,"-").replace(/-{2,}/g,"-").substring(0,this.options.truncate).replace(/^-+|-+$/gm,"").toLowerCase()},this.hasAnchorJSLink=function(A){var e=A.firstChild&&-1<(" "+A.firstChild.className+" ").indexOf(" anchorjs-link "),A=A.lastChild&&-1<(" "+A.lastChild.className+" ").indexOf(" anchorjs-link ");return e||A||!1}}}); +// @license-end \ No newline at end of file diff --git a/resources/stac_mount_save_files/libs/quarto-html/popper.min.js b/resources/stac_mount_save_files/libs/quarto-html/popper.min.js new file mode 100644 index 0000000..2269d66 --- /dev/null +++ b/resources/stac_mount_save_files/libs/quarto-html/popper.min.js @@ -0,0 +1,6 @@ +/** + * @popperjs/core v2.11.4 - MIT License + */ + +!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?t(exports):"function"==typeof define&&define.amd?define(["exports"],t):t((e="undefined"!=typeof globalThis?globalThis:e||self).Popper={})}(this,(function(e){"use strict";function t(e){if(null==e)return window;if("[object Window]"!==e.toString()){var t=e.ownerDocument;return t&&t.defaultView||window}return e}function n(e){return e instanceof t(e).Element||e instanceof Element}function r(e){return e instanceof t(e).HTMLElement||e instanceof HTMLElement}function o(e){return"undefined"!=typeof ShadowRoot&&(e instanceof t(e).ShadowRoot||e instanceof ShadowRoot)}var i=Math.max,a=Math.min,s=Math.round;function f(e,t){void 0===t&&(t=!1);var n=e.getBoundingClientRect(),o=1,i=1;if(r(e)&&t){var a=e.offsetHeight,f=e.offsetWidth;f>0&&(o=s(n.width)/f||1),a>0&&(i=s(n.height)/a||1)}return{width:n.width/o,height:n.height/i,top:n.top/i,right:n.right/o,bottom:n.bottom/i,left:n.left/o,x:n.left/o,y:n.top/i}}function c(e){var n=t(e);return{scrollLeft:n.pageXOffset,scrollTop:n.pageYOffset}}function p(e){return e?(e.nodeName||"").toLowerCase():null}function u(e){return((n(e)?e.ownerDocument:e.document)||window.document).documentElement}function l(e){return f(u(e)).left+c(e).scrollLeft}function d(e){return t(e).getComputedStyle(e)}function h(e){var t=d(e),n=t.overflow,r=t.overflowX,o=t.overflowY;return/auto|scroll|overlay|hidden/.test(n+o+r)}function m(e,n,o){void 0===o&&(o=!1);var i,a,d=r(n),m=r(n)&&function(e){var t=e.getBoundingClientRect(),n=s(t.width)/e.offsetWidth||1,r=s(t.height)/e.offsetHeight||1;return 1!==n||1!==r}(n),v=u(n),g=f(e,m),y={scrollLeft:0,scrollTop:0},b={x:0,y:0};return(d||!d&&!o)&&(("body"!==p(n)||h(v))&&(y=(i=n)!==t(i)&&r(i)?{scrollLeft:(a=i).scrollLeft,scrollTop:a.scrollTop}:c(i)),r(n)?((b=f(n,!0)).x+=n.clientLeft,b.y+=n.clientTop):v&&(b.x=l(v))),{x:g.left+y.scrollLeft-b.x,y:g.top+y.scrollTop-b.y,width:g.width,height:g.height}}function v(e){var t=f(e),n=e.offsetWidth,r=e.offsetHeight;return Math.abs(t.width-n)<=1&&(n=t.width),Math.abs(t.height-r)<=1&&(r=t.height),{x:e.offsetLeft,y:e.offsetTop,width:n,height:r}}function g(e){return"html"===p(e)?e:e.assignedSlot||e.parentNode||(o(e)?e.host:null)||u(e)}function y(e){return["html","body","#document"].indexOf(p(e))>=0?e.ownerDocument.body:r(e)&&h(e)?e:y(g(e))}function b(e,n){var r;void 0===n&&(n=[]);var o=y(e),i=o===(null==(r=e.ownerDocument)?void 0:r.body),a=t(o),s=i?[a].concat(a.visualViewport||[],h(o)?o:[]):o,f=n.concat(s);return i?f:f.concat(b(g(s)))}function x(e){return["table","td","th"].indexOf(p(e))>=0}function w(e){return r(e)&&"fixed"!==d(e).position?e.offsetParent:null}function O(e){for(var n=t(e),i=w(e);i&&x(i)&&"static"===d(i).position;)i=w(i);return i&&("html"===p(i)||"body"===p(i)&&"static"===d(i).position)?n:i||function(e){var t=-1!==navigator.userAgent.toLowerCase().indexOf("firefox");if(-1!==navigator.userAgent.indexOf("Trident")&&r(e)&&"fixed"===d(e).position)return null;var n=g(e);for(o(n)&&(n=n.host);r(n)&&["html","body"].indexOf(p(n))<0;){var i=d(n);if("none"!==i.transform||"none"!==i.perspective||"paint"===i.contain||-1!==["transform","perspective"].indexOf(i.willChange)||t&&"filter"===i.willChange||t&&i.filter&&"none"!==i.filter)return n;n=n.parentNode}return null}(e)||n}var j="top",E="bottom",D="right",A="left",L="auto",P=[j,E,D,A],M="start",k="end",W="viewport",B="popper",H=P.reduce((function(e,t){return e.concat([t+"-"+M,t+"-"+k])}),[]),T=[].concat(P,[L]).reduce((function(e,t){return e.concat([t,t+"-"+M,t+"-"+k])}),[]),R=["beforeRead","read","afterRead","beforeMain","main","afterMain","beforeWrite","write","afterWrite"];function S(e){var t=new Map,n=new Set,r=[];function o(e){n.add(e.name),[].concat(e.requires||[],e.requiresIfExists||[]).forEach((function(e){if(!n.has(e)){var r=t.get(e);r&&o(r)}})),r.push(e)}return e.forEach((function(e){t.set(e.name,e)})),e.forEach((function(e){n.has(e.name)||o(e)})),r}function C(e){return e.split("-")[0]}function q(e,t){var n=t.getRootNode&&t.getRootNode();if(e.contains(t))return!0;if(n&&o(n)){var r=t;do{if(r&&e.isSameNode(r))return!0;r=r.parentNode||r.host}while(r)}return!1}function V(e){return Object.assign({},e,{left:e.x,top:e.y,right:e.x+e.width,bottom:e.y+e.height})}function N(e,r){return r===W?V(function(e){var n=t(e),r=u(e),o=n.visualViewport,i=r.clientWidth,a=r.clientHeight,s=0,f=0;return o&&(i=o.width,a=o.height,/^((?!chrome|android).)*safari/i.test(navigator.userAgent)||(s=o.offsetLeft,f=o.offsetTop)),{width:i,height:a,x:s+l(e),y:f}}(e)):n(r)?function(e){var t=f(e);return t.top=t.top+e.clientTop,t.left=t.left+e.clientLeft,t.bottom=t.top+e.clientHeight,t.right=t.left+e.clientWidth,t.width=e.clientWidth,t.height=e.clientHeight,t.x=t.left,t.y=t.top,t}(r):V(function(e){var t,n=u(e),r=c(e),o=null==(t=e.ownerDocument)?void 0:t.body,a=i(n.scrollWidth,n.clientWidth,o?o.scrollWidth:0,o?o.clientWidth:0),s=i(n.scrollHeight,n.clientHeight,o?o.scrollHeight:0,o?o.clientHeight:0),f=-r.scrollLeft+l(e),p=-r.scrollTop;return"rtl"===d(o||n).direction&&(f+=i(n.clientWidth,o?o.clientWidth:0)-a),{width:a,height:s,x:f,y:p}}(u(e)))}function I(e,t,o){var s="clippingParents"===t?function(e){var t=b(g(e)),o=["absolute","fixed"].indexOf(d(e).position)>=0&&r(e)?O(e):e;return n(o)?t.filter((function(e){return n(e)&&q(e,o)&&"body"!==p(e)})):[]}(e):[].concat(t),f=[].concat(s,[o]),c=f[0],u=f.reduce((function(t,n){var r=N(e,n);return t.top=i(r.top,t.top),t.right=a(r.right,t.right),t.bottom=a(r.bottom,t.bottom),t.left=i(r.left,t.left),t}),N(e,c));return u.width=u.right-u.left,u.height=u.bottom-u.top,u.x=u.left,u.y=u.top,u}function _(e){return e.split("-")[1]}function F(e){return["top","bottom"].indexOf(e)>=0?"x":"y"}function U(e){var t,n=e.reference,r=e.element,o=e.placement,i=o?C(o):null,a=o?_(o):null,s=n.x+n.width/2-r.width/2,f=n.y+n.height/2-r.height/2;switch(i){case j:t={x:s,y:n.y-r.height};break;case E:t={x:s,y:n.y+n.height};break;case D:t={x:n.x+n.width,y:f};break;case A:t={x:n.x-r.width,y:f};break;default:t={x:n.x,y:n.y}}var c=i?F(i):null;if(null!=c){var p="y"===c?"height":"width";switch(a){case M:t[c]=t[c]-(n[p]/2-r[p]/2);break;case k:t[c]=t[c]+(n[p]/2-r[p]/2)}}return t}function z(e){return Object.assign({},{top:0,right:0,bottom:0,left:0},e)}function X(e,t){return t.reduce((function(t,n){return t[n]=e,t}),{})}function Y(e,t){void 0===t&&(t={});var r=t,o=r.placement,i=void 0===o?e.placement:o,a=r.boundary,s=void 0===a?"clippingParents":a,c=r.rootBoundary,p=void 0===c?W:c,l=r.elementContext,d=void 0===l?B:l,h=r.altBoundary,m=void 0!==h&&h,v=r.padding,g=void 0===v?0:v,y=z("number"!=typeof g?g:X(g,P)),b=d===B?"reference":B,x=e.rects.popper,w=e.elements[m?b:d],O=I(n(w)?w:w.contextElement||u(e.elements.popper),s,p),A=f(e.elements.reference),L=U({reference:A,element:x,strategy:"absolute",placement:i}),M=V(Object.assign({},x,L)),k=d===B?M:A,H={top:O.top-k.top+y.top,bottom:k.bottom-O.bottom+y.bottom,left:O.left-k.left+y.left,right:k.right-O.right+y.right},T=e.modifiersData.offset;if(d===B&&T){var R=T[i];Object.keys(H).forEach((function(e){var t=[D,E].indexOf(e)>=0?1:-1,n=[j,E].indexOf(e)>=0?"y":"x";H[e]+=R[n]*t}))}return H}var G={placement:"bottom",modifiers:[],strategy:"absolute"};function J(){for(var e=arguments.length,t=new Array(e),n=0;n=0?-1:1,i="function"==typeof n?n(Object.assign({},t,{placement:e})):n,a=i[0],s=i[1];return a=a||0,s=(s||0)*o,[A,D].indexOf(r)>=0?{x:s,y:a}:{x:a,y:s}}(n,t.rects,i),e}),{}),s=a[t.placement],f=s.x,c=s.y;null!=t.modifiersData.popperOffsets&&(t.modifiersData.popperOffsets.x+=f,t.modifiersData.popperOffsets.y+=c),t.modifiersData[r]=a}},ie={left:"right",right:"left",bottom:"top",top:"bottom"};function ae(e){return e.replace(/left|right|bottom|top/g,(function(e){return ie[e]}))}var se={start:"end",end:"start"};function fe(e){return e.replace(/start|end/g,(function(e){return se[e]}))}function ce(e,t){void 0===t&&(t={});var n=t,r=n.placement,o=n.boundary,i=n.rootBoundary,a=n.padding,s=n.flipVariations,f=n.allowedAutoPlacements,c=void 0===f?T:f,p=_(r),u=p?s?H:H.filter((function(e){return _(e)===p})):P,l=u.filter((function(e){return c.indexOf(e)>=0}));0===l.length&&(l=u);var d=l.reduce((function(t,n){return t[n]=Y(e,{placement:n,boundary:o,rootBoundary:i,padding:a})[C(n)],t}),{});return Object.keys(d).sort((function(e,t){return d[e]-d[t]}))}var pe={name:"flip",enabled:!0,phase:"main",fn:function(e){var t=e.state,n=e.options,r=e.name;if(!t.modifiersData[r]._skip){for(var o=n.mainAxis,i=void 0===o||o,a=n.altAxis,s=void 0===a||a,f=n.fallbackPlacements,c=n.padding,p=n.boundary,u=n.rootBoundary,l=n.altBoundary,d=n.flipVariations,h=void 0===d||d,m=n.allowedAutoPlacements,v=t.options.placement,g=C(v),y=f||(g===v||!h?[ae(v)]:function(e){if(C(e)===L)return[];var t=ae(e);return[fe(e),t,fe(t)]}(v)),b=[v].concat(y).reduce((function(e,n){return e.concat(C(n)===L?ce(t,{placement:n,boundary:p,rootBoundary:u,padding:c,flipVariations:h,allowedAutoPlacements:m}):n)}),[]),x=t.rects.reference,w=t.rects.popper,O=new Map,P=!0,k=b[0],W=0;W=0,S=R?"width":"height",q=Y(t,{placement:B,boundary:p,rootBoundary:u,altBoundary:l,padding:c}),V=R?T?D:A:T?E:j;x[S]>w[S]&&(V=ae(V));var N=ae(V),I=[];if(i&&I.push(q[H]<=0),s&&I.push(q[V]<=0,q[N]<=0),I.every((function(e){return e}))){k=B,P=!1;break}O.set(B,I)}if(P)for(var F=function(e){var t=b.find((function(t){var n=O.get(t);if(n)return n.slice(0,e).every((function(e){return e}))}));if(t)return k=t,"break"},U=h?3:1;U>0;U--){if("break"===F(U))break}t.placement!==k&&(t.modifiersData[r]._skip=!0,t.placement=k,t.reset=!0)}},requiresIfExists:["offset"],data:{_skip:!1}};function ue(e,t,n){return i(e,a(t,n))}var le={name:"preventOverflow",enabled:!0,phase:"main",fn:function(e){var t=e.state,n=e.options,r=e.name,o=n.mainAxis,s=void 0===o||o,f=n.altAxis,c=void 0!==f&&f,p=n.boundary,u=n.rootBoundary,l=n.altBoundary,d=n.padding,h=n.tether,m=void 0===h||h,g=n.tetherOffset,y=void 0===g?0:g,b=Y(t,{boundary:p,rootBoundary:u,padding:d,altBoundary:l}),x=C(t.placement),w=_(t.placement),L=!w,P=F(x),k="x"===P?"y":"x",W=t.modifiersData.popperOffsets,B=t.rects.reference,H=t.rects.popper,T="function"==typeof y?y(Object.assign({},t.rects,{placement:t.placement})):y,R="number"==typeof T?{mainAxis:T,altAxis:T}:Object.assign({mainAxis:0,altAxis:0},T),S=t.modifiersData.offset?t.modifiersData.offset[t.placement]:null,q={x:0,y:0};if(W){if(s){var V,N="y"===P?j:A,I="y"===P?E:D,U="y"===P?"height":"width",z=W[P],X=z+b[N],G=z-b[I],J=m?-H[U]/2:0,K=w===M?B[U]:H[U],Q=w===M?-H[U]:-B[U],Z=t.elements.arrow,$=m&&Z?v(Z):{width:0,height:0},ee=t.modifiersData["arrow#persistent"]?t.modifiersData["arrow#persistent"].padding:{top:0,right:0,bottom:0,left:0},te=ee[N],ne=ee[I],re=ue(0,B[U],$[U]),oe=L?B[U]/2-J-re-te-R.mainAxis:K-re-te-R.mainAxis,ie=L?-B[U]/2+J+re+ne+R.mainAxis:Q+re+ne+R.mainAxis,ae=t.elements.arrow&&O(t.elements.arrow),se=ae?"y"===P?ae.clientTop||0:ae.clientLeft||0:0,fe=null!=(V=null==S?void 0:S[P])?V:0,ce=z+ie-fe,pe=ue(m?a(X,z+oe-fe-se):X,z,m?i(G,ce):G);W[P]=pe,q[P]=pe-z}if(c){var le,de="x"===P?j:A,he="x"===P?E:D,me=W[k],ve="y"===k?"height":"width",ge=me+b[de],ye=me-b[he],be=-1!==[j,A].indexOf(x),xe=null!=(le=null==S?void 0:S[k])?le:0,we=be?ge:me-B[ve]-H[ve]-xe+R.altAxis,Oe=be?me+B[ve]+H[ve]-xe-R.altAxis:ye,je=m&&be?function(e,t,n){var r=ue(e,t,n);return r>n?n:r}(we,me,Oe):ue(m?we:ge,me,m?Oe:ye);W[k]=je,q[k]=je-me}t.modifiersData[r]=q}},requiresIfExists:["offset"]};var de={name:"arrow",enabled:!0,phase:"main",fn:function(e){var t,n=e.state,r=e.name,o=e.options,i=n.elements.arrow,a=n.modifiersData.popperOffsets,s=C(n.placement),f=F(s),c=[A,D].indexOf(s)>=0?"height":"width";if(i&&a){var p=function(e,t){return z("number"!=typeof(e="function"==typeof e?e(Object.assign({},t.rects,{placement:t.placement})):e)?e:X(e,P))}(o.padding,n),u=v(i),l="y"===f?j:A,d="y"===f?E:D,h=n.rects.reference[c]+n.rects.reference[f]-a[f]-n.rects.popper[c],m=a[f]-n.rects.reference[f],g=O(i),y=g?"y"===f?g.clientHeight||0:g.clientWidth||0:0,b=h/2-m/2,x=p[l],w=y-u[c]-p[d],L=y/2-u[c]/2+b,M=ue(x,L,w),k=f;n.modifiersData[r]=((t={})[k]=M,t.centerOffset=M-L,t)}},effect:function(e){var t=e.state,n=e.options.element,r=void 0===n?"[data-popper-arrow]":n;null!=r&&("string"!=typeof r||(r=t.elements.popper.querySelector(r)))&&q(t.elements.popper,r)&&(t.elements.arrow=r)},requires:["popperOffsets"],requiresIfExists:["preventOverflow"]};function he(e,t,n){return void 0===n&&(n={x:0,y:0}),{top:e.top-t.height-n.y,right:e.right-t.width+n.x,bottom:e.bottom-t.height+n.y,left:e.left-t.width-n.x}}function me(e){return[j,D,E,A].some((function(t){return e[t]>=0}))}var ve={name:"hide",enabled:!0,phase:"main",requiresIfExists:["preventOverflow"],fn:function(e){var t=e.state,n=e.name,r=t.rects.reference,o=t.rects.popper,i=t.modifiersData.preventOverflow,a=Y(t,{elementContext:"reference"}),s=Y(t,{altBoundary:!0}),f=he(a,r),c=he(s,o,i),p=me(f),u=me(c);t.modifiersData[n]={referenceClippingOffsets:f,popperEscapeOffsets:c,isReferenceHidden:p,hasPopperEscaped:u},t.attributes.popper=Object.assign({},t.attributes.popper,{"data-popper-reference-hidden":p,"data-popper-escaped":u})}},ge=K({defaultModifiers:[Z,$,ne,re]}),ye=[Z,$,ne,re,oe,pe,le,de,ve],be=K({defaultModifiers:ye});e.applyStyles=re,e.arrow=de,e.computeStyles=ne,e.createPopper=be,e.createPopperLite=ge,e.defaultModifiers=ye,e.detectOverflow=Y,e.eventListeners=Z,e.flip=pe,e.hide=ve,e.offset=oe,e.popperGenerator=K,e.popperOffsets=$,e.preventOverflow=le,Object.defineProperty(e,"__esModule",{value:!0})})); + diff --git a/resources/stac_mount_save_files/libs/quarto-html/quarto-syntax-highlighting.css b/resources/stac_mount_save_files/libs/quarto-html/quarto-syntax-highlighting.css new file mode 100644 index 0000000..36cb328 --- /dev/null +++ b/resources/stac_mount_save_files/libs/quarto-html/quarto-syntax-highlighting.css @@ -0,0 +1,171 @@ +/* quarto syntax highlight colors */ +:root { + --quarto-hl-ot-color: #003B4F; + --quarto-hl-at-color: #657422; + --quarto-hl-ss-color: #20794D; + --quarto-hl-an-color: #5E5E5E; + --quarto-hl-fu-color: #4758AB; + --quarto-hl-st-color: #20794D; + --quarto-hl-cf-color: #003B4F; + --quarto-hl-op-color: #5E5E5E; + --quarto-hl-er-color: #AD0000; + --quarto-hl-bn-color: #AD0000; + --quarto-hl-al-color: #AD0000; + --quarto-hl-va-color: #111111; + --quarto-hl-bu-color: inherit; + --quarto-hl-ex-color: inherit; + --quarto-hl-pp-color: #AD0000; + --quarto-hl-in-color: #5E5E5E; + --quarto-hl-vs-color: #20794D; + --quarto-hl-wa-color: #5E5E5E; + --quarto-hl-do-color: #5E5E5E; + --quarto-hl-im-color: #00769E; + --quarto-hl-ch-color: #20794D; + --quarto-hl-dt-color: #AD0000; + --quarto-hl-fl-color: #AD0000; + --quarto-hl-co-color: #5E5E5E; + --quarto-hl-cv-color: #5E5E5E; + --quarto-hl-cn-color: #8f5902; + --quarto-hl-sc-color: #5E5E5E; + --quarto-hl-dv-color: #AD0000; + --quarto-hl-kw-color: #003B4F; +} + +/* other quarto variables */ +:root { + --quarto-font-monospace: SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace; +} + +pre > code.sourceCode > span { + color: #003B4F; +} + +code span { + color: #003B4F; +} + +code.sourceCode > span { + color: #003B4F; +} + +div.sourceCode, +div.sourceCode pre.sourceCode { + color: #003B4F; +} + +code span.ot { + color: #003B4F; +} + +code span.at { + color: #657422; +} + +code span.ss { + color: #20794D; +} + +code span.an { + color: #5E5E5E; +} + +code span.fu { + color: #4758AB; +} + +code span.st { + color: #20794D; +} + +code span.cf { + color: #003B4F; +} + +code span.op { + color: #5E5E5E; +} + +code span.er { + color: #AD0000; +} + +code span.bn { + color: #AD0000; +} + +code span.al { + color: #AD0000; +} + +code span.va { + color: #111111; +} + +code span.pp { + color: #AD0000; +} + +code span.in { + color: #5E5E5E; +} + +code span.vs { + color: #20794D; +} + +code span.wa { + color: #5E5E5E; + font-style: italic; +} + +code span.do { + color: #5E5E5E; + font-style: italic; +} + +code span.im { + color: #00769E; +} + +code span.ch { + color: #20794D; +} + +code span.dt { + color: #AD0000; +} + +code span.fl { + color: #AD0000; +} + +code span.co { + color: #5E5E5E; +} + +code span.cv { + color: #5E5E5E; + font-style: italic; +} + +code span.cn { + color: #8f5902; +} + +code span.sc { + color: #5E5E5E; +} + +code span.dv { + color: #AD0000; +} + +code span.kw { + color: #003B4F; +} + +.prevent-inlining { + content: " { + const sibling = el.previousElementSibling; + if (sibling && sibling.tagName === "A") { + return sibling.classList.contains("active"); + } else { + return false; + } + }; + + // fire slideEnter for bootstrap tab activations (for htmlwidget resize behavior) + function fireSlideEnter(e) { + const event = window.document.createEvent("Event"); + event.initEvent("slideenter", true, true); + window.document.dispatchEvent(event); + } + const tabs = window.document.querySelectorAll('a[data-bs-toggle="tab"]'); + tabs.forEach((tab) => { + tab.addEventListener("shown.bs.tab", fireSlideEnter); + }); + + // Track scrolling and mark TOC links as active + // get table of contents and sidebar (bail if we don't have at least one) + const tocLinks = tocEl + ? [...tocEl.querySelectorAll("a[data-scroll-target]")] + : []; + const makeActive = (link) => tocLinks[link].classList.add("active"); + const removeActive = (link) => tocLinks[link].classList.remove("active"); + const removeAllActive = () => + [...Array(tocLinks.length).keys()].forEach((link) => removeActive(link)); + + // activate the anchor for a section associated with this TOC entry + tocLinks.forEach((link) => { + link.addEventListener("click", () => { + if (link.href.indexOf("#") !== -1) { + const anchor = link.href.split("#")[1]; + const heading = window.document.querySelector( + `[data-anchor-id=${anchor}]` + ); + if (heading) { + // Add the class + heading.classList.add("reveal-anchorjs-link"); + + // function to show the anchor + const handleMouseout = () => { + heading.classList.remove("reveal-anchorjs-link"); + heading.removeEventListener("mouseout", handleMouseout); + }; + + // add a function to clear the anchor when the user mouses out of it + heading.addEventListener("mouseout", handleMouseout); + } + } + }); + }); + + const sections = tocLinks.map((link) => { + const target = link.getAttribute("data-scroll-target"); + if (target.startsWith("#")) { + return window.document.getElementById(decodeURI(`${target.slice(1)}`)); + } else { + return window.document.querySelector(decodeURI(`${target}`)); + } + }); + + const sectionMargin = 200; + let currentActive = 0; + // track whether we've initialized state the first time + let init = false; + + const updateActiveLink = () => { + // The index from bottom to top (e.g. reversed list) + let sectionIndex = -1; + if ( + window.innerHeight + window.pageYOffset >= + window.document.body.offsetHeight + ) { + sectionIndex = 0; + } else { + sectionIndex = [...sections].reverse().findIndex((section) => { + if (section) { + return window.pageYOffset >= section.offsetTop - sectionMargin; + } else { + return false; + } + }); + } + if (sectionIndex > -1) { + const current = sections.length - sectionIndex - 1; + if (current !== currentActive) { + removeAllActive(); + currentActive = current; + makeActive(current); + if (init) { + window.dispatchEvent(sectionChanged); + } + init = true; + } + } + }; + + const inHiddenRegion = (top, bottom, hiddenRegions) => { + for (const region of hiddenRegions) { + if (top <= region.bottom && bottom >= region.top) { + return true; + } + } + return false; + }; + + const categorySelector = "header.quarto-title-block .quarto-category"; + const activateCategories = (href) => { + // Find any categories + // Surround them with a link pointing back to: + // #category=Authoring + try { + const categoryEls = window.document.querySelectorAll(categorySelector); + for (const categoryEl of categoryEls) { + const categoryText = categoryEl.textContent; + if (categoryText) { + const link = `${href}#category=${encodeURIComponent(categoryText)}`; + const linkEl = window.document.createElement("a"); + linkEl.setAttribute("href", link); + for (const child of categoryEl.childNodes) { + linkEl.append(child); + } + categoryEl.appendChild(linkEl); + } + } + } catch { + // Ignore errors + } + }; + function hasTitleCategories() { + return window.document.querySelector(categorySelector) !== null; + } + + function offsetRelativeUrl(url) { + const offset = getMeta("quarto:offset"); + return offset ? offset + url : url; + } + + function offsetAbsoluteUrl(url) { + const offset = getMeta("quarto:offset"); + const baseUrl = new URL(offset, window.location); + + const projRelativeUrl = url.replace(baseUrl, ""); + if (projRelativeUrl.startsWith("/")) { + return projRelativeUrl; + } else { + return "/" + projRelativeUrl; + } + } + + // read a meta tag value + function getMeta(metaName) { + const metas = window.document.getElementsByTagName("meta"); + for (let i = 0; i < metas.length; i++) { + if (metas[i].getAttribute("name") === metaName) { + return metas[i].getAttribute("content"); + } + } + return ""; + } + + async function findAndActivateCategories() { + const currentPagePath = offsetAbsoluteUrl(window.location.href); + const response = await fetch(offsetRelativeUrl("listings.json")); + if (response.status == 200) { + return response.json().then(function (listingPaths) { + const listingHrefs = []; + for (const listingPath of listingPaths) { + const pathWithoutLeadingSlash = listingPath.listing.substring(1); + for (const item of listingPath.items) { + if ( + item === currentPagePath || + item === currentPagePath + "index.html" + ) { + // Resolve this path against the offset to be sure + // we already are using the correct path to the listing + // (this adjusts the listing urls to be rooted against + // whatever root the page is actually running against) + const relative = offsetRelativeUrl(pathWithoutLeadingSlash); + const baseUrl = window.location; + const resolvedPath = new URL(relative, baseUrl); + listingHrefs.push(resolvedPath.pathname); + break; + } + } + } + + // Look up the tree for a nearby linting and use that if we find one + const nearestListing = findNearestParentListing( + offsetAbsoluteUrl(window.location.pathname), + listingHrefs + ); + if (nearestListing) { + activateCategories(nearestListing); + } else { + // See if the referrer is a listing page for this item + const referredRelativePath = offsetAbsoluteUrl(document.referrer); + const referrerListing = listingHrefs.find((listingHref) => { + const isListingReferrer = + listingHref === referredRelativePath || + listingHref === referredRelativePath + "index.html"; + return isListingReferrer; + }); + + if (referrerListing) { + // Try to use the referrer if possible + activateCategories(referrerListing); + } else if (listingHrefs.length > 0) { + // Otherwise, just fall back to the first listing + activateCategories(listingHrefs[0]); + } + } + }); + } + } + if (hasTitleCategories()) { + findAndActivateCategories(); + } + + const findNearestParentListing = (href, listingHrefs) => { + if (!href || !listingHrefs) { + return undefined; + } + // Look up the tree for a nearby linting and use that if we find one + const relativeParts = href.substring(1).split("/"); + while (relativeParts.length > 0) { + const path = relativeParts.join("/"); + for (const listingHref of listingHrefs) { + if (listingHref.startsWith(path)) { + return listingHref; + } + } + relativeParts.pop(); + } + + return undefined; + }; + + const manageSidebarVisiblity = (el, placeholderDescriptor) => { + let isVisible = true; + + return (hiddenRegions) => { + if (el === null) { + return; + } + + // Find the last element of the TOC + const lastChildEl = el.lastElementChild; + + if (lastChildEl) { + // Find the top and bottom o the element that is being managed + const elTop = el.offsetTop; + const elBottom = + elTop + lastChildEl.offsetTop + lastChildEl.offsetHeight; + + // Converts the sidebar to a menu + const convertToMenu = () => { + for (const child of el.children) { + child.style.opacity = 0; + child.style.display = "none"; + } + + const toggleContainer = window.document.createElement("div"); + toggleContainer.style.width = "100%"; + toggleContainer.classList.add("zindex-over-content"); + toggleContainer.classList.add("quarto-sidebar-toggle"); + toggleContainer.classList.add("headroom-target"); // Marks this to be managed by headeroom + toggleContainer.id = placeholderDescriptor.id; + toggleContainer.style.position = "fixed"; + + const toggleIcon = window.document.createElement("i"); + toggleIcon.classList.add("quarto-sidebar-toggle-icon"); + toggleIcon.classList.add("bi"); + toggleIcon.classList.add("bi-caret-down-fill"); + + const toggleTitle = window.document.createElement("div"); + const titleEl = window.document.body.querySelector( + placeholderDescriptor.titleSelector + ); + if (titleEl) { + toggleTitle.append(titleEl.innerText, toggleIcon); + } + toggleTitle.classList.add("zindex-over-content"); + toggleTitle.classList.add("quarto-sidebar-toggle-title"); + toggleContainer.append(toggleTitle); + + const toggleContents = window.document.createElement("div"); + toggleContents.classList = el.classList; + toggleContents.classList.add("zindex-over-content"); + toggleContents.classList.add("quarto-sidebar-toggle-contents"); + for (const child of el.children) { + if (child.id === "toc-title") { + continue; + } + + const clone = child.cloneNode(true); + clone.style.opacity = 1; + clone.style.display = null; + toggleContents.append(clone); + } + toggleContents.style.height = "0px"; + toggleContainer.append(toggleContents); + el.parentElement.prepend(toggleContainer); + + // Process clicks + let tocShowing = false; + // Allow the caller to control whether this is dismissed + // when it is clicked (e.g. sidebar navigation supports + // opening and closing the nav tree, so don't dismiss on click) + const clickEl = placeholderDescriptor.dismissOnClick + ? toggleContainer + : toggleTitle; + + const closeToggle = () => { + if (tocShowing) { + toggleContainer.classList.remove("expanded"); + toggleContents.style.height = "0px"; + tocShowing = false; + } + }; + + const positionToggle = () => { + // position the element (top left of parent, same width as parent) + const elRect = el.getBoundingClientRect(); + toggleContainer.style.left = `${elRect.left}px`; + toggleContainer.style.top = `${elRect.top}px`; + toggleContainer.style.width = `${elRect.width}px`; + }; + + // Get rid of any expanded toggle if the user scrolls + window.document.addEventListener( + "scroll", + throttle(() => { + closeToggle(); + }, 50) + ); + + // Handle positioning of the toggle + window.addEventListener( + "resize", + throttle(() => { + positionToggle(); + }, 50) + ); + positionToggle(); + + // Process the click + clickEl.onclick = () => { + if (!tocShowing) { + toggleContainer.classList.add("expanded"); + toggleContents.style.height = null; + tocShowing = true; + } else { + closeToggle(); + } + }; + }; + + // Converts a sidebar from a menu back to a sidebar + const convertToSidebar = () => { + for (const child of el.children) { + child.style.opacity = 1; + clone.style.display = null; + } + + const placeholderEl = window.document.getElementById( + placeholderDescriptor.id + ); + if (placeholderEl) { + placeholderEl.remove(); + } + + el.classList.remove("rollup"); + }; + + if (isReaderMode()) { + convertToMenu(); + isVisible = false; + } else { + if (!isVisible) { + // If the element is current not visible reveal if there are + // no conflicts with overlay regions + if (!inHiddenRegion(elTop, elBottom, hiddenRegions)) { + convertToSidebar(); + isVisible = true; + } + } else { + // If the element is visible, hide it if it conflicts with overlay regions + // and insert a placeholder toggle (or if we're in reader mode) + if (inHiddenRegion(elTop, elBottom, hiddenRegions)) { + convertToMenu(); + isVisible = false; + } + } + } + } + }; + }; + + // Find any conflicting margin elements and add margins to the + // top to prevent overlap + const marginChildren = window.document.querySelectorAll( + ".column-margin.column-container > * " + ); + let lastBottom = 0; + for (const marginChild of marginChildren) { + const top = marginChild.getBoundingClientRect().top; + if (top < lastBottom) { + const margin = lastBottom - top; + marginChild.style.marginTop = `${margin}px`; + } + const styles = window.getComputedStyle(marginChild); + const marginTop = parseFloat(styles["marginTop"]); + + lastBottom = top + marginChild.getBoundingClientRect().height + marginTop; + } + + // Manage the visibility of the toc and the sidebar + const marginScrollVisibility = manageSidebarVisiblity(marginSidebarEl, { + id: "quarto-toc-toggle", + titleSelector: "#toc-title", + dismissOnClick: true, + }); + const sidebarScrollVisiblity = manageSidebarVisiblity(sidebarEl, { + id: "quarto-sidebarnav-toggle", + titleSelector: ".title", + dismissOnClick: false, + }); + let tocLeftScrollVisibility; + if (leftTocEl) { + tocLeftScrollVisibility = manageSidebarVisiblity(leftTocEl, { + id: "quarto-lefttoc-toggle", + titleSelector: "#toc-title", + dismissOnClick: true, + }); + } + + // Find the first element that uses formatting in special columns + const conflictingEls = window.document.body.querySelectorAll( + '[class^="column-"], [class*=" column-"], aside, [class*="margin-caption"], [class*=" margin-caption"], [class*="margin-ref"], [class*=" margin-ref"]' + ); + + // Filter all the possibly conflicting elements into ones + // the do conflict on the left or ride side + const arrConflictingEls = Array.from(conflictingEls); + const leftSideConflictEls = arrConflictingEls.filter((el) => { + if (el.tagName === "ASIDE") { + return false; + } + return Array.from(el.classList).find((className) => { + return ( + className !== "column-body" && + className.startsWith("column-") && + !className.endsWith("right") && + !className.endsWith("container") && + className !== "column-margin" + ); + }); + }); + const rightSideConflictEls = arrConflictingEls.filter((el) => { + if (el.tagName === "ASIDE") { + return true; + } + + const hasMarginCaption = Array.from(el.classList).find((className) => { + return className == "margin-caption"; + }); + if (hasMarginCaption) { + return true; + } + + return Array.from(el.classList).find((className) => { + return ( + className !== "column-body" && + !className.endsWith("container") && + className.startsWith("column-") && + !className.endsWith("left") + ); + }); + }); + + const kOverlapPaddingSize = 10; + function toRegions(els) { + return els.map((el) => { + const top = + el.getBoundingClientRect().top + + document.documentElement.scrollTop - + kOverlapPaddingSize; + return { + top, + bottom: top + el.scrollHeight + 2 * kOverlapPaddingSize, + }; + }); + } + + const hideOverlappedSidebars = () => { + marginScrollVisibility(toRegions(rightSideConflictEls)); + sidebarScrollVisiblity(toRegions(leftSideConflictEls)); + if (tocLeftScrollVisibility) { + tocLeftScrollVisibility(toRegions(leftSideConflictEls)); + } + }; + + window.quartoToggleReader = () => { + // Applies a slow class (or removes it) + // to update the transition speed + const slowTransition = (slow) => { + const manageTransition = (id, slow) => { + const el = document.getElementById(id); + if (el) { + if (slow) { + el.classList.add("slow"); + } else { + el.classList.remove("slow"); + } + } + }; + + manageTransition("TOC", slow); + manageTransition("quarto-sidebar", slow); + }; + + const readerMode = !isReaderMode(); + setReaderModeValue(readerMode); + + // If we're entering reader mode, slow the transition + if (readerMode) { + slowTransition(readerMode); + } + highlightReaderToggle(readerMode); + hideOverlappedSidebars(); + + // If we're exiting reader mode, restore the non-slow transition + if (!readerMode) { + slowTransition(!readerMode); + } + }; + + const highlightReaderToggle = (readerMode) => { + const els = document.querySelectorAll(".quarto-reader-toggle"); + if (els) { + els.forEach((el) => { + if (readerMode) { + el.classList.add("reader"); + } else { + el.classList.remove("reader"); + } + }); + } + }; + + const setReaderModeValue = (val) => { + if (window.location.protocol !== "file:") { + window.localStorage.setItem("quarto-reader-mode", val); + } else { + localReaderMode = val; + } + }; + + const isReaderMode = () => { + if (window.location.protocol !== "file:") { + return window.localStorage.getItem("quarto-reader-mode") === "true"; + } else { + return localReaderMode; + } + }; + let localReaderMode = null; + + // Walk the TOC and collapse/expand nodes + // Nodes are expanded if: + // - they are top level + // - they have children that are 'active' links + // - they are directly below an link that is 'active' + const walk = (el, depth) => { + // Tick depth when we enter a UL + if (el.tagName === "UL") { + depth = depth + 1; + } + + // It this is active link + let isActiveNode = false; + if (el.tagName === "A" && el.classList.contains("active")) { + isActiveNode = true; + } + + // See if there is an active child to this element + let hasActiveChild = false; + for (child of el.children) { + hasActiveChild = walk(child, depth) || hasActiveChild; + } + + // Process the collapse state if this is an UL + if (el.tagName === "UL") { + if (depth === 1 || hasActiveChild || prevSiblingIsActiveLink(el)) { + el.classList.remove("collapse"); + } else { + el.classList.add("collapse"); + } + + // untick depth when we leave a UL + depth = depth - 1; + } + return hasActiveChild || isActiveNode; + }; + + // walk the TOC and expand / collapse any items that should be shown + + if (tocEl) { + walk(tocEl, 0); + updateActiveLink(); + } + + // Throttle the scroll event and walk peridiocally + window.document.addEventListener( + "scroll", + throttle(() => { + if (tocEl) { + updateActiveLink(); + walk(tocEl, 0); + } + if (!isReaderMode()) { + hideOverlappedSidebars(); + } + }, 5) + ); + window.addEventListener( + "resize", + throttle(() => { + if (!isReaderMode()) { + hideOverlappedSidebars(); + } + }, 10) + ); + hideOverlappedSidebars(); + highlightReaderToggle(isReaderMode()); +}); + +function throttle(func, wait) { + let waiting = false; + return function () { + if (!waiting) { + func.apply(this, arguments); + waiting = true; + setTimeout(function () { + waiting = false; + }, wait); + } + }; +} diff --git a/resources/stac_mount_save_files/libs/quarto-html/tippy.css b/resources/stac_mount_save_files/libs/quarto-html/tippy.css new file mode 100644 index 0000000..e6ae635 --- /dev/null +++ b/resources/stac_mount_save_files/libs/quarto-html/tippy.css @@ -0,0 +1 @@ +.tippy-box[data-animation=fade][data-state=hidden]{opacity:0}[data-tippy-root]{max-width:calc(100vw - 10px)}.tippy-box{position:relative;background-color:#333;color:#fff;border-radius:4px;font-size:14px;line-height:1.4;white-space:normal;outline:0;transition-property:transform,visibility,opacity}.tippy-box[data-placement^=top]>.tippy-arrow{bottom:0}.tippy-box[data-placement^=top]>.tippy-arrow:before{bottom:-7px;left:0;border-width:8px 8px 0;border-top-color:initial;transform-origin:center top}.tippy-box[data-placement^=bottom]>.tippy-arrow{top:0}.tippy-box[data-placement^=bottom]>.tippy-arrow:before{top:-7px;left:0;border-width:0 8px 8px;border-bottom-color:initial;transform-origin:center bottom}.tippy-box[data-placement^=left]>.tippy-arrow{right:0}.tippy-box[data-placement^=left]>.tippy-arrow:before{border-width:8px 0 8px 8px;border-left-color:initial;right:-7px;transform-origin:center left}.tippy-box[data-placement^=right]>.tippy-arrow{left:0}.tippy-box[data-placement^=right]>.tippy-arrow:before{left:-7px;border-width:8px 8px 8px 0;border-right-color:initial;transform-origin:center right}.tippy-box[data-inertia][data-state=visible]{transition-timing-function:cubic-bezier(.54,1.5,.38,1.11)}.tippy-arrow{width:16px;height:16px;color:#333}.tippy-arrow:before{content:"";position:absolute;border-color:transparent;border-style:solid}.tippy-content{position:relative;padding:5px 9px;z-index:1} \ No newline at end of file diff --git a/resources/stac_mount_save_files/libs/quarto-html/tippy.umd.min.js b/resources/stac_mount_save_files/libs/quarto-html/tippy.umd.min.js new file mode 100644 index 0000000..ca292be --- /dev/null +++ b/resources/stac_mount_save_files/libs/quarto-html/tippy.umd.min.js @@ -0,0 +1,2 @@ +!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?module.exports=t(require("@popperjs/core")):"function"==typeof define&&define.amd?define(["@popperjs/core"],t):(e=e||self).tippy=t(e.Popper)}(this,(function(e){"use strict";var t={passive:!0,capture:!0},n=function(){return document.body};function r(e,t,n){if(Array.isArray(e)){var r=e[t];return null==r?Array.isArray(n)?n[t]:n:r}return e}function o(e,t){var n={}.toString.call(e);return 0===n.indexOf("[object")&&n.indexOf(t+"]")>-1}function i(e,t){return"function"==typeof e?e.apply(void 0,t):e}function a(e,t){return 0===t?e:function(r){clearTimeout(n),n=setTimeout((function(){e(r)}),t)};var n}function s(e,t){var n=Object.assign({},e);return t.forEach((function(e){delete n[e]})),n}function u(e){return[].concat(e)}function c(e,t){-1===e.indexOf(t)&&e.push(t)}function p(e){return e.split("-")[0]}function f(e){return[].slice.call(e)}function l(e){return Object.keys(e).reduce((function(t,n){return void 0!==e[n]&&(t[n]=e[n]),t}),{})}function d(){return document.createElement("div")}function v(e){return["Element","Fragment"].some((function(t){return o(e,t)}))}function m(e){return o(e,"MouseEvent")}function g(e){return!(!e||!e._tippy||e._tippy.reference!==e)}function h(e){return v(e)?[e]:function(e){return o(e,"NodeList")}(e)?f(e):Array.isArray(e)?e:f(document.querySelectorAll(e))}function b(e,t){e.forEach((function(e){e&&(e.style.transitionDuration=t+"ms")}))}function y(e,t){e.forEach((function(e){e&&e.setAttribute("data-state",t)}))}function w(e){var t,n=u(e)[0];return null!=n&&null!=(t=n.ownerDocument)&&t.body?n.ownerDocument:document}function E(e,t,n){var r=t+"EventListener";["transitionend","webkitTransitionEnd"].forEach((function(t){e[r](t,n)}))}function O(e,t){for(var n=t;n;){var r;if(e.contains(n))return!0;n=null==n.getRootNode||null==(r=n.getRootNode())?void 0:r.host}return!1}var x={isTouch:!1},C=0;function T(){x.isTouch||(x.isTouch=!0,window.performance&&document.addEventListener("mousemove",A))}function A(){var e=performance.now();e-C<20&&(x.isTouch=!1,document.removeEventListener("mousemove",A)),C=e}function L(){var e=document.activeElement;if(g(e)){var t=e._tippy;e.blur&&!t.state.isVisible&&e.blur()}}var D=!!("undefined"!=typeof window&&"undefined"!=typeof document)&&!!window.msCrypto,R=Object.assign({appendTo:n,aria:{content:"auto",expanded:"auto"},delay:0,duration:[300,250],getReferenceClientRect:null,hideOnClick:!0,ignoreAttributes:!1,interactive:!1,interactiveBorder:2,interactiveDebounce:0,moveTransition:"",offset:[0,10],onAfterUpdate:function(){},onBeforeUpdate:function(){},onCreate:function(){},onDestroy:function(){},onHidden:function(){},onHide:function(){},onMount:function(){},onShow:function(){},onShown:function(){},onTrigger:function(){},onUntrigger:function(){},onClickOutside:function(){},placement:"top",plugins:[],popperOptions:{},render:null,showOnCreate:!1,touch:!0,trigger:"mouseenter focus",triggerTarget:null},{animateFill:!1,followCursor:!1,inlinePositioning:!1,sticky:!1},{allowHTML:!1,animation:"fade",arrow:!0,content:"",inertia:!1,maxWidth:350,role:"tooltip",theme:"",zIndex:9999}),k=Object.keys(R);function P(e){var t=(e.plugins||[]).reduce((function(t,n){var r,o=n.name,i=n.defaultValue;o&&(t[o]=void 0!==e[o]?e[o]:null!=(r=R[o])?r:i);return t}),{});return Object.assign({},e,t)}function j(e,t){var n=Object.assign({},t,{content:i(t.content,[e])},t.ignoreAttributes?{}:function(e,t){return(t?Object.keys(P(Object.assign({},R,{plugins:t}))):k).reduce((function(t,n){var r=(e.getAttribute("data-tippy-"+n)||"").trim();if(!r)return t;if("content"===n)t[n]=r;else try{t[n]=JSON.parse(r)}catch(e){t[n]=r}return t}),{})}(e,t.plugins));return n.aria=Object.assign({},R.aria,n.aria),n.aria={expanded:"auto"===n.aria.expanded?t.interactive:n.aria.expanded,content:"auto"===n.aria.content?t.interactive?null:"describedby":n.aria.content},n}function M(e,t){e.innerHTML=t}function V(e){var t=d();return!0===e?t.className="tippy-arrow":(t.className="tippy-svg-arrow",v(e)?t.appendChild(e):M(t,e)),t}function I(e,t){v(t.content)?(M(e,""),e.appendChild(t.content)):"function"!=typeof t.content&&(t.allowHTML?M(e,t.content):e.textContent=t.content)}function S(e){var t=e.firstElementChild,n=f(t.children);return{box:t,content:n.find((function(e){return e.classList.contains("tippy-content")})),arrow:n.find((function(e){return e.classList.contains("tippy-arrow")||e.classList.contains("tippy-svg-arrow")})),backdrop:n.find((function(e){return e.classList.contains("tippy-backdrop")}))}}function N(e){var t=d(),n=d();n.className="tippy-box",n.setAttribute("data-state","hidden"),n.setAttribute("tabindex","-1");var r=d();function o(n,r){var o=S(t),i=o.box,a=o.content,s=o.arrow;r.theme?i.setAttribute("data-theme",r.theme):i.removeAttribute("data-theme"),"string"==typeof r.animation?i.setAttribute("data-animation",r.animation):i.removeAttribute("data-animation"),r.inertia?i.setAttribute("data-inertia",""):i.removeAttribute("data-inertia"),i.style.maxWidth="number"==typeof r.maxWidth?r.maxWidth+"px":r.maxWidth,r.role?i.setAttribute("role",r.role):i.removeAttribute("role"),n.content===r.content&&n.allowHTML===r.allowHTML||I(a,e.props),r.arrow?s?n.arrow!==r.arrow&&(i.removeChild(s),i.appendChild(V(r.arrow))):i.appendChild(V(r.arrow)):s&&i.removeChild(s)}return r.className="tippy-content",r.setAttribute("data-state","hidden"),I(r,e.props),t.appendChild(n),n.appendChild(r),o(e.props,e.props),{popper:t,onUpdate:o}}N.$$tippy=!0;var B=1,H=[],U=[];function _(o,s){var v,g,h,C,T,A,L,k,M=j(o,Object.assign({},R,P(l(s)))),V=!1,I=!1,N=!1,_=!1,F=[],W=a(we,M.interactiveDebounce),X=B++,Y=(k=M.plugins).filter((function(e,t){return k.indexOf(e)===t})),$={id:X,reference:o,popper:d(),popperInstance:null,props:M,state:{isEnabled:!0,isVisible:!1,isDestroyed:!1,isMounted:!1,isShown:!1},plugins:Y,clearDelayTimeouts:function(){clearTimeout(v),clearTimeout(g),cancelAnimationFrame(h)},setProps:function(e){if($.state.isDestroyed)return;ae("onBeforeUpdate",[$,e]),be();var t=$.props,n=j(o,Object.assign({},t,l(e),{ignoreAttributes:!0}));$.props=n,he(),t.interactiveDebounce!==n.interactiveDebounce&&(ce(),W=a(we,n.interactiveDebounce));t.triggerTarget&&!n.triggerTarget?u(t.triggerTarget).forEach((function(e){e.removeAttribute("aria-expanded")})):n.triggerTarget&&o.removeAttribute("aria-expanded");ue(),ie(),J&&J(t,n);$.popperInstance&&(Ce(),Ae().forEach((function(e){requestAnimationFrame(e._tippy.popperInstance.forceUpdate)})));ae("onAfterUpdate",[$,e])},setContent:function(e){$.setProps({content:e})},show:function(){var e=$.state.isVisible,t=$.state.isDestroyed,o=!$.state.isEnabled,a=x.isTouch&&!$.props.touch,s=r($.props.duration,0,R.duration);if(e||t||o||a)return;if(te().hasAttribute("disabled"))return;if(ae("onShow",[$],!1),!1===$.props.onShow($))return;$.state.isVisible=!0,ee()&&(z.style.visibility="visible");ie(),de(),$.state.isMounted||(z.style.transition="none");if(ee()){var u=re(),p=u.box,f=u.content;b([p,f],0)}A=function(){var e;if($.state.isVisible&&!_){if(_=!0,z.offsetHeight,z.style.transition=$.props.moveTransition,ee()&&$.props.animation){var t=re(),n=t.box,r=t.content;b([n,r],s),y([n,r],"visible")}se(),ue(),c(U,$),null==(e=$.popperInstance)||e.forceUpdate(),ae("onMount",[$]),$.props.animation&&ee()&&function(e,t){me(e,t)}(s,(function(){$.state.isShown=!0,ae("onShown",[$])}))}},function(){var e,t=$.props.appendTo,r=te();e=$.props.interactive&&t===n||"parent"===t?r.parentNode:i(t,[r]);e.contains(z)||e.appendChild(z);$.state.isMounted=!0,Ce()}()},hide:function(){var e=!$.state.isVisible,t=$.state.isDestroyed,n=!$.state.isEnabled,o=r($.props.duration,1,R.duration);if(e||t||n)return;if(ae("onHide",[$],!1),!1===$.props.onHide($))return;$.state.isVisible=!1,$.state.isShown=!1,_=!1,V=!1,ee()&&(z.style.visibility="hidden");if(ce(),ve(),ie(!0),ee()){var i=re(),a=i.box,s=i.content;$.props.animation&&(b([a,s],o),y([a,s],"hidden"))}se(),ue(),$.props.animation?ee()&&function(e,t){me(e,(function(){!$.state.isVisible&&z.parentNode&&z.parentNode.contains(z)&&t()}))}(o,$.unmount):$.unmount()},hideWithInteractivity:function(e){ne().addEventListener("mousemove",W),c(H,W),W(e)},enable:function(){$.state.isEnabled=!0},disable:function(){$.hide(),$.state.isEnabled=!1},unmount:function(){$.state.isVisible&&$.hide();if(!$.state.isMounted)return;Te(),Ae().forEach((function(e){e._tippy.unmount()})),z.parentNode&&z.parentNode.removeChild(z);U=U.filter((function(e){return e!==$})),$.state.isMounted=!1,ae("onHidden",[$])},destroy:function(){if($.state.isDestroyed)return;$.clearDelayTimeouts(),$.unmount(),be(),delete o._tippy,$.state.isDestroyed=!0,ae("onDestroy",[$])}};if(!M.render)return $;var q=M.render($),z=q.popper,J=q.onUpdate;z.setAttribute("data-tippy-root",""),z.id="tippy-"+$.id,$.popper=z,o._tippy=$,z._tippy=$;var G=Y.map((function(e){return e.fn($)})),K=o.hasAttribute("aria-expanded");return he(),ue(),ie(),ae("onCreate",[$]),M.showOnCreate&&Le(),z.addEventListener("mouseenter",(function(){$.props.interactive&&$.state.isVisible&&$.clearDelayTimeouts()})),z.addEventListener("mouseleave",(function(){$.props.interactive&&$.props.trigger.indexOf("mouseenter")>=0&&ne().addEventListener("mousemove",W)})),$;function Q(){var e=$.props.touch;return Array.isArray(e)?e:[e,0]}function Z(){return"hold"===Q()[0]}function ee(){var e;return!(null==(e=$.props.render)||!e.$$tippy)}function te(){return L||o}function ne(){var e=te().parentNode;return e?w(e):document}function re(){return S(z)}function oe(e){return $.state.isMounted&&!$.state.isVisible||x.isTouch||C&&"focus"===C.type?0:r($.props.delay,e?0:1,R.delay)}function ie(e){void 0===e&&(e=!1),z.style.pointerEvents=$.props.interactive&&!e?"":"none",z.style.zIndex=""+$.props.zIndex}function ae(e,t,n){var r;(void 0===n&&(n=!0),G.forEach((function(n){n[e]&&n[e].apply(n,t)})),n)&&(r=$.props)[e].apply(r,t)}function se(){var e=$.props.aria;if(e.content){var t="aria-"+e.content,n=z.id;u($.props.triggerTarget||o).forEach((function(e){var r=e.getAttribute(t);if($.state.isVisible)e.setAttribute(t,r?r+" "+n:n);else{var o=r&&r.replace(n,"").trim();o?e.setAttribute(t,o):e.removeAttribute(t)}}))}}function ue(){!K&&$.props.aria.expanded&&u($.props.triggerTarget||o).forEach((function(e){$.props.interactive?e.setAttribute("aria-expanded",$.state.isVisible&&e===te()?"true":"false"):e.removeAttribute("aria-expanded")}))}function ce(){ne().removeEventListener("mousemove",W),H=H.filter((function(e){return e!==W}))}function pe(e){if(!x.isTouch||!N&&"mousedown"!==e.type){var t=e.composedPath&&e.composedPath()[0]||e.target;if(!$.props.interactive||!O(z,t)){if(u($.props.triggerTarget||o).some((function(e){return O(e,t)}))){if(x.isTouch)return;if($.state.isVisible&&$.props.trigger.indexOf("click")>=0)return}else ae("onClickOutside",[$,e]);!0===$.props.hideOnClick&&($.clearDelayTimeouts(),$.hide(),I=!0,setTimeout((function(){I=!1})),$.state.isMounted||ve())}}}function fe(){N=!0}function le(){N=!1}function de(){var e=ne();e.addEventListener("mousedown",pe,!0),e.addEventListener("touchend",pe,t),e.addEventListener("touchstart",le,t),e.addEventListener("touchmove",fe,t)}function ve(){var e=ne();e.removeEventListener("mousedown",pe,!0),e.removeEventListener("touchend",pe,t),e.removeEventListener("touchstart",le,t),e.removeEventListener("touchmove",fe,t)}function me(e,t){var n=re().box;function r(e){e.target===n&&(E(n,"remove",r),t())}if(0===e)return t();E(n,"remove",T),E(n,"add",r),T=r}function ge(e,t,n){void 0===n&&(n=!1),u($.props.triggerTarget||o).forEach((function(r){r.addEventListener(e,t,n),F.push({node:r,eventType:e,handler:t,options:n})}))}function he(){var e;Z()&&(ge("touchstart",ye,{passive:!0}),ge("touchend",Ee,{passive:!0})),(e=$.props.trigger,e.split(/\s+/).filter(Boolean)).forEach((function(e){if("manual"!==e)switch(ge(e,ye),e){case"mouseenter":ge("mouseleave",Ee);break;case"focus":ge(D?"focusout":"blur",Oe);break;case"focusin":ge("focusout",Oe)}}))}function be(){F.forEach((function(e){var t=e.node,n=e.eventType,r=e.handler,o=e.options;t.removeEventListener(n,r,o)})),F=[]}function ye(e){var t,n=!1;if($.state.isEnabled&&!xe(e)&&!I){var r="focus"===(null==(t=C)?void 0:t.type);C=e,L=e.currentTarget,ue(),!$.state.isVisible&&m(e)&&H.forEach((function(t){return t(e)})),"click"===e.type&&($.props.trigger.indexOf("mouseenter")<0||V)&&!1!==$.props.hideOnClick&&$.state.isVisible?n=!0:Le(e),"click"===e.type&&(V=!n),n&&!r&&De(e)}}function we(e){var t=e.target,n=te().contains(t)||z.contains(t);"mousemove"===e.type&&n||function(e,t){var n=t.clientX,r=t.clientY;return e.every((function(e){var t=e.popperRect,o=e.popperState,i=e.props.interactiveBorder,a=p(o.placement),s=o.modifiersData.offset;if(!s)return!0;var u="bottom"===a?s.top.y:0,c="top"===a?s.bottom.y:0,f="right"===a?s.left.x:0,l="left"===a?s.right.x:0,d=t.top-r+u>i,v=r-t.bottom-c>i,m=t.left-n+f>i,g=n-t.right-l>i;return d||v||m||g}))}(Ae().concat(z).map((function(e){var t,n=null==(t=e._tippy.popperInstance)?void 0:t.state;return n?{popperRect:e.getBoundingClientRect(),popperState:n,props:M}:null})).filter(Boolean),e)&&(ce(),De(e))}function Ee(e){xe(e)||$.props.trigger.indexOf("click")>=0&&V||($.props.interactive?$.hideWithInteractivity(e):De(e))}function Oe(e){$.props.trigger.indexOf("focusin")<0&&e.target!==te()||$.props.interactive&&e.relatedTarget&&z.contains(e.relatedTarget)||De(e)}function xe(e){return!!x.isTouch&&Z()!==e.type.indexOf("touch")>=0}function Ce(){Te();var t=$.props,n=t.popperOptions,r=t.placement,i=t.offset,a=t.getReferenceClientRect,s=t.moveTransition,u=ee()?S(z).arrow:null,c=a?{getBoundingClientRect:a,contextElement:a.contextElement||te()}:o,p=[{name:"offset",options:{offset:i}},{name:"preventOverflow",options:{padding:{top:2,bottom:2,left:5,right:5}}},{name:"flip",options:{padding:5}},{name:"computeStyles",options:{adaptive:!s}},{name:"$$tippy",enabled:!0,phase:"beforeWrite",requires:["computeStyles"],fn:function(e){var t=e.state;if(ee()){var n=re().box;["placement","reference-hidden","escaped"].forEach((function(e){"placement"===e?n.setAttribute("data-placement",t.placement):t.attributes.popper["data-popper-"+e]?n.setAttribute("data-"+e,""):n.removeAttribute("data-"+e)})),t.attributes.popper={}}}}];ee()&&u&&p.push({name:"arrow",options:{element:u,padding:3}}),p.push.apply(p,(null==n?void 0:n.modifiers)||[]),$.popperInstance=e.createPopper(c,z,Object.assign({},n,{placement:r,onFirstUpdate:A,modifiers:p}))}function Te(){$.popperInstance&&($.popperInstance.destroy(),$.popperInstance=null)}function Ae(){return f(z.querySelectorAll("[data-tippy-root]"))}function Le(e){$.clearDelayTimeouts(),e&&ae("onTrigger",[$,e]),de();var t=oe(!0),n=Q(),r=n[0],o=n[1];x.isTouch&&"hold"===r&&o&&(t=o),t?v=setTimeout((function(){$.show()}),t):$.show()}function De(e){if($.clearDelayTimeouts(),ae("onUntrigger",[$,e]),$.state.isVisible){if(!($.props.trigger.indexOf("mouseenter")>=0&&$.props.trigger.indexOf("click")>=0&&["mouseleave","mousemove"].indexOf(e.type)>=0&&V)){var t=oe(!1);t?g=setTimeout((function(){$.state.isVisible&&$.hide()}),t):h=requestAnimationFrame((function(){$.hide()}))}}else ve()}}function F(e,n){void 0===n&&(n={});var r=R.plugins.concat(n.plugins||[]);document.addEventListener("touchstart",T,t),window.addEventListener("blur",L);var o=Object.assign({},n,{plugins:r}),i=h(e).reduce((function(e,t){var n=t&&_(t,o);return n&&e.push(n),e}),[]);return v(e)?i[0]:i}F.defaultProps=R,F.setDefaultProps=function(e){Object.keys(e).forEach((function(t){R[t]=e[t]}))},F.currentInput=x;var W=Object.assign({},e.applyStyles,{effect:function(e){var t=e.state,n={popper:{position:t.options.strategy,left:"0",top:"0",margin:"0"},arrow:{position:"absolute"},reference:{}};Object.assign(t.elements.popper.style,n.popper),t.styles=n,t.elements.arrow&&Object.assign(t.elements.arrow.style,n.arrow)}}),X={mouseover:"mouseenter",focusin:"focus",click:"click"};var Y={name:"animateFill",defaultValue:!1,fn:function(e){var t;if(null==(t=e.props.render)||!t.$$tippy)return{};var n=S(e.popper),r=n.box,o=n.content,i=e.props.animateFill?function(){var e=d();return e.className="tippy-backdrop",y([e],"hidden"),e}():null;return{onCreate:function(){i&&(r.insertBefore(i,r.firstElementChild),r.setAttribute("data-animatefill",""),r.style.overflow="hidden",e.setProps({arrow:!1,animation:"shift-away"}))},onMount:function(){if(i){var e=r.style.transitionDuration,t=Number(e.replace("ms",""));o.style.transitionDelay=Math.round(t/10)+"ms",i.style.transitionDuration=e,y([i],"visible")}},onShow:function(){i&&(i.style.transitionDuration="0ms")},onHide:function(){i&&y([i],"hidden")}}}};var $={clientX:0,clientY:0},q=[];function z(e){var t=e.clientX,n=e.clientY;$={clientX:t,clientY:n}}var J={name:"followCursor",defaultValue:!1,fn:function(e){var t=e.reference,n=w(e.props.triggerTarget||t),r=!1,o=!1,i=!0,a=e.props;function s(){return"initial"===e.props.followCursor&&e.state.isVisible}function u(){n.addEventListener("mousemove",f)}function c(){n.removeEventListener("mousemove",f)}function p(){r=!0,e.setProps({getReferenceClientRect:null}),r=!1}function f(n){var r=!n.target||t.contains(n.target),o=e.props.followCursor,i=n.clientX,a=n.clientY,s=t.getBoundingClientRect(),u=i-s.left,c=a-s.top;!r&&e.props.interactive||e.setProps({getReferenceClientRect:function(){var e=t.getBoundingClientRect(),n=i,r=a;"initial"===o&&(n=e.left+u,r=e.top+c);var s="horizontal"===o?e.top:r,p="vertical"===o?e.right:n,f="horizontal"===o?e.bottom:r,l="vertical"===o?e.left:n;return{width:p-l,height:f-s,top:s,right:p,bottom:f,left:l}}})}function l(){e.props.followCursor&&(q.push({instance:e,doc:n}),function(e){e.addEventListener("mousemove",z)}(n))}function d(){0===(q=q.filter((function(t){return t.instance!==e}))).filter((function(e){return e.doc===n})).length&&function(e){e.removeEventListener("mousemove",z)}(n)}return{onCreate:l,onDestroy:d,onBeforeUpdate:function(){a=e.props},onAfterUpdate:function(t,n){var i=n.followCursor;r||void 0!==i&&a.followCursor!==i&&(d(),i?(l(),!e.state.isMounted||o||s()||u()):(c(),p()))},onMount:function(){e.props.followCursor&&!o&&(i&&(f($),i=!1),s()||u())},onTrigger:function(e,t){m(t)&&($={clientX:t.clientX,clientY:t.clientY}),o="focus"===t.type},onHidden:function(){e.props.followCursor&&(p(),c(),i=!0)}}}};var G={name:"inlinePositioning",defaultValue:!1,fn:function(e){var t,n=e.reference;var r=-1,o=!1,i=[],a={name:"tippyInlinePositioning",enabled:!0,phase:"afterWrite",fn:function(o){var a=o.state;e.props.inlinePositioning&&(-1!==i.indexOf(a.placement)&&(i=[]),t!==a.placement&&-1===i.indexOf(a.placement)&&(i.push(a.placement),e.setProps({getReferenceClientRect:function(){return function(e){return function(e,t,n,r){if(n.length<2||null===e)return t;if(2===n.length&&r>=0&&n[0].left>n[1].right)return n[r]||t;switch(e){case"top":case"bottom":var o=n[0],i=n[n.length-1],a="top"===e,s=o.top,u=i.bottom,c=a?o.left:i.left,p=a?o.right:i.right;return{top:s,bottom:u,left:c,right:p,width:p-c,height:u-s};case"left":case"right":var f=Math.min.apply(Math,n.map((function(e){return e.left}))),l=Math.max.apply(Math,n.map((function(e){return e.right}))),d=n.filter((function(t){return"left"===e?t.left===f:t.right===l})),v=d[0].top,m=d[d.length-1].bottom;return{top:v,bottom:m,left:f,right:l,width:l-f,height:m-v};default:return t}}(p(e),n.getBoundingClientRect(),f(n.getClientRects()),r)}(a.placement)}})),t=a.placement)}};function s(){var t;o||(t=function(e,t){var n;return{popperOptions:Object.assign({},e.popperOptions,{modifiers:[].concat(((null==(n=e.popperOptions)?void 0:n.modifiers)||[]).filter((function(e){return e.name!==t.name})),[t])})}}(e.props,a),o=!0,e.setProps(t),o=!1)}return{onCreate:s,onAfterUpdate:s,onTrigger:function(t,n){if(m(n)){var o=f(e.reference.getClientRects()),i=o.find((function(e){return e.left-2<=n.clientX&&e.right+2>=n.clientX&&e.top-2<=n.clientY&&e.bottom+2>=n.clientY})),a=o.indexOf(i);r=a>-1?a:r}},onHidden:function(){r=-1}}}};var K={name:"sticky",defaultValue:!1,fn:function(e){var t=e.reference,n=e.popper;function r(t){return!0===e.props.sticky||e.props.sticky===t}var o=null,i=null;function a(){var s=r("reference")?(e.popperInstance?e.popperInstance.state.elements.reference:t).getBoundingClientRect():null,u=r("popper")?n.getBoundingClientRect():null;(s&&Q(o,s)||u&&Q(i,u))&&e.popperInstance&&e.popperInstance.update(),o=s,i=u,e.state.isMounted&&requestAnimationFrame(a)}return{onMount:function(){e.props.sticky&&a()}}}};function Q(e,t){return!e||!t||(e.top!==t.top||e.right!==t.right||e.bottom!==t.bottom||e.left!==t.left)}return F.setDefaultProps({plugins:[Y,J,G,K],render:N}),F.createSingleton=function(e,t){var n;void 0===t&&(t={});var r,o=e,i=[],a=[],c=t.overrides,p=[],f=!1;function l(){a=o.map((function(e){return u(e.props.triggerTarget||e.reference)})).reduce((function(e,t){return e.concat(t)}),[])}function v(){i=o.map((function(e){return e.reference}))}function m(e){o.forEach((function(t){e?t.enable():t.disable()}))}function g(e){return o.map((function(t){var n=t.setProps;return t.setProps=function(o){n(o),t.reference===r&&e.setProps(o)},function(){t.setProps=n}}))}function h(e,t){var n=a.indexOf(t);if(t!==r){r=t;var s=(c||[]).concat("content").reduce((function(e,t){return e[t]=o[n].props[t],e}),{});e.setProps(Object.assign({},s,{getReferenceClientRect:"function"==typeof s.getReferenceClientRect?s.getReferenceClientRect:function(){var e;return null==(e=i[n])?void 0:e.getBoundingClientRect()}}))}}m(!1),v(),l();var b={fn:function(){return{onDestroy:function(){m(!0)},onHidden:function(){r=null},onClickOutside:function(e){e.props.showOnCreate&&!f&&(f=!0,r=null)},onShow:function(e){e.props.showOnCreate&&!f&&(f=!0,h(e,i[0]))},onTrigger:function(e,t){h(e,t.currentTarget)}}}},y=F(d(),Object.assign({},s(t,["overrides"]),{plugins:[b].concat(t.plugins||[]),triggerTarget:a,popperOptions:Object.assign({},t.popperOptions,{modifiers:[].concat((null==(n=t.popperOptions)?void 0:n.modifiers)||[],[W])})})),w=y.show;y.show=function(e){if(w(),!r&&null==e)return h(y,i[0]);if(!r||null!=e){if("number"==typeof e)return i[e]&&h(y,i[e]);if(o.indexOf(e)>=0){var t=e.reference;return h(y,t)}return i.indexOf(e)>=0?h(y,e):void 0}},y.showNext=function(){var e=i[0];if(!r)return y.show(0);var t=i.indexOf(r);y.show(i[t+1]||e)},y.showPrevious=function(){var e=i[i.length-1];if(!r)return y.show(e);var t=i.indexOf(r),n=i[t-1]||e;y.show(n)};var E=y.setProps;return y.setProps=function(e){c=e.overrides||c,E(e)},y.setInstances=function(e){m(!0),p.forEach((function(e){return e()})),o=e,m(!1),v(),l(),p=g(y),y.setProps({triggerTarget:a})},p=g(y),y},F.delegate=function(e,n){var r=[],o=[],i=!1,a=n.target,c=s(n,["target"]),p=Object.assign({},c,{trigger:"manual",touch:!1}),f=Object.assign({touch:R.touch},c,{showOnCreate:!0}),l=F(e,p);function d(e){if(e.target&&!i){var t=e.target.closest(a);if(t){var r=t.getAttribute("data-tippy-trigger")||n.trigger||R.trigger;if(!t._tippy&&!("touchstart"===e.type&&"boolean"==typeof f.touch||"touchstart"!==e.type&&r.indexOf(X[e.type])<0)){var s=F(t,f);s&&(o=o.concat(s))}}}}function v(e,t,n,o){void 0===o&&(o=!1),e.addEventListener(t,n,o),r.push({node:e,eventType:t,handler:n,options:o})}return u(l).forEach((function(e){var n=e.destroy,a=e.enable,s=e.disable;e.destroy=function(e){void 0===e&&(e=!0),e&&o.forEach((function(e){e.destroy()})),o=[],r.forEach((function(e){var t=e.node,n=e.eventType,r=e.handler,o=e.options;t.removeEventListener(n,r,o)})),r=[],n()},e.enable=function(){a(),o.forEach((function(e){return e.enable()})),i=!1},e.disable=function(){s(),o.forEach((function(e){return e.disable()})),i=!0},function(e){var n=e.reference;v(n,"touchstart",d,t),v(n,"mouseover",d),v(n,"focusin",d),v(n,"click",d)}(e)})),l},F.hideAll=function(e){var t=void 0===e?{}:e,n=t.exclude,r=t.duration;U.forEach((function(e){var t=!1;if(n&&(t=g(n)?e.reference===n:e.popper===n.popper),!t){var o=e.props.duration;e.setProps({duration:r}),e.hide(),e.state.isDestroyed||e.setProps({duration:o})}}))},F.roundArrow='',F})); + diff --git a/resources/third_meeting_notes/index.html b/resources/third_meeting_notes/index.html new file mode 100644 index 0000000..e62b88c --- /dev/null +++ b/resources/third_meeting_notes/index.html @@ -0,0 +1,1446 @@ + + + + + + + + + + + + + + + + + + + + + + Primary Meeting Day 11-15: Finalization and Conclusion - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    Primary Meeting Day 11-15: Finalization and Conclusion

    +

    Meeting Details

    +
      +
    • Dates:
    • +
    • Times:
    • +
    • Location:
    • +
    • Facilitator:
    • +
    +

    Attendees

    +
      +
    • List of attendees
    • +
    +

    Daily Agenda

    +

    Day 11: Alignment and Focus

    +

    Realigning Objectives

    +
      +
    • Review the project's main goals to ensure alignment with the final output.
    • +
    • Address any misalignments or deviations from the original plan.
    • +
    +

    Prioritization of Tasks

    +
      +
    • Identify critical tasks that need to be completed.
    • +
    • Allocate resources and efforts to ensure these priorities are met.
    • +
    +

    Day 12-14: Intensive Work Period

    +

    Task Completion

    +
      +
    • Dedicated time for team members to complete their individual contributions.
    • +
    • Regular check-ins to track progress and address any blockers.
    • +
    +

    Integration of Work

    +
      +
    • Begin to combine individual contributions into a cohesive whole.
    • +
    • Review the integration to ensure consistency and coherency across the project.
    • +
    +

    Final Reviews and Edits

    +
      +
    • Conduct thorough reviews of the project's outputs.
    • +
    • Perform final edits to refine the quality of the work.
    • +
    +

    Day 15: Closure and Celebration

    +

    Final Presentation

    +
      +
    • Present the completed project to the group.
    • +
    • Discuss any last-minute adjustments or refinements needed.
    • +
    +

    Reflective Session

    +
      +
    • Reflect on the achievements and learnings from the project.
    • +
    • Share appreciation for the team's hard work and dedication.
    • +
    +

    Celebration

    +
      +
    • Acknowledge the successful completion of the project.
    • +
    • Plan for any dissemination of the project's findings or outputs.
    • +
    +

    Detailed Notes

    +

    Day 11 Notes

    +
      +
    • ...
    • +
    +

    Day 12 Notes

    +
      +
    • ...
    • +
    +

    Day 13 Notes

    +
      +
    • ...
    • +
    +

    Day 14 Notes

    +
      +
    • ...
    • +
    +

    Day 15 Notes

    +
      +
    • ...
    • +
    +

    Action Items

    +
      +
    • Finalize manuscript for publication: Assigned to - Deadline
    • +
    • Prepare data for repository submission: Assigned to - Deadline
    • +
    • Organize project materials for archival: Assigned to - Deadline
    • +
    • ...
    • +
    +

    Reflections and Comments

    +
      +
    • (Space for any additional thoughts, insights, or personal reflections on the meeting and the project as a whole.)
    • +
    +

    Next Steps

    +
      +
    • Define the publication and dissemination plan.
    • +
    • Outline any follow-up research or projects that have stemmed from this work.
    • +
    +

    Additional Documentation

    +
      +
    • (Include or link to any additional documents, charts, or resources that were created or referenced during the meeting.)
    • +
    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/resources/third_meeting_notes/third_meeting_notes.md b/resources/third_meeting_notes/third_meeting_notes.md new file mode 100644 index 0000000..19abeda --- /dev/null +++ b/resources/third_meeting_notes/third_meeting_notes.md @@ -0,0 +1,83 @@ +# Primary Meeting Day 11-15: Finalization and Conclusion + +## Meeting Details +- **Dates:** +- **Times:** +- **Location:** +- **Facilitator:** + +## Attendees +- List of attendees + +## Daily Agenda + +### Day 11: Alignment and Focus + +#### Realigning Objectives +- Review the project's main goals to ensure alignment with the final output. +- Address any misalignments or deviations from the original plan. + +#### Prioritization of Tasks +- Identify critical tasks that need to be completed. +- Allocate resources and efforts to ensure these priorities are met. + +### Day 12-14: Intensive Work Period + +#### Task Completion +- Dedicated time for team members to complete their individual contributions. +- Regular check-ins to track progress and address any blockers. + +#### Integration of Work +- Begin to combine individual contributions into a cohesive whole. +- Review the integration to ensure consistency and coherency across the project. + +#### Final Reviews and Edits +- Conduct thorough reviews of the project's outputs. +- Perform final edits to refine the quality of the work. + +### Day 15: Closure and Celebration + +#### Final Presentation +- Present the completed project to the group. +- Discuss any last-minute adjustments or refinements needed. + +#### Reflective Session +- Reflect on the achievements and learnings from the project. +- Share appreciation for the team's hard work and dedication. + +#### Celebration +- Acknowledge the successful completion of the project. +- Plan for any dissemination of the project's findings or outputs. + +## Detailed Notes + +### Day 11 Notes +- ... + +### Day 12 Notes +- ... + +### Day 13 Notes +- ... + +### Day 14 Notes +- ... + +### Day 15 Notes +- ... + +## Action Items +- [ ] Finalize manuscript for publication: Assigned to - Deadline +- [ ] Prepare data for repository submission: Assigned to - Deadline +- [ ] Organize project materials for archival: Assigned to - Deadline +- ... + +## Reflections and Comments +- (Space for any additional thoughts, insights, or personal reflections on the meeting and the project as a whole.) + +## Next Steps +- Define the publication and dissemination plan. +- Outline any follow-up research or projects that have stemmed from this work. + +## Additional Documentation +- (Include or link to any additional documents, charts, or resources that were created or referenced during the meeting.) diff --git a/resources/visualizations/index.html b/resources/visualizations/index.html new file mode 100644 index 0000000..20a8907 --- /dev/null +++ b/resources/visualizations/index.html @@ -0,0 +1,1405 @@ + + + + + + + + + + + + + + + + + + + + + + Visualization Strategy and Development Documentation - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    Visualization Strategy and Development Documentation

    +

    Overview

    +
      +
    • Brief overview of the visualization goals and their alignment with the overall project objectives.
    • +
    +

    Visualization Strategy

    +

    Identifying Key Messages

    +
      +
    • Discuss main messages or insights to communicate through visualizations.
    • +
    • Identify target audience and their specific needs.
    • +
    +

    Selecting Appropriate Visualization Types

    +
      +
    • Explore different types of visualizations (charts, graphs, 3D, interactive elements) suitable for the data and message.
    • +
    • Brainstorm creative visualization approaches.
    • +
    +

    Visualization Development

    +

    Code-Generated Visualizations

    +
      +
    • Outline initial visualizations generated from the data pipeline.
    • +
    • Include code snippets and explanations.
    • +
    +
    # Example Python code for a basic plot
    +import matplotlib.pyplot as plt
    +plt.plot(data['x'], data['y'])
    +plt.show()
    +
    +

    Enhancing Visualizations

    +
      +
    • Steps for annotating, animating, creating 3D, immersive, or interactive visualizations.
    • +
    • Discuss challenges and solutions in enhancing visuals.
    • +
    +

    Versioning and Iterations

    +
      +
    • Document different versions and iterations of visualizations.
    • +
    • Reflect on improvements or changes in each version.
    • +
    +

    Finalizing Visualizations

    +
      +
    • Process of finalizing visuals for presentation or publication.
    • +
    • Feedback incorporation from team or test audiences.
    • +
    +

    Documentation of Tools and Resources

    +
      +
    • List software, libraries, and tools used for visualization.
    • +
    • Reference external resources or tutorials.
    • +
    +

    Conclusions

    +
      +
    • Summarize the visualization process and contributions to the project.
    • +
    • Reflect on lessons learned and potential future improvements.
    • +
    +

    References

    +
      +
    • Cite external sources, inspirations, or frameworks used in visualization.
    • +
    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/resources/visualizations/visualizations.md b/resources/visualizations/visualizations.md new file mode 100644 index 0000000..2857a8b --- /dev/null +++ b/resources/visualizations/visualizations.md @@ -0,0 +1,50 @@ +# Visualization Strategy and Development Documentation + +## Overview +- Brief overview of the visualization goals and their alignment with the overall project objectives. + +## Visualization Strategy + +### Identifying Key Messages +- Discuss main messages or insights to communicate through visualizations. +- Identify target audience and their specific needs. + +### Selecting Appropriate Visualization Types +- Explore different types of visualizations (charts, graphs, 3D, interactive elements) suitable for the data and message. +- Brainstorm creative visualization approaches. + +## Visualization Development + +### Code-Generated Visualizations +- Outline initial visualizations generated from the data pipeline. +- Include code snippets and explanations. + +```python +# Example Python code for a basic plot +import matplotlib.pyplot as plt +plt.plot(data['x'], data['y']) +plt.show() +``` + +### Enhancing Visualizations +- Steps for annotating, animating, creating 3D, immersive, or interactive visualizations. +- Discuss challenges and solutions in enhancing visuals. + +### Versioning and Iterations +- Document different versions and iterations of visualizations. +- Reflect on improvements or changes in each version. + +### Finalizing Visualizations +- Process of finalizing visuals for presentation or publication. +- Feedback incorporation from team or test audiences. + +### Documentation of Tools and Resources +- List software, libraries, and tools used for visualization. +- Reference external resources or tutorials. + +### Conclusions +- Summarize the visualization process and contributions to the project. +- Reflect on lessons learned and potential future improvements. + +### References +- Cite external sources, inspirations, or frameworks used in visualization. diff --git a/resources/working_groups_and_postdocs/index.html b/resources/working_groups_and_postdocs/index.html new file mode 100644 index 0000000..c1a3629 --- /dev/null +++ b/resources/working_groups_and_postdocs/index.html @@ -0,0 +1,1379 @@ + + + + + + + + + + + + + + + + + + + + + + ESIIL Postdoctoral Researcher Responsibilities and Opportunities - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    ESIIL Postdoctoral Researcher Responsibilities and Opportunities

    +

    Primary Responsibilities

    +
      +
    • Independent Research:
    • +
    • Conducting self-proposed research projects.
    • +
    • Adhering to open data principles.
    • +
    • Data and Code Storage:
    • +
    • Storing all research code and data in the designated ESIIL repository.
    • +
    • Use of CyVerse:
    • +
    • Utilizing CyVerse as the primary computational platform.
    • +
    +

    Opportunities for Collaboration

    +
      +
    • Joining Working Groups:
    • +
    • Opportunity to collaborate with working groups within ESIIL, subject to invitation.
    • +
    • Networking and Collaboration:
    • +
    • Engaging in regular meetings and seminars for networking.
    • +
    +

    Additional Responsibilities

    +
      +
    • Reviewing Working Group Applications:
    • +
    • Assisting in the review process of working group applications.
    • +
    • Supporting Working Groups:
    • +
    • Providing support to working groups in various capacities, even if not an author.
    • +
    +

    Note

    +
      +
    • Primary research commitments should be prioritized unless otherwise directed by supervisors or ESIIL's administrative body.
    • +
    +

    This framework ensures that ESIIL postdocs balance independent research with collaborative opportunities, adhering to open data principles, and utilizing designated platforms for their work.

    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/resources/working_groups_and_postdocs/working_groups_and_postdocs.md b/resources/working_groups_and_postdocs/working_groups_and_postdocs.md new file mode 100644 index 0000000..ffa9ca5 --- /dev/null +++ b/resources/working_groups_and_postdocs/working_groups_and_postdocs.md @@ -0,0 +1,30 @@ +# ESIIL Postdoctoral Researcher Responsibilities and Opportunities + +## Primary Responsibilities + +- **Independent Research:** + - Conducting self-proposed research projects. + - Adhering to open data principles. +- **Data and Code Storage:** + - Storing all research code and data in the designated ESIIL repository. +- **Use of CyVerse:** + - Utilizing CyVerse as the primary computational platform. + +## Opportunities for Collaboration + +- **Joining Working Groups:** + - Opportunity to collaborate with working groups within ESIIL, subject to invitation. +- **Networking and Collaboration:** + - Engaging in regular meetings and seminars for networking. + +## Additional Responsibilities + +- **Reviewing Working Group Applications:** + - Assisting in the review process of working group applications. +- **Supporting Working Groups:** + - Providing support to working groups in various capacities, even if not an author. + +## Note +- Primary research commitments should be prioritized unless otherwise directed by supervisors or ESIIL's administrative body. + +This framework ensures that ESIIL postdocs balance independent research with collaborative opportunities, adhering to open data principles, and utilizing designated platforms for their work. diff --git a/search/search_index.json b/search/search_index.json new file mode 100644 index 0000000..c2d958b --- /dev/null +++ b/search/search_index.json @@ -0,0 +1 @@ +{"config":{"indexing":"full","lang":["en"],"min_search_length":3,"prebuild_index":false,"separator":"[\\s\\-]+"},"docs":[{"location":"","text":"Pollinating Resilient Interactions (PRI) \u00b6 Resilience of pollinator interactions in the face of climate change \u00b6 Welcome to the Pollinating Resilient Interactions repository, an integral part of ESIIL and Earth Lab's Forest Carbon Codefest. This repository is the central hub for our team, encompassing our project overview, team member information, codebase, and more... Google Drive Folder: https://drive.google.com/drive/folders/1LH9YAs8eZfam9B4oba-MAwjgD-MI3mQO CyVerse share drive location: /iplant/home/shared/iplant_esiil_summit/2024/group_04b Our Project \u00b6 Species interactions under climate change \u00b6 How will shifting climate conditions alter species interactions, and what impications will these changes have for ecosystem stability? Documentation \u00b6 Access detailed documentation on our GitHub Pages site . Group Members \u00b6 Member 1: Brooke Lamonte Long-Fox Member 2: Colleen Miller Member 3: Elsa Culler Member 4: Pablo Moreno Member 5: Miguel C Leon Member 6 Yahn-Jauh Su Member 7: Yu Liu [Link to more detailed bios or profiles if available and desired.] Code Repository Structure \u00b6 Data Processing : Scripts for cleaning, merging, and managing datasets. Analysis Code : Scripts for data analysis, statistical modeling, etc. Visualization : Code for creating figures, charts, and interactive visualizations. Meeting Notes and Agendas \u00b6 Regular updates to keep all group members informed and engaged with the project's progress and direction. Contributing to This Repository \u00b6 Contributions from all group members are welcome. Please adhere to these guidelines: Ensure commits have clear and concise messages. Document major changes in the meeting notes. Review and merge changes through pull requests for oversight. Getting Help \u00b6 If you encounter any issues or have questions, please refer to the ESIIL Support Page or contact the repository maintainers directly. Customize Your Repository \u00b6 Edit This Readme : Update with information specific to your project. Update Group Member Bios : Add detailed information about each group member's expertise and role. Organize Your Code : Use logical structure and clear naming conventions. Document Your Data : Include a data directory with README files for datasets. Outline Your Methods : Create a METHODS.md file for methodologies and tools. Set Up Project Management : Use 'Issues' and 'Projects' for task tracking. Add a License : Include an appropriate open-source license. Create Contribution Guidelines : Establish a CONTRIBUTING.md file. Review and Merge Workflow : Document your process for reviewing and merging changes. Establish Communication Channels : Set up channels like Slack or Discord for discussions. Remember, the goal is to make your repository clear, accessible, and useful for all current and future researchers. Happy researching!","title":"Home"},{"location":"#pollinating-resilient-interactions-pri","text":"","title":"Pollinating Resilient Interactions (PRI)"},{"location":"#resilience-of-pollinator-interactions-in-the-face-of-climate-change","text":"Welcome to the Pollinating Resilient Interactions repository, an integral part of ESIIL and Earth Lab's Forest Carbon Codefest. This repository is the central hub for our team, encompassing our project overview, team member information, codebase, and more... Google Drive Folder: https://drive.google.com/drive/folders/1LH9YAs8eZfam9B4oba-MAwjgD-MI3mQO CyVerse share drive location: /iplant/home/shared/iplant_esiil_summit/2024/group_04b","title":"Resilience of pollinator interactions in the face of climate change"},{"location":"#our-project","text":"","title":"Our Project"},{"location":"#species-interactions-under-climate-change","text":"How will shifting climate conditions alter species interactions, and what impications will these changes have for ecosystem stability?","title":"Species interactions under climate change"},{"location":"#documentation","text":"Access detailed documentation on our GitHub Pages site .","title":"Documentation"},{"location":"#group-members","text":"Member 1: Brooke Lamonte Long-Fox Member 2: Colleen Miller Member 3: Elsa Culler Member 4: Pablo Moreno Member 5: Miguel C Leon Member 6 Yahn-Jauh Su Member 7: Yu Liu [Link to more detailed bios or profiles if available and desired.]","title":"Group Members"},{"location":"#code-repository-structure","text":"Data Processing : Scripts for cleaning, merging, and managing datasets. Analysis Code : Scripts for data analysis, statistical modeling, etc. Visualization : Code for creating figures, charts, and interactive visualizations.","title":"Code Repository Structure"},{"location":"#meeting-notes-and-agendas","text":"Regular updates to keep all group members informed and engaged with the project's progress and direction.","title":"Meeting Notes and Agendas"},{"location":"#contributing-to-this-repository","text":"Contributions from all group members are welcome. Please adhere to these guidelines: Ensure commits have clear and concise messages. Document major changes in the meeting notes. Review and merge changes through pull requests for oversight.","title":"Contributing to This Repository"},{"location":"#getting-help","text":"If you encounter any issues or have questions, please refer to the ESIIL Support Page or contact the repository maintainers directly.","title":"Getting Help"},{"location":"#customize-your-repository","text":"Edit This Readme : Update with information specific to your project. Update Group Member Bios : Add detailed information about each group member's expertise and role. Organize Your Code : Use logical structure and clear naming conventions. Document Your Data : Include a data directory with README files for datasets. Outline Your Methods : Create a METHODS.md file for methodologies and tools. Set Up Project Management : Use 'Issues' and 'Projects' for task tracking. Add a License : Include an appropriate open-source license. Create Contribution Guidelines : Establish a CONTRIBUTING.md file. Review and Merge Workflow : Document your process for reviewing and merging changes. Establish Communication Channels : Set up channels like Slack or Discord for discussions. Remember, the goal is to make your repository clear, accessible, and useful for all current and future researchers. Happy researching!","title":"Customize Your Repository"},{"location":"agenda/","text":"ESIIL Innovation Summit - Agenda \u00b6 Big Data for Environmental Resilience and Adaptation Date: May 13-16, 2024 Location: SEEC Auditorium, University of Colorado Boulder Summit Website Goals of the 2024 ESIIL Innovation Summit \u00b6 Explore big data for environmental resilience and adaptation by identifying data synthesis opportunities and utilizing ESIIL cloud-compute capabilities. Promote best practices in ethical, open science, by supporting accessibility and usability of environmental data by all stakeholders. Champion ethical and equitable practices in environmental science, honoring data sovereignty and encouraging the responsible use of AI. Support diverse and inclusive teams by establishing collaborations around data-inspired themes across different disciplines, sectors, career stages, and backgrounds. Encourage the co-production of environmental knowledge with communities that are experiencing significant environmental challenges. Day Zero - May 13 th \u00b6 Time Event Location 9:00 AM - 12:00 PM MDT Leadership Program S372 (Viz Studio) 9:00 AM - 12:00 PM MDT Auditorium Set Up: Tables, Questions, Handouts, etc. SEEC Auditorium 12:00 - 1:00 PM MDT Facilitators Lunch 1:00 or 1:30 PM MDT Concurrent Optional Activities NEON Tour, HIKE 3:00 - 5:00 PM MDT Early Registration opens SEEC Atrium 3:00 - 4:00 PM MDT Technical Help Desk SEEC Auditorium 4:00 - 6:00 PM MDT Social Mixer SEEC Cafe Day One - May 14 th \u00b6 Time Event Location 8:30 AM MDT Registration SEEC Atrium 9:00 AM MDT Welcome & Opening Ceremony SEEC Auditorium 9:35 AM MDT Logistics and Planning Team Introductions SEEC Auditorium 9:45 AM MDT Positive Polarities SEEC Auditorium 10:00 AM MDT Navigating Miscommunications SEEC Auditorium 10:15 AM MDT Creating a shared language SEEC Auditorium 10:30 AM MDT Break SEEC Atrium 10:45 AM MDT Science of Team Science SEEC Auditorium 11:05 AM MDT Big Data for Resilience SEEC Auditorium 11:45 AM MDT Q&A SEEC Auditorium 12:15 PM MDT Group Photo SEEC Atrium 12:30 PM MDT Lunch SEEC Atrium 1:30 PM MDT Leveraging NEON to Understand Ecosystem Resilience Across Scales SEEC Auditorium 1:45 PM MDT Explore Topics in Resilience and Adaptation SEEC Auditorium 3:15 PM MDT Break SEEC Atrium 3:30 PM MDT Team Breakouts: Innovation Time Rooms available: S124, S127, S221, etc. 4:20 PM MDT Report Back SEEC Auditorium 4:50 PM MDT Whole Group Reflection SEEC Auditorium 4:55 PM MDT Day 1 Evaluation SEEC Auditorium 5:00 PM MDT Day 1 Close SEEC Auditorium Day Two - May 15 th \u00b6 Time Event Location 8:30 AM MDT Coffee & Tea SEEC Atrium 9:00 AM MDT Welcome Back SEEC Auditorium 9:20 AM MDT AI Research for Climate Change and Environmental Sustainability SEEC Auditorium 9:35 PM MDT Prepare for the day SEEC Auditorium 9:50 AM MDT Team Breakouts: Innovation Time Breakout Spaces with your Team 12:30 PM MDT Lunch SEEC Atrium 1:30 PM MDT Working Through the Groan Zone SEEC Auditorium 1:50 PM MDT Team Breakouts: Innovation Time Breakout Spaces with your Team 4:10 PM MDT Report Back SEEC Auditorium 4:50 PM MDT Whole Group Reflection SEEC Auditorium 5:00 PM MDT Day 2 Close Day Three - May 16 th \u00b6 Time Event Location 8:30 AM MDT Coffee & Tea SEEC Atrium 9:00 AM MDT Welcome Back SEEC Auditorium 9:15 AM MDT Final Team Breakout: Prepare for the Final Report Back Breakout Spaces with your Team 9:45 AM MDT Final Break SEEC Atrium 10:00 AM MDT Final Report back SEEC Auditorium 11:20 AM MDT What\u2019s Next? SEEC Auditorium 11:35 AM MDT Final Reflection SEEC Auditorium 11:50 PM MDT Closing SEEC Auditorium","title":"Agenda"},{"location":"agenda/#esiil-innovation-summit-agenda","text":"Big Data for Environmental Resilience and Adaptation Date: May 13-16, 2024 Location: SEEC Auditorium, University of Colorado Boulder Summit Website","title":"ESIIL Innovation Summit - Agenda"},{"location":"agenda/#goals-of-the-2024-esiil-innovation-summit","text":"Explore big data for environmental resilience and adaptation by identifying data synthesis opportunities and utilizing ESIIL cloud-compute capabilities. Promote best practices in ethical, open science, by supporting accessibility and usability of environmental data by all stakeholders. Champion ethical and equitable practices in environmental science, honoring data sovereignty and encouraging the responsible use of AI. Support diverse and inclusive teams by establishing collaborations around data-inspired themes across different disciplines, sectors, career stages, and backgrounds. Encourage the co-production of environmental knowledge with communities that are experiencing significant environmental challenges.","title":"Goals of the 2024 ESIIL Innovation Summit"},{"location":"agenda/#day-zero-may-13th","text":"Time Event Location 9:00 AM - 12:00 PM MDT Leadership Program S372 (Viz Studio) 9:00 AM - 12:00 PM MDT Auditorium Set Up: Tables, Questions, Handouts, etc. SEEC Auditorium 12:00 - 1:00 PM MDT Facilitators Lunch 1:00 or 1:30 PM MDT Concurrent Optional Activities NEON Tour, HIKE 3:00 - 5:00 PM MDT Early Registration opens SEEC Atrium 3:00 - 4:00 PM MDT Technical Help Desk SEEC Auditorium 4:00 - 6:00 PM MDT Social Mixer SEEC Cafe","title":"Day Zero - May 13th"},{"location":"agenda/#day-one-may-14th","text":"Time Event Location 8:30 AM MDT Registration SEEC Atrium 9:00 AM MDT Welcome & Opening Ceremony SEEC Auditorium 9:35 AM MDT Logistics and Planning Team Introductions SEEC Auditorium 9:45 AM MDT Positive Polarities SEEC Auditorium 10:00 AM MDT Navigating Miscommunications SEEC Auditorium 10:15 AM MDT Creating a shared language SEEC Auditorium 10:30 AM MDT Break SEEC Atrium 10:45 AM MDT Science of Team Science SEEC Auditorium 11:05 AM MDT Big Data for Resilience SEEC Auditorium 11:45 AM MDT Q&A SEEC Auditorium 12:15 PM MDT Group Photo SEEC Atrium 12:30 PM MDT Lunch SEEC Atrium 1:30 PM MDT Leveraging NEON to Understand Ecosystem Resilience Across Scales SEEC Auditorium 1:45 PM MDT Explore Topics in Resilience and Adaptation SEEC Auditorium 3:15 PM MDT Break SEEC Atrium 3:30 PM MDT Team Breakouts: Innovation Time Rooms available: S124, S127, S221, etc. 4:20 PM MDT Report Back SEEC Auditorium 4:50 PM MDT Whole Group Reflection SEEC Auditorium 4:55 PM MDT Day 1 Evaluation SEEC Auditorium 5:00 PM MDT Day 1 Close SEEC Auditorium","title":"Day One - May 14th"},{"location":"agenda/#day-two-may-15th","text":"Time Event Location 8:30 AM MDT Coffee & Tea SEEC Atrium 9:00 AM MDT Welcome Back SEEC Auditorium 9:20 AM MDT AI Research for Climate Change and Environmental Sustainability SEEC Auditorium 9:35 PM MDT Prepare for the day SEEC Auditorium 9:50 AM MDT Team Breakouts: Innovation Time Breakout Spaces with your Team 12:30 PM MDT Lunch SEEC Atrium 1:30 PM MDT Working Through the Groan Zone SEEC Auditorium 1:50 PM MDT Team Breakouts: Innovation Time Breakout Spaces with your Team 4:10 PM MDT Report Back SEEC Auditorium 4:50 PM MDT Whole Group Reflection SEEC Auditorium 5:00 PM MDT Day 2 Close","title":"Day Two - May 15th"},{"location":"agenda/#day-three-may-16th","text":"Time Event Location 8:30 AM MDT Coffee & Tea SEEC Atrium 9:00 AM MDT Welcome Back SEEC Auditorium 9:15 AM MDT Final Team Breakout: Prepare for the Final Report Back Breakout Spaces with your Team 9:45 AM MDT Final Break SEEC Atrium 10:00 AM MDT Final Report back SEEC Auditorium 11:20 AM MDT What\u2019s Next? SEEC Auditorium 11:35 AM MDT Final Reflection SEEC Auditorium 11:50 PM MDT Closing SEEC Auditorium","title":"Day Three - May 16th"},{"location":"breakout/","text":"Breakout prompts & dedicated working space \u00b6 Dedicated working space \u00b6 Each team will have a room that has been reserved for their use at all team times. Those rooms will shift for each breakout time, shown below. However, your team is also welcome to explore the building and find other spaces that make you more comfortable or creative. For example, you may want to check out: The SEEC Cafe dining area (north end of the building, lots of windows in the eating room!) The SEEC lobby (both north and south) The Earth Lab conference room (S340). Note that this room may sometimes be reserved by Earth Lab staff. The southern end of the first floor, S148 Outside if it is sunny! The grass or SEEC courtyard! You are also welcome to use other rooms if they are available, but please be aware that other classes, study groups, or workshops may have reserved them and kick you out. Day 1 Team Dedicated Space Morning team time Team 1 S221 Team 2 S149 Team 3 C325 Team 4 S240 Team 5 Viz Studio (S372A) Team 6 Viz Studio (S372B) Afternoon team time Team 1 Viz Studio (S372B) Team 2 S221 Team 3 S149 Team 4 C325 Team 5 S240 Team 6 Viz Studio (S372A) Day 2 Team Dedicated Space Morning team time Team 1 Viz Studio (S372A) Team 2 Viz Studio (S372B) Team 3 S221 Team 4 S149 Team 5 C325 Team 6 S240 Afternoon team time Team 1 S240 Team 2 Viz Studio (S372A) Team 3 Viz Studio (S372B) Team 4 S221 Team 5 S149 Team 6 C325 Day 3 Team Dedicated Space Morning team time Team 1 C325 Team 2 S240 Team 3 Viz Studio (S372A) Team 4 Viz Studio (S372B) Team 5 S221 Team 6 S149 Breakout Prompts \u00b6 For ease of access to breakout group prompts throughout the codefest. Breakout #0: Virtual meeting #3 \u00b6 Introduce yourselves! Please briefly share: Your preferred name and where you are currently based A skill or area of expertise that you feel you are bringing to the table Something you are worried about regarding the codefest Something that brings you joy What is a topic that you are excited to investigate for two days related to forest carbon in the Southern Rocky mountains? What datasets are you excited to potentially use? (Tuesday morning you will have ~2.5 hours to continue brainstorming, with a draft question ready by noon! So don't stress, this is just a first opportunity to get a sense of what your team is generally excited about.) Breakout #1 : Day 1 morning team time \u00b6 In-person introductions Who are you and why are you excited to be here? Establish team norms Note-taking and documenting the flow of ideas Expectations for work outside of official event hours Brainstorm What will your team project be for the next 2.5 days?! This should be a specific scientific question related to forest carbon in the Southern Rocky Mountains that you think is potentially answerable (at least in a very rough form) by the end of the event. Think about\u2026 What are you each excited about and what skills do you have around the table that can be leveraged? What datasets are you familiar with and/or excited to work with? Spin up some instances and get familiar with the data! Evaluation criteria (linked on the website!) Bring back One spokesperson to talk for 1 minute Your specific, answerable scientific question One \u2018need\u2019 that you see, whether that is help accessing an additional dataset, guidance on a dataset already available, or just your first step to get cracking Breakout #2 : Day 1 afternoon team time \u00b6 Establish: How are you going to divide work, responsibilities, and code workflows? How are you going to manage people working in different coding languages? Take time to explore the datasets you intend to use and make sure you know how to work with and visualize them. Map out an initial workflow. What are the steps you will need to take to get from start to 'finish'? Begin work! Breakout #3 : Day 2 morning team time \u00b6 Code, code, code! Focus on concrete, tractable problems, and don't get sucked into unneccessary coding or debugging. Is there an easier or faster way to answer your question? Graphics and deliverables are your friend! Demonstrate the progress you're making and remember to document what you are doing and WHY you're making the decisions you are. Keep your repo up to date! Breakout #4 : Day 2 afternoon team time \u00b6 Prepare your deliverables. What figures are necessary to tell the story of your project? What do you want in your presentation and on your team website? Breakout #5 : Day 3 morning team time \u00b6 Finalize all deliverables, push them to your website & GitHub, and finalize your presentation!","title":"Breakout prompts & dedicated working space"},{"location":"breakout/#breakout-prompts-dedicated-working-space","text":"","title":"Breakout prompts & dedicated working space"},{"location":"breakout/#dedicated-working-space","text":"Each team will have a room that has been reserved for their use at all team times. Those rooms will shift for each breakout time, shown below. However, your team is also welcome to explore the building and find other spaces that make you more comfortable or creative. For example, you may want to check out: The SEEC Cafe dining area (north end of the building, lots of windows in the eating room!) The SEEC lobby (both north and south) The Earth Lab conference room (S340). Note that this room may sometimes be reserved by Earth Lab staff. The southern end of the first floor, S148 Outside if it is sunny! The grass or SEEC courtyard! You are also welcome to use other rooms if they are available, but please be aware that other classes, study groups, or workshops may have reserved them and kick you out. Day 1 Team Dedicated Space Morning team time Team 1 S221 Team 2 S149 Team 3 C325 Team 4 S240 Team 5 Viz Studio (S372A) Team 6 Viz Studio (S372B) Afternoon team time Team 1 Viz Studio (S372B) Team 2 S221 Team 3 S149 Team 4 C325 Team 5 S240 Team 6 Viz Studio (S372A) Day 2 Team Dedicated Space Morning team time Team 1 Viz Studio (S372A) Team 2 Viz Studio (S372B) Team 3 S221 Team 4 S149 Team 5 C325 Team 6 S240 Afternoon team time Team 1 S240 Team 2 Viz Studio (S372A) Team 3 Viz Studio (S372B) Team 4 S221 Team 5 S149 Team 6 C325 Day 3 Team Dedicated Space Morning team time Team 1 C325 Team 2 S240 Team 3 Viz Studio (S372A) Team 4 Viz Studio (S372B) Team 5 S221 Team 6 S149","title":"Dedicated working space"},{"location":"breakout/#breakout-prompts","text":"For ease of access to breakout group prompts throughout the codefest.","title":"Breakout Prompts"},{"location":"breakout/#breakout-0-virtual-meeting-3","text":"Introduce yourselves! Please briefly share: Your preferred name and where you are currently based A skill or area of expertise that you feel you are bringing to the table Something you are worried about regarding the codefest Something that brings you joy What is a topic that you are excited to investigate for two days related to forest carbon in the Southern Rocky mountains? What datasets are you excited to potentially use? (Tuesday morning you will have ~2.5 hours to continue brainstorming, with a draft question ready by noon! So don't stress, this is just a first opportunity to get a sense of what your team is generally excited about.)","title":"Breakout #0: Virtual meeting #3"},{"location":"breakout/#breakout-1-day-1-morning-team-time","text":"In-person introductions Who are you and why are you excited to be here? Establish team norms Note-taking and documenting the flow of ideas Expectations for work outside of official event hours Brainstorm What will your team project be for the next 2.5 days?! This should be a specific scientific question related to forest carbon in the Southern Rocky Mountains that you think is potentially answerable (at least in a very rough form) by the end of the event. Think about\u2026 What are you each excited about and what skills do you have around the table that can be leveraged? What datasets are you familiar with and/or excited to work with? Spin up some instances and get familiar with the data! Evaluation criteria (linked on the website!) Bring back One spokesperson to talk for 1 minute Your specific, answerable scientific question One \u2018need\u2019 that you see, whether that is help accessing an additional dataset, guidance on a dataset already available, or just your first step to get cracking","title":"Breakout #1: Day 1 morning team time"},{"location":"breakout/#breakout-2-day-1-afternoon-team-time","text":"Establish: How are you going to divide work, responsibilities, and code workflows? How are you going to manage people working in different coding languages? Take time to explore the datasets you intend to use and make sure you know how to work with and visualize them. Map out an initial workflow. What are the steps you will need to take to get from start to 'finish'? Begin work!","title":"Breakout #2: Day 1 afternoon team time"},{"location":"breakout/#breakout-3-day-2-morning-team-time","text":"Code, code, code! Focus on concrete, tractable problems, and don't get sucked into unneccessary coding or debugging. Is there an easier or faster way to answer your question? Graphics and deliverables are your friend! Demonstrate the progress you're making and remember to document what you are doing and WHY you're making the decisions you are. Keep your repo up to date!","title":"Breakout #3: Day 2 morning team time"},{"location":"breakout/#breakout-4-day-2-afternoon-team-time","text":"Prepare your deliverables. What figures are necessary to tell the story of your project? What do you want in your presentation and on your team website?","title":"Breakout #4: Day 2 afternoon team time"},{"location":"breakout/#breakout-5-day-3-morning-team-time","text":"Finalize all deliverables, push them to your website & GitHub, and finalize your presentation!","title":"Breakout #5: Day 3 morning team time"},{"location":"teams/","text":"Event Logistics \u00b6 Venue Information \u00b6 The ESIIL Innovation Summit will be held at the University of Colorado Boulder East Campus SEEC Building (4001 Discovery Dr, Boulder, CO 80303). Directions to the SEEC Building here. (Building Maps to follow)","title":"Event Logistics"},{"location":"teams/#event-logistics","text":"","title":"Event Logistics"},{"location":"teams/#venue-information","text":"The ESIIL Innovation Summit will be held at the University of Colorado Boulder East Campus SEEC Building (4001 Discovery Dr, Boulder, CO 80303). Directions to the SEEC Building here. (Building Maps to follow)","title":"Venue Information"},{"location":"virtual-meetings/","text":"Pre-Summit Virtual Meetings \u00b6 There are three virtual meetings associated with the 2024 ESIIL Summit. Virtual Meeting 1 \u00b6 Head in the Clouds: Navigating the Basics of Cloud Computing Date: April 24, 2024 Time: 12:00-2:00 PM MST Virtual Meeting 1 Recording : https://www.youtube.com/watch?v=JxVPjDtIBmU Important Note: Please set up a GitHub account and a Cyverse account prior to this training. Virtual Meeting 2 \u00b6 Feet on the ground: Collaborating with Other People Using Cloud Computing Date: May 1, 2024 Time: 12:00-2:00 PM MST Virtual Meeting 2 Recording : https://www.youtube.com/watch?v=213C7faZVFQ Virtual Meeting 3 \u00b6 Voices in Concert: Cultural Intelligence, the Art of Team Science, and Community Skills Date: May 6, 2024 Time: 9-11 AM MST Virtual Meeting 3 Recording : https://youtu.be/Ea21i3do9sA Science of Team Science Slides Community Skills Slides","title":"Overview"},{"location":"virtual-meetings/#pre-summit-virtual-meetings","text":"There are three virtual meetings associated with the 2024 ESIIL Summit.","title":"Pre-Summit Virtual Meetings"},{"location":"virtual-meetings/#virtual-meeting-1","text":"Head in the Clouds: Navigating the Basics of Cloud Computing Date: April 24, 2024 Time: 12:00-2:00 PM MST Virtual Meeting 1 Recording : https://www.youtube.com/watch?v=JxVPjDtIBmU Important Note: Please set up a GitHub account and a Cyverse account prior to this training.","title":"Virtual Meeting 1"},{"location":"virtual-meetings/#virtual-meeting-2","text":"Feet on the ground: Collaborating with Other People Using Cloud Computing Date: May 1, 2024 Time: 12:00-2:00 PM MST Virtual Meeting 2 Recording : https://www.youtube.com/watch?v=213C7faZVFQ","title":"Virtual Meeting 2"},{"location":"virtual-meetings/#virtual-meeting-3","text":"Voices in Concert: Cultural Intelligence, the Art of Team Science, and Community Skills Date: May 6, 2024 Time: 9-11 AM MST Virtual Meeting 3 Recording : https://youtu.be/Ea21i3do9sA Science of Team Science Slides Community Skills Slides","title":"Virtual Meeting 3"},{"location":"worksheet_redlining/","text":"Redlining \u00b6 Exploring the Impact of Historical Redlining on Urban Greenspace: A Collaborative Examination of Maps, Justice, and Resilience \u00b6 Introduction \u00b6 This group exploration delves into the long-term impacts of historical redlining on urban greenspace, emphasizing the powerful role of maps in shaping environmental and social landscapes. By drawing on the research by Nardone et al. (2021), you will collaboratively investigate how discriminatory practices encoded in maps have led to persistent disparities in urban settings. This exploration aims to uncover the resilience of communities in adapting to these entrenched injustices and to foster a deeper understanding of how mapping can serve both as a tool of exclusion and as a means for promoting social equity. ) Understanding Redlining as a Systemic Disturbance \u00b6 Redlining originated in the 1930s as a discriminatory practice where the Home Owners\u2019 Loan Corporation (HOLC) systematically denied mortgages or offered unfavorable terms based on racial and ethnic compositions. This methodical exclusion, executed through maps that color-coded \u201crisky\u201d investment areas in red, marked minority-populated areas, denying them crucial investment and development opportunities and initiating a profound and lasting disturbance in the urban fabric. Maps serve as powerful tools beyond navigation; they communicate and enforce control. By defining neighborhood boundaries through redlining, HOLC maps not only mirrored societal biases but also perpetuated and embedded them into the urban landscape. This manipulation of geographic data set a trajectory that limited economic growth, dictated the allocation of services, and influenced the development or deterioration of community infrastructure. Figure 1: 1938 Map of Atlanta uses colors as grades for neighborhoods. The red swaths identify each area with large African-American populations that were deemed \u201cless safe.\u201d [![](../assets/redlining/georectified-thumbnail.png)](https://storymaps.arcgis.com/stories/0f58d49c566b486482b3e64e9e5f7ac9) ArcGIS Story Map Explore the Story Map: Click on the image above to explore the interactive story map about [subject of the story map]. Resilience and Adaptation in Urban Environments \u00b6 The legacy of redlining presents both a challenge and an opportunity for resilience and adaptation. Economically and socially, redlining entrenched cycles of poverty and racial segregation, creating a resilient wealth gap that has been difficult to dismantle. Environmentally, the neighborhoods targeted by redlining continue to face significant challenges\u2014they generally feature less greenspace, suffer from higher pollution levels, and are more vulnerable to the impacts of climate change. These factors compound the health and wellness challenges faced by residents. Despite these adversities, urban communities have continually demonstrated remarkable resilience. Adaptation strategies, such as community-led green initiatives, urban agriculture, and grassroots activism, have emerged as responses to these systemic disturbances. By enhancing green infrastructure and advocating for equitable environmental policies, these communities strive to increase their resilience against both historical inequities and environmental challenges. [![](https://img.youtube.com/vi/O5FBJyqfoLM/hqdefault.jpg)](https://youtu.be/O5FBJyqfoLM) Watch the video Video Title: Exploring the Impacts of Historical Redlining on Urban Development Description: Click on the image above to watch a video that delves into the consequences of historical redlining and its ongoing impact on urban environments. This educational piece offers insights into how such discriminatory practices have shaped cities and what can be learned from them. The following group exercise will not only uncover the impact of redlining on urban greenspace but also highlight the adaptive strategies developed in response to this enduring disturbance. Through mapping and analysis, we aim to illustrate the powerful role that geographic data can play in understanding and fostering urban resilience and social equity. References \u00b6 Nardone, A., Rudolph, K. E., Morello-Frosch, R., & Casey, J. A. (2021). Redlines and Greenspace: The Relationship between Historical Redlining and 2010 Greenspace across the United States. Environmental Health Perspectives , 129(1), 017006. DOI:10.1289/EHP7495. Hoffman, J. S., Shandas, V., & Pendleton, N. (2020). The Effects of Historical Housing Policies on Resident Exposure to Intra-Urban Heat: A Study of 108 US Urban Areas. Climate , 8(1), 12. DOI:10.3390/cli8010012. Goals of this group activity \u00b6 The primary objectives of this tutorial are: 1. To practice coding in CyVerse. 2. To analyze the relationship between HOLC grades and the presence of urban greenspace. 3. To understand how historic policies continue to affect the spatial distribution of environmental amenities. Part 1: Accessing and Visualizing Historic Redlining Data \u00b6 We will begin by accessing HOLC maps from the Mapping Inequality project and overlaying this data with modern geographic datasets to visualize the historical impact on contemporary urban landscapes. Data Acquisition \u00b6 Download HOLC map shapefiles from the University of Richmond\u2019s Mapping Inequality Project. Utilize satellite imagery and other geospatial data to map current greenspace using the normalized difference vegetation index (NDVI). Analysis Methodology \u00b6 Replicate the approach used by Nardone et al. to calculate NDVI values for each HOLC neighborhood, assessing greenspace as a health-promoting resource. Employ statistical methods such as propensity score matching to control for confounding variables and estimate the true impact of HOLC grades on urban greenspace. img { width: 100%; } details summary { color: black; background-color: white; } details[open] summary { color: black; } R libraries we use in this analysis if ( ! requireNamespace ( \"tidytext\" , quietly = TRUE )) { install.packages ( \"tidytext\" ) } library ( tidytext ) ## Warning: package 'tidytext' was built under R version 4.3.2 library ( sf ) ## Warning: package 'sf' was built under R version 4.3.2 ## Linking to GEOS 3.11.0, GDAL 3.5.3, PROJ 9.1.0; sf_use_s2() is TRUE library ( ggplot2 ) ## Warning: package 'ggplot2' was built under R version 4.3.2 library ( ggthemes ) ## Warning: package 'ggthemes' was built under R version 4.3.2 library ( dplyr ) ## ## Attaching package: 'dplyr' ## The following objects are masked from 'package:stats': ## ## filter, lag ## The following objects are masked from 'package:base': ## ## intersect, setdiff, setequal, union library ( rstac ) ## Warning: package 'rstac' was built under R version 4.3.2 library ( gdalcubes ) ## Warning: package 'gdalcubes' was built under R version 4.3.2 library ( gdalUtils ) ## Please note that rgdal will be retired during October 2023, ## plan transition to sf/stars/terra functions using GDAL and PROJ ## at your earliest convenience. ## See https://r-spatial.org/r/2023/05/15/evolution4.html and https://github.com/r-spatial/evolution ## rgdal: version: 1.6-7, (SVN revision 1203) ## Geospatial Data Abstraction Library extensions to R successfully loaded ## Loaded GDAL runtime: GDAL 3.5.3, released 2022/10/21 ## Path to GDAL shared files: /Library/Frameworks/R.framework/Versions/4.3-x86_64/Resources/library/rgdal/gdal ## GDAL does not use iconv for recoding strings. ## GDAL binary built with GEOS: TRUE ## Loaded PROJ runtime: Rel. 9.1.0, September 1st, 2022, [PJ_VERSION: 910] ## Path to PROJ shared files: /Library/Frameworks/R.framework/Versions/4.3-x86_64/Resources/library/gdalcubes/proj ## PROJ CDN enabled: FALSE ## Linking to sp version:1.6-1 ## To mute warnings of possible GDAL/OSR exportToProj4() degradation, ## use options(\"rgdal_show_exportToProj4_warnings\"=\"none\") before loading sp or rgdal. ## ## Attaching package: 'gdalUtils' ## The following object is masked from 'package:sf': ## ## gdal_rasterize library ( gdalcubes ) library ( colorspace ) library ( terra ) ## Warning: package 'terra' was built under R version 4.3.2 ## terra 1.7.71 ## ## Attaching package: 'terra' ## The following object is masked from 'package:colorspace': ## ## RGB ## The following objects are masked from 'package:gdalcubes': ## ## animate, crop, size library ( tidyterra ) ## ## Attaching package: 'tidyterra' ## The following object is masked from 'package:stats': ## ## filter library ( basemapR ) library ( tidytext ) library ( ggwordcloud ) library ( osmextract ) ## Data (c) OpenStreetMap contributors, ODbL 1.0. https://www.openstreetmap.org/copyright. ## Check the package website, https://docs.ropensci.org/osmextract/, for more details. library ( sf ) library ( ggplot2 ) library ( ggthemes ) library ( glue ) ## ## Attaching package: 'glue' ## The following object is masked from 'package:terra': ## ## trim library ( purrr ) FUNCTION: List cities where HOLC data are available # Function to get a list of unique cities and states from the redlining data get_city_state_list_from_redlining_data <- function () { # URL to the GeoJSON data url <- \"https://raw.githubusercontent.com/americanpanorama/mapping-inequality-census-crosswalk/main/MIv3Areas_2010TractCrosswalk.geojson\" # Read the GeoJSON file into an sf object redlining_data <- tryCatch ({ read_sf ( url ) }, error = function ( e ) { stop ( \"Error reading GeoJSON data: \" , e $ message ) }) # Check for the existence of 'city' and 'state' columns if ( ! all ( c ( \"city\" , \"state\" ) %in% names ( redlining_data ))) { stop ( \"The required columns 'city' and/or 'state' do not exist in the data.\" ) } # Extract a unique list of city and state pairs without the geometries city_state_df <- redlining_data %>% select ( city , state ) %>% st_set_geometry ( NULL ) %>% # Drop the geometry to avoid issues with invalid shapes distinct ( city , state ) %>% arrange ( state , city ) # Arrange the list alphabetically by state, then by city # Return the dataframe of unique city-state pairs return ( city_state_df ) } Stream list of available HOLC cities #Retrieve the list of cities and states city_state_list <- get_city_state_list_from_redlining_data () knitr :: kable ( city_state_list , format = \"markdown\" ) | city | state | |:---------------------------------|:------| | Birmingham | AL | | Mobile | AL | | Montgomery | AL | | Arkadelphia | AR | | Batesville | AR | | Camden | AR | | Conway | AR | | El Dorado | AR | | Fort Smith | AR | | Little Rock | AR | | Russellville | AR | | Texarkana | AR | | Phoenix | AZ | | Fresno | CA | | Los Angeles | CA | | Oakland | CA | | Sacramento | CA | | San Diego | CA | | San Francisco | CA | | San Jose | CA | | Stockton | CA | | Boulder | CO | | Colorado Springs | CO | | Denver | CO | | Fort Collins | CO | | Fort Morgan | CO | | Grand Junction | CO | | Greeley | CO | | Longmont | CO | | Pueblo | CO | | Bridgeport and Fairfield | CT | | Hartford | CT | | New Britain | CT | | New Haven | CT | | Stamford, Darien, and New Canaan | CT | | Waterbury | CT | | Crestview | FL | | Daytona Beach | FL | | DeFuniak Springs | FL | | DeLand | FL | | Jacksonville | FL | | Miami | FL | | New Smyrna | FL | | Orlando | FL | | Pensacola | FL | | St. Petersburg | FL | | Tampa | FL | | Atlanta | GA | | Augusta | GA | | Columbus | GA | | Macon | GA | | Savannah | GA | | Boone | IA | | Cedar Rapids | IA | | Council Bluffs | IA | | Davenport | IA | | Des Moines | IA | | Dubuque | IA | | Sioux City | IA | | Waterloo | IA | | Aurora | IL | | Chicago | IL | | Decatur | IL | | East St. Louis | IL | | Joliet | IL | | Peoria | IL | | Rockford | IL | | Springfield | IL | | Evansville | IN | | Fort Wayne | IN | | Indianapolis | IN | | Lake Co. Gary | IN | | Muncie | IN | | South Bend | IN | | Terre Haute | IN | | Atchison | KS | | Junction City | KS | | Topeka | KS | | Wichita | KS | | Covington | KY | | Lexington | KY | | Louisville | KY | | New Orleans | LA | | Shreveport | LA | | Arlington | MA | | Belmont | MA | | Boston | MA | | Braintree | MA | | Brockton | MA | | Brookline | MA | | Cambridge | MA | | Chelsea | MA | | Dedham | MA | | Everett | MA | | Fall River | MA | | Fitchburg | MA | | Haverhill | MA | | Holyoke Chicopee | MA | | Lawrence | MA | | Lexington | MA | | Lowell | MA | | Lynn | MA | | Malden | MA | | Medford | MA | | Melrose | MA | | Milton | MA | | Needham | MA | | New Bedford | MA | | Newton | MA | | Pittsfield | MA | | Quincy | MA | | Revere | MA | | Salem | MA | | Saugus | MA | | Somerville | MA | | Springfield | MA | | Waltham | MA | | Watertown | MA | | Winchester | MA | | Winthrop | MA | | Worcester | MA | | Baltimore | MD | | Augusta | ME | | Boothbay | ME | | Portland | ME | | Sanford | ME | | Waterville | ME | | Battle Creek | MI | | Bay City | MI | | Detroit | MI | | Flint | MI | | Grand Rapids | MI | | Jackson | MI | | Kalamazoo | MI | | Lansing | MI | | Muskegon | MI | | Pontiac | MI | | Saginaw | MI | | Austin | MN | | Duluth | MN | | Mankato | MN | | Minneapolis | MN | | Rochester | MN | | St. Cloud | MN | | St. Paul | MN | | Staples | MN | | Cape Girardeau | MO | | Carthage | MO | | Greater Kansas City | MO | | Joplin | MO | | Springfield | MO | | St. Joseph | MO | | St. Louis | MO | | Jackson | MS | | Asheville | NC | | Charlotte | NC | | Durham | NC | | Elizabeth City | NC | | Fayetteville | NC | | Goldsboro | NC | | Greensboro | NC | | Hendersonville | NC | | High Point | NC | | New Bern | NC | | Rocky Mount | NC | | Statesville | NC | | Winston-Salem | NC | | Fargo | ND | | Grand Forks | ND | | Minot | ND | | Williston | ND | | Lincoln | NE | | Omaha | NE | | Manchester | NH | | Atlantic City | NJ | | Bergen Co. | NJ | | Camden | NJ | | Essex Co. | NJ | | Hudson Co. | NJ | | Monmouth | NJ | | Passaic County | NJ | | Perth Amboy | NJ | | Trenton | NJ | | Union Co. | NJ | | Albany | NY | | Binghamton-Johnson City | NY | | Bronx | NY | | Brooklyn | NY | | Buffalo | NY | | Elmira | NY | | Jamestown | NY | | Lower Westchester Co. | NY | | Manhattan | NY | | Niagara Falls | NY | | Poughkeepsie | NY | | Queens | NY | | Rochester | NY | | Schenectady | NY | | Staten Island | NY | | Syracuse | NY | | Troy | NY | | Utica | NY | | Akron | OH | | Canton | OH | | Cleveland | OH | | Columbus | OH | | Dayton | OH | | Hamilton | OH | | Lima | OH | | Lorain | OH | | Portsmouth | OH | | Springfield | OH | | Toledo | OH | | Warren | OH | | Youngstown | OH | | Ada | OK | | Alva | OK | | Enid | OK | | Miami Ottawa County | OK | | Muskogee | OK | | Norman | OK | | Oklahoma City | OK | | South McAlester | OK | | Tulsa | OK | | Portland | OR | | Allentown | PA | | Altoona | PA | | Bethlehem | PA | | Chester | PA | | Erie | PA | | Harrisburg | PA | | Johnstown | PA | | Lancaster | PA | | McKeesport | PA | | New Castle | PA | | Philadelphia | PA | | Pittsburgh | PA | | Wilkes-Barre | PA | | York | PA | | Pawtucket & Central Falls | RI | | Providence | RI | | Woonsocket | RI | | Aiken | SC | | Charleston | SC | | Columbia | SC | | Greater Anderson | SC | | Greater Greenville | SC | | Orangeburg | SC | | Rock Hill | SC | | Spartanburg | SC | | Sumter | SC | | Aberdeen | SD | | Huron | SD | | Milbank | SD | | Mitchell | SD | | Rapid City | SD | | Sioux Falls | SD | | Vermillion | SD | | Watertown | SD | | Chattanooga | TN | | Elizabethton | TN | | Erwin | TN | | Greenville | TN | | Johnson City | TN | | Knoxville | TN | | Memphis | TN | | Nashville | TN | | Amarillo | TX | | Austin | TX | | Beaumont | TX | | Dallas | TX | | El Paso | TX | | Fort Worth | TX | | Galveston | TX | | Houston | TX | | Port Arthur | TX | | San Antonio | TX | | Waco | TX | | Wichita Falls | TX | | Ogden | UT | | Salt Lake City | UT | | Bristol | VA | | Danville | VA | | Harrisonburg | VA | | Lynchburg | VA | | Newport News | VA | | Norfolk | VA | | Petersburg | VA | | Phoebus | VA | | Richmond | VA | | Roanoke | VA | | Staunton | VA | | Bennington | VT | | Brattleboro | VT | | Burlington | VT | | Montpelier | VT | | Newport City | VT | | Poultney | VT | | Rutland | VT | | Springfield | VT | | St. Albans | VT | | St. Johnsbury | VT | | Windsor | VT | | Seattle | WA | | Spokane | WA | | Tacoma | WA | | Kenosha | WI | | Madison | WI | | Milwaukee Co. | WI | | Oshkosh | WI | | Racine | WI | | Charleston | WV | | Huntington | WV | | Wheeling | WV | FUNCTION: Stream HOLC data from a city # Function to load and filter redlining data by city load_city_redlining_data <- function ( city_name ) { # URL to the GeoJSON data url <- \"https://raw.githubusercontent.com/americanpanorama/mapping-inequality-census-crosswalk/main/MIv3Areas_2010TractCrosswalk.geojson\" # Read the GeoJSON file into an sf object redlining_data <- read_sf ( url ) # Filter the data for the specified city and non-empty grades city_redline <- redlining_data %>% filter ( city == city_name ) # Return the filtered data return ( city_redline ) } Stream HOLC data for Denver, CO # Load redlining data for Denver denver_redlining <- load_city_redlining_data ( \"Denver\" ) knitr :: kable ( head ( denver_redlining ), format = \"markdown\" ) | area_id | city | state | city_survey | cat | grade | label | res | com | ind | fill | GEOID10 | GISJOIN | calc_area | pct_tract | geometry | |--------:|:-------|:------|:------------|:-----|:------|:------|:-----|:------|:------|:---------|:------------|:---------------|-------------:|----------:|:-----------------------------| | 6525 | Denver | CO | TRUE | Best | A | A1 | TRUE | FALSE | FALSE | \\#76a865 | 08031004104 | G0800310004104 | 1.525535e+01 | 0.00001 | MULTIPOLYGON (((-104.9125 3\u2026 | | 6525 | Denver | CO | TRUE | Best | A | A1 | TRUE | FALSE | FALSE | \\#76a865 | 08031004201 | G0800310004201 | 3.987458e+05 | 0.20900 | MULTIPOLYGON (((-104.9246 3\u2026 | | 6525 | Denver | CO | TRUE | Best | A | A1 | TRUE | FALSE | FALSE | \\#76a865 | 08031004304 | G0800310004304 | 1.554195e+05 | 0.05927 | MULTIPOLYGON (((-104.9125 3\u2026 | | 6525 | Denver | CO | TRUE | Best | A | A1 | TRUE | FALSE | FALSE | \\#76a865 | 08031004202 | G0800310004202 | 1.117770e+06 | 0.57245 | MULTIPOLYGON (((-104.9125 3\u2026 | | 6529 | Denver | CO | TRUE | Best | A | A2 | TRUE | FALSE | FALSE | \\#76a865 | 08031004302 | G0800310004302 | 3.133415e+05 | 0.28381 | MULTIPOLYGON (((-104.928 39\u2026 | | 6529 | Denver | CO | TRUE | Best | A | A2 | TRUE | FALSE | FALSE | \\#76a865 | 08031004301 | G0800310004301 | 1.221218e+05 | 0.08622 | MULTIPOLYGON (((-104.9305 3\u2026 | FUNCTION: Get Points-of-Interest from city of interest get_places <- function ( polygon_layer , type = \"food\" ) { # Check if the input is an sf object if ( ! inherits ( polygon_layer , \"sf\" )) { stop ( \"The provided object is not an sf object.\" ) } # Create a bounding box from the input sf object bbox_here <- st_bbox ( polygon_layer ) |> st_as_sfc () if ( type == \"food\" ){ my_layer <- \"multipolygons\" my_query <- \"SELECT * FROM multipolygons WHERE ( shop IN ('supermarket', 'bodega', 'market', 'other_market', 'farm', 'garden_centre', 'doityourself', 'farm_supply', 'compost', 'mulch', 'fertilizer') OR amenity IN ('social_facility', 'market', 'restaurant', 'coffee') OR leisure = 'garden' OR landuse IN ('farm', 'farmland', 'row_crops', 'orchard_plantation', 'dairy_grazing') OR building IN ('brewery', 'winery', 'distillery') OR shop = 'greengrocer' OR amenity = 'marketplace' )\" title <- \"food\" } if ( type == \"processed_food\" ) { my_layer <- \"multipolygons\" my_query <- \"SELECT * FROM multipolygons WHERE ( amenity IN ('fast_food', 'cafe', 'pub') OR shop IN ('convenience', 'supermarket') OR shop = 'kiosk' )\" title <- \"Processed Food Locations\" } if ( type == \"natural_habitats\" ){ my_layer <- \"multipolygons\" my_query <- \"SELECT * FROM multipolygons WHERE ( boundary = 'protected_area' OR natural IN ('tree', 'wood') OR landuse = 'forest' OR leisure = 'park' )\" title <- \"Natural habitats or City owned trees\" } if ( type == \"roads\" ){ my_layer <- \"lines\" my_query <- \"SELECT * FROM lines WHERE ( highway IN ('motorway', 'trunk', 'primary', 'secondary', 'tertiary') )\" title <- \"Major roads\" } if ( type == \"rivers\" ){ my_layer <- \"lines\" my_query <- \"SELECT * FROM lines WHERE ( waterway IN ('river'))\" title <- \"Major rivers\" } if ( type == \"internet_access\" ) { my_layer <- \"multipolygons\" my_query <- \"SELECT * FROM multipolygons WHERE ( amenity IN ('library', 'cafe', 'community_centre', 'public_building') AND internet_access = 'yes' )\" title <- \"Internet Access Locations\" } if ( type == \"water_bodies\" ) { my_layer <- \"multipolygons\" my_query <- \"SELECT * FROM multipolygons WHERE ( natural IN ('water', 'lake', 'pond') OR water IN ('lake', 'pond') OR landuse = 'reservoir' )\" title <- \"Water Bodies\" } if ( type == \"government_buildings\" ) { my_layer <- \"multipolygons\" my_query <- \"SELECT * FROM multipolygons WHERE ( amenity IN ('townhall', 'courthouse', 'embassy', 'police', 'fire_station') OR building IN ('capitol', 'government') )\" title <- \"Government Buildings\" } # Use the bbox to get data with oe_get(), specifying the desired layer and a custom SQL query for fresh food places tryCatch ({ places <- oe_get ( place = bbox_here , layer = my_layer , # Adjusted layer; change as per actual data availability query = my_query , quiet = TRUE ) places <- st_make_valid ( places ) # Crop the data to the bounding box cropped_places <- st_crop ( places , bbox_here ) # Plotting the cropped fresh food places plot <- ggplot ( data = cropped_places ) + geom_sf ( fill = \"cornflowerblue\" , color = \"cornflowerblue\" ) + ggtitle ( title ) + theme_tufte () + theme ( legend.position = \"none\" , # Optionally hide the legend axis.text = element_blank (), # Remove axis text axis.title = element_blank (), # Remove axis titles axis.ticks = element_blank (), # Remove axis ticks plot.background = element_rect ( fill = \"white\" , color = NA ), # Set the plot background to white panel.background = element_rect ( fill = \"white\" , color = NA ), # Set the panel background to white panel.grid.major = element_blank (), # Remove major grid lines panel.grid.minor = element_blank (), ) # Save the plot as a PNG file png_filename <- paste0 ( title , \"_\" , Sys.Date (), \".png\" ) ggsave ( png_filename , plot , width = 10 , height = 8 , units = \"in\" ) # Return the cropped dataset return ( cropped_places ) }, error = function ( e ) { stop ( \"Failed to retrieve or plot data: \" , e $ message ) }) } FUNCTION: Plot POI over HOLC grades plot_city_redlining <- function ( redlining_data , filename = \"redlining_plot.png\" ) { # Fetch additional geographic data based on redlining data roads <- get_places ( redlining_data , type = \"roads\" ) rivers <- get_places ( redlining_data , type = \"rivers\" ) # Filter residential zones with valid grades and where city survey is TRUE residential_zones <- redlining_data %>% filter ( city_survey == TRUE & grade != \"\" ) # Colors for the grades colors <- c ( \"#76a865\" , \"#7cb5bd\" , \"#ffff00\" , \"#d9838d\" ) # Plot the data using ggplot2 plot <- ggplot () + geom_sf ( data = roads , lwd = 0.1 ) + geom_sf ( data = rivers , color = \"blue\" , alpha = 0.5 , lwd = 1.1 ) + geom_sf ( data = residential_zones , aes ( fill = grade ), alpha = 0.5 ) + theme_tufte () + scale_fill_manual ( values = colors ) + labs ( fill = 'HOLC Categories' ) + theme ( plot.background = element_rect ( fill = \"white\" , color = NA ), panel.background = element_rect ( fill = \"white\" , color = NA ), panel.grid.major = element_blank (), panel.grid.minor = element_blank (), legend.position = \"right\" ) # Save the plot as a high-resolution PNG file ggsave ( filename , plot , width = 10 , height = 8 , units = \"in\" , dpi = 600 ) # Return the plot object if needed for further manipulation or checking return ( plot ) } Plot Denver Redlining denver_plot <- plot_city_redlining ( denver_redlining ) Stream amenities by category food <- get_places ( denver_redlining , type = \"food\" ) food_processed <- get_places ( denver_redlining , type = \"processed_food\" ) natural_habitats <- get_places ( denver_redlining , type = \"natural_habitats\" ) roads <- get_places ( denver_redlining , type = \"roads\" ) rivers <- get_places ( denver_redlining , type = \"rivers\" ) #water_bodies <- get_places(denver_redlining, type=\"water_bodies\") government_buildings <- get_places ( denver_redlining , type = \"government_buildings\" ) FUNCTION: Plot the HOLC grades individually split_plot <- function ( sf_data , roads , rivers ) { # Filter for grades A, B, C, and D sf_data_filtered <- sf_data %>% filter ( grade %in% c ( 'A' , 'B' , 'C' , 'D' )) # Define a color for each grade grade_colors <- c ( \"A\" = \"#76a865\" , \"B\" = \"#7cb5bd\" , \"C\" = \"#ffff00\" , \"D\" = \"#d9838d\" ) # Create the plot with panels for each grade plot <- ggplot ( data = sf_data_filtered ) + geom_sf ( data = roads , alpha = 0.1 , lwd = 0.1 ) + geom_sf ( data = rivers , color = \"blue\" , alpha = 0.1 , lwd = 1.1 ) + geom_sf ( aes ( fill = grade )) + facet_wrap ( ~ grade , nrow = 1 ) + # Free scales for different zoom levels if needed scale_fill_manual ( values = grade_colors ) + theme_minimal () + labs ( fill = 'HOLC Grade' ) + theme_tufte () + theme ( plot.background = element_rect ( fill = \"white\" , color = NA ), panel.background = element_rect ( fill = \"white\" , color = NA ), legend.position = \"none\" , # Optionally hide the legend axis.text = element_blank (), # Remove axis text axis.title = element_blank (), # Remove axis titles axis.ticks = element_blank (), # Remove axis ticks panel.grid.major = element_blank (), # Remove major grid lines panel.grid.minor = element_blank ()) ggsave ( plot , filename = \"HOLC_grades_individually.png\" , width = 10 , height = 4 , units = \"in\" , dpi = 1200 ) return ( plot ) } Plot 4 HOLC grades individually plot_row <- split_plot ( denver_redlining , roads , rivers ) FUNCTION: Map an amenity over each grade individually process_and_plot_sf_layers <- function ( layer1 , layer2 , output_file = \"output_plot.png\" ) { # Make geometries valid layer1 <- st_make_valid ( layer1 ) layer2 <- st_make_valid ( layer2 ) # Optionally, simplify geometries to remove duplicate vertices layer1 <- st_simplify ( layer1 , preserveTopology = TRUE ) |> filter ( grade != \"\" ) # Prepare a list to store results results <- list () # Loop through each grade and perform operations for ( grade in c ( \"A\" , \"B\" , \"C\" , \"D\" )) { # Filter layer1 for current grade layer1_grade <- layer1 [ layer1 $ grade == grade , ] # Buffer the geometries of the current grade buffered_layer1_grade <- st_buffer ( layer1_grade , dist = 500 ) # Intersect with the second layer intersections <- st_intersects ( layer2 , buffered_layer1_grade , sparse = FALSE ) selected_polygons <- layer2 [ rowSums ( intersections ) > 0 , ] # Add a new column to store the grade information selected_polygons $ grade <- grade # Store the result results [[ grade ]] <- selected_polygons } # Combine all selected polygons from different grades into one sf object final_selected_polygons <- do.call ( rbind , results ) # Define colors for the grades grade_colors <- c ( \"A\" = \"grey\" , \"B\" = \"grey\" , \"C\" = \"grey\" , \"D\" = \"grey\" ) # Create the plot plot <- ggplot () + geom_sf ( data = roads , alpha = 0.05 , lwd = 0.1 ) + geom_sf ( data = rivers , color = \"blue\" , alpha = 0.1 , lwd = 1.1 ) + geom_sf ( data = layer1 , fill = \"grey\" , color = \"grey\" , size = 0.1 ) + facet_wrap ( ~ grade , nrow = 1 ) + geom_sf ( data = final_selected_polygons , fill = \"green\" , color = \"green\" , size = 0.1 ) + facet_wrap ( ~ grade , nrow = 1 ) + #scale_fill_manual(values = grade_colors) + #scale_color_manual(values = grade_colors) + theme_minimal () + labs ( fill = 'HOLC Grade' ) + theme_tufte () + theme ( plot.background = element_rect ( fill = \"white\" , color = NA ), panel.background = element_rect ( fill = \"white\" , color = NA ), legend.position = \"none\" , axis.text = element_blank (), axis.title = element_blank (), axis.ticks = element_blank (), panel.grid.major = element_blank (), panel.grid.minor = element_blank ()) # Save the plot as a high-resolution PNG file ggsave ( output_file , plot , width = 10 , height = 4 , units = \"in\" , dpi = 1200 ) # Return the plot for optional further use return ( list ( plot = plot , sf = final_selected_polygons )) } FUNCTION: Create word cloud per grade create_wordclouds_by_grade <- function ( sf_object , output_file = \"food_word_cloud_per_grade.png\" , title = \"Healthy food place names word cloud\" , max_size = 25 , col_select = \"name\" ) { # Extract relevant data and prepare text data text_data <- sf_object %>% select ( grade , col_select ) %>% filter ( ! is.na ( col_select )) %>% unnest_tokens ( output = \"word\" , input = col_select , token = \"words\" ) %>% count ( grade , word , sort = TRUE ) %>% ungroup () %>% filter ( n () > 1 ) # Filter to remove overly common or single-occurrence words # Ensure there are no NA values in the 'word' column text_data <- text_data %>% filter ( ! is.na ( word )) # Handle cases where text_data might be empty if ( nrow ( text_data ) == 0 ) { stop ( \"No data available for creating word clouds.\" ) } # Create a word cloud using ggplot2 and ggwordcloud p <- ggplot ( ) + geom_text_wordcloud_area ( data = text_data , aes ( label = word , size = n ), rm_outside = TRUE ) + scale_size_area ( max_size = max_size ) + facet_wrap ( ~ grade , nrow = 1 ) + scale_color_gradient ( low = \"darkred\" , high = \"red\" ) + theme_minimal () + theme ( plot.background = element_rect ( fill = \"white\" , color = NA ), panel.background = element_rect ( fill = \"white\" , color = NA ), panel.spacing = unit ( 0.5 , \"lines\" ), plot.title = element_text ( size = 16 , face = \"bold\" ), legend.position = \"none\" ) + labs ( title = title ) # Attempt to save the plot and handle any errors tryCatch ({ ggsave ( output_file , p , width = 10 , height = 4 , units = \"in\" , dpi = 600 ) }, error = function ( e ) { cat ( \"Error in saving the plot: \" , e $ message , \"\\n\" ) }) return ( p ) } Map food over each grade individually layer1 <- denver_redlining layer2 <- food food_match <- process_and_plot_sf_layers ( layer1 , layer2 , \"food_match.png\" ) WORD CLOUD: Names of places with fresh food food_word_cloud <- create_wordclouds_by_grade ( food_match $ sf , output_file = \"food_word_cloud_per_grade.png\" ) Warning: Using an external vector in selections was deprecated in tidyselect 1.1.0. \u2139 Please use `all_of()` or `any_of()` instead. # Was: data %>% select(col_select) # Now: data %>% select(all_of(col_select)) See . Warning in wordcloud_boxes(data_points = points_valid_first, boxes = boxes, : Some words could not fit on page. They have been removed. Map processed food over each grade individually layer1 <- denver_redlining layer2 <- food_processed processed_food_match <- process_and_plot_sf_layers ( layer1 , layer2 , \"processed_food_match.png\" ) WORD CLOUD: Names of places with processed food processed_food_cloud <- create_wordclouds_by_grade ( processed_food_match $ sf , output_file = \"processed_food_word_cloud_per_grade.png\" , title = \"Processed food place names where larger text is more frequent\" , max_size = 17 ) Part 2: Integrating Environmental Data \u00b6 Data Processing \u00b6 Use satellite data from 2010 to analyze greenspace using NDVI, an index that measures the quantity of vegetation in an area. Apply methods to adjust for potential confounders as described in the study, ensuring that comparisons of greenspace across HOLC grades are valid and not biased by historical or socio-demographic factors. Map natural habitats over each grade individually layer1 <- denver_redlining layer2 <- natural_habitats natural_habitats_match <- process_and_plot_sf_layers ( layer1 , layer2 , \"natural_habitats_match.png\" ) print ( natural_habitats_match $ plot ) ![](worksheet_redlining_files/figure-gfm/unnamed-chunk-18-1.png) WORD CLOUD: Name of natural habitat area natural_habitats_cloud <- create_wordclouds_by_grade ( natural_habitats_match $ sf , output_file = \"natural_habitats_word_cloud_per_grade.png\" , title = \"Natural habitats place names where larger text is more frequent\" , max_size = 35 ) FUNCTION: Stream NDVI data polygon_layer <- denver_redlining # Function to process satellite data based on an SF polygon's extent process_satellite_data <- function ( polygon_layer , start_date , end_date , assets , fps = 1 , output_file = \"anim.gif\" ) { # Record start time start_time <- Sys.time () # Calculate the bbox from the polygon layer bbox <- st_bbox ( polygon_layer ) s = stac ( \"https://earth-search.aws.element84.com/v0\" ) # Use stacR to search for Sentinel-2 images within the bbox and date range items = s |> stac_search ( collections = \"sentinel-s2-l2a-cogs\" , bbox = c ( bbox [ \"xmin\" ], bbox [ \"ymin\" ], bbox [ \"xmax\" ], bbox [ \"ymax\" ]), datetime = paste ( start_date , end_date , sep = \"/\" ), limit = 500 ) %>% post_request () # Define mask for Sentinel-2 image quality #S2.mask <- image_mask(\"SCL\", values = c(3, 8, 9)) # Create a collection of images filtering by cloud cover col <- stac_image_collection ( items $ features , asset_names = assets , property_filter = function ( x ) { x [[ \"eo:cloud_cover\" ]] < 30 }) # Define a view for processing the data v <- cube_view ( srs = \"EPSG:4326\" , extent = list ( t0 = start_date , t1 = end_date , left = bbox [ \"xmin\" ], right = bbox [ \"xmax\" ], top = bbox [ \"ymax\" ], bottom = bbox [ \"ymin\" ]), dx = 0.001 , dy = 0.001 , dt = \"P1M\" , aggregation = \"median\" , resampling = \"bilinear\" ) # Calculate NDVI and create an animation ndvi_col <- function ( n ) { rev ( sequential_hcl ( n , \"Green-Yellow\" )) } #raster_cube(col, v, mask = S2.mask) %>% raster_cube ( col , v ) %>% select_bands ( c ( \"B04\" , \"B08\" )) %>% apply_pixel ( \"(B08-B04)/(B08+B04)\" , \"NDVI\" ) %>% gdalcubes :: animate ( col = ndvi_col , zlim = c ( -0.2 , 1 ), key.pos = 1 , save_as = output_file , fps = fps ) # Calculate processing time end_time <- Sys.time () processing_time <- difftime ( end_time , start_time ) # Return processing time return ( processing_time ) } Stream NDVI data: animation processing_time <- process_satellite_data ( denver_redlining , \"2022-05-31\" , \"2023-05-31\" , c ( \"B04\" , \"B08\" )) FUNCTION: Stream year average NDVI yearly_average_ndvi <- function ( polygon_layer , output_file = \"ndvi.png\" , dx = 0.01 , dy = 0.01 ) { # Record start time start_time <- Sys.time () # Calculate the bbox from the polygon layer bbox <- st_bbox ( polygon_layer ) s = stac ( \"https://earth-search.aws.element84.com/v0\" ) # Search for Sentinel-2 images within the bbox for June items <- s |> stac_search ( collections = \"sentinel-s2-l2a-cogs\" , bbox = c ( bbox [ \"xmin\" ], bbox [ \"ymin\" ], bbox [ \"xmax\" ], bbox [ \"ymax\" ]), datetime = \"2023-01-01/2023-12-31\" , limit = 500 ) %>% post_request () # Create a collection of images filtering by cloud cover col <- stac_image_collection ( items $ features , asset_names = c ( \"B04\" , \"B08\" ), property_filter = function ( x ) { x [[ \"eo:cloud_cover\" ]] < 80 }) # Define a view for processing the data specifically for June v <- cube_view ( srs = \"EPSG:4326\" , extent = list ( t0 = \"2023-01-01\" , t1 = \"2023-12-31\" , left = bbox [ \"xmin\" ], right = bbox [ \"xmax\" ], top = bbox [ \"ymax\" ], bottom = bbox [ \"ymin\" ]), dx = dx , dy = dy , dt = \"P1Y\" , aggregation = \"median\" , resampling = \"bilinear\" ) # Process NDVI ndvi_rast <- raster_cube ( col , v ) %>% select_bands ( c ( \"B04\" , \"B08\" )) %>% apply_pixel ( \"(B08-B04)/(B08+B04)\" , \"NDVI\" ) %>% write_tif () |> terra :: rast () # Convert terra Raster to ggplot using tidyterra ndvi_plot <- ggplot () + geom_spatraster ( data = ndvi_rast , aes ( fill = NDVI )) + scale_fill_viridis_c ( option = \"viridis\" , direction = -1 , name = \"NDVI\" ) + labs ( title = \"NDVI mean for 2023\" ) + theme_minimal () + coord_sf () + theme ( plot.background = element_rect ( fill = \"white\" , color = NA ), panel.background = element_rect ( fill = \"white\" , color = NA ), legend.position = \"right\" , axis.text = element_blank (), axis.title = element_blank (), axis.ticks = element_blank (), panel.grid.major = element_blank (), panel.grid.minor = element_blank ()) # Save the plot as a high-resolution PNG file ggsave ( output_file , ndvi_plot , width = 10 , height = 8 , dpi = 600 ) # Calculate processing time end_time <- Sys.time () processing_time <- difftime ( end_time , start_time ) # Return the plot and processing time return ( list ( plot = ndvi_plot , processing_time = processing_time , raster = ndvi_rast )) } Stream NDVI: high resolution ndvi_background <- yearly_average_ndvi ( denver_redlining , dx = 0.0001 , dy = 0.0001 ) FUNCTION: Map NDVI per HOLC grade individually create_mask_and_plot <- function ( redlining_sf , background_raster = ndvi $ raster , roads = NULL , rivers = NULL ){ start_time <- Sys.time () # Start timing # Validate and prepare the redlining data redlining_sf <- redlining_sf %>% filter ( grade != \"\" ) %>% st_make_valid () bbox <- st_bbox ( redlining_sf ) # Get original bounding box expanded_bbox <- expand_bbox ( bbox , 6000 , 1000 ) # expanded_bbox_poly <- st_as_sfc ( expanded_bbox , crs = st_crs ( redlining_sf )) %>% st_make_valid () # Initialize an empty list to store masks masks <- list () # Iterate over each grade to create masks unique_grades <- unique ( redlining_sf $ grade ) for ( grade in unique_grades ) { # Filter polygons by grade grade_polygons <- redlining_sf [ redlining_sf $ grade == grade , ] # Create an \"inverted\" mask by subtracting these polygons from the background mask <- st_difference ( expanded_bbox_poly , st_union ( grade_polygons )) # Store the mask in the list with the grade as the name masks [[ grade ]] <- st_sf ( geometry = mask , grade = grade ) } # Combine all masks into a single sf object mask_sf <- do.call ( rbind , masks ) # Normalize the grades so that C.2 becomes C, but correctly handle other grades mask_sf $ grade <- ifelse ( mask_sf $ grade == \"C.2\" , \"C\" , mask_sf $ grade ) # Prepare the plot plot <- ggplot () + geom_spatraster ( data = background_raster , aes ( fill = NDVI )) + scale_fill_viridis_c ( name = \"NDVI\" , option = \"viridis\" , direction = -1 ) + geom_sf ( data = mask_sf , aes ( color = grade ), fill = \"white\" , size = 0.1 , show.legend = FALSE ) + scale_color_manual ( values = c ( \"A\" = \"white\" , \"B\" = \"white\" , \"C\" = \"white\" , \"D\" = \"white\" ), name = \"Grade\" ) + facet_wrap ( ~ grade , nrow = 1 ) + geom_sf ( data = roads , alpha = 1 , lwd = 0.1 , color = \"white\" ) + geom_sf ( data = rivers , color = \"white\" , alpha = 0.5 , lwd = 1.1 ) + labs ( title = \"NDVI: Normalized Difference Vegetation Index\" ) + theme_minimal () + coord_sf ( xlim = c ( bbox [ \"xmin\" ], bbox [ \"xmax\" ]), ylim = c ( bbox [ \"ymin\" ], bbox [ \"ymax\" ]), expand = FALSE ) + theme ( plot.background = element_rect ( fill = \"white\" , color = NA ), panel.background = element_rect ( fill = \"white\" , color = NA ), legend.position = \"bottom\" , axis.text = element_blank (), axis.title = element_blank (), axis.ticks = element_blank (), panel.grid.major = element_blank (), panel.grid.minor = element_blank ()) # Save the plot ggsave ( \"redlining_mask_ndvi.png\" , plot , width = 10 , height = 4 , dpi = 600 ) end_time <- Sys.time () # End timing runtime <- end_time - start_time # Return the plot and runtime return ( list ( plot = plot , runtime = runtime , mask_sf = mask_sf )) } Stream NDVI: low resolution ndvi_background_low <- yearly_average_ndvi ( denver_redlining ) Map low resolution NDVI per HOLC grade ndvi <- create_mask_and_plot ( denver_redlining , background_raster = ndvi_background_low $ raster , roads = roads , rivers = rivers ) FUNCTION: Map Denver City provided data per HOLC grade process_city_inventory_data <- function ( address , inner_file , polygon_layer , output_filename , variable_label = 'Tree Density' ) { # Download and read the shapefile full_path <- glue ( \"/vsizip/vsicurl/{address}/{inner_file}\" ) shape_data <- st_read ( full_path , quiet = TRUE ) |> st_as_sf () # Process the shape data with the provided polygon layer processed_data <- process_and_plot_sf_layers ( polygon_layer , shape_data , paste0 ( output_filename , \".png\" )) # Extract trees from the processed data trees <- processed_data $ sf denver_redlining_residential <- polygon_layer |> filter ( grade != \"\" ) # Generate the density plot plot <- ggplot () + geom_sf ( data = roads , alpha = 0.05 , lwd = 0.1 ) + geom_sf ( data = rivers , color = \"blue\" , alpha = 0.1 , lwd = 1.1 ) + geom_sf ( data = denver_redlining_residential , fill = \"grey\" , color = \"grey\" , size = 0.1 ) + facet_wrap ( ~ grade , nrow = 1 ) + stat_density_2d ( data = trees , mapping = aes ( x = map_dbl ( geometry , ~ . [ 1 ]), y = map_dbl ( geometry , ~ . [ 2 ]), fill = stat ( density )), geom = 'tile' , contour = FALSE , alpha = 0.9 ) + scale_fill_gradientn ( colors = c ( \"transparent\" , \"white\" , \"limegreen\" ), values = scales :: rescale ( c ( 0 , 0.1 , 1 )), # Adjust these based on your density range guide = \"colourbar\" ) + theme_minimal () + labs ( fill = variable_label ) + theme_tufte () + theme ( plot.background = element_rect ( fill = \"white\" , color = NA ), panel.background = element_rect ( fill = \"white\" , color = NA ), legend.position = \"bottom\" , axis.text = element_blank (), axis.title = element_blank (), axis.ticks = element_blank (), panel.grid.major = element_blank (), panel.grid.minor = element_blank ()) # Save the plot ggsave ( paste0 ( output_filename , \"_density_plot.png\" ), plot , width = 10 , height = 4 , units = \"in\" , dpi = 600 ) # Return the plot and the tree layer return ( list ( plot = plot , layer = trees )) } Map tree inventory per HOLC grade result <- process_city_inventory_data ( \"https://www.denvergov.org/media/gis/DataCatalog/tree_inventory/shape/tree_inventory.zip\" , \"tree_inventory.shp\" , denver_redlining , \"Denver_tree_inventory_2023\" ) Warning: `stat(density)` was deprecated in ggplot2 3.4.0. \u2139 Please use `after_stat(density)` instead. Map traffic accidents per HOLC grade result <- process_city_inventory_data ( \"https://www.denvergov.org/media/gis/DataCatalog/traffic_accidents/shape/traffic_accidents.zip\" , \"traffic_accidents.shp\" , denver_redlining , \"Denver_traffic_accidents\" , variable_label = 'Traffic accidents density' ) Map stream sampling effort per HOLC grade instream_sampling_sites <- process_city_inventory_data ( \"https://www.denvergov.org/media/gis/DataCatalog/instream_sampling_sites/shape/instream_sampling_sites.zip\" , \"instream_sampling_sites.shp\" , denver_redlining , \"instream_sampling_sites\" , variable_label = 'Instream sampling sites density' ) Map soil sampling effort per HOLC grade soil_samples <- process_city_inventory_data ( \"https://www.denvergov.org/media/gis/DataCatalog/soil_samples/shape/soil_samples.zip\" , \"soil_samples.shp\" , denver_redlining , \"Soil samples\" , variable_label = 'soil samples density' ) Map public art density per HOLC grade public_art <- process_city_inventory_data ( \"https://www.denvergov.org/media/gis/DataCatalog/public_art/shape/public_art.zip\" , \"public_art.shp\" , denver_redlining , \"Public art \" , variable_label = 'Public art density' ) Map liquor licenses density per HOLC grade liquor_licenses <- process_city_inventory_data ( \"https://www.denvergov.org/media/gis/DataCatalog/liquor_licenses/shape/liquor_licenses.zip\" , \"liquor_licenses.shp\" , denver_redlining , \"liquor licenses \" , variable_label = 'liquor licenses density' ) Map crime density per HOLC grade Crime <- process_city_inventory_data ( \"https://www.denvergov.org/media/gis/DataCatalog/crime/shape/crime.zip\" , \"crime.shp\" , denver_redlining , \"crime\" , variable_label = 'Crime density' ) WORD CLOUD: Types of crimes crime_cloud <- create_wordclouds_by_grade ( Crime $ layer , output_file = \"Crime_word_cloud_per_grade.png\" , title = \"Crime type where larger text is more frequent\" , max_size = 25 , col_select = \"OFFENSE_TY\" ) Warning: Using an external vector in selections was deprecated in tidyselect 1.1.0. \u2139 Please use `all_of()` or `any_of()` instead. # Was: data %>% select(col_select) # Now: data %>% select(all_of(col_select)) See . Map police shooting density per HOLC grade Denver_police_shootings <- process_city_inventory_data ( \"https://www.denvergov.org/media/gis/DataCatalog/denver_police_officer_involved_shootings/shape/denver_police_officer_involved_shootings.zip\" , \"denver_police_officer_involved_shootings.shp\" , denver_redlining , \"Police shootings\" , variable_label = 'Police shootings density' ) Not enough data for density across all 4 WORD CLOUD: Police involved shootings Denver_police_shootings_cloud <- create_wordclouds_by_grade ( Denver_police_shootings $ layer , output_file = \"police_shootings_word_cloud_per_grade.png\" , title = \"police involved shooting per crime type where larger text is more frequent\" , max_size = 35 , col_select = \"SHOOT_ACTI\" ) Part 3: Comparative Analysis and Visualization \u00b6 Statistical Analysis \u00b6 Conduct a detailed statistical analysis to compare greenspace across different HOLC grades, using techniques like Targeted Maximum Likelihood Estimation (TMLE) to assess the association between historical redlining and current greenspace levels. Visualize the disparities in greenspace distribution using GIS tools, highlighting how redlining has shaped urban ecological landscapes. Conclusion \u00b6 This tutorial provides tools and methodologies to explore the lingering effects of historic redlining on urban greenspace, offering insights into the intersection of urban planning, environmental justice, and public health. References \u00b6 Nardone, A., Rudolph, K. E., Morello-Frosch, R., & Casey, J. A. (2021). Redlines and Greenspace: The Relationship between Historical Redlining and 2010 Greenspace across the United States. Environmental Health Perspectives , 129(1), 017006. DOI:10.1289/EHP7495. Available online","title":"Redlining"},{"location":"worksheet_redlining/#redlining","text":"","title":"Redlining"},{"location":"worksheet_redlining/#exploring-the-impact-of-historical-redlining-on-urban-greenspace-a-collaborative-examination-of-maps-justice-and-resilience","text":"","title":"Exploring the Impact of Historical Redlining on Urban Greenspace: A Collaborative Examination of Maps, Justice, and Resilience"},{"location":"worksheet_redlining/#introduction","text":"This group exploration delves into the long-term impacts of historical redlining on urban greenspace, emphasizing the powerful role of maps in shaping environmental and social landscapes. By drawing on the research by Nardone et al. (2021), you will collaboratively investigate how discriminatory practices encoded in maps have led to persistent disparities in urban settings. This exploration aims to uncover the resilience of communities in adapting to these entrenched injustices and to foster a deeper understanding of how mapping can serve both as a tool of exclusion and as a means for promoting social equity. )","title":"Introduction"},{"location":"worksheet_redlining/#understanding-redlining-as-a-systemic-disturbance","text":"Redlining originated in the 1930s as a discriminatory practice where the Home Owners\u2019 Loan Corporation (HOLC) systematically denied mortgages or offered unfavorable terms based on racial and ethnic compositions. This methodical exclusion, executed through maps that color-coded \u201crisky\u201d investment areas in red, marked minority-populated areas, denying them crucial investment and development opportunities and initiating a profound and lasting disturbance in the urban fabric. Maps serve as powerful tools beyond navigation; they communicate and enforce control. By defining neighborhood boundaries through redlining, HOLC maps not only mirrored societal biases but also perpetuated and embedded them into the urban landscape. This manipulation of geographic data set a trajectory that limited economic growth, dictated the allocation of services, and influenced the development or deterioration of community infrastructure. Figure 1: 1938 Map of Atlanta uses colors as grades for neighborhoods. The red swaths identify each area with large African-American populations that were deemed \u201cless safe.\u201d [![](../assets/redlining/georectified-thumbnail.png)](https://storymaps.arcgis.com/stories/0f58d49c566b486482b3e64e9e5f7ac9) ArcGIS Story Map Explore the Story Map: Click on the image above to explore the interactive story map about [subject of the story map].","title":"Understanding Redlining as a Systemic Disturbance"},{"location":"worksheet_redlining/#resilience-and-adaptation-in-urban-environments","text":"The legacy of redlining presents both a challenge and an opportunity for resilience and adaptation. Economically and socially, redlining entrenched cycles of poverty and racial segregation, creating a resilient wealth gap that has been difficult to dismantle. Environmentally, the neighborhoods targeted by redlining continue to face significant challenges\u2014they generally feature less greenspace, suffer from higher pollution levels, and are more vulnerable to the impacts of climate change. These factors compound the health and wellness challenges faced by residents. Despite these adversities, urban communities have continually demonstrated remarkable resilience. Adaptation strategies, such as community-led green initiatives, urban agriculture, and grassroots activism, have emerged as responses to these systemic disturbances. By enhancing green infrastructure and advocating for equitable environmental policies, these communities strive to increase their resilience against both historical inequities and environmental challenges. [![](https://img.youtube.com/vi/O5FBJyqfoLM/hqdefault.jpg)](https://youtu.be/O5FBJyqfoLM) Watch the video Video Title: Exploring the Impacts of Historical Redlining on Urban Development Description: Click on the image above to watch a video that delves into the consequences of historical redlining and its ongoing impact on urban environments. This educational piece offers insights into how such discriminatory practices have shaped cities and what can be learned from them. The following group exercise will not only uncover the impact of redlining on urban greenspace but also highlight the adaptive strategies developed in response to this enduring disturbance. Through mapping and analysis, we aim to illustrate the powerful role that geographic data can play in understanding and fostering urban resilience and social equity.","title":"Resilience and Adaptation in Urban Environments"},{"location":"worksheet_redlining/#references","text":"Nardone, A., Rudolph, K. E., Morello-Frosch, R., & Casey, J. A. (2021). Redlines and Greenspace: The Relationship between Historical Redlining and 2010 Greenspace across the United States. Environmental Health Perspectives , 129(1), 017006. DOI:10.1289/EHP7495. Hoffman, J. S., Shandas, V., & Pendleton, N. (2020). The Effects of Historical Housing Policies on Resident Exposure to Intra-Urban Heat: A Study of 108 US Urban Areas. Climate , 8(1), 12. DOI:10.3390/cli8010012.","title":"References"},{"location":"worksheet_redlining/#goals-of-this-group-activity","text":"The primary objectives of this tutorial are: 1. To practice coding in CyVerse. 2. To analyze the relationship between HOLC grades and the presence of urban greenspace. 3. To understand how historic policies continue to affect the spatial distribution of environmental amenities.","title":"Goals of this group activity"},{"location":"worksheet_redlining/#part-1-accessing-and-visualizing-historic-redlining-data","text":"We will begin by accessing HOLC maps from the Mapping Inequality project and overlaying this data with modern geographic datasets to visualize the historical impact on contemporary urban landscapes.","title":"Part 1: Accessing and Visualizing Historic Redlining Data"},{"location":"worksheet_redlining/#data-acquisition","text":"Download HOLC map shapefiles from the University of Richmond\u2019s Mapping Inequality Project. Utilize satellite imagery and other geospatial data to map current greenspace using the normalized difference vegetation index (NDVI).","title":"Data Acquisition"},{"location":"worksheet_redlining/#analysis-methodology","text":"Replicate the approach used by Nardone et al. to calculate NDVI values for each HOLC neighborhood, assessing greenspace as a health-promoting resource. Employ statistical methods such as propensity score matching to control for confounding variables and estimate the true impact of HOLC grades on urban greenspace. img { width: 100%; } details summary { color: black; background-color: white; } details[open] summary { color: black; } R libraries we use in this analysis if ( ! requireNamespace ( \"tidytext\" , quietly = TRUE )) { install.packages ( \"tidytext\" ) } library ( tidytext ) ## Warning: package 'tidytext' was built under R version 4.3.2 library ( sf ) ## Warning: package 'sf' was built under R version 4.3.2 ## Linking to GEOS 3.11.0, GDAL 3.5.3, PROJ 9.1.0; sf_use_s2() is TRUE library ( ggplot2 ) ## Warning: package 'ggplot2' was built under R version 4.3.2 library ( ggthemes ) ## Warning: package 'ggthemes' was built under R version 4.3.2 library ( dplyr ) ## ## Attaching package: 'dplyr' ## The following objects are masked from 'package:stats': ## ## filter, lag ## The following objects are masked from 'package:base': ## ## intersect, setdiff, setequal, union library ( rstac ) ## Warning: package 'rstac' was built under R version 4.3.2 library ( gdalcubes ) ## Warning: package 'gdalcubes' was built under R version 4.3.2 library ( gdalUtils ) ## Please note that rgdal will be retired during October 2023, ## plan transition to sf/stars/terra functions using GDAL and PROJ ## at your earliest convenience. ## See https://r-spatial.org/r/2023/05/15/evolution4.html and https://github.com/r-spatial/evolution ## rgdal: version: 1.6-7, (SVN revision 1203) ## Geospatial Data Abstraction Library extensions to R successfully loaded ## Loaded GDAL runtime: GDAL 3.5.3, released 2022/10/21 ## Path to GDAL shared files: /Library/Frameworks/R.framework/Versions/4.3-x86_64/Resources/library/rgdal/gdal ## GDAL does not use iconv for recoding strings. ## GDAL binary built with GEOS: TRUE ## Loaded PROJ runtime: Rel. 9.1.0, September 1st, 2022, [PJ_VERSION: 910] ## Path to PROJ shared files: /Library/Frameworks/R.framework/Versions/4.3-x86_64/Resources/library/gdalcubes/proj ## PROJ CDN enabled: FALSE ## Linking to sp version:1.6-1 ## To mute warnings of possible GDAL/OSR exportToProj4() degradation, ## use options(\"rgdal_show_exportToProj4_warnings\"=\"none\") before loading sp or rgdal. ## ## Attaching package: 'gdalUtils' ## The following object is masked from 'package:sf': ## ## gdal_rasterize library ( gdalcubes ) library ( colorspace ) library ( terra ) ## Warning: package 'terra' was built under R version 4.3.2 ## terra 1.7.71 ## ## Attaching package: 'terra' ## The following object is masked from 'package:colorspace': ## ## RGB ## The following objects are masked from 'package:gdalcubes': ## ## animate, crop, size library ( tidyterra ) ## ## Attaching package: 'tidyterra' ## The following object is masked from 'package:stats': ## ## filter library ( basemapR ) library ( tidytext ) library ( ggwordcloud ) library ( osmextract ) ## Data (c) OpenStreetMap contributors, ODbL 1.0. https://www.openstreetmap.org/copyright. ## Check the package website, https://docs.ropensci.org/osmextract/, for more details. library ( sf ) library ( ggplot2 ) library ( ggthemes ) library ( glue ) ## ## Attaching package: 'glue' ## The following object is masked from 'package:terra': ## ## trim library ( purrr ) FUNCTION: List cities where HOLC data are available # Function to get a list of unique cities and states from the redlining data get_city_state_list_from_redlining_data <- function () { # URL to the GeoJSON data url <- \"https://raw.githubusercontent.com/americanpanorama/mapping-inequality-census-crosswalk/main/MIv3Areas_2010TractCrosswalk.geojson\" # Read the GeoJSON file into an sf object redlining_data <- tryCatch ({ read_sf ( url ) }, error = function ( e ) { stop ( \"Error reading GeoJSON data: \" , e $ message ) }) # Check for the existence of 'city' and 'state' columns if ( ! all ( c ( \"city\" , \"state\" ) %in% names ( redlining_data ))) { stop ( \"The required columns 'city' and/or 'state' do not exist in the data.\" ) } # Extract a unique list of city and state pairs without the geometries city_state_df <- redlining_data %>% select ( city , state ) %>% st_set_geometry ( NULL ) %>% # Drop the geometry to avoid issues with invalid shapes distinct ( city , state ) %>% arrange ( state , city ) # Arrange the list alphabetically by state, then by city # Return the dataframe of unique city-state pairs return ( city_state_df ) } Stream list of available HOLC cities #Retrieve the list of cities and states city_state_list <- get_city_state_list_from_redlining_data () knitr :: kable ( city_state_list , format = \"markdown\" ) | city | state | |:---------------------------------|:------| | Birmingham | AL | | Mobile | AL | | Montgomery | AL | | Arkadelphia | AR | | Batesville | AR | | Camden | AR | | Conway | AR | | El Dorado | AR | | Fort Smith | AR | | Little Rock | AR | | Russellville | AR | | Texarkana | AR | | Phoenix | AZ | | Fresno | CA | | Los Angeles | CA | | Oakland | CA | | Sacramento | CA | | San Diego | CA | | San Francisco | CA | | San Jose | CA | | Stockton | CA | | Boulder | CO | | Colorado Springs | CO | | Denver | CO | | Fort Collins | CO | | Fort Morgan | CO | | Grand Junction | CO | | Greeley | CO | | Longmont | CO | | Pueblo | CO | | Bridgeport and Fairfield | CT | | Hartford | CT | | New Britain | CT | | New Haven | CT | | Stamford, Darien, and New Canaan | CT | | Waterbury | CT | | Crestview | FL | | Daytona Beach | FL | | DeFuniak Springs | FL | | DeLand | FL | | Jacksonville | FL | | Miami | FL | | New Smyrna | FL | | Orlando | FL | | Pensacola | FL | | St. Petersburg | FL | | Tampa | FL | | Atlanta | GA | | Augusta | GA | | Columbus | GA | | Macon | GA | | Savannah | GA | | Boone | IA | | Cedar Rapids | IA | | Council Bluffs | IA | | Davenport | IA | | Des Moines | IA | | Dubuque | IA | | Sioux City | IA | | Waterloo | IA | | Aurora | IL | | Chicago | IL | | Decatur | IL | | East St. Louis | IL | | Joliet | IL | | Peoria | IL | | Rockford | IL | | Springfield | IL | | Evansville | IN | | Fort Wayne | IN | | Indianapolis | IN | | Lake Co. Gary | IN | | Muncie | IN | | South Bend | IN | | Terre Haute | IN | | Atchison | KS | | Junction City | KS | | Topeka | KS | | Wichita | KS | | Covington | KY | | Lexington | KY | | Louisville | KY | | New Orleans | LA | | Shreveport | LA | | Arlington | MA | | Belmont | MA | | Boston | MA | | Braintree | MA | | Brockton | MA | | Brookline | MA | | Cambridge | MA | | Chelsea | MA | | Dedham | MA | | Everett | MA | | Fall River | MA | | Fitchburg | MA | | Haverhill | MA | | Holyoke Chicopee | MA | | Lawrence | MA | | Lexington | MA | | Lowell | MA | | Lynn | MA | | Malden | MA | | Medford | MA | | Melrose | MA | | Milton | MA | | Needham | MA | | New Bedford | MA | | Newton | MA | | Pittsfield | MA | | Quincy | MA | | Revere | MA | | Salem | MA | | Saugus | MA | | Somerville | MA | | Springfield | MA | | Waltham | MA | | Watertown | MA | | Winchester | MA | | Winthrop | MA | | Worcester | MA | | Baltimore | MD | | Augusta | ME | | Boothbay | ME | | Portland | ME | | Sanford | ME | | Waterville | ME | | Battle Creek | MI | | Bay City | MI | | Detroit | MI | | Flint | MI | | Grand Rapids | MI | | Jackson | MI | | Kalamazoo | MI | | Lansing | MI | | Muskegon | MI | | Pontiac | MI | | Saginaw | MI | | Austin | MN | | Duluth | MN | | Mankato | MN | | Minneapolis | MN | | Rochester | MN | | St. Cloud | MN | | St. Paul | MN | | Staples | MN | | Cape Girardeau | MO | | Carthage | MO | | Greater Kansas City | MO | | Joplin | MO | | Springfield | MO | | St. Joseph | MO | | St. Louis | MO | | Jackson | MS | | Asheville | NC | | Charlotte | NC | | Durham | NC | | Elizabeth City | NC | | Fayetteville | NC | | Goldsboro | NC | | Greensboro | NC | | Hendersonville | NC | | High Point | NC | | New Bern | NC | | Rocky Mount | NC | | Statesville | NC | | Winston-Salem | NC | | Fargo | ND | | Grand Forks | ND | | Minot | ND | | Williston | ND | | Lincoln | NE | | Omaha | NE | | Manchester | NH | | Atlantic City | NJ | | Bergen Co. | NJ | | Camden | NJ | | Essex Co. | NJ | | Hudson Co. | NJ | | Monmouth | NJ | | Passaic County | NJ | | Perth Amboy | NJ | | Trenton | NJ | | Union Co. | NJ | | Albany | NY | | Binghamton-Johnson City | NY | | Bronx | NY | | Brooklyn | NY | | Buffalo | NY | | Elmira | NY | | Jamestown | NY | | Lower Westchester Co. | NY | | Manhattan | NY | | Niagara Falls | NY | | Poughkeepsie | NY | | Queens | NY | | Rochester | NY | | Schenectady | NY | | Staten Island | NY | | Syracuse | NY | | Troy | NY | | Utica | NY | | Akron | OH | | Canton | OH | | Cleveland | OH | | Columbus | OH | | Dayton | OH | | Hamilton | OH | | Lima | OH | | Lorain | OH | | Portsmouth | OH | | Springfield | OH | | Toledo | OH | | Warren | OH | | Youngstown | OH | | Ada | OK | | Alva | OK | | Enid | OK | | Miami Ottawa County | OK | | Muskogee | OK | | Norman | OK | | Oklahoma City | OK | | South McAlester | OK | | Tulsa | OK | | Portland | OR | | Allentown | PA | | Altoona | PA | | Bethlehem | PA | | Chester | PA | | Erie | PA | | Harrisburg | PA | | Johnstown | PA | | Lancaster | PA | | McKeesport | PA | | New Castle | PA | | Philadelphia | PA | | Pittsburgh | PA | | Wilkes-Barre | PA | | York | PA | | Pawtucket & Central Falls | RI | | Providence | RI | | Woonsocket | RI | | Aiken | SC | | Charleston | SC | | Columbia | SC | | Greater Anderson | SC | | Greater Greenville | SC | | Orangeburg | SC | | Rock Hill | SC | | Spartanburg | SC | | Sumter | SC | | Aberdeen | SD | | Huron | SD | | Milbank | SD | | Mitchell | SD | | Rapid City | SD | | Sioux Falls | SD | | Vermillion | SD | | Watertown | SD | | Chattanooga | TN | | Elizabethton | TN | | Erwin | TN | | Greenville | TN | | Johnson City | TN | | Knoxville | TN | | Memphis | TN | | Nashville | TN | | Amarillo | TX | | Austin | TX | | Beaumont | TX | | Dallas | TX | | El Paso | TX | | Fort Worth | TX | | Galveston | TX | | Houston | TX | | Port Arthur | TX | | San Antonio | TX | | Waco | TX | | Wichita Falls | TX | | Ogden | UT | | Salt Lake City | UT | | Bristol | VA | | Danville | VA | | Harrisonburg | VA | | Lynchburg | VA | | Newport News | VA | | Norfolk | VA | | Petersburg | VA | | Phoebus | VA | | Richmond | VA | | Roanoke | VA | | Staunton | VA | | Bennington | VT | | Brattleboro | VT | | Burlington | VT | | Montpelier | VT | | Newport City | VT | | Poultney | VT | | Rutland | VT | | Springfield | VT | | St. Albans | VT | | St. Johnsbury | VT | | Windsor | VT | | Seattle | WA | | Spokane | WA | | Tacoma | WA | | Kenosha | WI | | Madison | WI | | Milwaukee Co. | WI | | Oshkosh | WI | | Racine | WI | | Charleston | WV | | Huntington | WV | | Wheeling | WV | FUNCTION: Stream HOLC data from a city # Function to load and filter redlining data by city load_city_redlining_data <- function ( city_name ) { # URL to the GeoJSON data url <- \"https://raw.githubusercontent.com/americanpanorama/mapping-inequality-census-crosswalk/main/MIv3Areas_2010TractCrosswalk.geojson\" # Read the GeoJSON file into an sf object redlining_data <- read_sf ( url ) # Filter the data for the specified city and non-empty grades city_redline <- redlining_data %>% filter ( city == city_name ) # Return the filtered data return ( city_redline ) } Stream HOLC data for Denver, CO # Load redlining data for Denver denver_redlining <- load_city_redlining_data ( \"Denver\" ) knitr :: kable ( head ( denver_redlining ), format = \"markdown\" ) | area_id | city | state | city_survey | cat | grade | label | res | com | ind | fill | GEOID10 | GISJOIN | calc_area | pct_tract | geometry | |--------:|:-------|:------|:------------|:-----|:------|:------|:-----|:------|:------|:---------|:------------|:---------------|-------------:|----------:|:-----------------------------| | 6525 | Denver | CO | TRUE | Best | A | A1 | TRUE | FALSE | FALSE | \\#76a865 | 08031004104 | G0800310004104 | 1.525535e+01 | 0.00001 | MULTIPOLYGON (((-104.9125 3\u2026 | | 6525 | Denver | CO | TRUE | Best | A | A1 | TRUE | FALSE | FALSE | \\#76a865 | 08031004201 | G0800310004201 | 3.987458e+05 | 0.20900 | MULTIPOLYGON (((-104.9246 3\u2026 | | 6525 | Denver | CO | TRUE | Best | A | A1 | TRUE | FALSE | FALSE | \\#76a865 | 08031004304 | G0800310004304 | 1.554195e+05 | 0.05927 | MULTIPOLYGON (((-104.9125 3\u2026 | | 6525 | Denver | CO | TRUE | Best | A | A1 | TRUE | FALSE | FALSE | \\#76a865 | 08031004202 | G0800310004202 | 1.117770e+06 | 0.57245 | MULTIPOLYGON (((-104.9125 3\u2026 | | 6529 | Denver | CO | TRUE | Best | A | A2 | TRUE | FALSE | FALSE | \\#76a865 | 08031004302 | G0800310004302 | 3.133415e+05 | 0.28381 | MULTIPOLYGON (((-104.928 39\u2026 | | 6529 | Denver | CO | TRUE | Best | A | A2 | TRUE | FALSE | FALSE | \\#76a865 | 08031004301 | G0800310004301 | 1.221218e+05 | 0.08622 | MULTIPOLYGON (((-104.9305 3\u2026 | FUNCTION: Get Points-of-Interest from city of interest get_places <- function ( polygon_layer , type = \"food\" ) { # Check if the input is an sf object if ( ! inherits ( polygon_layer , \"sf\" )) { stop ( \"The provided object is not an sf object.\" ) } # Create a bounding box from the input sf object bbox_here <- st_bbox ( polygon_layer ) |> st_as_sfc () if ( type == \"food\" ){ my_layer <- \"multipolygons\" my_query <- \"SELECT * FROM multipolygons WHERE ( shop IN ('supermarket', 'bodega', 'market', 'other_market', 'farm', 'garden_centre', 'doityourself', 'farm_supply', 'compost', 'mulch', 'fertilizer') OR amenity IN ('social_facility', 'market', 'restaurant', 'coffee') OR leisure = 'garden' OR landuse IN ('farm', 'farmland', 'row_crops', 'orchard_plantation', 'dairy_grazing') OR building IN ('brewery', 'winery', 'distillery') OR shop = 'greengrocer' OR amenity = 'marketplace' )\" title <- \"food\" } if ( type == \"processed_food\" ) { my_layer <- \"multipolygons\" my_query <- \"SELECT * FROM multipolygons WHERE ( amenity IN ('fast_food', 'cafe', 'pub') OR shop IN ('convenience', 'supermarket') OR shop = 'kiosk' )\" title <- \"Processed Food Locations\" } if ( type == \"natural_habitats\" ){ my_layer <- \"multipolygons\" my_query <- \"SELECT * FROM multipolygons WHERE ( boundary = 'protected_area' OR natural IN ('tree', 'wood') OR landuse = 'forest' OR leisure = 'park' )\" title <- \"Natural habitats or City owned trees\" } if ( type == \"roads\" ){ my_layer <- \"lines\" my_query <- \"SELECT * FROM lines WHERE ( highway IN ('motorway', 'trunk', 'primary', 'secondary', 'tertiary') )\" title <- \"Major roads\" } if ( type == \"rivers\" ){ my_layer <- \"lines\" my_query <- \"SELECT * FROM lines WHERE ( waterway IN ('river'))\" title <- \"Major rivers\" } if ( type == \"internet_access\" ) { my_layer <- \"multipolygons\" my_query <- \"SELECT * FROM multipolygons WHERE ( amenity IN ('library', 'cafe', 'community_centre', 'public_building') AND internet_access = 'yes' )\" title <- \"Internet Access Locations\" } if ( type == \"water_bodies\" ) { my_layer <- \"multipolygons\" my_query <- \"SELECT * FROM multipolygons WHERE ( natural IN ('water', 'lake', 'pond') OR water IN ('lake', 'pond') OR landuse = 'reservoir' )\" title <- \"Water Bodies\" } if ( type == \"government_buildings\" ) { my_layer <- \"multipolygons\" my_query <- \"SELECT * FROM multipolygons WHERE ( amenity IN ('townhall', 'courthouse', 'embassy', 'police', 'fire_station') OR building IN ('capitol', 'government') )\" title <- \"Government Buildings\" } # Use the bbox to get data with oe_get(), specifying the desired layer and a custom SQL query for fresh food places tryCatch ({ places <- oe_get ( place = bbox_here , layer = my_layer , # Adjusted layer; change as per actual data availability query = my_query , quiet = TRUE ) places <- st_make_valid ( places ) # Crop the data to the bounding box cropped_places <- st_crop ( places , bbox_here ) # Plotting the cropped fresh food places plot <- ggplot ( data = cropped_places ) + geom_sf ( fill = \"cornflowerblue\" , color = \"cornflowerblue\" ) + ggtitle ( title ) + theme_tufte () + theme ( legend.position = \"none\" , # Optionally hide the legend axis.text = element_blank (), # Remove axis text axis.title = element_blank (), # Remove axis titles axis.ticks = element_blank (), # Remove axis ticks plot.background = element_rect ( fill = \"white\" , color = NA ), # Set the plot background to white panel.background = element_rect ( fill = \"white\" , color = NA ), # Set the panel background to white panel.grid.major = element_blank (), # Remove major grid lines panel.grid.minor = element_blank (), ) # Save the plot as a PNG file png_filename <- paste0 ( title , \"_\" , Sys.Date (), \".png\" ) ggsave ( png_filename , plot , width = 10 , height = 8 , units = \"in\" ) # Return the cropped dataset return ( cropped_places ) }, error = function ( e ) { stop ( \"Failed to retrieve or plot data: \" , e $ message ) }) } FUNCTION: Plot POI over HOLC grades plot_city_redlining <- function ( redlining_data , filename = \"redlining_plot.png\" ) { # Fetch additional geographic data based on redlining data roads <- get_places ( redlining_data , type = \"roads\" ) rivers <- get_places ( redlining_data , type = \"rivers\" ) # Filter residential zones with valid grades and where city survey is TRUE residential_zones <- redlining_data %>% filter ( city_survey == TRUE & grade != \"\" ) # Colors for the grades colors <- c ( \"#76a865\" , \"#7cb5bd\" , \"#ffff00\" , \"#d9838d\" ) # Plot the data using ggplot2 plot <- ggplot () + geom_sf ( data = roads , lwd = 0.1 ) + geom_sf ( data = rivers , color = \"blue\" , alpha = 0.5 , lwd = 1.1 ) + geom_sf ( data = residential_zones , aes ( fill = grade ), alpha = 0.5 ) + theme_tufte () + scale_fill_manual ( values = colors ) + labs ( fill = 'HOLC Categories' ) + theme ( plot.background = element_rect ( fill = \"white\" , color = NA ), panel.background = element_rect ( fill = \"white\" , color = NA ), panel.grid.major = element_blank (), panel.grid.minor = element_blank (), legend.position = \"right\" ) # Save the plot as a high-resolution PNG file ggsave ( filename , plot , width = 10 , height = 8 , units = \"in\" , dpi = 600 ) # Return the plot object if needed for further manipulation or checking return ( plot ) } Plot Denver Redlining denver_plot <- plot_city_redlining ( denver_redlining ) Stream amenities by category food <- get_places ( denver_redlining , type = \"food\" ) food_processed <- get_places ( denver_redlining , type = \"processed_food\" ) natural_habitats <- get_places ( denver_redlining , type = \"natural_habitats\" ) roads <- get_places ( denver_redlining , type = \"roads\" ) rivers <- get_places ( denver_redlining , type = \"rivers\" ) #water_bodies <- get_places(denver_redlining, type=\"water_bodies\") government_buildings <- get_places ( denver_redlining , type = \"government_buildings\" ) FUNCTION: Plot the HOLC grades individually split_plot <- function ( sf_data , roads , rivers ) { # Filter for grades A, B, C, and D sf_data_filtered <- sf_data %>% filter ( grade %in% c ( 'A' , 'B' , 'C' , 'D' )) # Define a color for each grade grade_colors <- c ( \"A\" = \"#76a865\" , \"B\" = \"#7cb5bd\" , \"C\" = \"#ffff00\" , \"D\" = \"#d9838d\" ) # Create the plot with panels for each grade plot <- ggplot ( data = sf_data_filtered ) + geom_sf ( data = roads , alpha = 0.1 , lwd = 0.1 ) + geom_sf ( data = rivers , color = \"blue\" , alpha = 0.1 , lwd = 1.1 ) + geom_sf ( aes ( fill = grade )) + facet_wrap ( ~ grade , nrow = 1 ) + # Free scales for different zoom levels if needed scale_fill_manual ( values = grade_colors ) + theme_minimal () + labs ( fill = 'HOLC Grade' ) + theme_tufte () + theme ( plot.background = element_rect ( fill = \"white\" , color = NA ), panel.background = element_rect ( fill = \"white\" , color = NA ), legend.position = \"none\" , # Optionally hide the legend axis.text = element_blank (), # Remove axis text axis.title = element_blank (), # Remove axis titles axis.ticks = element_blank (), # Remove axis ticks panel.grid.major = element_blank (), # Remove major grid lines panel.grid.minor = element_blank ()) ggsave ( plot , filename = \"HOLC_grades_individually.png\" , width = 10 , height = 4 , units = \"in\" , dpi = 1200 ) return ( plot ) } Plot 4 HOLC grades individually plot_row <- split_plot ( denver_redlining , roads , rivers ) FUNCTION: Map an amenity over each grade individually process_and_plot_sf_layers <- function ( layer1 , layer2 , output_file = \"output_plot.png\" ) { # Make geometries valid layer1 <- st_make_valid ( layer1 ) layer2 <- st_make_valid ( layer2 ) # Optionally, simplify geometries to remove duplicate vertices layer1 <- st_simplify ( layer1 , preserveTopology = TRUE ) |> filter ( grade != \"\" ) # Prepare a list to store results results <- list () # Loop through each grade and perform operations for ( grade in c ( \"A\" , \"B\" , \"C\" , \"D\" )) { # Filter layer1 for current grade layer1_grade <- layer1 [ layer1 $ grade == grade , ] # Buffer the geometries of the current grade buffered_layer1_grade <- st_buffer ( layer1_grade , dist = 500 ) # Intersect with the second layer intersections <- st_intersects ( layer2 , buffered_layer1_grade , sparse = FALSE ) selected_polygons <- layer2 [ rowSums ( intersections ) > 0 , ] # Add a new column to store the grade information selected_polygons $ grade <- grade # Store the result results [[ grade ]] <- selected_polygons } # Combine all selected polygons from different grades into one sf object final_selected_polygons <- do.call ( rbind , results ) # Define colors for the grades grade_colors <- c ( \"A\" = \"grey\" , \"B\" = \"grey\" , \"C\" = \"grey\" , \"D\" = \"grey\" ) # Create the plot plot <- ggplot () + geom_sf ( data = roads , alpha = 0.05 , lwd = 0.1 ) + geom_sf ( data = rivers , color = \"blue\" , alpha = 0.1 , lwd = 1.1 ) + geom_sf ( data = layer1 , fill = \"grey\" , color = \"grey\" , size = 0.1 ) + facet_wrap ( ~ grade , nrow = 1 ) + geom_sf ( data = final_selected_polygons , fill = \"green\" , color = \"green\" , size = 0.1 ) + facet_wrap ( ~ grade , nrow = 1 ) + #scale_fill_manual(values = grade_colors) + #scale_color_manual(values = grade_colors) + theme_minimal () + labs ( fill = 'HOLC Grade' ) + theme_tufte () + theme ( plot.background = element_rect ( fill = \"white\" , color = NA ), panel.background = element_rect ( fill = \"white\" , color = NA ), legend.position = \"none\" , axis.text = element_blank (), axis.title = element_blank (), axis.ticks = element_blank (), panel.grid.major = element_blank (), panel.grid.minor = element_blank ()) # Save the plot as a high-resolution PNG file ggsave ( output_file , plot , width = 10 , height = 4 , units = \"in\" , dpi = 1200 ) # Return the plot for optional further use return ( list ( plot = plot , sf = final_selected_polygons )) } FUNCTION: Create word cloud per grade create_wordclouds_by_grade <- function ( sf_object , output_file = \"food_word_cloud_per_grade.png\" , title = \"Healthy food place names word cloud\" , max_size = 25 , col_select = \"name\" ) { # Extract relevant data and prepare text data text_data <- sf_object %>% select ( grade , col_select ) %>% filter ( ! is.na ( col_select )) %>% unnest_tokens ( output = \"word\" , input = col_select , token = \"words\" ) %>% count ( grade , word , sort = TRUE ) %>% ungroup () %>% filter ( n () > 1 ) # Filter to remove overly common or single-occurrence words # Ensure there are no NA values in the 'word' column text_data <- text_data %>% filter ( ! is.na ( word )) # Handle cases where text_data might be empty if ( nrow ( text_data ) == 0 ) { stop ( \"No data available for creating word clouds.\" ) } # Create a word cloud using ggplot2 and ggwordcloud p <- ggplot ( ) + geom_text_wordcloud_area ( data = text_data , aes ( label = word , size = n ), rm_outside = TRUE ) + scale_size_area ( max_size = max_size ) + facet_wrap ( ~ grade , nrow = 1 ) + scale_color_gradient ( low = \"darkred\" , high = \"red\" ) + theme_minimal () + theme ( plot.background = element_rect ( fill = \"white\" , color = NA ), panel.background = element_rect ( fill = \"white\" , color = NA ), panel.spacing = unit ( 0.5 , \"lines\" ), plot.title = element_text ( size = 16 , face = \"bold\" ), legend.position = \"none\" ) + labs ( title = title ) # Attempt to save the plot and handle any errors tryCatch ({ ggsave ( output_file , p , width = 10 , height = 4 , units = \"in\" , dpi = 600 ) }, error = function ( e ) { cat ( \"Error in saving the plot: \" , e $ message , \"\\n\" ) }) return ( p ) } Map food over each grade individually layer1 <- denver_redlining layer2 <- food food_match <- process_and_plot_sf_layers ( layer1 , layer2 , \"food_match.png\" ) WORD CLOUD: Names of places with fresh food food_word_cloud <- create_wordclouds_by_grade ( food_match $ sf , output_file = \"food_word_cloud_per_grade.png\" ) Warning: Using an external vector in selections was deprecated in tidyselect 1.1.0. \u2139 Please use `all_of()` or `any_of()` instead. # Was: data %>% select(col_select) # Now: data %>% select(all_of(col_select)) See . Warning in wordcloud_boxes(data_points = points_valid_first, boxes = boxes, : Some words could not fit on page. They have been removed. Map processed food over each grade individually layer1 <- denver_redlining layer2 <- food_processed processed_food_match <- process_and_plot_sf_layers ( layer1 , layer2 , \"processed_food_match.png\" ) WORD CLOUD: Names of places with processed food processed_food_cloud <- create_wordclouds_by_grade ( processed_food_match $ sf , output_file = \"processed_food_word_cloud_per_grade.png\" , title = \"Processed food place names where larger text is more frequent\" , max_size = 17 )","title":"Analysis Methodology"},{"location":"worksheet_redlining/#part-2-integrating-environmental-data","text":"","title":"Part 2: Integrating Environmental Data"},{"location":"worksheet_redlining/#data-processing","text":"Use satellite data from 2010 to analyze greenspace using NDVI, an index that measures the quantity of vegetation in an area. Apply methods to adjust for potential confounders as described in the study, ensuring that comparisons of greenspace across HOLC grades are valid and not biased by historical or socio-demographic factors. Map natural habitats over each grade individually layer1 <- denver_redlining layer2 <- natural_habitats natural_habitats_match <- process_and_plot_sf_layers ( layer1 , layer2 , \"natural_habitats_match.png\" ) print ( natural_habitats_match $ plot ) ![](worksheet_redlining_files/figure-gfm/unnamed-chunk-18-1.png) WORD CLOUD: Name of natural habitat area natural_habitats_cloud <- create_wordclouds_by_grade ( natural_habitats_match $ sf , output_file = \"natural_habitats_word_cloud_per_grade.png\" , title = \"Natural habitats place names where larger text is more frequent\" , max_size = 35 ) FUNCTION: Stream NDVI data polygon_layer <- denver_redlining # Function to process satellite data based on an SF polygon's extent process_satellite_data <- function ( polygon_layer , start_date , end_date , assets , fps = 1 , output_file = \"anim.gif\" ) { # Record start time start_time <- Sys.time () # Calculate the bbox from the polygon layer bbox <- st_bbox ( polygon_layer ) s = stac ( \"https://earth-search.aws.element84.com/v0\" ) # Use stacR to search for Sentinel-2 images within the bbox and date range items = s |> stac_search ( collections = \"sentinel-s2-l2a-cogs\" , bbox = c ( bbox [ \"xmin\" ], bbox [ \"ymin\" ], bbox [ \"xmax\" ], bbox [ \"ymax\" ]), datetime = paste ( start_date , end_date , sep = \"/\" ), limit = 500 ) %>% post_request () # Define mask for Sentinel-2 image quality #S2.mask <- image_mask(\"SCL\", values = c(3, 8, 9)) # Create a collection of images filtering by cloud cover col <- stac_image_collection ( items $ features , asset_names = assets , property_filter = function ( x ) { x [[ \"eo:cloud_cover\" ]] < 30 }) # Define a view for processing the data v <- cube_view ( srs = \"EPSG:4326\" , extent = list ( t0 = start_date , t1 = end_date , left = bbox [ \"xmin\" ], right = bbox [ \"xmax\" ], top = bbox [ \"ymax\" ], bottom = bbox [ \"ymin\" ]), dx = 0.001 , dy = 0.001 , dt = \"P1M\" , aggregation = \"median\" , resampling = \"bilinear\" ) # Calculate NDVI and create an animation ndvi_col <- function ( n ) { rev ( sequential_hcl ( n , \"Green-Yellow\" )) } #raster_cube(col, v, mask = S2.mask) %>% raster_cube ( col , v ) %>% select_bands ( c ( \"B04\" , \"B08\" )) %>% apply_pixel ( \"(B08-B04)/(B08+B04)\" , \"NDVI\" ) %>% gdalcubes :: animate ( col = ndvi_col , zlim = c ( -0.2 , 1 ), key.pos = 1 , save_as = output_file , fps = fps ) # Calculate processing time end_time <- Sys.time () processing_time <- difftime ( end_time , start_time ) # Return processing time return ( processing_time ) } Stream NDVI data: animation processing_time <- process_satellite_data ( denver_redlining , \"2022-05-31\" , \"2023-05-31\" , c ( \"B04\" , \"B08\" )) FUNCTION: Stream year average NDVI yearly_average_ndvi <- function ( polygon_layer , output_file = \"ndvi.png\" , dx = 0.01 , dy = 0.01 ) { # Record start time start_time <- Sys.time () # Calculate the bbox from the polygon layer bbox <- st_bbox ( polygon_layer ) s = stac ( \"https://earth-search.aws.element84.com/v0\" ) # Search for Sentinel-2 images within the bbox for June items <- s |> stac_search ( collections = \"sentinel-s2-l2a-cogs\" , bbox = c ( bbox [ \"xmin\" ], bbox [ \"ymin\" ], bbox [ \"xmax\" ], bbox [ \"ymax\" ]), datetime = \"2023-01-01/2023-12-31\" , limit = 500 ) %>% post_request () # Create a collection of images filtering by cloud cover col <- stac_image_collection ( items $ features , asset_names = c ( \"B04\" , \"B08\" ), property_filter = function ( x ) { x [[ \"eo:cloud_cover\" ]] < 80 }) # Define a view for processing the data specifically for June v <- cube_view ( srs = \"EPSG:4326\" , extent = list ( t0 = \"2023-01-01\" , t1 = \"2023-12-31\" , left = bbox [ \"xmin\" ], right = bbox [ \"xmax\" ], top = bbox [ \"ymax\" ], bottom = bbox [ \"ymin\" ]), dx = dx , dy = dy , dt = \"P1Y\" , aggregation = \"median\" , resampling = \"bilinear\" ) # Process NDVI ndvi_rast <- raster_cube ( col , v ) %>% select_bands ( c ( \"B04\" , \"B08\" )) %>% apply_pixel ( \"(B08-B04)/(B08+B04)\" , \"NDVI\" ) %>% write_tif () |> terra :: rast () # Convert terra Raster to ggplot using tidyterra ndvi_plot <- ggplot () + geom_spatraster ( data = ndvi_rast , aes ( fill = NDVI )) + scale_fill_viridis_c ( option = \"viridis\" , direction = -1 , name = \"NDVI\" ) + labs ( title = \"NDVI mean for 2023\" ) + theme_minimal () + coord_sf () + theme ( plot.background = element_rect ( fill = \"white\" , color = NA ), panel.background = element_rect ( fill = \"white\" , color = NA ), legend.position = \"right\" , axis.text = element_blank (), axis.title = element_blank (), axis.ticks = element_blank (), panel.grid.major = element_blank (), panel.grid.minor = element_blank ()) # Save the plot as a high-resolution PNG file ggsave ( output_file , ndvi_plot , width = 10 , height = 8 , dpi = 600 ) # Calculate processing time end_time <- Sys.time () processing_time <- difftime ( end_time , start_time ) # Return the plot and processing time return ( list ( plot = ndvi_plot , processing_time = processing_time , raster = ndvi_rast )) } Stream NDVI: high resolution ndvi_background <- yearly_average_ndvi ( denver_redlining , dx = 0.0001 , dy = 0.0001 ) FUNCTION: Map NDVI per HOLC grade individually create_mask_and_plot <- function ( redlining_sf , background_raster = ndvi $ raster , roads = NULL , rivers = NULL ){ start_time <- Sys.time () # Start timing # Validate and prepare the redlining data redlining_sf <- redlining_sf %>% filter ( grade != \"\" ) %>% st_make_valid () bbox <- st_bbox ( redlining_sf ) # Get original bounding box expanded_bbox <- expand_bbox ( bbox , 6000 , 1000 ) # expanded_bbox_poly <- st_as_sfc ( expanded_bbox , crs = st_crs ( redlining_sf )) %>% st_make_valid () # Initialize an empty list to store masks masks <- list () # Iterate over each grade to create masks unique_grades <- unique ( redlining_sf $ grade ) for ( grade in unique_grades ) { # Filter polygons by grade grade_polygons <- redlining_sf [ redlining_sf $ grade == grade , ] # Create an \"inverted\" mask by subtracting these polygons from the background mask <- st_difference ( expanded_bbox_poly , st_union ( grade_polygons )) # Store the mask in the list with the grade as the name masks [[ grade ]] <- st_sf ( geometry = mask , grade = grade ) } # Combine all masks into a single sf object mask_sf <- do.call ( rbind , masks ) # Normalize the grades so that C.2 becomes C, but correctly handle other grades mask_sf $ grade <- ifelse ( mask_sf $ grade == \"C.2\" , \"C\" , mask_sf $ grade ) # Prepare the plot plot <- ggplot () + geom_spatraster ( data = background_raster , aes ( fill = NDVI )) + scale_fill_viridis_c ( name = \"NDVI\" , option = \"viridis\" , direction = -1 ) + geom_sf ( data = mask_sf , aes ( color = grade ), fill = \"white\" , size = 0.1 , show.legend = FALSE ) + scale_color_manual ( values = c ( \"A\" = \"white\" , \"B\" = \"white\" , \"C\" = \"white\" , \"D\" = \"white\" ), name = \"Grade\" ) + facet_wrap ( ~ grade , nrow = 1 ) + geom_sf ( data = roads , alpha = 1 , lwd = 0.1 , color = \"white\" ) + geom_sf ( data = rivers , color = \"white\" , alpha = 0.5 , lwd = 1.1 ) + labs ( title = \"NDVI: Normalized Difference Vegetation Index\" ) + theme_minimal () + coord_sf ( xlim = c ( bbox [ \"xmin\" ], bbox [ \"xmax\" ]), ylim = c ( bbox [ \"ymin\" ], bbox [ \"ymax\" ]), expand = FALSE ) + theme ( plot.background = element_rect ( fill = \"white\" , color = NA ), panel.background = element_rect ( fill = \"white\" , color = NA ), legend.position = \"bottom\" , axis.text = element_blank (), axis.title = element_blank (), axis.ticks = element_blank (), panel.grid.major = element_blank (), panel.grid.minor = element_blank ()) # Save the plot ggsave ( \"redlining_mask_ndvi.png\" , plot , width = 10 , height = 4 , dpi = 600 ) end_time <- Sys.time () # End timing runtime <- end_time - start_time # Return the plot and runtime return ( list ( plot = plot , runtime = runtime , mask_sf = mask_sf )) } Stream NDVI: low resolution ndvi_background_low <- yearly_average_ndvi ( denver_redlining ) Map low resolution NDVI per HOLC grade ndvi <- create_mask_and_plot ( denver_redlining , background_raster = ndvi_background_low $ raster , roads = roads , rivers = rivers ) FUNCTION: Map Denver City provided data per HOLC grade process_city_inventory_data <- function ( address , inner_file , polygon_layer , output_filename , variable_label = 'Tree Density' ) { # Download and read the shapefile full_path <- glue ( \"/vsizip/vsicurl/{address}/{inner_file}\" ) shape_data <- st_read ( full_path , quiet = TRUE ) |> st_as_sf () # Process the shape data with the provided polygon layer processed_data <- process_and_plot_sf_layers ( polygon_layer , shape_data , paste0 ( output_filename , \".png\" )) # Extract trees from the processed data trees <- processed_data $ sf denver_redlining_residential <- polygon_layer |> filter ( grade != \"\" ) # Generate the density plot plot <- ggplot () + geom_sf ( data = roads , alpha = 0.05 , lwd = 0.1 ) + geom_sf ( data = rivers , color = \"blue\" , alpha = 0.1 , lwd = 1.1 ) + geom_sf ( data = denver_redlining_residential , fill = \"grey\" , color = \"grey\" , size = 0.1 ) + facet_wrap ( ~ grade , nrow = 1 ) + stat_density_2d ( data = trees , mapping = aes ( x = map_dbl ( geometry , ~ . [ 1 ]), y = map_dbl ( geometry , ~ . [ 2 ]), fill = stat ( density )), geom = 'tile' , contour = FALSE , alpha = 0.9 ) + scale_fill_gradientn ( colors = c ( \"transparent\" , \"white\" , \"limegreen\" ), values = scales :: rescale ( c ( 0 , 0.1 , 1 )), # Adjust these based on your density range guide = \"colourbar\" ) + theme_minimal () + labs ( fill = variable_label ) + theme_tufte () + theme ( plot.background = element_rect ( fill = \"white\" , color = NA ), panel.background = element_rect ( fill = \"white\" , color = NA ), legend.position = \"bottom\" , axis.text = element_blank (), axis.title = element_blank (), axis.ticks = element_blank (), panel.grid.major = element_blank (), panel.grid.minor = element_blank ()) # Save the plot ggsave ( paste0 ( output_filename , \"_density_plot.png\" ), plot , width = 10 , height = 4 , units = \"in\" , dpi = 600 ) # Return the plot and the tree layer return ( list ( plot = plot , layer = trees )) } Map tree inventory per HOLC grade result <- process_city_inventory_data ( \"https://www.denvergov.org/media/gis/DataCatalog/tree_inventory/shape/tree_inventory.zip\" , \"tree_inventory.shp\" , denver_redlining , \"Denver_tree_inventory_2023\" ) Warning: `stat(density)` was deprecated in ggplot2 3.4.0. \u2139 Please use `after_stat(density)` instead. Map traffic accidents per HOLC grade result <- process_city_inventory_data ( \"https://www.denvergov.org/media/gis/DataCatalog/traffic_accidents/shape/traffic_accidents.zip\" , \"traffic_accidents.shp\" , denver_redlining , \"Denver_traffic_accidents\" , variable_label = 'Traffic accidents density' ) Map stream sampling effort per HOLC grade instream_sampling_sites <- process_city_inventory_data ( \"https://www.denvergov.org/media/gis/DataCatalog/instream_sampling_sites/shape/instream_sampling_sites.zip\" , \"instream_sampling_sites.shp\" , denver_redlining , \"instream_sampling_sites\" , variable_label = 'Instream sampling sites density' ) Map soil sampling effort per HOLC grade soil_samples <- process_city_inventory_data ( \"https://www.denvergov.org/media/gis/DataCatalog/soil_samples/shape/soil_samples.zip\" , \"soil_samples.shp\" , denver_redlining , \"Soil samples\" , variable_label = 'soil samples density' ) Map public art density per HOLC grade public_art <- process_city_inventory_data ( \"https://www.denvergov.org/media/gis/DataCatalog/public_art/shape/public_art.zip\" , \"public_art.shp\" , denver_redlining , \"Public art \" , variable_label = 'Public art density' ) Map liquor licenses density per HOLC grade liquor_licenses <- process_city_inventory_data ( \"https://www.denvergov.org/media/gis/DataCatalog/liquor_licenses/shape/liquor_licenses.zip\" , \"liquor_licenses.shp\" , denver_redlining , \"liquor licenses \" , variable_label = 'liquor licenses density' ) Map crime density per HOLC grade Crime <- process_city_inventory_data ( \"https://www.denvergov.org/media/gis/DataCatalog/crime/shape/crime.zip\" , \"crime.shp\" , denver_redlining , \"crime\" , variable_label = 'Crime density' ) WORD CLOUD: Types of crimes crime_cloud <- create_wordclouds_by_grade ( Crime $ layer , output_file = \"Crime_word_cloud_per_grade.png\" , title = \"Crime type where larger text is more frequent\" , max_size = 25 , col_select = \"OFFENSE_TY\" ) Warning: Using an external vector in selections was deprecated in tidyselect 1.1.0. \u2139 Please use `all_of()` or `any_of()` instead. # Was: data %>% select(col_select) # Now: data %>% select(all_of(col_select)) See . Map police shooting density per HOLC grade Denver_police_shootings <- process_city_inventory_data ( \"https://www.denvergov.org/media/gis/DataCatalog/denver_police_officer_involved_shootings/shape/denver_police_officer_involved_shootings.zip\" , \"denver_police_officer_involved_shootings.shp\" , denver_redlining , \"Police shootings\" , variable_label = 'Police shootings density' ) Not enough data for density across all 4 WORD CLOUD: Police involved shootings Denver_police_shootings_cloud <- create_wordclouds_by_grade ( Denver_police_shootings $ layer , output_file = \"police_shootings_word_cloud_per_grade.png\" , title = \"police involved shooting per crime type where larger text is more frequent\" , max_size = 35 , col_select = \"SHOOT_ACTI\" )","title":"Data Processing"},{"location":"worksheet_redlining/#part-3-comparative-analysis-and-visualization","text":"","title":"Part 3: Comparative Analysis and Visualization"},{"location":"worksheet_redlining/#statistical-analysis","text":"Conduct a detailed statistical analysis to compare greenspace across different HOLC grades, using techniques like Targeted Maximum Likelihood Estimation (TMLE) to assess the association between historical redlining and current greenspace levels. Visualize the disparities in greenspace distribution using GIS tools, highlighting how redlining has shaped urban ecological landscapes.","title":"Statistical Analysis"},{"location":"worksheet_redlining/#conclusion","text":"This tutorial provides tools and methodologies to explore the lingering effects of historic redlining on urban greenspace, offering insights into the intersection of urban planning, environmental justice, and public health.","title":"Conclusion"},{"location":"worksheet_redlining/#references_1","text":"Nardone, A., Rudolph, K. E., Morello-Frosch, R., & Casey, J. A. (2021). Redlines and Greenspace: The Relationship between Historical Redlining and 2010 Greenspace across the United States. Environmental Health Perspectives , 129(1), 017006. DOI:10.1289/EHP7495. Available online","title":"References"},{"location":"additional-resources/bilingualism_md/","text":"R and Python bilingualism \u00b6 Welcome to the R and Python bilingualism reference guide! If you\u2019re fluent in one of these languages but hesitant to learn the other, you\u2019re in the right place. The good news is that there are many similarities between R and Python that make it easy to switch between the two. Both R and Python are widely used in data science and are open-source, meaning that they are free to use and constantly being improved by the community. They both have extensive libraries for data analysis, visualization, and machine learning. In fact, many of the libraries in both languages have similar names and functions, such as Pandas in Python and data.table in R. While there are differences between the two languages, they can complement each other well. Python is versatile and scalable, making it ideal for large and complex projects such as web development and artificial intelligence. R, on the other hand, is known for its exceptional statistical capabilities and is often used in data analysis and modeling. Visualization is also easier in R, making it a popular choice for creating graphs and charts. By learning both R and Python, you\u2019ll be able to take advantage of the strengths of each language and create more efficient and robust data analysis workflows. Don\u2019t let the differences between the two languages intimidate you - once you become familiar with one, learning the other will be much easier. So, whether you\u2019re a Python enthusiast looking to expand your statistical analysis capabilities, or an R user interested in exploring the world of web development and artificial intelligence, this guide will help you become bilingual in R and Python. Install packages \u00b6 In R, packages can be installed from CRAN repository by using the install.packages() function: R code: # Install the dplyr package from CRAN install.packages ( \"dplyr\" ) In Python, packages can be installed from the Anaconda repository by using the conda install command: Python code: # Install the pandas package from Anaconda ! conda install pandas Loading libraries in R and Python In R, libraries can be loaded in the same way as before, using the library() function: R code: # Load the dplyr library library ( dplyr ) In Python, libraries can be loaded in the same way as before, using the import statement. Here\u2019s an example: Python code: # Load the pandas library import pandas as pd Note that the package or library must be installed from the respective repository before it can be loaded. Also, make sure you have the correct repository specified in your system before installing packages. By default, R uses CRAN as its primary repository, whereas Anaconda uses its own repository by default. reticulate \u00b6 The reticulate package lets you run both R and Python together in the R environment. R libraries are stored and managed in a repository called CRAN. You can download R packages with the install.packages() function install.packages ( \"reticulate\" ) You only need to install packages once, but you need to mount those packages with the library() function each time you open R. library ( reticulate ) Python libraries are stored and managed in a few different libraries and their dependencies are not regulated as strictly as R libraries are in CRAN. It\u2019s easier to publish a python package but it can also be more cumbersome for users because you need to manage dependencies yourself. You can download python packages using both R and Python code py_install ( \"laspy\" ) ## + '/Users/ty/opt/miniconda3/bin/conda' 'install' '--yes' '--prefix' '/Users/ty/opt/miniconda3/envs/earth-analytics-python' '-c' 'conda-forge' 'laspy' Now, let\u2019s create a Python list and assign it to a variable py_list: R code: py_list <- r_to_py ( list ( 1 , 2 , 3 )) We can now print out the py_list variable in Python using the py_run_string() function: R code: py_run_string ( \"print(r.py_list)\" ) This will output [1, 2, 3] in the Python console. Now, let\u2019s create an R vector and assign it to a variable r_vec: R code: r_vec <- c ( 4 , 5 , 6 ) We can now print out the r_vec variable in R using the py$ syntax to access Python variables: R code: print ( py $ py_list ) This will output [1, 2, 3] in the R console. We can also call Python functions from R using the py_call() function. For example, let\u2019s call the Python sum() function on the py_list variable and assign the result to an R variable r_sum: R code: r_sum <- py_call ( \"sum\" , args = list ( py_list )) We can now print out the r_sum variable in R: R code: print ( r_sum ) This will output 6 in the R console. Load packages and change settings \u00b6 options ( java.parameters = \"-Xmx5G\" ) library ( r5r ) library ( sf ) library ( data.table ) library ( ggplot2 ) library ( interp ) library ( dplyr ) library ( osmdata ) library ( ggthemes ) library ( sf ) library ( data.table ) library ( ggplot2 ) library ( akima ) library ( dplyr ) library ( raster ) library ( osmdata ) library ( mapview ) library ( cowplot ) library ( here ) library ( testthat ) import sys sys . argv . append ([ \"--max-memory\" , \"5G\" ]) import pandas as pd import geopandas import matplotlib.pyplot as plt import numpy as np import plotnine import contextily as cx import r5py import seaborn as sns R and Python are two popular programming languages used for data analysis, statistics, and machine learning. Although they share some similarities, there are some fundamental differences between them. Here\u2019s an example code snippet in R and Python to illustrate some of the differences: R Code: # Create a vector of numbers from 1 to 10 x <- 1 : 10 # Compute the mean of the vector mean_x <- mean ( x ) # Print the result print ( mean_x ) ## [1] 5.5 Python Code: # Import the numpy library for numerical operations import numpy as np # Create a numpy array of numbers from 1 to 10 x = np . array ([ 1 , 2 , 3 , 4 , 5 , 6 , 7 , 8 , 9 , 10 ]) # Compute the mean of the array mean_x = np . mean ( x ) # Print the result print ( mean_x ) ## 5.5 In this example, we can see that there are several differences between R and Python: Syntax: R uses the assignment operator \\<- while Python uses the equals sign = for variable assignment. Libraries: Python relies heavily on external libraries such as numpy, pandas, and matplotlib for data analysis, while R has built-in functions for many data analysis tasks. Data types: R is designed to work with vectors and matrices, while Python uses lists and arrays. In the example above, we used the numpy library to create a numerical array in Python. Function names: Function names in R and Python can differ significantly. In the example above, we used the mean() function in R and the np.mean() function in Python to calculate the mean of the vector/array. These are just a few of the many differences between R and Python. Ultimately, the choice between the two languages will depend on your specific needs and preferences. Load saved data \u00b6 R Code: data ( \"iris\" ) here () load ( file = here ( \"2_R_and_Py_bilingualism\" , \"data\" , \"iris_example_data.rdata\" )) objects () Python code: Save data \u00b6 R Code: save ( iris , file = here ( \"2_R_and_Py_bilingualism\" , \"data\" , \"iris_example_data.rdata\" )) write.csv ( iris , file = here ( \"2_R_and_Py_bilingualism\" , \"data\" , \"iris_example_data.csv\" )) Python code: functions \u00b6 Both R and Python are powerful languages for writing functions that can take input, perform a specific task, and return output. R Code: # Define a function that takes two arguments and returns their sum sum_r <- function ( a , b ) { return ( a + b ) } # Call the function with two arguments and print the result result_r <- sum_r ( 3 , 5 ) print ( result_r ) ## [1] 8 Python code: # Define a function that takes two arguments and returns their sum def sum_py ( a , b ): return a + b # Call the function with two arguments and print the result result_py = sum_py ( 3 , 5 ) print ( result_py ) ## 8 In both cases, we define a function that takes two arguments and returns their sum. In R, we use the function keyword to define a function, while in Python, we use the def keyword. The function body in R is enclosed in curly braces, while in Python it is indented. There are a few differences in the syntax and functionality between the two approaches: Function arguments: In R, function arguments are separated by commas, while in Python they are enclosed in parentheses. The syntax for specifying default arguments and variable-length argument lists can also differ between the two languages. Return statement: In R, we use the return keyword to specify the return value of a function, while in Python, we simply use the return statement. Function names: Function names in R and Python can differ significantly. In the example above, we used the sum_r() function in R and the sum_py() function in Python to calculate the sum of two numbers. Data Plots \u00b6 R Code: # Load the \"ggplot2\" package for plotting library ( ggplot2 ) # Generate some sample data x <- seq ( 1 , 10 , 1 ) y <- x + rnorm ( 10 ) # Create a scatter plot ggplot ( data.frame ( x , y ), aes ( x = x , y = y )) + geom_point () Python code: # Load the \"matplotlib\" library import matplotlib.pyplot as plt # Generate some sample data import numpy as np x = np . arange ( 1 , 11 ) y = x + np . random . normal ( 0 , 1 , 10 ) #clear last plot plt . clf () # Create a scatter plot plt . scatter ( x , y ) plt . show () In both cases, we generate some sample data and create a scatter plot to visualize the relationship between the variables. There are a few differences in the syntax and functionality between the two approaches: Library and package names: In R, we use the ggplot2 package for plotting, while in Python, we use the matplotlib library. Data format: In R, we use a data frame to store the input data, while in Python, we use numpy arrays. Plotting functions: In R, we use the ggplot() function to create a new plot object, and then use the geom_point() function to create a scatter plot layer. In Python, we use the scatter() function from the matplotlib.pyplot module to create a scatter plot directly. Linear regression \u00b6 R Code: # Load the \"ggplot2\" package for plotting library ( ggplot2 ) # Generate some sample data x <- seq ( 1 , 10 , 1 ) y <- x + rnorm ( 10 ) # Perform linear regression model_r <- lm ( y ~ x ) # Print the model summary summary ( model_r ) ## ## Call: ## lm(formula = y ~ x) ## ## Residuals: ## Min 1Q Median 3Q Max ## -1.69344 -0.42336 0.08961 0.34778 1.56728 ## ## Coefficients: ## Estimate Std. Error t value Pr(>|t|) ## (Intercept) -0.1676 0.6781 -0.247 0.811 ## x 0.9750 0.1093 8.921 1.98e-05 *** ## --- ## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1 ## ## Residual standard error: 0.9926 on 8 degrees of freedom ## Multiple R-squared: 0.9087, Adjusted R-squared: 0.8972 ## F-statistic: 79.59 on 1 and 8 DF, p-value: 1.976e-05 # Plot the data and regression line ggplot ( data.frame ( x , y ), aes ( x = x , y = y )) + geom_point () + geom_smooth ( method = \"lm\" , se = FALSE ) ## `geom_smooth()` using formula = 'y ~ x' Python code: # Load the \"matplotlib\" and \"scikit-learn\" libraries import matplotlib.pyplot as plt from sklearn.linear_model import LinearRegression # Generate some sample data import numpy as np x = np . arange ( 1 , 11 ) y = x + np . random . normal ( 0 , 1 , 10 ) # Perform linear regression model_py = LinearRegression () . fit ( x . reshape ( - 1 , 1 ), y ) # Print the model coefficients print ( \"Coefficients: \" , model_py . coef_ ) ## Coefficients: [1.15539692] print ( \"Intercept: \" , model_py . intercept_ ) #clear last plot ## Intercept: -1.1291396173221218 plt . clf () # Plot the data and regression line plt . scatter ( x , y ) plt . plot ( x , model_py . predict ( x . reshape ( - 1 , 1 )), color = 'red' ) plt . show () In both cases, we generate some sample data with a linear relationship between x and y, and then perform a simple linear regression to estimate the slope and intercept of the line. We then plot the data and regression line to visualize the fit. There are a few differences in the syntax and functionality between the two approaches: Library and package names: In R, we use the lm() function from the base package to perform linear regression, while in Python, we use the LinearRegression() class from the scikit-learn library. Additionally, we use the ggplot2 package in R for plotting, while we use the matplotlib library in Python. Data format: In R, we can specify the dependent and independent variables in the formula used for regression. In Python, we need to reshape the input data to a two-dimensional array before fitting the model. Model summary: In R, we can use the summary() function to print a summary of the model, including the estimated coefficients, standard errors, and p-values. In Python, we need to print the coefficients and intercept separately. Random Forest \u00b6 R Code: # Load the \"randomForest\" package library ( randomForest ) # Load the \"iris\" dataset data ( iris ) # Split the data into training and testing sets set.seed ( 123 ) train_idx <- sample ( 1 : nrow ( iris ), nrow ( iris ) * 0.7 , replace = FALSE ) train_data <- iris [ train_idx , ] test_data <- iris [ - train_idx , ] # Build a random forest model rf_model <- randomForest ( Species ~ . , data = train_data , ntree = 500 ) # Make predictions on the testing set predictions <- predict ( rf_model , test_data ) # Calculate accuracy of the model accuracy <- sum ( predictions == test_data $ Species ) / nrow ( test_data ) print ( paste ( \"Accuracy:\" , accuracy )) ## [1] \"Accuracy: 0.977777777777778\" Python code: # Load the \"pandas\", \"numpy\", and \"sklearn\" libraries import pandas as pd import numpy as np from sklearn.ensemble import RandomForestClassifier from sklearn.datasets import load_iris from sklearn.model_selection import train_test_split # Load the \"iris\" dataset iris = load_iris () # Split the data into training and testing sets X_train , X_test , y_train , y_test = train_test_split ( iris . data , iris . target , test_size = 0.3 , random_state = 123 ) # Build a random forest model rf_model = RandomForestClassifier ( n_estimators = 500 , random_state = 123 ) rf_model . fit ( X_train , y_train ) # Make predictions on the testing set ## RandomForestClassifier(n_estimators=500, random_state=123) predictions = rf_model . predict ( X_test ) # Calculate accuracy of the model accuracy = sum ( predictions == y_test ) / len ( y_test ) print ( \"Accuracy:\" , accuracy ) ## Accuracy: 0.9555555555555556 In both cases, we load the iris dataset and split it into training and testing sets. We then build a random forest model using the training data and evaluate its accuracy on the testing data. There are a few differences in the syntax and functionality between the two approaches: Library and package names: In R, we use the randomForest package to build random forest models, while in Python, we use the RandomForestClassifier class from the sklearn.ensemble module. We also use different libraries for loading and manipulating data (pandas and numpy in Python, and built-in datasets in R). Model parameters: The syntax for setting model parameters is slightly different in R and Python. For example, in R, we specify the number of trees using the ntree parameter, while in Python, we use the n_estimators parameter. Data format: In R, we use a data frame to store the input data, while in Python, we use numpy arrays. Basic streetmap from Open Street Map \u00b6 R Code: # Load the \"osmdata\" package for mapping library ( osmdata ) library ( tmap ) # Define the map location and zoom level bbox <- c ( left = -0.16 , bottom = 51.49 , right = -0.13 , top = 51.51 ) # Get the OpenStreetMap data osm_data <- opq ( bbox ) %>% add_osm_feature ( key = \"highway\" ) %>% osmdata_sf () # Plot the map using tmap tm_shape ( osm_data $ osm_lines ) + tm_lines () Python code: # Load the \"osmnx\" package for mapping import osmnx as ox # Define the map location and zoom level bbox = ( 51.49 , - 0.16 , 51.51 , - 0.13 ) # Get the OpenStreetMap data osm_data = ox . graph_from_bbox ( north = bbox [ 2 ], south = bbox [ 0 ], east = bbox [ 3 ], west = bbox [ 1 ], network_type = 'all' ) # Plot the map using osmnx ox . plot_graph ( osm_data ) ## (
    , ) In both cases, we define the map location and zoom level, retrieve the OpenStreetMap data using the specified bounding box, and plot the map. The main differences between the two approaches are: Package names and syntax: In R, we use the osmdata package and its syntax to download and process the OpenStreetMap data, while in Python, we use the osmnx package and its syntax. Mapping libraries: In R, we use the tmap package to create a static map of the OpenStreetMap data, while in Python, we use the built-in ox.plot_graph function from the osmnx package to plot the map. CNN on Raster data \u00b6 R Code: # Load the \"keras\" package for building the CNN library ( tensorflow ) library ( keras ) # Load the \"raster\" package for working with raster data library ( raster ) # Load the \"magrittr\" package for pipe operator library ( magrittr ) # Load the data as a raster brick raster_data <- brick ( \"raster_data.tif\" ) # Split the data into training and testing sets split_data <- sample ( 1 : nlayers ( raster_data ), size = nlayers ( raster_data ) * 0.8 , replace = FALSE ) train_data <- raster_data [[ split_data ]] test_data <- raster_data [[ setdiff ( 1 : nlayers ( raster_data ), split_data )]] # Define the CNN model model <- keras_model_sequential () %>% layer_conv_2d ( filters = 32 , kernel_size = c ( 3 , 3 ), activation = \"relu\" , input_shape = c ( ncol ( train_data ), nrow ( train_data ), ncell ( train_data ))) %>% layer_max_pooling_2d ( pool_size = c ( 2 , 2 )) %>% layer_dropout ( rate = 0.25 ) %>% layer_flatten () %>% layer_dense ( units = 128 , activation = \"relu\" ) %>% layer_dropout ( rate = 0.5 ) %>% layer_dense ( units = nlayers ( train_data ), activation = \"softmax\" ) # Compile the model model %>% compile ( loss = \"categorical_crossentropy\" , optimizer = \"adam\" , metrics = \"accuracy\" ) # Train the model history <- model %>% fit ( x = array ( train_data ), y = to_categorical ( 1 : nlayers ( train_data )), epochs = 10 , validation_split = 0.2 ) # Evaluate the model model %>% evaluate ( x = array ( test_data ), y = to_categorical ( 1 : nlayers ( test_data ))) # Plot the model accuracy over time plot ( history ) Piping \u00b6 Piping is a powerful feature in both R and Python that allows for a more streamlined and readable code. However, the syntax for piping is slightly different between the two languages. In R, piping is done using the %>% operator from the magrittr package, while in Python, it is done using the | operator from the pandas package. Let\u2019s compare and contrast piping in R and Python with some examples: Piping in R In R, we can use the %>% operator to pipe output from one function to another, which can make our code more readable and easier to follow. Here\u2019s an example: R code: library ( dplyr ) # create a data frame df <- data.frame ( x = c ( 1 , 2 , 3 ), y = c ( 4 , 5 , 6 )) # calculate the sum of column x and y df %>% mutate ( z = x + y ) %>% summarize ( sum_z = sum ( z )) ## sum_z ## 1 21 In this example, we first create a data frame df with two columns x and y. We then pipe the output of df to mutate, which adds a new column z to the data frame that is the sum of x and y. Finally, we pipe the output to summarize, which calculates the sum of z and returns the result. Piping in Python In Python, we can use the | operator to pipe output from one function to another. However, instead of piping output from one function to another, we pipe a DataFrame to a method of the DataFrame. Here\u2019s an example: Python code: import pandas as pd # create a DataFrame df = pd . DataFrame ({ 'x' : [ 1 , 2 , 3 ], 'y' : [ 4 , 5 , 6 ]}) # calculate the sum of column x and y ( df . assign ( z = df [ 'x' ] + df [ 'y' ]) . agg ( sum_z = ( 'z' , 'sum' ))) ## z ## sum_z 21 In this example, we first create a DataFrame df with two columns x and y. We then use the assign() method to add a new column z to the DataFrame that is the sum of x and y. Finally, we use the agg() method to calculate the sum of z and return the result. As we can see, the syntax for piping is slightly different between R and Python, but the concept remains the same. Piping can make our code more readable and easier to follow, which is an important aspect of creating efficient and effective code. R code: library ( dplyr ) library ( ggplot2 ) iris %>% filter ( Species == \"setosa\" ) %>% group_by ( Sepal.Width ) %>% summarise ( mean.Petal.Length = mean ( Petal.Length )) %>% mutate ( Sepal.Width = as.factor ( Sepal.Width )) %>% ggplot ( aes ( x = Sepal.Width , y = mean.Petal.Length )) + geom_bar ( stat = \"identity\" , fill = \"dodgerblue\" ) + labs ( title = \"Mean Petal Length of Setosa by Sepal Width\" , x = \"Sepal Width\" , y = \"Mean Petal Length\" ) In this example, we start with the iris dataset and filter it to only include rows where the Species column is \u201csetosa\u201d. We then group the remaining rows by the Sepal.Width column and calculate the mean Petal.Length for each group. Next, we convert Sepal.Width to a factor variable to ensure that it is treated as a categorical variable in the visualization. Finally, we create a bar plot using ggplot2, with Sepal.Width on the x-axis and mean.Petal.Length on the y-axis. The resulting plot shows the mean petal length of setosa flowers for each sepal width category. Python code: import pandas as pd # Load the iris dataset and pipe it into the next function ( pd . read_csv ( \"https://archive.ics.uci.edu/ml/machine-learning-databases/iris/iris.data\" , header = None , names = [ 'sepal_length' , 'sepal_width' , 'petal_length' , 'petal_width' , 'class' ]) # Select columns and pivot the dataset . loc [:, [ 'sepal_length' , 'sepal_width' , 'petal_length' ]] . melt ( var_name = 'variable' , value_name = 'value' ) # Group by variable and calculate mean . groupby ( 'variable' , as_index = False ) . mean () # Filter for mean greater than 3.5 and sort by descending mean . query ( 'value > 3.5' ) . sort_values ( 'value' , ascending = False ) ) ## variable value ## 1 sepal_length 5.843333 ## 0 petal_length 3.758667 for loops \u00b6 Here is an example of a for loop in R: R code # Create a vector of numbers numbers <- c ( 1 , 2 , 3 , 4 , 5 ) # Use a for loop to print out each number in the vector for ( i in numbers ) { print ( i ) } ## [1] 1 ## [1] 2 ## [1] 3 ## [1] 4 ## [1] 5 In this example, the for loop iterates over each element in the numbers vector, assigning the current element to the variable i. The print(i) statement is then executed for each iteration, outputting the value of i. Here is the equivalent example in Python: Python code # Create a list of numbers numbers = [ 1 , 2 , 3 , 4 , 5 ] # Use a for loop to print out each number in the list for i in numbers : print ( i ) ## 1 ## 2 ## 3 ## 4 ## 5 In Python, the for loop iterates over each element in the numbers list, assigning the current element to the variable i. The print(i) statement is then executed for each iteration, outputting the value of i. Both languages also support nested for loops, which can be used to perform iterations over multiple dimensions, such as looping through a 2D array. Parallel \u00b6 Parallel computing is a technique used to execute multiple computational tasks simultaneously, which can significantly reduce the time required to complete a task. Both R and Python have built-in support for parallel computing, although the approaches are slightly different. In this answer, we will compare and contrast the parallel computing capabilities of R and Python, and provide working examples in code. Parallel computing in R In R, there are several packages that support parallel computing, such as parallel, foreach, and doParallel. The parallel package provides basic functionality for parallel computing, while foreach and doParallel provide higher-level abstractions that make it easier to write parallel code. Here is an example of using the foreach package to execute a loop in parallel: R code: library ( foreach ) library ( doParallel ) # Set up a parallel backend with 4 workers cl <- makeCluster ( 4 ) registerDoParallel ( cl ) # Define a function to apply in parallel myfunc <- function ( x ) { # some computation here return ( x ^ 2 ) } # Generate some data mydata <- 1 : 1000 # Apply the function to the data in parallel result <- foreach ( i = mydata ) %dopar% { myfunc ( i ) } # Stop the cluster stopCluster ( cl ) In this example, we use the makeCluster() function to set up a cluster with 4 workers, and the registerDoParallel() function to register the cluster as the parallel backend for foreach. We then define a function myfunc() that takes an input x and returns x^2. We generate some data mydata and use foreach to apply myfunc() to each element of mydata in parallel, using the %dopar% operator. R Tidyverse parallel In R Tidyverse, we can use the furrr package for parallel computing. Here\u2019s an example of using furrr to parallelize a map function: R Tidy code: library ( tidyverse ) library ( furrr ) # Generate a list of numbers numbers <- 1 : 10 # Use the future_map function from furrr to parallelize the map function plan ( multisession ) squares <- future_map ( numbers , function ( x ) x ^ 2 ) In this example, we first load the Tidyverse and furrr libraries. We then generate a list of numbers from 1 to 10. We then use the plan function to set the parallelization strategy to \u201cmultisession\u201d, which will use multiple CPU cores to execute the code. Finally, we use the future_map function from furrr to apply the function x^2 to each number in the list in parallel. Parallel computing in Python In Python, the standard library includes the multiprocessing module, which provides basic support for parallel computing. Additionally, there are several third-party packages that provide higher-level abstractions, such as joblib and dask. Here is an example of using the multiprocessing module to execute a loop in parallel: Python code: def square ( x ): return x ** 2 from multiprocessing import Pool # Generate a list of numbers numbers = list ( range ( 1 , 11 )) # Use the map function and a pool of workers to parallelize the square function with Pool () as pool : squares = pool . map ( square , numbers ) print ( squares ) In this example, we define a function myfunc() that takes an input x and returns x^2. We generate some data mydata and use the Pool class from the multiprocessing module to set up a pool of 4 workers. We then use the map() method of the Pool class to apply myfunc() to each element of mydata in parallel. Comparison and contrast Both R and Python have built-in support for parallel computing, with similar basic functionality for creating and managing parallel processes. However, the higher-level abstractions differ between the two languages. In R, the foreach package provides a high-level interface that makes it easy to write parallel code, while in Python, the multiprocessing module provides a basic interface that can be extended using third-party packages like joblib and dask. Additionally, Python has better support for distributed computing using frameworks like Apache Spark, while R has better support for shared-memory parallelism using tools like data.table and ff. Data wrangling \u00b6 Data wrangling is an important part of any data analysis project, and both R and Python provide tools and libraries for performing this task. In this answer, we will compare and contrast data wrangling in R\u2019s tidyverse and Python\u2019s pandas library, with working examples in code. Data Wrangling in R Tidyverse The tidyverse is a collection of R packages designed for data science, and it includes several packages that are useful for data wrangling. One of the most popular packages is dplyr, which provides a grammar of data manipulation for data frames. Here is an example of using dplyr to filter, mutate, and summarize a data frame: R code library ( dplyr ) # Load data data ( mtcars ) # Filter for cars with more than 100 horsepower mtcars %>% filter ( hp > 100 ) %>% # Add a new column with fuel efficiency in km per liter mutate ( kmpl = 0.425 * mpg ) %>% # Group by number of cylinders and summarize group_by ( cyl ) %>% summarize ( mean_hp = mean ( hp ), mean_kmpl = mean ( kmpl )) ## # A tibble: 3 \u00d7 3 ## cyl mean_hp mean_kmpl ## ## 1 4 111 11.0 ## 2 6 122. 8.39 ## 3 8 209. 6.42 In this example, we first filter the mtcars data frame to only include cars with more than 100 horsepower. We then use mutate to create a new column with fuel efficiency in kilometers per liter. Finally, we group the data by the number of cylinders and calculate the mean horsepower and fuel efficiency. Data Wrangling in Python Pandas Pandas is a popular library for data manipulation in Python. It provides a data frame object similar to R\u2019s data frames, along with a wide range of functions for data wrangling. Here is an example of using pandas to filter, transform, and group a data frame: Python code: import pandas as pd # Load data mtcars = pd . read_csv ( 'https://raw.githubusercontent.com/mwaskom/seaborn-data/master/mtcars.csv' ) # Filter for cars with more than 100 horsepower filtered_mtcars = mtcars [ mtcars [ 'hp' ] > 100 ] # Add a new column with fuel efficiency in km per liter filtered_mtcars [ 'kmpl' ] = 0.425 * filtered_mtcars [ 'mpg' ] # Group by number of cylinders and calculate mean horsepower and fuel efficiency grouped_mtcars = filtered_mtcars . groupby ( 'cyl' ) . agg ({ 'hp' : 'mean' , 'kmpl' : 'mean' }) In this example, we first load the mtcars data from a CSV file. We then filter the data to only include cars with more than 100 horsepower, using boolean indexing. We use the assign function to create a new column with fuel efficiency in kilometers per liter. Finally, we group the data by the number of cylinders and calculate the mean horsepower and fuel efficiency. Comparison Overall, both R\u2019s tidyverse and Python\u2019s pandas provide similar functionality for data wrangling. Both allow for filtering, transforming, and aggregating data frames. The syntax for performing these operations is slightly different between the two languages, with R using the %>% operator for chaining operations and Python using method chaining or the apply family of functions. One key difference between the two languages is that R\u2019s tidyverse provides a consistent grammar for data manipulation across its various packages, making it easier to learn and use. However, Python\u2019s pandas library has a larger developer community and is more versatile for use in other applications, such as web development or machine learning. In conclusion, both R and Python provide powerful tools for data wrangling, and the choice between the two ultimately depends on the specific needs of the user and their familiarity Data from API \u00b6 Retrieving data from an API is a common task in both R and Python. Here are examples of how to retrieve data from an API in both languages: Python To retrieve data from an API in Python, we can use the requests library. Here\u2019s an example of how to retrieve weather data from the OpenWeatherMap API: Python code: import requests url = 'https://api.openweathermap.org/data/2.5/weather?q=London,uk&appid=API_KEY' response = requests . get ( url ) data = response . json () print ( data ) This code retrieves the current weather data for London from the OpenWeatherMap API. We first construct the API URL with the location and API key, then use the requests.get() function to make a request to the API. We then extract the JSON data from the response using the .json() method and print the resulting data. R In R, we can use the httr package to retrieve data from an API. Here\u2019s an example of how to retrieve weather data from the OpenWeatherMap API in R: R code: library ( httr ) url <- 'https://api.openweathermap.org/data/2.5/weather?q=London,uk&appid=API_KEY' response <- GET ( url ) data <- content ( response , 'text' ) print ( data ) This code is similar to the Python code above. We first load the httr library, then construct the API URL and use the GET() function to make a request to the API. We then extract the data from the response using the content() function and print the resulting data. Retrieving Data from an API in R Tidyverse In R Tidyverse, we can use the httr and jsonlite packages to retrieve and process data from an API. R code: # Load required packages library ( httr ) library ( jsonlite ) # Define API endpoint endpoint <- \"https://jsonplaceholder.typicode.com/posts\" # Retrieve data from API response <- GET ( endpoint ) # Extract content from response content <- content ( response , \"text\" ) # Convert content to JSON json <- fromJSON ( content ) # Convert JSON to a data frame df <- as.data.frame ( json ) In the above example, we use the GET() function from the httr package to retrieve data from an API endpoint, and the content() function to extract the content of the response. We then use the fromJSON() function from the jsonlite package to convert the JSON content to a list, and the as.data.frame() function to convert the list to a data frame. Retrieving Data from an API in Python In Python, we can use the requests library to retrieve data from an API, and the json library to process the JSON data. Python code: # Load required libraries import requests import json # Define API endpoint endpoint = \"https://jsonplaceholder.typicode.com/posts\" # Retrieve data from API response = requests . get ( endpoint ) # Extract content from response content = response . content # Convert content to JSON json_data = json . loads ( content ) # Convert JSON to a list of dictionaries data = [ dict ( row ) for row in json_data ] In the above example, we use the get() function from the requests library to retrieve data from an API endpoint, and the content attribute to extract the content of the response. We then use the loads() function from the json library to convert the JSON content to a list of dictionaries. Comparison Both R Tidyverse and Python provide powerful tools for retrieving and processing data from an API. In terms of syntax, the two languages are somewhat similar. In both cases, we use a library to retrieve data from the API, extract the content of the response, and then process the JSON data. However, there are some differences in the specific functions and methods used. For example, in R Tidyverse, we use the content() function to extract the content of the response, whereas in Python, we use the content attribute. Additionally, in R Tidyverse, we use the fromJSON() function to convert the JSON data to a list, whereas in Python, we use the loads() function. Census data \u00b6 Retrieving USA census data in R, R Tidy, and Python can be done using different packages and libraries. Here are some working examples in code for each language: R: To retrieve census data in R, we can use the tidycensus package. Here\u2019s an example of how to retrieve the total population for the state of California: R code: library ( tidycensus ) library ( tidyverse ) # Set your Census API key census_api_key ( \"your_api_key\" ) # Get the total population for the state of California ca_pop <- get_acs ( geography = \"state\" , variables = \"B01003_001\" , state = \"CA\" ) %>% rename ( total_population = estimate ) %>% select ( total_population ) # View the result ca_pop R Tidy: To retrieve census data in R Tidy, we can also use the tidycensus package. Here\u2019s an example of how to retrieve the total population for the state of California using pipes and dplyr functions: R tidy code: library ( tidycensus ) library ( tidyverse ) # Set your Census API key census_api_key ( \"your_api_key\" ) # Get the total population for the state of California ca_pop <- get_acs ( geography = \"state\" , variables = \"B01003_001\" , state = \"CA\" ) %>% rename ( total_population = estimate ) %>% select ( total_population ) # View the result ca_pop Python: To retrieve census data in Python, we can use the census library. Here\u2019s an example of how to retrieve the total population for the state of California: Python code: from census import Census from us import states import pandas as pd # Set your Census API key c = Census ( \"your_api_key\" ) # Get the total population for the state of California ca_pop = c . acs5 . state (( \"B01003_001\" ), states . CA . fips , year = 2019 ) # Convert the result to a Pandas DataFrame ca_pop_df = pd . DataFrame ( ca_pop ) # Rename the column ca_pop_df = ca_pop_df . rename ( columns = { \"B01003_001E\" : \"total_population\" }) # Select only the total population column ca_pop_df = ca_pop_df [[ \"total_population\" ]] # View the result ca_pop_df Lidar data \u00b6 To find Lidar data in R and Python, you typically need to start by identifying sources of Lidar data and then accessing them using appropriate packages and functions. Here are some examples of how to find Lidar data in R and Python: R: Identify sources of Lidar data: The USGS National Map Viewer provides access to Lidar data for the United States. You can also find Lidar data on state and local government websites, as well as on commercial data providers\u2019 websites. Access the data: You can use the lidR package in R to download and read Lidar data in the LAS format. For example, the following code downloads and reads Lidar data for a specific area: R code: library ( lidR ) # Download Lidar data LASfile <- system.file ( \"extdata\" , \"Megaplot.laz\" , package = \"lidR\" ) lidar <- readLAS ( LASfile ) # Visualize the data plot ( lidar ) Python: Identify sources of Lidar data: The USGS 3DEP program provides access to Lidar data for the United States. You can also find Lidar data on state and local government websites, as well as on commercial data providers\u2019 websites. Access the data: You can use the pylastools package in Python to download and read Lidar data in the LAS format. For example, the following code downloads and reads Lidar data for a specific area: Python code: py_install ( \"requests\" ) py_install ( \"pylas\" ) py_install ( \"laspy\" ) import requests from pylas import read import laspy import numpy as np # Download Lidar data url = \"https://s3-us-west-2.amazonaws.com/usgs-lidar-public/USGS_LPC_CA_SanFrancisco_2016_LAS_2018.zip\" lasfile = \"USGS_LPC_CA_SanFrancisco_2016_LAS_2018.las\" r = requests . get ( url , allow_redirects = True ) open ( lasfile , 'wb' ) . write ( r . content ) # Read the data lidar = read ( lasfile ) # Visualize the data laspy . plot . plot ( lidar ) Data for black lives \u00b6 Data for Black Lives ( https://d4bl.org/ ) is a movement that uses data science to create measurable change in the lives of Black people. While the Data for Black Lives website provides resources, reports, articles, and datasets related to racial equity, it doesn\u2019t provide a direct API for downloading data. Instead, you can access the Data for Black Lives GitHub repository ( https://github.com/Data4BlackLives ) to find datasets and resources to work with. In this example, we\u2019ll use a sample dataset available at https://github.com/Data4BlackLives/covid-19/tree/master/data . The dataset \u201cCOVID19_race_data.csv\u201d contains COVID-19 race-related data. R: In R, we\u2019ll use the \u2018readr\u2019 and \u2018dplyr\u2019 packages to read, process, and analyze the dataset. R code: # Install and load necessary libraries library ( readr ) library ( dplyr ) # Read the CSV file url <- \"https://raw.githubusercontent.com/Data4BlackLives/covid-19/master/data/COVID19_race_data.csv\" data <- read_csv ( url ) # Basic information about the dataset print ( dim ( data )) print ( head ( data )) # Example analysis: calculate the mean of 'cases_total' by 'state' data %>% group_by ( state ) %>% summarize ( mean_cases_total = mean ( cases_total , na.rm = TRUE )) %>% arrange ( desc ( mean_cases_total )) Python: In Python, we\u2019ll use the \u2018pandas\u2019 library to read, process, and analyze the dataset. Python code: import pandas as pd # Read the CSV file url = \"https://raw.githubusercontent.com/Data4BlackLives/covid-19/master/data/COVID19_race_data.csv\" data = pd . read_csv ( url ) # Basic information about the dataset print ( data . shape ) print ( data . head ()) # Example analysis: calculate the mean of 'cases_total' by 'state' mean_cases_total = data . groupby ( \"state\" )[ \"cases_total\" ] . mean () . sort_values ( ascending = False ) print ( mean_cases_total ) In conclusion, both R and Python provide powerful libraries and tools for downloading, processing, and analyzing datasets, such as those found in the Data for Black Lives repository. The \u2018readr\u2019 and \u2018dplyr\u2019 libraries in R offer a simple and intuitive way to read and manipulate data, while the \u2018pandas\u2019 library in Python offers similar functionality with a different syntax. Depending on your preferred programming language and environment, both options can be effective in working with social justice datasets. Propublica Congress API \u00b6 The ProPublica Congress API provides information about the U.S. Congress members and their voting records. In this example, we\u2019ll fetch data about the current Senate members and calculate the number of members in each party. R: In R, we\u2019ll use the \u2018httr\u2019 and \u2018jsonlite\u2019 packages to fetch and process data from the ProPublica Congress API. R code: # load necessary libraries library ( httr ) library ( jsonlite ) # Replace 'your_api_key' with your ProPublica API key # # Fetch data about the current Senate members url <- \"https://api.propublica.org/congress/v1/117/senate/members.json\" response <- GET ( url , add_headers ( `X-API-Key` = api_key )) # Check if the request was successful if ( http_status ( response ) $ category == \"Success\" ) { data <- content ( response , \"parsed\" ) members <- data $ results [[ 1 ]] $ members # Calculate the number of members in each party party_counts <- table ( sapply ( members , function ( x ) x $ party )) print ( party_counts ) } else { print ( http_status ( response ) $ message ) } ## ## D I ID R ## 49 1 2 51 Python: In Python, we\u2019ll use the \u2018requests\u2019 library to fetch data from the ProPublica Congress API and \u2018pandas\u2019 library to process the data. python code: # Install necessary libraries import requests import pandas as pd # Replace 'your_api_key' with your ProPublica API key api_key = \"your_api_key\" headers = { \"X-API-Key\" : api_key } # Fetch data about the current Senate members url = \"https://api.propublica.org/congress/v1/117/senate/members.json\" response = requests . get ( url , headers = headers ) # Check if the request was successful if response . status_code == 200 : data = response . json () members = data [ \"results\" ][ 0 ][ \"members\" ] # Calculate the number of members in each party party_counts = pd . DataFrame ( members )[ \"party\" ] . value_counts () print ( party_counts ) else : print ( f \"Error: { response . status_code } \" ) In conclusion, both R and Python offer efficient ways to fetch and process data from APIs like the ProPublica Congress API. The \u2018httr\u2019 and \u2018jsonlite\u2019 libraries in R provide a straightforward way to make HTTP requests and parse JSON data, while the \u2018requests\u2019 library in Python offers similar functionality. The \u2018pandas\u2019 library in Python can be used for data manipulation and analysis, and R provides built-in functions like table() for aggregating data. Depending on your preferred programming language and environment, both options can be effective for working with the ProPublica Congress API. Nonprofit Explorer API by ProPublica \u00b6 The Nonprofit Explorer API by ProPublica provides data on tax-exempt organizations in the United States. In this example, we\u2019ll search for organizations with the keyword \u201ceducation\u201d and analyze the results. R: In R, we\u2019ll use the \u2018httr\u2019 and \u2018jsonlite\u2019 packages to fetch and process data from the Nonprofit Explorer API. R code: # Install and load necessary libraries library ( httr ) library ( jsonlite ) # Fetch data for organizations with the keyword \"education\" url <- \"https://projects.propublica.org/nonprofits/api/v2/search.json?q=education\" response <- GET ( url ) # Check if the request was successful if ( http_status ( response ) $ category == \"Success\" ) { data <- content ( response , \"parsed\" ) organizations <- data $ organizations # Count the number of organizations per state state_counts <- table ( sapply ( organizations , function ( x ) x $ state )) print ( state_counts ) } else { print ( http_status ( response ) $ message ) } ## ## AZ CA CO DC FL GA HI IL Indiana LA ## 3 22 6 5 3 2 1 2 1 1 ## MD MI MN MO MP MS NC NE NJ NM ## 1 2 5 3 1 1 2 2 2 1 ## NY OH OK Oregon PA TX UT VA WA WV ## 1 5 1 2 2 12 1 4 3 1 ## ZZ ## 2 Python: In Python, we\u2019ll use the \u2018requests\u2019 library to fetch data from the Nonprofit Explorer API and \u2018pandas\u2019 library to process the data. Python code: # Install necessary libraries import requests import pandas as pd # Fetch data for organizations with the keyword \"education\" url = \"https://projects.propublica.org/nonprofits/api/v2/search.json?q=education\" response = requests . get ( url ) # Check if the request was successful if response . status_code == 200 : data = response . json () organizations = data [ \"organizations\" ] # Count the number of organizations per state state_counts = pd . DataFrame ( organizations )[ \"state\" ] . value_counts () print ( state_counts ) else : print ( f \"Error: { response . status_code } \" ) ## CA 22 ## TX 12 ## CO 6 ## MN 5 ## OH 5 ## DC 5 ## VA 4 ## AZ 3 ## WA 3 ## MO 3 ## FL 3 ## IL 2 ## GA 2 ## NC 2 ## MI 2 ## Oregon 2 ## NE 2 ## ZZ 2 ## PA 2 ## NJ 2 ## HI 1 ## MS 1 ## NY 1 ## Indiana 1 ## NM 1 ## LA 1 ## UT 1 ## MD 1 ## MP 1 ## WV 1 ## OK 1 ## Name: state, dtype: int64 In conclusion, both R and Python offer efficient ways to fetch and process data from APIs like the Nonprofit Explorer API. The \u2018httr\u2019 and \u2018jsonlite\u2019 libraries in R provide a straightforward way to make HTTP requests and parse JSON data, while the \u2018requests\u2019 library in Python offers similar functionality. The \u2018pandas\u2019 library in Python can be used for data manipulation and analysis, and R provides built-in functions like table() for aggregating data. Depending on your preferred programming language and environment, both options can be effective for working with the Nonprofit Explorer API. Campaign Finance API by ProPublica \u00b6 The Campaign Finance API by the Federal Election Commission (FEC) provides data on campaign finance in U.S. federal elections. In this example, we\u2019ll fetch data about individual contributions for the 2020 election cycle and analyze the results. R: In R, we\u2019ll use the \u2018httr\u2019 and \u2018jsonlite\u2019 packages to fetch and process data from the Campaign Finance API. R code: # Install and load necessary libraries library ( httr ) library ( jsonlite ) # Fetch data about individual contributions for the 2020 election cycle url <- \"https://api.open.fec.gov/v1/schedules/schedule_a/?api_key='OGwpkX7tH5Jihs1qQcisKfVAMddJzmzouWKtKoby'&two_year_transaction_period=2020&sort_hide_null=false&sort_null_only=false&per_page=20&page=1\" response <- GET ( url ) # Check if the request was successful if ( http_status ( response ) $ category == \"Success\" ) { data <- content ( response , \"parsed\" ) contributions <- data $ results # Calculate the total contributions per state state_totals <- aggregate ( contributions $ contributor_state , by = list ( contributions $ contributor_state ), FUN = sum ) colnames ( state_totals ) <- c ( \"State\" , \"Total_Contributions\" ) print ( state_totals ) } else { print ( http_status ( response ) $ message ) } ## [1] \"Client error: (403) Forbidden\" Python: In Python, we\u2019ll use the \u2018requests\u2019 library to fetch data from the Campaign Finance API and \u2018pandas\u2019 library to process the data. Python code: # Install necessary libraries import requests import pandas as pd # Fetch data about individual contributions for the 2020 election cycle url = \"https://api.open.fec.gov/v1/schedules/schedule_a/?api_key=your_api_key&two_year_transaction_period=2020&sort_hide_null=false&sort_null_only=false&per_page=20&page=1\" response = requests . get ( url ) # Check if the request was successful if response . status_code == 200 : data = response . json () contributions = data [ \"results\" ] # Calculate the total contributions per state df = pd . DataFrame ( contributions ) state_totals = df . groupby ( \"contributor_state\" )[ \"contribution_receipt_amount\" ] . sum () print ( state_totals ) else : print ( f \"Error: { response . status_code } \" ) ## Error: 403 In conclusion, both R and Python offer efficient ways to fetch and process data from APIs like the Campaign Finance API. The \u2018httr\u2019 and \u2018jsonlite\u2019 libraries in R provide a straightforward way to make HTTP requests and parse JSON data, while the \u2018requests\u2019 library in Python offers similar functionality. The \u2018pandas\u2019 library in Python can be used for data manipulation and analysis, and R provides built-in functions like aggregate() for aggregating data. Depending on your preferred programming language and environment, both options can be effective for working with the Campaign Finance API. Note: Remember to replace your_api_key with your actual FEC API key in the code examples above. Historic Redlining \u00b6 Historic redlining data refers to data from the Home Owners\u2019 Loan Corporation (HOLC) that created residential security maps in the 1930s, which contributed to racial segregation and disinvestment in minority neighborhoods. One popular source for this data is the Mapping Inequality project ( https://dsl.richmond.edu/panorama/redlining/ ). In this example, we\u2019ll download historic redlining data for Philadelphia in the form of a GeoJSON file and analyze the data in R and Python. R: In R, we\u2019ll use the \u2018sf\u2019 and \u2018dplyr\u2019 packages to read and process the GeoJSON data. R code: # Install and load necessary libraries library ( sf ) library ( dplyr ) # Download historic redlining data for Philadelphia url <- \"https://dsl.richmond.edu/panorama/redlining/static/downloads/geojson/PAPhiladelphia1937.geojson\" philly_geojson <- read_sf ( url ) # Count the number of areas per HOLC grade grade_counts <- philly_geojson %>% group_by ( holc_grade ) %>% summarize ( count = n ()) plot ( grade_counts ) Python: In Python, we\u2019ll use the \u2018geopandas\u2019 library to read and process the GeoJSON data. Python code: # Install necessary libraries import geopandas as gpd # Download historic redlining data for Philadelphia url = \"https://dsl.richmond.edu/panorama/redlining/static/downloads/geojson/PAPhiladelphia1937.geojson\" philly_geojson = gpd . read_file ( url ) # Count the number of areas per HOLC grade grade_counts = philly_geojson [ \"holc_grade\" ] . value_counts () print ( grade_counts ) ## B 28 ## D 26 ## C 18 ## A 10 ## Name: holc_grade, dtype: int64 In conclusion, both R and Python offer efficient ways to download and process historic redlining data in the form of GeoJSON files. The \u2018sf\u2019 package in R provides a simple way to read and manipulate spatial data, while the \u2018geopandas\u2019 library in Python offers similar functionality. The \u2018dplyr\u2019 package in R can be used for data manipulation and analysis, and Python\u2019s built-in functions like value_counts() can be used for aggregating data. Depending on your preferred programming language and environment, both options can be effective for working with historic redlining data. American Indian and Alaska Native Areas (AIANNH) \u00b6 In this example, we\u2019ll download and analyze the American Indian and Alaska Native Areas (AIANNH) TIGER/Line Shapefile from the U.S. Census Bureau. We\u2019ll download the data for the year 2020, and analyze the number of AIANNH per congressional district R: In R, we\u2019ll use the \u2018sf\u2019 and \u2018dplyr\u2019 packages to read and process the Shapefile data. R code: # Install and load necessary libraries library ( sf ) library ( dplyr ) # Download historic redlining data for Philadelphia url <- \"https://www2.census.gov/geo/tiger/TIGER2020/AIANNH/tl_2020_us_aiannh.zip\" temp_file <- tempfile ( fileext = \".zip\" ) download.file ( url , temp_file , mode = \"wb\" ) unzip ( temp_file , exdir = tempdir ()) # Read the Shapefile shapefile_path <- file.path ( tempdir (), \"tl_2020_us_aiannh.shp\" ) aiannh <- read_sf ( shapefile_path ) # Count the number of AIANNH per congressional district state_counts <- aiannh %>% group_by ( LSAD ) %>% summarize ( count = n ()) print ( state_counts [ order ( - state_counts $ count ),]) ## Simple feature collection with 26 features and 2 fields ## Geometry type: GEOMETRY ## Dimension: XY ## Bounding box: xmin: -174.236 ymin: 18.91069 xmax: -67.03552 ymax: 71.34019 ## Geodetic CRS: NAD83 ## # A tibble: 26 \u00d7 3 ## LSAD count geometry ## ## 1 79 221 (((-166.5331 65.33918, -166.5331 65.33906, -166.533 65.33699, -1\u2026 ## 2 86 206 (((-83.38811 35.46645, -83.38342 35.46596, -83.38316 35.46593, -\u2026 ## 3 OT 155 (((-92.32972 47.81374, -92.3297 47.81305, -92.32967 47.81196, -9\u2026 ## 4 78 75 (((-155.729 20.02457, -155.7288 20.02428, -155.7288 20.02427, -1\u2026 ## 5 85 46 (((-122.3355 37.95215, -122.3354 37.95206, -122.3352 37.95199, -\u2026 ## 6 92 35 (((-93.01356 31.56287, -93.01354 31.56251, -93.01316 31.56019, -\u2026 ## 7 88 25 (((-97.35299 36.908, -97.35291 36.90801, -97.35287 36.908, -97.3\u2026 ## 8 96 19 (((-116.48 32.63814, -116.48 32.63718, -116.4794 32.63716, -116.\u2026 ## 9 84 16 (((-105.5937 36.40379, -105.5937 36.40324, -105.5937 36.40251, -\u2026 ## 10 89 11 (((-95.91705 41.28037, -95.91653 41.28036, -95.91653 41.28125, -\u2026 ## # \u2139 16 more rows Python: In Python, we\u2019ll use the \u2018geopandas\u2019 library to read and process the Shapefile data. Python code: import geopandas as gpd import pandas as pd import requests import zipfile import os from io import BytesIO # Download historic redlining data for Philadelphia url = \"https://www2.census.gov/geo/tiger/TIGER2020/AIANNH/tl_2020_us_aiannh.zip\" response = requests . get ( url ) zip_file = zipfile . ZipFile ( BytesIO ( response . content )) # Extract Shapefile temp_dir = \"temp\" if not os . path . exists ( temp_dir ): os . makedirs ( temp_dir ) zip_file . extractall ( path = temp_dir ) shapefile_path = os . path . join ( temp_dir , \"tl_2020_us_aiannh.shp\" ) # Read the Shapefile aiannh = gpd . read_file ( shapefile_path ) # Count the number of AIANNH per congressional district state_counts = aiannh . groupby ( \"LSAD\" ) . size () . reset_index ( name = \"count\" ) # Sort by descending count state_counts_sorted = state_counts . sort_values ( by = \"count\" , ascending = False ) print ( state_counts_sorted ) ## LSAD count ## 2 79 221 ## 9 86 206 ## 25 OT 155 ## 1 78 75 ## 8 85 46 ## 15 92 35 ## 11 88 25 ## 19 96 19 ## 7 84 16 ## 12 89 11 ## 5 82 8 ## 3 80 7 ## 4 81 6 ## 21 98 5 ## 20 97 5 ## 13 90 4 ## 18 95 3 ## 6 83 3 ## 17 94 2 ## 16 93 1 ## 14 91 1 ## 10 87 1 ## 22 99 1 ## 23 9C 1 ## 24 9D 1 ## 0 00 1 In conclusion, both R and Python offer efficient ways to download and process AIANNH TIGER/Line Shapefile data from the U.S. Census Bureau. The \u2018sf\u2019 package in R provides a simple way to read and manipulate spatial data, while the \u2018geopandas\u2019 library in Python offers similar functionality. The \u2018dplyr\u2019 package in R can be used for data manipulation and analysis, and Python\u2019s built-in functions like value_counts() can be used for aggregating data. Depending on your preferred programming language and environment, both options can be effective for working with AIANNH data. Indian Entities Recognized and Eligible To Receive Services by BIA \u00b6 The Bureau of Indian Affairs (BIA) provides a PDF document containing a list of Indian Entities Recognized and Eligible To Receive Services. To analyze the data, we\u2019ll first need to extract the information from the PDF. In this example, we\u2019ll extract the names of the recognized tribes and count the number of tribes per state. R: In R, we\u2019ll use the \u2018pdftools\u2019 package to extract text from the PDF and the \u2018stringr\u2019 package to process the text data. R code: # Install and load necessary libraries library ( pdftools ) library ( stringr ) library ( dplyr ) # Download the BIA PDF url <- \"https://www.govinfo.gov/content/pkg/FR-2022-01-28/pdf/2022-01789.pdf\" temp_file <- tempfile ( fileext = \".pdf\" ) download.file ( url , temp_file , mode = \"wb\" ) # Extract text from the PDF pdf_text <- pdf_text ( temp_file ) tribe_text <- pdf_text [ 4 : length ( pdf_text )] # Define helper functions tribe_state_extractor <- function ( text_line ) { regex_pattern <- \"(.*),\\\\s+([A-Z]{2})$\" tribe_state <- str_match ( text_line , regex_pattern ) return ( tribe_state ) } is_valid_tribe_line <- function ( text_line ) { regex_pattern <- \"^\\\\d+\\\\s+\" return ( ! is.na ( str_match ( text_line , regex_pattern ))) } # Process text data to extract tribes and states tribe_states <- sapply ( tribe_text , tribe_state_extractor ) valid_lines <- sapply ( tribe_text , is_valid_tribe_line ) tribe_states <- tribe_states [ valid_lines , 2 : 3 ] # Count the number of tribes per state tribe_data <- as.data.frame ( tribe_states ) colnames ( tribe_data ) <- c ( \"Tribe\" , \"State\" ) state_counts <- tribe_data %>% group_by ( State ) %>% summarise ( Count = n ()) print ( state_counts ) ## # A tibble: 0 \u00d7 2 ## # \u2139 2 variables: State , Count Python: In Python, we\u2019ll use the \u2018PyPDF2\u2019 library to extract text from the PDF and the \u2018re\u2019 module to process the text data. Python code: # Install necessary libraries import requests import PyPDF2 import io import re from collections import Counter # Download the BIA PDF url = \"https://www.bia.gov/sites/bia.gov/files/assets/public/raca/online-tribal-leaders-directory/tribal_leaders_2021-12-27.pdf\" response = requests . get ( url ) # Extract text from the PDF pdf_reader = PyPDF2 . PdfFileReader ( io . BytesIO ( response . content )) tribe_text = [ pdf_reader . getPage ( i ) . extractText () for i in range ( 3 , pdf_reader . numPages )] # Process text data to extract tribes and states tribes = [ re . findall ( r '^\\d+\\s+(.+),\\s+([A-Z] {2} )' , line ) for text in tribe_text for line in text . split ( ' \\n ' ) if line ] tribe_states = [ state for tribe , state in tribes ] # Count the number of tribes per state state_counts = Counter ( tribe_states ) print ( state_counts ) In conclusion, both R and Python offer efficient ways to download and process the list of Indian Entities Recognized and Eligible To Receive Services from the BIA. The \u2018pdftools\u2019 package in R provides a simple way to extract text from PDF files, while the \u2018PyPDF2\u2019 library in Python offers similar functionality. The \u2018stringr\u2019 package in R and the \u2018re\u2019 module in Python can be used to process and analyze text data. Depending on your preferred programming language and environment, both options can be effective for working with BIA data. National Atlas - Indian Lands of the United States dataset \u00b6 In this example, we will download and analyze the National Atlas - Indian Lands of the United States dataset in both R and Python. We will read the dataset and count the number of Indian lands per state. R: In R, we\u2019ll use the \u2018sf\u2019 package to read the Shapefile and the \u2018dplyr\u2019 package to process the data. R code: # Install and load necessary libraries library ( sf ) library ( dplyr ) # Download the Indian Lands dataset url <- \"https://prd-tnm.s3.amazonaws.com/StagedProducts/Small-scale/data/Boundaries/indlanp010g.shp_nt00968.tar.gz\" temp_file <- tempfile ( fileext = \".tar.gz\" ) download.file ( url , temp_file , mode = \"wb\" ) untar ( temp_file , exdir = tempdir ()) # Read the Shapefile shapefile_path <- file.path ( tempdir (), \"indlanp010g.shp\" ) indian_lands <- read_sf ( shapefile_path ) # Count the number of Indian lands per state # state_counts <- indian_lands %>% # group_by(STATE) %>% # summarize(count = n()) plot ( indian_lands ) ## Warning: plotting the first 9 out of 23 attributes; use max.plot = 23 to plot ## all Python: In Python, we\u2019ll use the \u2018geopandas\u2019 and \u2018pandas\u2019 libraries to read the Shapefile and process the data. Python code: import geopandas as gpd import pandas as pd import requests import tarfile import os from io import BytesIO # Download the Indian Lands dataset url = \"https://prd-tnm.s3.amazonaws.com/StagedProducts/Small-scale/data/Boundaries/indlanp010g.shp_nt00966.tar.gz\" response = requests . get ( url ) tar_file = tarfile . open ( fileobj = BytesIO ( response . content ), mode = 'r:gz' ) # Extract Shapefile temp_dir = \"temp\" if not os . path . exists ( temp_dir ): os . makedirs ( temp_dir ) tar_file . extractall ( path = temp_dir ) shapefile_path = os . path . join ( temp_dir , \"indlanp010g.shp\" ) # Read the Shapefile indian_lands = gpd . read_file ( shapefile_path ) # Count the number of Indian lands per state state_counts = indian_lands . groupby ( \"STATE\" ) . size () . reset_index ( name = \"count\" ) print ( state_counts ) Both R and Python codes download the dataset and read the Shapefile using the respective packages. They then group the data by the \u2018STATE\u2019 attribute and calculate the count of Indian lands per state.","title":"R and Python bilingualism"},{"location":"additional-resources/bilingualism_md/#r-and-python-bilingualism","text":"Welcome to the R and Python bilingualism reference guide! If you\u2019re fluent in one of these languages but hesitant to learn the other, you\u2019re in the right place. The good news is that there are many similarities between R and Python that make it easy to switch between the two. Both R and Python are widely used in data science and are open-source, meaning that they are free to use and constantly being improved by the community. They both have extensive libraries for data analysis, visualization, and machine learning. In fact, many of the libraries in both languages have similar names and functions, such as Pandas in Python and data.table in R. While there are differences between the two languages, they can complement each other well. Python is versatile and scalable, making it ideal for large and complex projects such as web development and artificial intelligence. R, on the other hand, is known for its exceptional statistical capabilities and is often used in data analysis and modeling. Visualization is also easier in R, making it a popular choice for creating graphs and charts. By learning both R and Python, you\u2019ll be able to take advantage of the strengths of each language and create more efficient and robust data analysis workflows. Don\u2019t let the differences between the two languages intimidate you - once you become familiar with one, learning the other will be much easier. So, whether you\u2019re a Python enthusiast looking to expand your statistical analysis capabilities, or an R user interested in exploring the world of web development and artificial intelligence, this guide will help you become bilingual in R and Python.","title":"R and Python bilingualism"},{"location":"additional-resources/bilingualism_md/#install-packages","text":"In R, packages can be installed from CRAN repository by using the install.packages() function: R code: # Install the dplyr package from CRAN install.packages ( \"dplyr\" ) In Python, packages can be installed from the Anaconda repository by using the conda install command: Python code: # Install the pandas package from Anaconda ! conda install pandas Loading libraries in R and Python In R, libraries can be loaded in the same way as before, using the library() function: R code: # Load the dplyr library library ( dplyr ) In Python, libraries can be loaded in the same way as before, using the import statement. Here\u2019s an example: Python code: # Load the pandas library import pandas as pd Note that the package or library must be installed from the respective repository before it can be loaded. Also, make sure you have the correct repository specified in your system before installing packages. By default, R uses CRAN as its primary repository, whereas Anaconda uses its own repository by default.","title":"Install packages"},{"location":"additional-resources/bilingualism_md/#reticulate","text":"The reticulate package lets you run both R and Python together in the R environment. R libraries are stored and managed in a repository called CRAN. You can download R packages with the install.packages() function install.packages ( \"reticulate\" ) You only need to install packages once, but you need to mount those packages with the library() function each time you open R. library ( reticulate ) Python libraries are stored and managed in a few different libraries and their dependencies are not regulated as strictly as R libraries are in CRAN. It\u2019s easier to publish a python package but it can also be more cumbersome for users because you need to manage dependencies yourself. You can download python packages using both R and Python code py_install ( \"laspy\" ) ## + '/Users/ty/opt/miniconda3/bin/conda' 'install' '--yes' '--prefix' '/Users/ty/opt/miniconda3/envs/earth-analytics-python' '-c' 'conda-forge' 'laspy' Now, let\u2019s create a Python list and assign it to a variable py_list: R code: py_list <- r_to_py ( list ( 1 , 2 , 3 )) We can now print out the py_list variable in Python using the py_run_string() function: R code: py_run_string ( \"print(r.py_list)\" ) This will output [1, 2, 3] in the Python console. Now, let\u2019s create an R vector and assign it to a variable r_vec: R code: r_vec <- c ( 4 , 5 , 6 ) We can now print out the r_vec variable in R using the py$ syntax to access Python variables: R code: print ( py $ py_list ) This will output [1, 2, 3] in the R console. We can also call Python functions from R using the py_call() function. For example, let\u2019s call the Python sum() function on the py_list variable and assign the result to an R variable r_sum: R code: r_sum <- py_call ( \"sum\" , args = list ( py_list )) We can now print out the r_sum variable in R: R code: print ( r_sum ) This will output 6 in the R console.","title":"reticulate"},{"location":"additional-resources/bilingualism_md/#load-packages-and-change-settings","text":"options ( java.parameters = \"-Xmx5G\" ) library ( r5r ) library ( sf ) library ( data.table ) library ( ggplot2 ) library ( interp ) library ( dplyr ) library ( osmdata ) library ( ggthemes ) library ( sf ) library ( data.table ) library ( ggplot2 ) library ( akima ) library ( dplyr ) library ( raster ) library ( osmdata ) library ( mapview ) library ( cowplot ) library ( here ) library ( testthat ) import sys sys . argv . append ([ \"--max-memory\" , \"5G\" ]) import pandas as pd import geopandas import matplotlib.pyplot as plt import numpy as np import plotnine import contextily as cx import r5py import seaborn as sns R and Python are two popular programming languages used for data analysis, statistics, and machine learning. Although they share some similarities, there are some fundamental differences between them. Here\u2019s an example code snippet in R and Python to illustrate some of the differences: R Code: # Create a vector of numbers from 1 to 10 x <- 1 : 10 # Compute the mean of the vector mean_x <- mean ( x ) # Print the result print ( mean_x ) ## [1] 5.5 Python Code: # Import the numpy library for numerical operations import numpy as np # Create a numpy array of numbers from 1 to 10 x = np . array ([ 1 , 2 , 3 , 4 , 5 , 6 , 7 , 8 , 9 , 10 ]) # Compute the mean of the array mean_x = np . mean ( x ) # Print the result print ( mean_x ) ## 5.5 In this example, we can see that there are several differences between R and Python: Syntax: R uses the assignment operator \\<- while Python uses the equals sign = for variable assignment. Libraries: Python relies heavily on external libraries such as numpy, pandas, and matplotlib for data analysis, while R has built-in functions for many data analysis tasks. Data types: R is designed to work with vectors and matrices, while Python uses lists and arrays. In the example above, we used the numpy library to create a numerical array in Python. Function names: Function names in R and Python can differ significantly. In the example above, we used the mean() function in R and the np.mean() function in Python to calculate the mean of the vector/array. These are just a few of the many differences between R and Python. Ultimately, the choice between the two languages will depend on your specific needs and preferences.","title":"Load packages and change settings"},{"location":"additional-resources/bilingualism_md/#load-saved-data","text":"R Code: data ( \"iris\" ) here () load ( file = here ( \"2_R_and_Py_bilingualism\" , \"data\" , \"iris_example_data.rdata\" )) objects () Python code:","title":"Load saved data"},{"location":"additional-resources/bilingualism_md/#save-data","text":"R Code: save ( iris , file = here ( \"2_R_and_Py_bilingualism\" , \"data\" , \"iris_example_data.rdata\" )) write.csv ( iris , file = here ( \"2_R_and_Py_bilingualism\" , \"data\" , \"iris_example_data.csv\" )) Python code:","title":"Save data"},{"location":"additional-resources/bilingualism_md/#functions","text":"Both R and Python are powerful languages for writing functions that can take input, perform a specific task, and return output. R Code: # Define a function that takes two arguments and returns their sum sum_r <- function ( a , b ) { return ( a + b ) } # Call the function with two arguments and print the result result_r <- sum_r ( 3 , 5 ) print ( result_r ) ## [1] 8 Python code: # Define a function that takes two arguments and returns their sum def sum_py ( a , b ): return a + b # Call the function with two arguments and print the result result_py = sum_py ( 3 , 5 ) print ( result_py ) ## 8 In both cases, we define a function that takes two arguments and returns their sum. In R, we use the function keyword to define a function, while in Python, we use the def keyword. The function body in R is enclosed in curly braces, while in Python it is indented. There are a few differences in the syntax and functionality between the two approaches: Function arguments: In R, function arguments are separated by commas, while in Python they are enclosed in parentheses. The syntax for specifying default arguments and variable-length argument lists can also differ between the two languages. Return statement: In R, we use the return keyword to specify the return value of a function, while in Python, we simply use the return statement. Function names: Function names in R and Python can differ significantly. In the example above, we used the sum_r() function in R and the sum_py() function in Python to calculate the sum of two numbers.","title":"functions"},{"location":"additional-resources/bilingualism_md/#data-plots","text":"R Code: # Load the \"ggplot2\" package for plotting library ( ggplot2 ) # Generate some sample data x <- seq ( 1 , 10 , 1 ) y <- x + rnorm ( 10 ) # Create a scatter plot ggplot ( data.frame ( x , y ), aes ( x = x , y = y )) + geom_point () Python code: # Load the \"matplotlib\" library import matplotlib.pyplot as plt # Generate some sample data import numpy as np x = np . arange ( 1 , 11 ) y = x + np . random . normal ( 0 , 1 , 10 ) #clear last plot plt . clf () # Create a scatter plot plt . scatter ( x , y ) plt . show () In both cases, we generate some sample data and create a scatter plot to visualize the relationship between the variables. There are a few differences in the syntax and functionality between the two approaches: Library and package names: In R, we use the ggplot2 package for plotting, while in Python, we use the matplotlib library. Data format: In R, we use a data frame to store the input data, while in Python, we use numpy arrays. Plotting functions: In R, we use the ggplot() function to create a new plot object, and then use the geom_point() function to create a scatter plot layer. In Python, we use the scatter() function from the matplotlib.pyplot module to create a scatter plot directly.","title":"Data Plots"},{"location":"additional-resources/bilingualism_md/#linear-regression","text":"R Code: # Load the \"ggplot2\" package for plotting library ( ggplot2 ) # Generate some sample data x <- seq ( 1 , 10 , 1 ) y <- x + rnorm ( 10 ) # Perform linear regression model_r <- lm ( y ~ x ) # Print the model summary summary ( model_r ) ## ## Call: ## lm(formula = y ~ x) ## ## Residuals: ## Min 1Q Median 3Q Max ## -1.69344 -0.42336 0.08961 0.34778 1.56728 ## ## Coefficients: ## Estimate Std. Error t value Pr(>|t|) ## (Intercept) -0.1676 0.6781 -0.247 0.811 ## x 0.9750 0.1093 8.921 1.98e-05 *** ## --- ## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1 ## ## Residual standard error: 0.9926 on 8 degrees of freedom ## Multiple R-squared: 0.9087, Adjusted R-squared: 0.8972 ## F-statistic: 79.59 on 1 and 8 DF, p-value: 1.976e-05 # Plot the data and regression line ggplot ( data.frame ( x , y ), aes ( x = x , y = y )) + geom_point () + geom_smooth ( method = \"lm\" , se = FALSE ) ## `geom_smooth()` using formula = 'y ~ x' Python code: # Load the \"matplotlib\" and \"scikit-learn\" libraries import matplotlib.pyplot as plt from sklearn.linear_model import LinearRegression # Generate some sample data import numpy as np x = np . arange ( 1 , 11 ) y = x + np . random . normal ( 0 , 1 , 10 ) # Perform linear regression model_py = LinearRegression () . fit ( x . reshape ( - 1 , 1 ), y ) # Print the model coefficients print ( \"Coefficients: \" , model_py . coef_ ) ## Coefficients: [1.15539692] print ( \"Intercept: \" , model_py . intercept_ ) #clear last plot ## Intercept: -1.1291396173221218 plt . clf () # Plot the data and regression line plt . scatter ( x , y ) plt . plot ( x , model_py . predict ( x . reshape ( - 1 , 1 )), color = 'red' ) plt . show () In both cases, we generate some sample data with a linear relationship between x and y, and then perform a simple linear regression to estimate the slope and intercept of the line. We then plot the data and regression line to visualize the fit. There are a few differences in the syntax and functionality between the two approaches: Library and package names: In R, we use the lm() function from the base package to perform linear regression, while in Python, we use the LinearRegression() class from the scikit-learn library. Additionally, we use the ggplot2 package in R for plotting, while we use the matplotlib library in Python. Data format: In R, we can specify the dependent and independent variables in the formula used for regression. In Python, we need to reshape the input data to a two-dimensional array before fitting the model. Model summary: In R, we can use the summary() function to print a summary of the model, including the estimated coefficients, standard errors, and p-values. In Python, we need to print the coefficients and intercept separately.","title":"Linear regression"},{"location":"additional-resources/bilingualism_md/#random-forest","text":"R Code: # Load the \"randomForest\" package library ( randomForest ) # Load the \"iris\" dataset data ( iris ) # Split the data into training and testing sets set.seed ( 123 ) train_idx <- sample ( 1 : nrow ( iris ), nrow ( iris ) * 0.7 , replace = FALSE ) train_data <- iris [ train_idx , ] test_data <- iris [ - train_idx , ] # Build a random forest model rf_model <- randomForest ( Species ~ . , data = train_data , ntree = 500 ) # Make predictions on the testing set predictions <- predict ( rf_model , test_data ) # Calculate accuracy of the model accuracy <- sum ( predictions == test_data $ Species ) / nrow ( test_data ) print ( paste ( \"Accuracy:\" , accuracy )) ## [1] \"Accuracy: 0.977777777777778\" Python code: # Load the \"pandas\", \"numpy\", and \"sklearn\" libraries import pandas as pd import numpy as np from sklearn.ensemble import RandomForestClassifier from sklearn.datasets import load_iris from sklearn.model_selection import train_test_split # Load the \"iris\" dataset iris = load_iris () # Split the data into training and testing sets X_train , X_test , y_train , y_test = train_test_split ( iris . data , iris . target , test_size = 0.3 , random_state = 123 ) # Build a random forest model rf_model = RandomForestClassifier ( n_estimators = 500 , random_state = 123 ) rf_model . fit ( X_train , y_train ) # Make predictions on the testing set ## RandomForestClassifier(n_estimators=500, random_state=123) predictions = rf_model . predict ( X_test ) # Calculate accuracy of the model accuracy = sum ( predictions == y_test ) / len ( y_test ) print ( \"Accuracy:\" , accuracy ) ## Accuracy: 0.9555555555555556 In both cases, we load the iris dataset and split it into training and testing sets. We then build a random forest model using the training data and evaluate its accuracy on the testing data. There are a few differences in the syntax and functionality between the two approaches: Library and package names: In R, we use the randomForest package to build random forest models, while in Python, we use the RandomForestClassifier class from the sklearn.ensemble module. We also use different libraries for loading and manipulating data (pandas and numpy in Python, and built-in datasets in R). Model parameters: The syntax for setting model parameters is slightly different in R and Python. For example, in R, we specify the number of trees using the ntree parameter, while in Python, we use the n_estimators parameter. Data format: In R, we use a data frame to store the input data, while in Python, we use numpy arrays.","title":"Random Forest"},{"location":"additional-resources/bilingualism_md/#basic-streetmap-from-open-street-map","text":"R Code: # Load the \"osmdata\" package for mapping library ( osmdata ) library ( tmap ) # Define the map location and zoom level bbox <- c ( left = -0.16 , bottom = 51.49 , right = -0.13 , top = 51.51 ) # Get the OpenStreetMap data osm_data <- opq ( bbox ) %>% add_osm_feature ( key = \"highway\" ) %>% osmdata_sf () # Plot the map using tmap tm_shape ( osm_data $ osm_lines ) + tm_lines () Python code: # Load the \"osmnx\" package for mapping import osmnx as ox # Define the map location and zoom level bbox = ( 51.49 , - 0.16 , 51.51 , - 0.13 ) # Get the OpenStreetMap data osm_data = ox . graph_from_bbox ( north = bbox [ 2 ], south = bbox [ 0 ], east = bbox [ 3 ], west = bbox [ 1 ], network_type = 'all' ) # Plot the map using osmnx ox . plot_graph ( osm_data ) ## (
    , ) In both cases, we define the map location and zoom level, retrieve the OpenStreetMap data using the specified bounding box, and plot the map. The main differences between the two approaches are: Package names and syntax: In R, we use the osmdata package and its syntax to download and process the OpenStreetMap data, while in Python, we use the osmnx package and its syntax. Mapping libraries: In R, we use the tmap package to create a static map of the OpenStreetMap data, while in Python, we use the built-in ox.plot_graph function from the osmnx package to plot the map.","title":"Basic streetmap from Open Street Map"},{"location":"additional-resources/bilingualism_md/#cnn-on-raster-data","text":"R Code: # Load the \"keras\" package for building the CNN library ( tensorflow ) library ( keras ) # Load the \"raster\" package for working with raster data library ( raster ) # Load the \"magrittr\" package for pipe operator library ( magrittr ) # Load the data as a raster brick raster_data <- brick ( \"raster_data.tif\" ) # Split the data into training and testing sets split_data <- sample ( 1 : nlayers ( raster_data ), size = nlayers ( raster_data ) * 0.8 , replace = FALSE ) train_data <- raster_data [[ split_data ]] test_data <- raster_data [[ setdiff ( 1 : nlayers ( raster_data ), split_data )]] # Define the CNN model model <- keras_model_sequential () %>% layer_conv_2d ( filters = 32 , kernel_size = c ( 3 , 3 ), activation = \"relu\" , input_shape = c ( ncol ( train_data ), nrow ( train_data ), ncell ( train_data ))) %>% layer_max_pooling_2d ( pool_size = c ( 2 , 2 )) %>% layer_dropout ( rate = 0.25 ) %>% layer_flatten () %>% layer_dense ( units = 128 , activation = \"relu\" ) %>% layer_dropout ( rate = 0.5 ) %>% layer_dense ( units = nlayers ( train_data ), activation = \"softmax\" ) # Compile the model model %>% compile ( loss = \"categorical_crossentropy\" , optimizer = \"adam\" , metrics = \"accuracy\" ) # Train the model history <- model %>% fit ( x = array ( train_data ), y = to_categorical ( 1 : nlayers ( train_data )), epochs = 10 , validation_split = 0.2 ) # Evaluate the model model %>% evaluate ( x = array ( test_data ), y = to_categorical ( 1 : nlayers ( test_data ))) # Plot the model accuracy over time plot ( history )","title":"CNN on Raster data"},{"location":"additional-resources/bilingualism_md/#piping","text":"Piping is a powerful feature in both R and Python that allows for a more streamlined and readable code. However, the syntax for piping is slightly different between the two languages. In R, piping is done using the %>% operator from the magrittr package, while in Python, it is done using the | operator from the pandas package. Let\u2019s compare and contrast piping in R and Python with some examples: Piping in R In R, we can use the %>% operator to pipe output from one function to another, which can make our code more readable and easier to follow. Here\u2019s an example: R code: library ( dplyr ) # create a data frame df <- data.frame ( x = c ( 1 , 2 , 3 ), y = c ( 4 , 5 , 6 )) # calculate the sum of column x and y df %>% mutate ( z = x + y ) %>% summarize ( sum_z = sum ( z )) ## sum_z ## 1 21 In this example, we first create a data frame df with two columns x and y. We then pipe the output of df to mutate, which adds a new column z to the data frame that is the sum of x and y. Finally, we pipe the output to summarize, which calculates the sum of z and returns the result. Piping in Python In Python, we can use the | operator to pipe output from one function to another. However, instead of piping output from one function to another, we pipe a DataFrame to a method of the DataFrame. Here\u2019s an example: Python code: import pandas as pd # create a DataFrame df = pd . DataFrame ({ 'x' : [ 1 , 2 , 3 ], 'y' : [ 4 , 5 , 6 ]}) # calculate the sum of column x and y ( df . assign ( z = df [ 'x' ] + df [ 'y' ]) . agg ( sum_z = ( 'z' , 'sum' ))) ## z ## sum_z 21 In this example, we first create a DataFrame df with two columns x and y. We then use the assign() method to add a new column z to the DataFrame that is the sum of x and y. Finally, we use the agg() method to calculate the sum of z and return the result. As we can see, the syntax for piping is slightly different between R and Python, but the concept remains the same. Piping can make our code more readable and easier to follow, which is an important aspect of creating efficient and effective code. R code: library ( dplyr ) library ( ggplot2 ) iris %>% filter ( Species == \"setosa\" ) %>% group_by ( Sepal.Width ) %>% summarise ( mean.Petal.Length = mean ( Petal.Length )) %>% mutate ( Sepal.Width = as.factor ( Sepal.Width )) %>% ggplot ( aes ( x = Sepal.Width , y = mean.Petal.Length )) + geom_bar ( stat = \"identity\" , fill = \"dodgerblue\" ) + labs ( title = \"Mean Petal Length of Setosa by Sepal Width\" , x = \"Sepal Width\" , y = \"Mean Petal Length\" ) In this example, we start with the iris dataset and filter it to only include rows where the Species column is \u201csetosa\u201d. We then group the remaining rows by the Sepal.Width column and calculate the mean Petal.Length for each group. Next, we convert Sepal.Width to a factor variable to ensure that it is treated as a categorical variable in the visualization. Finally, we create a bar plot using ggplot2, with Sepal.Width on the x-axis and mean.Petal.Length on the y-axis. The resulting plot shows the mean petal length of setosa flowers for each sepal width category. Python code: import pandas as pd # Load the iris dataset and pipe it into the next function ( pd . read_csv ( \"https://archive.ics.uci.edu/ml/machine-learning-databases/iris/iris.data\" , header = None , names = [ 'sepal_length' , 'sepal_width' , 'petal_length' , 'petal_width' , 'class' ]) # Select columns and pivot the dataset . loc [:, [ 'sepal_length' , 'sepal_width' , 'petal_length' ]] . melt ( var_name = 'variable' , value_name = 'value' ) # Group by variable and calculate mean . groupby ( 'variable' , as_index = False ) . mean () # Filter for mean greater than 3.5 and sort by descending mean . query ( 'value > 3.5' ) . sort_values ( 'value' , ascending = False ) ) ## variable value ## 1 sepal_length 5.843333 ## 0 petal_length 3.758667","title":"Piping"},{"location":"additional-resources/bilingualism_md/#for-loops","text":"Here is an example of a for loop in R: R code # Create a vector of numbers numbers <- c ( 1 , 2 , 3 , 4 , 5 ) # Use a for loop to print out each number in the vector for ( i in numbers ) { print ( i ) } ## [1] 1 ## [1] 2 ## [1] 3 ## [1] 4 ## [1] 5 In this example, the for loop iterates over each element in the numbers vector, assigning the current element to the variable i. The print(i) statement is then executed for each iteration, outputting the value of i. Here is the equivalent example in Python: Python code # Create a list of numbers numbers = [ 1 , 2 , 3 , 4 , 5 ] # Use a for loop to print out each number in the list for i in numbers : print ( i ) ## 1 ## 2 ## 3 ## 4 ## 5 In Python, the for loop iterates over each element in the numbers list, assigning the current element to the variable i. The print(i) statement is then executed for each iteration, outputting the value of i. Both languages also support nested for loops, which can be used to perform iterations over multiple dimensions, such as looping through a 2D array.","title":"for loops"},{"location":"additional-resources/bilingualism_md/#parallel","text":"Parallel computing is a technique used to execute multiple computational tasks simultaneously, which can significantly reduce the time required to complete a task. Both R and Python have built-in support for parallel computing, although the approaches are slightly different. In this answer, we will compare and contrast the parallel computing capabilities of R and Python, and provide working examples in code. Parallel computing in R In R, there are several packages that support parallel computing, such as parallel, foreach, and doParallel. The parallel package provides basic functionality for parallel computing, while foreach and doParallel provide higher-level abstractions that make it easier to write parallel code. Here is an example of using the foreach package to execute a loop in parallel: R code: library ( foreach ) library ( doParallel ) # Set up a parallel backend with 4 workers cl <- makeCluster ( 4 ) registerDoParallel ( cl ) # Define a function to apply in parallel myfunc <- function ( x ) { # some computation here return ( x ^ 2 ) } # Generate some data mydata <- 1 : 1000 # Apply the function to the data in parallel result <- foreach ( i = mydata ) %dopar% { myfunc ( i ) } # Stop the cluster stopCluster ( cl ) In this example, we use the makeCluster() function to set up a cluster with 4 workers, and the registerDoParallel() function to register the cluster as the parallel backend for foreach. We then define a function myfunc() that takes an input x and returns x^2. We generate some data mydata and use foreach to apply myfunc() to each element of mydata in parallel, using the %dopar% operator. R Tidyverse parallel In R Tidyverse, we can use the furrr package for parallel computing. Here\u2019s an example of using furrr to parallelize a map function: R Tidy code: library ( tidyverse ) library ( furrr ) # Generate a list of numbers numbers <- 1 : 10 # Use the future_map function from furrr to parallelize the map function plan ( multisession ) squares <- future_map ( numbers , function ( x ) x ^ 2 ) In this example, we first load the Tidyverse and furrr libraries. We then generate a list of numbers from 1 to 10. We then use the plan function to set the parallelization strategy to \u201cmultisession\u201d, which will use multiple CPU cores to execute the code. Finally, we use the future_map function from furrr to apply the function x^2 to each number in the list in parallel. Parallel computing in Python In Python, the standard library includes the multiprocessing module, which provides basic support for parallel computing. Additionally, there are several third-party packages that provide higher-level abstractions, such as joblib and dask. Here is an example of using the multiprocessing module to execute a loop in parallel: Python code: def square ( x ): return x ** 2 from multiprocessing import Pool # Generate a list of numbers numbers = list ( range ( 1 , 11 )) # Use the map function and a pool of workers to parallelize the square function with Pool () as pool : squares = pool . map ( square , numbers ) print ( squares ) In this example, we define a function myfunc() that takes an input x and returns x^2. We generate some data mydata and use the Pool class from the multiprocessing module to set up a pool of 4 workers. We then use the map() method of the Pool class to apply myfunc() to each element of mydata in parallel. Comparison and contrast Both R and Python have built-in support for parallel computing, with similar basic functionality for creating and managing parallel processes. However, the higher-level abstractions differ between the two languages. In R, the foreach package provides a high-level interface that makes it easy to write parallel code, while in Python, the multiprocessing module provides a basic interface that can be extended using third-party packages like joblib and dask. Additionally, Python has better support for distributed computing using frameworks like Apache Spark, while R has better support for shared-memory parallelism using tools like data.table and ff.","title":"Parallel"},{"location":"additional-resources/bilingualism_md/#data-wrangling","text":"Data wrangling is an important part of any data analysis project, and both R and Python provide tools and libraries for performing this task. In this answer, we will compare and contrast data wrangling in R\u2019s tidyverse and Python\u2019s pandas library, with working examples in code. Data Wrangling in R Tidyverse The tidyverse is a collection of R packages designed for data science, and it includes several packages that are useful for data wrangling. One of the most popular packages is dplyr, which provides a grammar of data manipulation for data frames. Here is an example of using dplyr to filter, mutate, and summarize a data frame: R code library ( dplyr ) # Load data data ( mtcars ) # Filter for cars with more than 100 horsepower mtcars %>% filter ( hp > 100 ) %>% # Add a new column with fuel efficiency in km per liter mutate ( kmpl = 0.425 * mpg ) %>% # Group by number of cylinders and summarize group_by ( cyl ) %>% summarize ( mean_hp = mean ( hp ), mean_kmpl = mean ( kmpl )) ## # A tibble: 3 \u00d7 3 ## cyl mean_hp mean_kmpl ## ## 1 4 111 11.0 ## 2 6 122. 8.39 ## 3 8 209. 6.42 In this example, we first filter the mtcars data frame to only include cars with more than 100 horsepower. We then use mutate to create a new column with fuel efficiency in kilometers per liter. Finally, we group the data by the number of cylinders and calculate the mean horsepower and fuel efficiency. Data Wrangling in Python Pandas Pandas is a popular library for data manipulation in Python. It provides a data frame object similar to R\u2019s data frames, along with a wide range of functions for data wrangling. Here is an example of using pandas to filter, transform, and group a data frame: Python code: import pandas as pd # Load data mtcars = pd . read_csv ( 'https://raw.githubusercontent.com/mwaskom/seaborn-data/master/mtcars.csv' ) # Filter for cars with more than 100 horsepower filtered_mtcars = mtcars [ mtcars [ 'hp' ] > 100 ] # Add a new column with fuel efficiency in km per liter filtered_mtcars [ 'kmpl' ] = 0.425 * filtered_mtcars [ 'mpg' ] # Group by number of cylinders and calculate mean horsepower and fuel efficiency grouped_mtcars = filtered_mtcars . groupby ( 'cyl' ) . agg ({ 'hp' : 'mean' , 'kmpl' : 'mean' }) In this example, we first load the mtcars data from a CSV file. We then filter the data to only include cars with more than 100 horsepower, using boolean indexing. We use the assign function to create a new column with fuel efficiency in kilometers per liter. Finally, we group the data by the number of cylinders and calculate the mean horsepower and fuel efficiency. Comparison Overall, both R\u2019s tidyverse and Python\u2019s pandas provide similar functionality for data wrangling. Both allow for filtering, transforming, and aggregating data frames. The syntax for performing these operations is slightly different between the two languages, with R using the %>% operator for chaining operations and Python using method chaining or the apply family of functions. One key difference between the two languages is that R\u2019s tidyverse provides a consistent grammar for data manipulation across its various packages, making it easier to learn and use. However, Python\u2019s pandas library has a larger developer community and is more versatile for use in other applications, such as web development or machine learning. In conclusion, both R and Python provide powerful tools for data wrangling, and the choice between the two ultimately depends on the specific needs of the user and their familiarity","title":"Data wrangling"},{"location":"additional-resources/bilingualism_md/#data-from-api","text":"Retrieving data from an API is a common task in both R and Python. Here are examples of how to retrieve data from an API in both languages: Python To retrieve data from an API in Python, we can use the requests library. Here\u2019s an example of how to retrieve weather data from the OpenWeatherMap API: Python code: import requests url = 'https://api.openweathermap.org/data/2.5/weather?q=London,uk&appid=API_KEY' response = requests . get ( url ) data = response . json () print ( data ) This code retrieves the current weather data for London from the OpenWeatherMap API. We first construct the API URL with the location and API key, then use the requests.get() function to make a request to the API. We then extract the JSON data from the response using the .json() method and print the resulting data. R In R, we can use the httr package to retrieve data from an API. Here\u2019s an example of how to retrieve weather data from the OpenWeatherMap API in R: R code: library ( httr ) url <- 'https://api.openweathermap.org/data/2.5/weather?q=London,uk&appid=API_KEY' response <- GET ( url ) data <- content ( response , 'text' ) print ( data ) This code is similar to the Python code above. We first load the httr library, then construct the API URL and use the GET() function to make a request to the API. We then extract the data from the response using the content() function and print the resulting data. Retrieving Data from an API in R Tidyverse In R Tidyverse, we can use the httr and jsonlite packages to retrieve and process data from an API. R code: # Load required packages library ( httr ) library ( jsonlite ) # Define API endpoint endpoint <- \"https://jsonplaceholder.typicode.com/posts\" # Retrieve data from API response <- GET ( endpoint ) # Extract content from response content <- content ( response , \"text\" ) # Convert content to JSON json <- fromJSON ( content ) # Convert JSON to a data frame df <- as.data.frame ( json ) In the above example, we use the GET() function from the httr package to retrieve data from an API endpoint, and the content() function to extract the content of the response. We then use the fromJSON() function from the jsonlite package to convert the JSON content to a list, and the as.data.frame() function to convert the list to a data frame. Retrieving Data from an API in Python In Python, we can use the requests library to retrieve data from an API, and the json library to process the JSON data. Python code: # Load required libraries import requests import json # Define API endpoint endpoint = \"https://jsonplaceholder.typicode.com/posts\" # Retrieve data from API response = requests . get ( endpoint ) # Extract content from response content = response . content # Convert content to JSON json_data = json . loads ( content ) # Convert JSON to a list of dictionaries data = [ dict ( row ) for row in json_data ] In the above example, we use the get() function from the requests library to retrieve data from an API endpoint, and the content attribute to extract the content of the response. We then use the loads() function from the json library to convert the JSON content to a list of dictionaries. Comparison Both R Tidyverse and Python provide powerful tools for retrieving and processing data from an API. In terms of syntax, the two languages are somewhat similar. In both cases, we use a library to retrieve data from the API, extract the content of the response, and then process the JSON data. However, there are some differences in the specific functions and methods used. For example, in R Tidyverse, we use the content() function to extract the content of the response, whereas in Python, we use the content attribute. Additionally, in R Tidyverse, we use the fromJSON() function to convert the JSON data to a list, whereas in Python, we use the loads() function.","title":"Data from API"},{"location":"additional-resources/bilingualism_md/#census-data","text":"Retrieving USA census data in R, R Tidy, and Python can be done using different packages and libraries. Here are some working examples in code for each language: R: To retrieve census data in R, we can use the tidycensus package. Here\u2019s an example of how to retrieve the total population for the state of California: R code: library ( tidycensus ) library ( tidyverse ) # Set your Census API key census_api_key ( \"your_api_key\" ) # Get the total population for the state of California ca_pop <- get_acs ( geography = \"state\" , variables = \"B01003_001\" , state = \"CA\" ) %>% rename ( total_population = estimate ) %>% select ( total_population ) # View the result ca_pop R Tidy: To retrieve census data in R Tidy, we can also use the tidycensus package. Here\u2019s an example of how to retrieve the total population for the state of California using pipes and dplyr functions: R tidy code: library ( tidycensus ) library ( tidyverse ) # Set your Census API key census_api_key ( \"your_api_key\" ) # Get the total population for the state of California ca_pop <- get_acs ( geography = \"state\" , variables = \"B01003_001\" , state = \"CA\" ) %>% rename ( total_population = estimate ) %>% select ( total_population ) # View the result ca_pop Python: To retrieve census data in Python, we can use the census library. Here\u2019s an example of how to retrieve the total population for the state of California: Python code: from census import Census from us import states import pandas as pd # Set your Census API key c = Census ( \"your_api_key\" ) # Get the total population for the state of California ca_pop = c . acs5 . state (( \"B01003_001\" ), states . CA . fips , year = 2019 ) # Convert the result to a Pandas DataFrame ca_pop_df = pd . DataFrame ( ca_pop ) # Rename the column ca_pop_df = ca_pop_df . rename ( columns = { \"B01003_001E\" : \"total_population\" }) # Select only the total population column ca_pop_df = ca_pop_df [[ \"total_population\" ]] # View the result ca_pop_df","title":"Census data"},{"location":"additional-resources/bilingualism_md/#lidar-data","text":"To find Lidar data in R and Python, you typically need to start by identifying sources of Lidar data and then accessing them using appropriate packages and functions. Here are some examples of how to find Lidar data in R and Python: R: Identify sources of Lidar data: The USGS National Map Viewer provides access to Lidar data for the United States. You can also find Lidar data on state and local government websites, as well as on commercial data providers\u2019 websites. Access the data: You can use the lidR package in R to download and read Lidar data in the LAS format. For example, the following code downloads and reads Lidar data for a specific area: R code: library ( lidR ) # Download Lidar data LASfile <- system.file ( \"extdata\" , \"Megaplot.laz\" , package = \"lidR\" ) lidar <- readLAS ( LASfile ) # Visualize the data plot ( lidar ) Python: Identify sources of Lidar data: The USGS 3DEP program provides access to Lidar data for the United States. You can also find Lidar data on state and local government websites, as well as on commercial data providers\u2019 websites. Access the data: You can use the pylastools package in Python to download and read Lidar data in the LAS format. For example, the following code downloads and reads Lidar data for a specific area: Python code: py_install ( \"requests\" ) py_install ( \"pylas\" ) py_install ( \"laspy\" ) import requests from pylas import read import laspy import numpy as np # Download Lidar data url = \"https://s3-us-west-2.amazonaws.com/usgs-lidar-public/USGS_LPC_CA_SanFrancisco_2016_LAS_2018.zip\" lasfile = \"USGS_LPC_CA_SanFrancisco_2016_LAS_2018.las\" r = requests . get ( url , allow_redirects = True ) open ( lasfile , 'wb' ) . write ( r . content ) # Read the data lidar = read ( lasfile ) # Visualize the data laspy . plot . plot ( lidar )","title":"Lidar data"},{"location":"additional-resources/bilingualism_md/#data-for-black-lives","text":"Data for Black Lives ( https://d4bl.org/ ) is a movement that uses data science to create measurable change in the lives of Black people. While the Data for Black Lives website provides resources, reports, articles, and datasets related to racial equity, it doesn\u2019t provide a direct API for downloading data. Instead, you can access the Data for Black Lives GitHub repository ( https://github.com/Data4BlackLives ) to find datasets and resources to work with. In this example, we\u2019ll use a sample dataset available at https://github.com/Data4BlackLives/covid-19/tree/master/data . The dataset \u201cCOVID19_race_data.csv\u201d contains COVID-19 race-related data. R: In R, we\u2019ll use the \u2018readr\u2019 and \u2018dplyr\u2019 packages to read, process, and analyze the dataset. R code: # Install and load necessary libraries library ( readr ) library ( dplyr ) # Read the CSV file url <- \"https://raw.githubusercontent.com/Data4BlackLives/covid-19/master/data/COVID19_race_data.csv\" data <- read_csv ( url ) # Basic information about the dataset print ( dim ( data )) print ( head ( data )) # Example analysis: calculate the mean of 'cases_total' by 'state' data %>% group_by ( state ) %>% summarize ( mean_cases_total = mean ( cases_total , na.rm = TRUE )) %>% arrange ( desc ( mean_cases_total )) Python: In Python, we\u2019ll use the \u2018pandas\u2019 library to read, process, and analyze the dataset. Python code: import pandas as pd # Read the CSV file url = \"https://raw.githubusercontent.com/Data4BlackLives/covid-19/master/data/COVID19_race_data.csv\" data = pd . read_csv ( url ) # Basic information about the dataset print ( data . shape ) print ( data . head ()) # Example analysis: calculate the mean of 'cases_total' by 'state' mean_cases_total = data . groupby ( \"state\" )[ \"cases_total\" ] . mean () . sort_values ( ascending = False ) print ( mean_cases_total ) In conclusion, both R and Python provide powerful libraries and tools for downloading, processing, and analyzing datasets, such as those found in the Data for Black Lives repository. The \u2018readr\u2019 and \u2018dplyr\u2019 libraries in R offer a simple and intuitive way to read and manipulate data, while the \u2018pandas\u2019 library in Python offers similar functionality with a different syntax. Depending on your preferred programming language and environment, both options can be effective in working with social justice datasets.","title":"Data for black lives"},{"location":"additional-resources/bilingualism_md/#propublica-congress-api","text":"The ProPublica Congress API provides information about the U.S. Congress members and their voting records. In this example, we\u2019ll fetch data about the current Senate members and calculate the number of members in each party. R: In R, we\u2019ll use the \u2018httr\u2019 and \u2018jsonlite\u2019 packages to fetch and process data from the ProPublica Congress API. R code: # load necessary libraries library ( httr ) library ( jsonlite ) # Replace 'your_api_key' with your ProPublica API key # # Fetch data about the current Senate members url <- \"https://api.propublica.org/congress/v1/117/senate/members.json\" response <- GET ( url , add_headers ( `X-API-Key` = api_key )) # Check if the request was successful if ( http_status ( response ) $ category == \"Success\" ) { data <- content ( response , \"parsed\" ) members <- data $ results [[ 1 ]] $ members # Calculate the number of members in each party party_counts <- table ( sapply ( members , function ( x ) x $ party )) print ( party_counts ) } else { print ( http_status ( response ) $ message ) } ## ## D I ID R ## 49 1 2 51 Python: In Python, we\u2019ll use the \u2018requests\u2019 library to fetch data from the ProPublica Congress API and \u2018pandas\u2019 library to process the data. python code: # Install necessary libraries import requests import pandas as pd # Replace 'your_api_key' with your ProPublica API key api_key = \"your_api_key\" headers = { \"X-API-Key\" : api_key } # Fetch data about the current Senate members url = \"https://api.propublica.org/congress/v1/117/senate/members.json\" response = requests . get ( url , headers = headers ) # Check if the request was successful if response . status_code == 200 : data = response . json () members = data [ \"results\" ][ 0 ][ \"members\" ] # Calculate the number of members in each party party_counts = pd . DataFrame ( members )[ \"party\" ] . value_counts () print ( party_counts ) else : print ( f \"Error: { response . status_code } \" ) In conclusion, both R and Python offer efficient ways to fetch and process data from APIs like the ProPublica Congress API. The \u2018httr\u2019 and \u2018jsonlite\u2019 libraries in R provide a straightforward way to make HTTP requests and parse JSON data, while the \u2018requests\u2019 library in Python offers similar functionality. The \u2018pandas\u2019 library in Python can be used for data manipulation and analysis, and R provides built-in functions like table() for aggregating data. Depending on your preferred programming language and environment, both options can be effective for working with the ProPublica Congress API.","title":"Propublica Congress API"},{"location":"additional-resources/bilingualism_md/#nonprofit-explorer-api-by-propublica","text":"The Nonprofit Explorer API by ProPublica provides data on tax-exempt organizations in the United States. In this example, we\u2019ll search for organizations with the keyword \u201ceducation\u201d and analyze the results. R: In R, we\u2019ll use the \u2018httr\u2019 and \u2018jsonlite\u2019 packages to fetch and process data from the Nonprofit Explorer API. R code: # Install and load necessary libraries library ( httr ) library ( jsonlite ) # Fetch data for organizations with the keyword \"education\" url <- \"https://projects.propublica.org/nonprofits/api/v2/search.json?q=education\" response <- GET ( url ) # Check if the request was successful if ( http_status ( response ) $ category == \"Success\" ) { data <- content ( response , \"parsed\" ) organizations <- data $ organizations # Count the number of organizations per state state_counts <- table ( sapply ( organizations , function ( x ) x $ state )) print ( state_counts ) } else { print ( http_status ( response ) $ message ) } ## ## AZ CA CO DC FL GA HI IL Indiana LA ## 3 22 6 5 3 2 1 2 1 1 ## MD MI MN MO MP MS NC NE NJ NM ## 1 2 5 3 1 1 2 2 2 1 ## NY OH OK Oregon PA TX UT VA WA WV ## 1 5 1 2 2 12 1 4 3 1 ## ZZ ## 2 Python: In Python, we\u2019ll use the \u2018requests\u2019 library to fetch data from the Nonprofit Explorer API and \u2018pandas\u2019 library to process the data. Python code: # Install necessary libraries import requests import pandas as pd # Fetch data for organizations with the keyword \"education\" url = \"https://projects.propublica.org/nonprofits/api/v2/search.json?q=education\" response = requests . get ( url ) # Check if the request was successful if response . status_code == 200 : data = response . json () organizations = data [ \"organizations\" ] # Count the number of organizations per state state_counts = pd . DataFrame ( organizations )[ \"state\" ] . value_counts () print ( state_counts ) else : print ( f \"Error: { response . status_code } \" ) ## CA 22 ## TX 12 ## CO 6 ## MN 5 ## OH 5 ## DC 5 ## VA 4 ## AZ 3 ## WA 3 ## MO 3 ## FL 3 ## IL 2 ## GA 2 ## NC 2 ## MI 2 ## Oregon 2 ## NE 2 ## ZZ 2 ## PA 2 ## NJ 2 ## HI 1 ## MS 1 ## NY 1 ## Indiana 1 ## NM 1 ## LA 1 ## UT 1 ## MD 1 ## MP 1 ## WV 1 ## OK 1 ## Name: state, dtype: int64 In conclusion, both R and Python offer efficient ways to fetch and process data from APIs like the Nonprofit Explorer API. The \u2018httr\u2019 and \u2018jsonlite\u2019 libraries in R provide a straightforward way to make HTTP requests and parse JSON data, while the \u2018requests\u2019 library in Python offers similar functionality. The \u2018pandas\u2019 library in Python can be used for data manipulation and analysis, and R provides built-in functions like table() for aggregating data. Depending on your preferred programming language and environment, both options can be effective for working with the Nonprofit Explorer API.","title":"Nonprofit Explorer API by ProPublica"},{"location":"additional-resources/bilingualism_md/#campaign-finance-api-by-propublica","text":"The Campaign Finance API by the Federal Election Commission (FEC) provides data on campaign finance in U.S. federal elections. In this example, we\u2019ll fetch data about individual contributions for the 2020 election cycle and analyze the results. R: In R, we\u2019ll use the \u2018httr\u2019 and \u2018jsonlite\u2019 packages to fetch and process data from the Campaign Finance API. R code: # Install and load necessary libraries library ( httr ) library ( jsonlite ) # Fetch data about individual contributions for the 2020 election cycle url <- \"https://api.open.fec.gov/v1/schedules/schedule_a/?api_key='OGwpkX7tH5Jihs1qQcisKfVAMddJzmzouWKtKoby'&two_year_transaction_period=2020&sort_hide_null=false&sort_null_only=false&per_page=20&page=1\" response <- GET ( url ) # Check if the request was successful if ( http_status ( response ) $ category == \"Success\" ) { data <- content ( response , \"parsed\" ) contributions <- data $ results # Calculate the total contributions per state state_totals <- aggregate ( contributions $ contributor_state , by = list ( contributions $ contributor_state ), FUN = sum ) colnames ( state_totals ) <- c ( \"State\" , \"Total_Contributions\" ) print ( state_totals ) } else { print ( http_status ( response ) $ message ) } ## [1] \"Client error: (403) Forbidden\" Python: In Python, we\u2019ll use the \u2018requests\u2019 library to fetch data from the Campaign Finance API and \u2018pandas\u2019 library to process the data. Python code: # Install necessary libraries import requests import pandas as pd # Fetch data about individual contributions for the 2020 election cycle url = \"https://api.open.fec.gov/v1/schedules/schedule_a/?api_key=your_api_key&two_year_transaction_period=2020&sort_hide_null=false&sort_null_only=false&per_page=20&page=1\" response = requests . get ( url ) # Check if the request was successful if response . status_code == 200 : data = response . json () contributions = data [ \"results\" ] # Calculate the total contributions per state df = pd . DataFrame ( contributions ) state_totals = df . groupby ( \"contributor_state\" )[ \"contribution_receipt_amount\" ] . sum () print ( state_totals ) else : print ( f \"Error: { response . status_code } \" ) ## Error: 403 In conclusion, both R and Python offer efficient ways to fetch and process data from APIs like the Campaign Finance API. The \u2018httr\u2019 and \u2018jsonlite\u2019 libraries in R provide a straightforward way to make HTTP requests and parse JSON data, while the \u2018requests\u2019 library in Python offers similar functionality. The \u2018pandas\u2019 library in Python can be used for data manipulation and analysis, and R provides built-in functions like aggregate() for aggregating data. Depending on your preferred programming language and environment, both options can be effective for working with the Campaign Finance API. Note: Remember to replace your_api_key with your actual FEC API key in the code examples above.","title":"Campaign Finance API by ProPublica"},{"location":"additional-resources/bilingualism_md/#historic-redlining","text":"Historic redlining data refers to data from the Home Owners\u2019 Loan Corporation (HOLC) that created residential security maps in the 1930s, which contributed to racial segregation and disinvestment in minority neighborhoods. One popular source for this data is the Mapping Inequality project ( https://dsl.richmond.edu/panorama/redlining/ ). In this example, we\u2019ll download historic redlining data for Philadelphia in the form of a GeoJSON file and analyze the data in R and Python. R: In R, we\u2019ll use the \u2018sf\u2019 and \u2018dplyr\u2019 packages to read and process the GeoJSON data. R code: # Install and load necessary libraries library ( sf ) library ( dplyr ) # Download historic redlining data for Philadelphia url <- \"https://dsl.richmond.edu/panorama/redlining/static/downloads/geojson/PAPhiladelphia1937.geojson\" philly_geojson <- read_sf ( url ) # Count the number of areas per HOLC grade grade_counts <- philly_geojson %>% group_by ( holc_grade ) %>% summarize ( count = n ()) plot ( grade_counts ) Python: In Python, we\u2019ll use the \u2018geopandas\u2019 library to read and process the GeoJSON data. Python code: # Install necessary libraries import geopandas as gpd # Download historic redlining data for Philadelphia url = \"https://dsl.richmond.edu/panorama/redlining/static/downloads/geojson/PAPhiladelphia1937.geojson\" philly_geojson = gpd . read_file ( url ) # Count the number of areas per HOLC grade grade_counts = philly_geojson [ \"holc_grade\" ] . value_counts () print ( grade_counts ) ## B 28 ## D 26 ## C 18 ## A 10 ## Name: holc_grade, dtype: int64 In conclusion, both R and Python offer efficient ways to download and process historic redlining data in the form of GeoJSON files. The \u2018sf\u2019 package in R provides a simple way to read and manipulate spatial data, while the \u2018geopandas\u2019 library in Python offers similar functionality. The \u2018dplyr\u2019 package in R can be used for data manipulation and analysis, and Python\u2019s built-in functions like value_counts() can be used for aggregating data. Depending on your preferred programming language and environment, both options can be effective for working with historic redlining data.","title":"Historic Redlining"},{"location":"additional-resources/bilingualism_md/#american-indian-and-alaska-native-areas-aiannh","text":"In this example, we\u2019ll download and analyze the American Indian and Alaska Native Areas (AIANNH) TIGER/Line Shapefile from the U.S. Census Bureau. We\u2019ll download the data for the year 2020, and analyze the number of AIANNH per congressional district R: In R, we\u2019ll use the \u2018sf\u2019 and \u2018dplyr\u2019 packages to read and process the Shapefile data. R code: # Install and load necessary libraries library ( sf ) library ( dplyr ) # Download historic redlining data for Philadelphia url <- \"https://www2.census.gov/geo/tiger/TIGER2020/AIANNH/tl_2020_us_aiannh.zip\" temp_file <- tempfile ( fileext = \".zip\" ) download.file ( url , temp_file , mode = \"wb\" ) unzip ( temp_file , exdir = tempdir ()) # Read the Shapefile shapefile_path <- file.path ( tempdir (), \"tl_2020_us_aiannh.shp\" ) aiannh <- read_sf ( shapefile_path ) # Count the number of AIANNH per congressional district state_counts <- aiannh %>% group_by ( LSAD ) %>% summarize ( count = n ()) print ( state_counts [ order ( - state_counts $ count ),]) ## Simple feature collection with 26 features and 2 fields ## Geometry type: GEOMETRY ## Dimension: XY ## Bounding box: xmin: -174.236 ymin: 18.91069 xmax: -67.03552 ymax: 71.34019 ## Geodetic CRS: NAD83 ## # A tibble: 26 \u00d7 3 ## LSAD count geometry ## ## 1 79 221 (((-166.5331 65.33918, -166.5331 65.33906, -166.533 65.33699, -1\u2026 ## 2 86 206 (((-83.38811 35.46645, -83.38342 35.46596, -83.38316 35.46593, -\u2026 ## 3 OT 155 (((-92.32972 47.81374, -92.3297 47.81305, -92.32967 47.81196, -9\u2026 ## 4 78 75 (((-155.729 20.02457, -155.7288 20.02428, -155.7288 20.02427, -1\u2026 ## 5 85 46 (((-122.3355 37.95215, -122.3354 37.95206, -122.3352 37.95199, -\u2026 ## 6 92 35 (((-93.01356 31.56287, -93.01354 31.56251, -93.01316 31.56019, -\u2026 ## 7 88 25 (((-97.35299 36.908, -97.35291 36.90801, -97.35287 36.908, -97.3\u2026 ## 8 96 19 (((-116.48 32.63814, -116.48 32.63718, -116.4794 32.63716, -116.\u2026 ## 9 84 16 (((-105.5937 36.40379, -105.5937 36.40324, -105.5937 36.40251, -\u2026 ## 10 89 11 (((-95.91705 41.28037, -95.91653 41.28036, -95.91653 41.28125, -\u2026 ## # \u2139 16 more rows Python: In Python, we\u2019ll use the \u2018geopandas\u2019 library to read and process the Shapefile data. Python code: import geopandas as gpd import pandas as pd import requests import zipfile import os from io import BytesIO # Download historic redlining data for Philadelphia url = \"https://www2.census.gov/geo/tiger/TIGER2020/AIANNH/tl_2020_us_aiannh.zip\" response = requests . get ( url ) zip_file = zipfile . ZipFile ( BytesIO ( response . content )) # Extract Shapefile temp_dir = \"temp\" if not os . path . exists ( temp_dir ): os . makedirs ( temp_dir ) zip_file . extractall ( path = temp_dir ) shapefile_path = os . path . join ( temp_dir , \"tl_2020_us_aiannh.shp\" ) # Read the Shapefile aiannh = gpd . read_file ( shapefile_path ) # Count the number of AIANNH per congressional district state_counts = aiannh . groupby ( \"LSAD\" ) . size () . reset_index ( name = \"count\" ) # Sort by descending count state_counts_sorted = state_counts . sort_values ( by = \"count\" , ascending = False ) print ( state_counts_sorted ) ## LSAD count ## 2 79 221 ## 9 86 206 ## 25 OT 155 ## 1 78 75 ## 8 85 46 ## 15 92 35 ## 11 88 25 ## 19 96 19 ## 7 84 16 ## 12 89 11 ## 5 82 8 ## 3 80 7 ## 4 81 6 ## 21 98 5 ## 20 97 5 ## 13 90 4 ## 18 95 3 ## 6 83 3 ## 17 94 2 ## 16 93 1 ## 14 91 1 ## 10 87 1 ## 22 99 1 ## 23 9C 1 ## 24 9D 1 ## 0 00 1 In conclusion, both R and Python offer efficient ways to download and process AIANNH TIGER/Line Shapefile data from the U.S. Census Bureau. The \u2018sf\u2019 package in R provides a simple way to read and manipulate spatial data, while the \u2018geopandas\u2019 library in Python offers similar functionality. The \u2018dplyr\u2019 package in R can be used for data manipulation and analysis, and Python\u2019s built-in functions like value_counts() can be used for aggregating data. Depending on your preferred programming language and environment, both options can be effective for working with AIANNH data.","title":"American Indian and Alaska Native Areas (AIANNH)"},{"location":"additional-resources/bilingualism_md/#indian-entities-recognized-and-eligible-to-receive-services-by-bia","text":"The Bureau of Indian Affairs (BIA) provides a PDF document containing a list of Indian Entities Recognized and Eligible To Receive Services. To analyze the data, we\u2019ll first need to extract the information from the PDF. In this example, we\u2019ll extract the names of the recognized tribes and count the number of tribes per state. R: In R, we\u2019ll use the \u2018pdftools\u2019 package to extract text from the PDF and the \u2018stringr\u2019 package to process the text data. R code: # Install and load necessary libraries library ( pdftools ) library ( stringr ) library ( dplyr ) # Download the BIA PDF url <- \"https://www.govinfo.gov/content/pkg/FR-2022-01-28/pdf/2022-01789.pdf\" temp_file <- tempfile ( fileext = \".pdf\" ) download.file ( url , temp_file , mode = \"wb\" ) # Extract text from the PDF pdf_text <- pdf_text ( temp_file ) tribe_text <- pdf_text [ 4 : length ( pdf_text )] # Define helper functions tribe_state_extractor <- function ( text_line ) { regex_pattern <- \"(.*),\\\\s+([A-Z]{2})$\" tribe_state <- str_match ( text_line , regex_pattern ) return ( tribe_state ) } is_valid_tribe_line <- function ( text_line ) { regex_pattern <- \"^\\\\d+\\\\s+\" return ( ! is.na ( str_match ( text_line , regex_pattern ))) } # Process text data to extract tribes and states tribe_states <- sapply ( tribe_text , tribe_state_extractor ) valid_lines <- sapply ( tribe_text , is_valid_tribe_line ) tribe_states <- tribe_states [ valid_lines , 2 : 3 ] # Count the number of tribes per state tribe_data <- as.data.frame ( tribe_states ) colnames ( tribe_data ) <- c ( \"Tribe\" , \"State\" ) state_counts <- tribe_data %>% group_by ( State ) %>% summarise ( Count = n ()) print ( state_counts ) ## # A tibble: 0 \u00d7 2 ## # \u2139 2 variables: State , Count Python: In Python, we\u2019ll use the \u2018PyPDF2\u2019 library to extract text from the PDF and the \u2018re\u2019 module to process the text data. Python code: # Install necessary libraries import requests import PyPDF2 import io import re from collections import Counter # Download the BIA PDF url = \"https://www.bia.gov/sites/bia.gov/files/assets/public/raca/online-tribal-leaders-directory/tribal_leaders_2021-12-27.pdf\" response = requests . get ( url ) # Extract text from the PDF pdf_reader = PyPDF2 . PdfFileReader ( io . BytesIO ( response . content )) tribe_text = [ pdf_reader . getPage ( i ) . extractText () for i in range ( 3 , pdf_reader . numPages )] # Process text data to extract tribes and states tribes = [ re . findall ( r '^\\d+\\s+(.+),\\s+([A-Z] {2} )' , line ) for text in tribe_text for line in text . split ( ' \\n ' ) if line ] tribe_states = [ state for tribe , state in tribes ] # Count the number of tribes per state state_counts = Counter ( tribe_states ) print ( state_counts ) In conclusion, both R and Python offer efficient ways to download and process the list of Indian Entities Recognized and Eligible To Receive Services from the BIA. The \u2018pdftools\u2019 package in R provides a simple way to extract text from PDF files, while the \u2018PyPDF2\u2019 library in Python offers similar functionality. The \u2018stringr\u2019 package in R and the \u2018re\u2019 module in Python can be used to process and analyze text data. Depending on your preferred programming language and environment, both options can be effective for working with BIA data.","title":"Indian Entities Recognized and Eligible To Receive Services by BIA"},{"location":"additional-resources/bilingualism_md/#national-atlas-indian-lands-of-the-united-states-dataset","text":"In this example, we will download and analyze the National Atlas - Indian Lands of the United States dataset in both R and Python. We will read the dataset and count the number of Indian lands per state. R: In R, we\u2019ll use the \u2018sf\u2019 package to read the Shapefile and the \u2018dplyr\u2019 package to process the data. R code: # Install and load necessary libraries library ( sf ) library ( dplyr ) # Download the Indian Lands dataset url <- \"https://prd-tnm.s3.amazonaws.com/StagedProducts/Small-scale/data/Boundaries/indlanp010g.shp_nt00968.tar.gz\" temp_file <- tempfile ( fileext = \".tar.gz\" ) download.file ( url , temp_file , mode = \"wb\" ) untar ( temp_file , exdir = tempdir ()) # Read the Shapefile shapefile_path <- file.path ( tempdir (), \"indlanp010g.shp\" ) indian_lands <- read_sf ( shapefile_path ) # Count the number of Indian lands per state # state_counts <- indian_lands %>% # group_by(STATE) %>% # summarize(count = n()) plot ( indian_lands ) ## Warning: plotting the first 9 out of 23 attributes; use max.plot = 23 to plot ## all Python: In Python, we\u2019ll use the \u2018geopandas\u2019 and \u2018pandas\u2019 libraries to read the Shapefile and process the data. Python code: import geopandas as gpd import pandas as pd import requests import tarfile import os from io import BytesIO # Download the Indian Lands dataset url = \"https://prd-tnm.s3.amazonaws.com/StagedProducts/Small-scale/data/Boundaries/indlanp010g.shp_nt00966.tar.gz\" response = requests . get ( url ) tar_file = tarfile . open ( fileobj = BytesIO ( response . content ), mode = 'r:gz' ) # Extract Shapefile temp_dir = \"temp\" if not os . path . exists ( temp_dir ): os . makedirs ( temp_dir ) tar_file . extractall ( path = temp_dir ) shapefile_path = os . path . join ( temp_dir , \"indlanp010g.shp\" ) # Read the Shapefile indian_lands = gpd . read_file ( shapefile_path ) # Count the number of Indian lands per state state_counts = indian_lands . groupby ( \"STATE\" ) . size () . reset_index ( name = \"count\" ) print ( state_counts ) Both R and Python codes download the dataset and read the Shapefile using the respective packages. They then group the data by the \u2018STATE\u2019 attribute and calculate the count of Indian lands per state.","title":"National Atlas - Indian Lands of the United States dataset"},{"location":"additional-resources/code-of-conduct/","text":"Code of Conduct and Respectful Inclusive Collaboration Guidelines \u00b6 Environmental Data Science Innovation & Inclusion Lab (ESIIL) is committed to building, maintaining, and fostering an inclusive, kind, collaborative, and diverse transdisciplinary environmental data science community, whose members feel welcome, supported, and safe to contribute ideas and knowledge. The 2024 ESIIL Innovation Summit will follow all aspects of the ESIIL Code of Conduct (below). All community members are responsible for creating this culture, embodying our values, welcoming diverse perspectives and ways of knowing, creating safe inclusive spaces, and conducting ethical science as guided by FAIR (Findable, Accessible, Interoperable, Reusable) and CARE (Collective Benefit, Authority to Control, Responsibility, and Ethics) principles for scientific and Indigenous data management, governance, and stewardship. Our values \u00b6 ESIIL\u2019s vision is grounded in the conviction that innovation and breakthroughs in environmental data science will be precipitated by a diverse, collaborative, curious, and inclusive research community empowered by open data and infrastructure, cross-sector and community partnerships, team science, and engaged learning. As such, our core values center people through inclusion, kindness, respect, collaboration, and genuine relationships. They also center innovation, driven by collaborative, cross-sector science and synthesis, open, accessible data and tools, and fun, diverse teams. Finally, they center learning, propelled by curiosity and accessible, inclusive training, and education opportunities. When and how to use these guidelines \u00b6 These guidelines outline behavior expectations for ESIIL community members. Your participation in the ESIIL network is contingent upon following these guidelines in all ESIIL activities, including, but not limited to, participating in meetings, webinars, hackathons, working groups, hosted or funded by ESIIL, as well as email lists and online forums such as GutHub, Slack, and Twitter. These guidelines have been adapted from those of the International Arctic Research Policy Committee, the Geological Society of America, the American Geophysical Union, the University Corporation for Atmospheric Research, The Carpentries, and others. We encourage other organizations to adapt these guidelines for use in their own meetings. Note: Working groups and hackathon/codefest teams are encouraged to discuss these guidelines and what they mean to them, and will have the opportunity to add to them to specifically support and empower their team. Collaborative and behavior commitments complement data use, management, authorship, and access plans that commit to CARE and FAIR principles. Behavior Agreements \u00b6 ESIIL community members are expected to act professionally and respectfully in all activities, such that each person, regardless of gender, gender identity or expression, sexual orientation, disability, physical appearance, age, body size, race, religion, national origin, ethnicity, level of experience, language fluency, political affiliation, veteran status, pregnancy, country of origin, and any other characteristic protected under state or federal law, feels safe and welcome in our activities and community. We gain strength from diversity and actively seek participation from those who enhance it. In order to garner the benefits of a diverse community and to reach the full potential of our mission and charge, ESIIL participants must be allowed to develop a sense of belonging and trust within a respectful, inclusive, and collaborative culture. Guiding behaviors that contribute to this culture include, but are not limited to: Showing Respect \u00b6 Listen carefully \u2013 we each bring our own styles of communication, language, and ideas, and we must do our best to accept and accommodate differences. Do not interrupt when someone is speaking and maintain an open mind when others have different ideas than yours. Be present \u2013 when engaging with others, give them your full attention. If you need to respond to outside needs, please step away from the group quietly. Be kind \u2013 offer positive, supportive comments and constructive feedback. Critique ideas, not people. Harassment, discrimination, bullying, aggression, including offensive comments, jokes, and imagery, are unacceptable, regardless of intent, and will not be tolerated. Be punctual - adhere to the schedule provided by the organizers and avoid disruptive behavior during presentations, trainings, or working sessions. Respect privacy - be mindful of the confidentiality of others. Always obtain explicit consent before recording, sharing, or using someone else\u2019s personal information, photos, or recordings. Practice good digital etiquette (netiquette) when communicating online, whether in emails, messages, or social media - think before posting online and consider the potential impact on others. Do not share or distribute content generated by or involving others without their explicit consent. Being Inclusive \u00b6 Create space for everyone to participate \u2013 be thoughtful about who is at the table; openly address accessibility needs, and provide multiple ways to contribute. Be welcoming \u2013 ESIIL participants come from a wide range of skill levels and career stages, backgrounds, and cultures. Demonstrate that you value these different perspectives and identities through your words and actions, including through correct use of names, titles, and pronouns. Be self-aware \u2013 recognize that positionality, identity, unconscious biases, and upbringing can all affect how words and behaviors are perceived. Ensure that your words and behavior make others feel welcome. Commit to ongoing learning \u2013 the move toward inclusive, equitable, and just environmental data science is a collective journey. Continue to learn about and apply practices of inclusion, anti-racism, bystander intervention, and cultural sensitivity. None of us is perfect; all of us will, from time to time, fail to live up to our own high standards. Being perfect is not what matters; owning our mistakes and committing to clear and persistent efforts to grow and improve is. Being Curious \u00b6 Check your presumptions \u2013 we each bring our own ideas and assumptions about how the world should and does work \u2013 what are yours, and how do they affect how you interact with others? How do they shape your perception of new ideas? Ask questions \u2013 one of the strengths of interdisciplinary and diverse teams is that we all bring different knowledge and viewpoints; no one person is expected to know everything. So don\u2019t be afraid to ask, to learn, and to share. Be bold \u2013 significant innovations don\u2019t come from incremental efforts. Be brave in proposing and testing new ideas. When things don\u2019t work, learn from the experience. Invite feedback \u2013 new ideas and improvements can emerge from many places when we\u2019re open to hearing them. Check your defensiveness and listen; accept feedback as a gift toward improving our work and ourselves. Being Collaborative \u00b6 Recognize that everyone is bringing something different to the table \u2013 take the time to get to know each other. Keep an open mind, encourage ideas that are different from yours, and learn from each other\u2019s expertise and experience. Be accountable - great team science depends on trust, communication, respect, and delivering on your commitments. Be clear about your needs, as both a requester and a responder, realistic about your time and capacity commitments, and communicate timelines and standards in advance. Make assumptions explicit and provide context wherever possible - misunderstandings are common on transdisciplinary and cross-cultural teams and can best be managed with intentionality. Check in about assumptions, and be willing to share and correct misunderstandings or mistakes when they happen. Make use of collaboration agreements, communicate clearly and avoid jargon wherever possible. Respect intellectual property and Indigenous data sovereignty \u2013 ESIIL recognizes the extractive and abusive history of scientific engagement with Native peoples, and is committed to doing better. Indigenous knowledge holders are under no obligation to share their data, stories or knowledge. Their work should always be credited, and only shared with permission. Follow guidelines for authorship, Indigenous data sovereignty, and CARE principles. Acknowledge and credit the ideas and work of others. Use the resources that we provide - take advantage of the cyberinfrastructure and data cube at your disposal, but do not use them for unrelated tasks, as it could disrupt the event, introduce security risks, undermine the spirit of collaboration and fair play, and erode trust within the event community. Be safe - never share sensitive personal information; use strong passwords for your Cyverse and GitHub accounts and do not share them with other participants; be cautious of unsolicited emails, messages, or links; and verify online contacts. If you encounter any illegal or harmful activities online related to this event, report them to Tyler McIntosh or Susan Sullivan. Finally, speak up if you experience or notice a dangerous situation, or someone in distress! Code of Conduct: Unacceptable behaviors \u00b6 We adopt the full Code of Conduct of our home institution, the University of Colorado, details of which are found here . To summarize, examples of unacceptable and reportable behaviors include, but are not limited to: Harassment, intimidation, or discrimination in any form Physical or verbal abuse by anyone to anyone, including but not limited to a participant, member of the public, guest, member of any institution or sponsor Unwelcome sexual attention or advances Personal attacks directed at other guests, members, participants, etc. Alarming, intimidating, threatening, or hostile comments or conduct Inappropriate use of nudity and/or sexual images in public spaces or in presentations Threatening or stalking anyone Unauthorized use or sharing of personal or confidential information or private communication Continuing interactions, including but not limited to conversations, photographies, recordings, instant messages, and emails, after being asked to stop Ethical and scientific misconduct, including failing to credit contributions or respect intellectual property Engaging in any illegal activities, including hacking, cheating, or unauthorized access to systems or data Using the cyberinfrastructure provided by the organizers for activities unrelated to this event. Other conduct which could reasonably be considered inappropriate in a professional setting. The University of Colorado recognizes all Federal and State protected classes, which include the following: race, color, national origin, sex, pregnancy, age, marital status, disability, creed, religion, sexual orientation, gender identity, gender expression, veteran status, political affiliation or political philosophy. Mistreatment or harassment not related to protected class also has a negative impact and will be addressed by the ESIIL team. Anyone requested to stop unacceptable behavior is expected to comply immediately. If there is a clear violation of the code of conduct during an ESIIL event\u2014for example, a meeting is Zoom bombed or a team member is verbally abusing another participant during a workshop\u2014 ESIIL leaders, facilitators (or their designee) or campus/local police may take any action deemed necessary and appropriate, including expelling the violator, or immediate removal of the violator from any online or in-person event or platform without warning or refund. If such actions are necessary, there will be follow up with the ESIIL Diversity Equity and Inclusion (DEI) team to determine what further action is needed (see Reporting Process and Consequences below). Addressing Behavior Directly \u00b6 For smaller incidents that might be settled with a brief conversation, you may choose to contact the person in question or set up a (video) conversation to discuss how the behavior affected you. Please use this approach only if you feel comfortable; you do not have to carry the weight of addressing these issues yourself. If you are interested in this option but unsure how to go about it, please contact the ESIIL DEI lead, Susan Sullivan, first\u2014she will have advice on how to make the conversation happen and is available to join you in a conversation as requested. Reporting Process and Consequences \u00b6 We take any reports of Code of Conduct violations seriously, and aim to support those who are impacted and ensure that problematic behavior doesn\u2019t happen again. Making a Report \u00b6 If you believe you\u2019re experiencing or have experienced unacceptable behavior that is counter to this code of conduct, or you are witness to this behavior happening to someone else, we encourage you to contact our DEI lead: Susan Sullivan, CIRES Email: susan.sullivan@colorado.edu You may also choose to anonymously report behavior to ESIIL using this form . The DEI team will keep reports as confidential as possible. However, as mandatory reporters, we have an obligation to report alleged protected class violations to our home institution or to law enforcement. Specifically: \u00b6 Cases of potential protected-class harassment will be reported to the CU Office of Institutional Equity and Compliance. If the violation is made by a member of another institution, that information may also be shared with that member\u2019s home institution by the CU Office of Institutional Equity and Compliance under Title IX. In some instances, harassment information may be shared with the National Science Foundation, who are the funding organization of ESIIL. When we discuss incidents with people who are accused of misconduct (the respondent), we will anonymize details as much as possible to protect the privacy of the reporter and the person who was impacted (the complainant). In some cases, even when the details are anonymized, the respondent may guess at the identities of the reporter and complainants. If you have concerns about retaliation or your personal safety, please let us know (or note that in your report). We encourage you to report in any case, so that we can support you while keeping ESIIL members safe. In some cases, we are able to compile several anonymized reports into a pattern of behavior, and take action based on that pattern. If you prefer to speak with someone who is not on the ESIIL leadership team, or who can maintain confidentiality, you may contact: CU Ombuds Phone: 303-492-5077 (for guidance and support navigating difficult conversations) CU Office of Victim Assistance Phone: 303-492-8855 If you want more information about when to report, or how to help someone who needs to report, please review the resources at Don\u2019t Ignore It . Note: The reporting party does not need to be directly involved in a code of conduct violation incident. Please make a bystander report if you observe a potentially dangerous situation, someone in distress, or violations of these guidelines, even if the situation is not happening to you. What Happens After a Report Is Filed \u00b6 After a member of the ESIIL DEI team takes your report, they will (if necessary) consult with the appropriate support people at CU. The ESIIL DEI team will respond with a status update within 5 business days. During this time, they, or members of the CU Office of Institutional Equity and Compliance, will: Meet with you or review report documentation to determine what happened Consult documentation of past incidents for patterns of behavior Discuss appropriate response(s) to the incident Connect with the appropriate offices and/or make those response(s) Determine the follow-up actions for any impacted people and/or the reporter Follow up with the impacted people, including connecting them with support and resources. As a result of this process, in minor cases ESIIL DEI may communicate with the respondent to: \u00b6 Explain what happened and the impact of their behavior Offer concrete examples of how to improve their behavior Explain consequences of their behavior, or future consequences if the behavior is repeated. For significant infractions, follow up to the report may be turned over to the CU Office of Institutional Equity and Compliance and/or campus police. Possible Consequences to Code of Conduct Violations \u00b6 What follows are examples of possible responses to an incident report. This list is not inclusive, and ESIIL reserves the right to take any action it deems necessary. Generally speaking, the strongest response ESIIL may take is to completely ban a user from further engagement with ESIIL activities and, as is required, report a person to the CU Office of Institutional Equity and Compliance and/or their home institution and NSF. If law enforcement should be involved, they will recommend that the complainant make that contact. Employees of CU Boulder may also be subject to consequences as determined by the institution. In addition to the responses above, ESIIL responses may include but are not limited to the following: A verbal discussion in person or via phone/Zoom followed by documentation of the conversation via email Not publishing the video or slides of a talk that violated the code of conduct Not allowing a speaker who violated the code of conduct to give (further) talks Immediately ending any team leadership, membership, or other responsibilities and privileges that a person holds Temporarily banning a person from ESIIL activities Permanently banning a person from ESIIL activities Nothing, if the behavior is determined to not be a code of conduct violation Do you need more resources? Please don\u2019t hesitate to contact the ESIIL DEI lead, Susan Sullivan, if you have questions or concerns. The CU Office of Institutional Equity and Compliance is a resource for all of us in navigating this space. They also offer resource materials that can assist you in exploring various topics and skills here. If you have questions about what, when or how to report, or how to help someone else with concerns, Don\u2019t Ignore It. CU Ombud\u2019s Office: Confidential support to navigate university situations. (Most universities have these resources) The CU Office of Victims Assistance (counseling limited to CU students/staff/faculty, though advocacy is open to everyone engaged with a CU-sponsored activity. Please look for a similar resource on your campus if you are from another institution). National Crisis Hotlines How are we doing? Despite our best intentions, in some cases we may not be living up to our ideals of a positive, supportive, inclusive, respectful and collaborative community. If you feel we could do better, we welcome your feedback. Comments, suggestions and praise are also very welcome! Acknowledgment By participating in this event, you agree to abide by this code of conduct and understand the consequences of violating it. We believe that a respectful and inclusive environment benefits all participants and leads to more creative and successful outcomes. Thank you for your cooperation in making the this event a welcoming event for all. Have fun!","title":"Code of Conduct"},{"location":"additional-resources/code-of-conduct/#code-of-conduct-and-respectful-inclusive-collaboration-guidelines","text":"Environmental Data Science Innovation & Inclusion Lab (ESIIL) is committed to building, maintaining, and fostering an inclusive, kind, collaborative, and diverse transdisciplinary environmental data science community, whose members feel welcome, supported, and safe to contribute ideas and knowledge. The 2024 ESIIL Innovation Summit will follow all aspects of the ESIIL Code of Conduct (below). All community members are responsible for creating this culture, embodying our values, welcoming diverse perspectives and ways of knowing, creating safe inclusive spaces, and conducting ethical science as guided by FAIR (Findable, Accessible, Interoperable, Reusable) and CARE (Collective Benefit, Authority to Control, Responsibility, and Ethics) principles for scientific and Indigenous data management, governance, and stewardship.","title":"Code of Conduct and Respectful Inclusive Collaboration Guidelines"},{"location":"additional-resources/code-of-conduct/#our-values","text":"ESIIL\u2019s vision is grounded in the conviction that innovation and breakthroughs in environmental data science will be precipitated by a diverse, collaborative, curious, and inclusive research community empowered by open data and infrastructure, cross-sector and community partnerships, team science, and engaged learning. As such, our core values center people through inclusion, kindness, respect, collaboration, and genuine relationships. They also center innovation, driven by collaborative, cross-sector science and synthesis, open, accessible data and tools, and fun, diverse teams. Finally, they center learning, propelled by curiosity and accessible, inclusive training, and education opportunities.","title":"Our values"},{"location":"additional-resources/code-of-conduct/#when-and-how-to-use-these-guidelines","text":"These guidelines outline behavior expectations for ESIIL community members. Your participation in the ESIIL network is contingent upon following these guidelines in all ESIIL activities, including, but not limited to, participating in meetings, webinars, hackathons, working groups, hosted or funded by ESIIL, as well as email lists and online forums such as GutHub, Slack, and Twitter. These guidelines have been adapted from those of the International Arctic Research Policy Committee, the Geological Society of America, the American Geophysical Union, the University Corporation for Atmospheric Research, The Carpentries, and others. We encourage other organizations to adapt these guidelines for use in their own meetings. Note: Working groups and hackathon/codefest teams are encouraged to discuss these guidelines and what they mean to them, and will have the opportunity to add to them to specifically support and empower their team. Collaborative and behavior commitments complement data use, management, authorship, and access plans that commit to CARE and FAIR principles.","title":"When and how to use these guidelines"},{"location":"additional-resources/code-of-conduct/#behavior-agreements","text":"ESIIL community members are expected to act professionally and respectfully in all activities, such that each person, regardless of gender, gender identity or expression, sexual orientation, disability, physical appearance, age, body size, race, religion, national origin, ethnicity, level of experience, language fluency, political affiliation, veteran status, pregnancy, country of origin, and any other characteristic protected under state or federal law, feels safe and welcome in our activities and community. We gain strength from diversity and actively seek participation from those who enhance it. In order to garner the benefits of a diverse community and to reach the full potential of our mission and charge, ESIIL participants must be allowed to develop a sense of belonging and trust within a respectful, inclusive, and collaborative culture. Guiding behaviors that contribute to this culture include, but are not limited to:","title":"Behavior Agreements"},{"location":"additional-resources/code-of-conduct/#showing-respect","text":"Listen carefully \u2013 we each bring our own styles of communication, language, and ideas, and we must do our best to accept and accommodate differences. Do not interrupt when someone is speaking and maintain an open mind when others have different ideas than yours. Be present \u2013 when engaging with others, give them your full attention. If you need to respond to outside needs, please step away from the group quietly. Be kind \u2013 offer positive, supportive comments and constructive feedback. Critique ideas, not people. Harassment, discrimination, bullying, aggression, including offensive comments, jokes, and imagery, are unacceptable, regardless of intent, and will not be tolerated. Be punctual - adhere to the schedule provided by the organizers and avoid disruptive behavior during presentations, trainings, or working sessions. Respect privacy - be mindful of the confidentiality of others. Always obtain explicit consent before recording, sharing, or using someone else\u2019s personal information, photos, or recordings. Practice good digital etiquette (netiquette) when communicating online, whether in emails, messages, or social media - think before posting online and consider the potential impact on others. Do not share or distribute content generated by or involving others without their explicit consent.","title":"Showing Respect"},{"location":"additional-resources/code-of-conduct/#being-inclusive","text":"Create space for everyone to participate \u2013 be thoughtful about who is at the table; openly address accessibility needs, and provide multiple ways to contribute. Be welcoming \u2013 ESIIL participants come from a wide range of skill levels and career stages, backgrounds, and cultures. Demonstrate that you value these different perspectives and identities through your words and actions, including through correct use of names, titles, and pronouns. Be self-aware \u2013 recognize that positionality, identity, unconscious biases, and upbringing can all affect how words and behaviors are perceived. Ensure that your words and behavior make others feel welcome. Commit to ongoing learning \u2013 the move toward inclusive, equitable, and just environmental data science is a collective journey. Continue to learn about and apply practices of inclusion, anti-racism, bystander intervention, and cultural sensitivity. None of us is perfect; all of us will, from time to time, fail to live up to our own high standards. Being perfect is not what matters; owning our mistakes and committing to clear and persistent efforts to grow and improve is.","title":"Being Inclusive"},{"location":"additional-resources/code-of-conduct/#being-curious","text":"Check your presumptions \u2013 we each bring our own ideas and assumptions about how the world should and does work \u2013 what are yours, and how do they affect how you interact with others? How do they shape your perception of new ideas? Ask questions \u2013 one of the strengths of interdisciplinary and diverse teams is that we all bring different knowledge and viewpoints; no one person is expected to know everything. So don\u2019t be afraid to ask, to learn, and to share. Be bold \u2013 significant innovations don\u2019t come from incremental efforts. Be brave in proposing and testing new ideas. When things don\u2019t work, learn from the experience. Invite feedback \u2013 new ideas and improvements can emerge from many places when we\u2019re open to hearing them. Check your defensiveness and listen; accept feedback as a gift toward improving our work and ourselves.","title":"Being Curious"},{"location":"additional-resources/code-of-conduct/#being-collaborative","text":"Recognize that everyone is bringing something different to the table \u2013 take the time to get to know each other. Keep an open mind, encourage ideas that are different from yours, and learn from each other\u2019s expertise and experience. Be accountable - great team science depends on trust, communication, respect, and delivering on your commitments. Be clear about your needs, as both a requester and a responder, realistic about your time and capacity commitments, and communicate timelines and standards in advance. Make assumptions explicit and provide context wherever possible - misunderstandings are common on transdisciplinary and cross-cultural teams and can best be managed with intentionality. Check in about assumptions, and be willing to share and correct misunderstandings or mistakes when they happen. Make use of collaboration agreements, communicate clearly and avoid jargon wherever possible. Respect intellectual property and Indigenous data sovereignty \u2013 ESIIL recognizes the extractive and abusive history of scientific engagement with Native peoples, and is committed to doing better. Indigenous knowledge holders are under no obligation to share their data, stories or knowledge. Their work should always be credited, and only shared with permission. Follow guidelines for authorship, Indigenous data sovereignty, and CARE principles. Acknowledge and credit the ideas and work of others. Use the resources that we provide - take advantage of the cyberinfrastructure and data cube at your disposal, but do not use them for unrelated tasks, as it could disrupt the event, introduce security risks, undermine the spirit of collaboration and fair play, and erode trust within the event community. Be safe - never share sensitive personal information; use strong passwords for your Cyverse and GitHub accounts and do not share them with other participants; be cautious of unsolicited emails, messages, or links; and verify online contacts. If you encounter any illegal or harmful activities online related to this event, report them to Tyler McIntosh or Susan Sullivan. Finally, speak up if you experience or notice a dangerous situation, or someone in distress!","title":"Being Collaborative"},{"location":"additional-resources/code-of-conduct/#code-of-conduct-unacceptable-behaviors","text":"We adopt the full Code of Conduct of our home institution, the University of Colorado, details of which are found here . To summarize, examples of unacceptable and reportable behaviors include, but are not limited to: Harassment, intimidation, or discrimination in any form Physical or verbal abuse by anyone to anyone, including but not limited to a participant, member of the public, guest, member of any institution or sponsor Unwelcome sexual attention or advances Personal attacks directed at other guests, members, participants, etc. Alarming, intimidating, threatening, or hostile comments or conduct Inappropriate use of nudity and/or sexual images in public spaces or in presentations Threatening or stalking anyone Unauthorized use or sharing of personal or confidential information or private communication Continuing interactions, including but not limited to conversations, photographies, recordings, instant messages, and emails, after being asked to stop Ethical and scientific misconduct, including failing to credit contributions or respect intellectual property Engaging in any illegal activities, including hacking, cheating, or unauthorized access to systems or data Using the cyberinfrastructure provided by the organizers for activities unrelated to this event. Other conduct which could reasonably be considered inappropriate in a professional setting. The University of Colorado recognizes all Federal and State protected classes, which include the following: race, color, national origin, sex, pregnancy, age, marital status, disability, creed, religion, sexual orientation, gender identity, gender expression, veteran status, political affiliation or political philosophy. Mistreatment or harassment not related to protected class also has a negative impact and will be addressed by the ESIIL team. Anyone requested to stop unacceptable behavior is expected to comply immediately. If there is a clear violation of the code of conduct during an ESIIL event\u2014for example, a meeting is Zoom bombed or a team member is verbally abusing another participant during a workshop\u2014 ESIIL leaders, facilitators (or their designee) or campus/local police may take any action deemed necessary and appropriate, including expelling the violator, or immediate removal of the violator from any online or in-person event or platform without warning or refund. If such actions are necessary, there will be follow up with the ESIIL Diversity Equity and Inclusion (DEI) team to determine what further action is needed (see Reporting Process and Consequences below).","title":"Code of Conduct: Unacceptable behaviors"},{"location":"additional-resources/code-of-conduct/#addressing-behavior-directly","text":"For smaller incidents that might be settled with a brief conversation, you may choose to contact the person in question or set up a (video) conversation to discuss how the behavior affected you. Please use this approach only if you feel comfortable; you do not have to carry the weight of addressing these issues yourself. If you are interested in this option but unsure how to go about it, please contact the ESIIL DEI lead, Susan Sullivan, first\u2014she will have advice on how to make the conversation happen and is available to join you in a conversation as requested.","title":"Addressing Behavior Directly"},{"location":"additional-resources/code-of-conduct/#reporting-process-and-consequences","text":"We take any reports of Code of Conduct violations seriously, and aim to support those who are impacted and ensure that problematic behavior doesn\u2019t happen again.","title":"Reporting Process and Consequences"},{"location":"additional-resources/code-of-conduct/#making-a-report","text":"If you believe you\u2019re experiencing or have experienced unacceptable behavior that is counter to this code of conduct, or you are witness to this behavior happening to someone else, we encourage you to contact our DEI lead: Susan Sullivan, CIRES Email: susan.sullivan@colorado.edu You may also choose to anonymously report behavior to ESIIL using this form . The DEI team will keep reports as confidential as possible. However, as mandatory reporters, we have an obligation to report alleged protected class violations to our home institution or to law enforcement.","title":"Making a Report"},{"location":"additional-resources/code-of-conduct/#specifically","text":"Cases of potential protected-class harassment will be reported to the CU Office of Institutional Equity and Compliance. If the violation is made by a member of another institution, that information may also be shared with that member\u2019s home institution by the CU Office of Institutional Equity and Compliance under Title IX. In some instances, harassment information may be shared with the National Science Foundation, who are the funding organization of ESIIL. When we discuss incidents with people who are accused of misconduct (the respondent), we will anonymize details as much as possible to protect the privacy of the reporter and the person who was impacted (the complainant). In some cases, even when the details are anonymized, the respondent may guess at the identities of the reporter and complainants. If you have concerns about retaliation or your personal safety, please let us know (or note that in your report). We encourage you to report in any case, so that we can support you while keeping ESIIL members safe. In some cases, we are able to compile several anonymized reports into a pattern of behavior, and take action based on that pattern. If you prefer to speak with someone who is not on the ESIIL leadership team, or who can maintain confidentiality, you may contact: CU Ombuds Phone: 303-492-5077 (for guidance and support navigating difficult conversations) CU Office of Victim Assistance Phone: 303-492-8855 If you want more information about when to report, or how to help someone who needs to report, please review the resources at Don\u2019t Ignore It . Note: The reporting party does not need to be directly involved in a code of conduct violation incident. Please make a bystander report if you observe a potentially dangerous situation, someone in distress, or violations of these guidelines, even if the situation is not happening to you.","title":"Specifically:"},{"location":"additional-resources/code-of-conduct/#what-happens-after-a-report-is-filed","text":"After a member of the ESIIL DEI team takes your report, they will (if necessary) consult with the appropriate support people at CU. The ESIIL DEI team will respond with a status update within 5 business days. During this time, they, or members of the CU Office of Institutional Equity and Compliance, will: Meet with you or review report documentation to determine what happened Consult documentation of past incidents for patterns of behavior Discuss appropriate response(s) to the incident Connect with the appropriate offices and/or make those response(s) Determine the follow-up actions for any impacted people and/or the reporter Follow up with the impacted people, including connecting them with support and resources.","title":"What Happens After a Report Is Filed"},{"location":"additional-resources/code-of-conduct/#as-a-result-of-this-process-in-minor-cases-esiil-dei-may-communicate-with-the-respondent-to","text":"Explain what happened and the impact of their behavior Offer concrete examples of how to improve their behavior Explain consequences of their behavior, or future consequences if the behavior is repeated. For significant infractions, follow up to the report may be turned over to the CU Office of Institutional Equity and Compliance and/or campus police.","title":"As a result of this process, in minor cases ESIIL DEI may communicate with the respondent to:"},{"location":"additional-resources/code-of-conduct/#possible-consequences-to-code-of-conduct-violations","text":"What follows are examples of possible responses to an incident report. This list is not inclusive, and ESIIL reserves the right to take any action it deems necessary. Generally speaking, the strongest response ESIIL may take is to completely ban a user from further engagement with ESIIL activities and, as is required, report a person to the CU Office of Institutional Equity and Compliance and/or their home institution and NSF. If law enforcement should be involved, they will recommend that the complainant make that contact. Employees of CU Boulder may also be subject to consequences as determined by the institution. In addition to the responses above, ESIIL responses may include but are not limited to the following: A verbal discussion in person or via phone/Zoom followed by documentation of the conversation via email Not publishing the video or slides of a talk that violated the code of conduct Not allowing a speaker who violated the code of conduct to give (further) talks Immediately ending any team leadership, membership, or other responsibilities and privileges that a person holds Temporarily banning a person from ESIIL activities Permanently banning a person from ESIIL activities Nothing, if the behavior is determined to not be a code of conduct violation Do you need more resources? Please don\u2019t hesitate to contact the ESIIL DEI lead, Susan Sullivan, if you have questions or concerns. The CU Office of Institutional Equity and Compliance is a resource for all of us in navigating this space. They also offer resource materials that can assist you in exploring various topics and skills here. If you have questions about what, when or how to report, or how to help someone else with concerns, Don\u2019t Ignore It. CU Ombud\u2019s Office: Confidential support to navigate university situations. (Most universities have these resources) The CU Office of Victims Assistance (counseling limited to CU students/staff/faculty, though advocacy is open to everyone engaged with a CU-sponsored activity. Please look for a similar resource on your campus if you are from another institution). National Crisis Hotlines How are we doing? Despite our best intentions, in some cases we may not be living up to our ideals of a positive, supportive, inclusive, respectful and collaborative community. If you feel we could do better, we welcome your feedback. Comments, suggestions and praise are also very welcome! Acknowledgment By participating in this event, you agree to abide by this code of conduct and understand the consequences of violating it. We believe that a respectful and inclusive environment benefits all participants and leads to more creative and successful outcomes. Thank you for your cooperation in making the this event a welcoming event for all. Have fun!","title":"Possible Consequences to Code of Conduct Violations"},{"location":"additional-resources/cyverse_hacks/","text":"Transitioning Workflows to CyVerse: Tips & Tricks \u00b6 Forest Carbon Codefest Data Storage \u00b6 Path: ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/ Your team has a subdirectory within the Team_outputs directory. Setup \u00b6 CyVerse Account: Create an account if not already owned. Contact Tyson for account upgrades after maximizing current limits. GitHub Connection \u00b6 Follow the guide for connecting GitHub to CyVerse Select \u201cJupyterLab ESIIL\u201d and choose \u201cmacrosystems\u201d in the version dropdown. Clone into /home/jovyan/data-store . Clone innovation-summit-utils for SSH connection to GitHub. Run conda install -c conda-forge openssh in the terminal if encountering errors. GitHub authentication is session-specific. RStudio in Discovery Environment \u00b6 Copy your instance ID. It can be found in your analyis URL in form https:// .cyverse.run/lab. Use your ID in these links and run them each, in sequence, in the same browser window: https://.cyverse.run/rstudio/auth-sign-in https://.cyverse.run/rstudio/ Data Transfer to CyVerse \u00b6 Use GoCommands for HPC/CyVerse transfers. Installation: Linux: GOCMD_VER= \\((curl -L -s https://raw.githubusercontent.com/cyverse/gocommands/main/VERSION.txt); \\ curl -L -s https://github.com/cyverse/gocommands/releases/download/\\) -linux-amd64.tar.gz | tar zxvf -}/gocmd-${GOCMD_VER Windows Powershell: curl -o gocmdv.txt https://raw.githubusercontent.com/cyverse/gocommands/main/VERSION.txt ; \\(env:GOCMD_VER = (Get-Content gocmdv.txt) curl -o gocmd.zip https://github.com/cyverse/gocommands/releases/download/\\) env:GOCMD_VER/gocmd-$env:GOCMD_VER-windows-amd64.zip ; tar zxvf gocmd.zip ; del gocmd.zip ; del gocmdv.txt Usage: ./gocmd init Hit enter until you are asked for your iRODS Username (which is your cyverse username) Use put for upload and get for download. Ensure correct CyVerse directory path. Note that the CyVerse directory path should start from \u201c/iplant/home/\u2026\u201d (i.e. if you start from \u2018/home/jovyan/\u2026\u2019 GoCommands will not find the directory and throw an error)","title":"Cyverse hacks"},{"location":"additional-resources/cyverse_hacks/#transitioning-workflows-to-cyverse-tips-tricks","text":"","title":"Transitioning Workflows to CyVerse: Tips & Tricks"},{"location":"additional-resources/cyverse_hacks/#forest-carbon-codefest-data-storage","text":"Path: ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/ Your team has a subdirectory within the Team_outputs directory.","title":"Forest Carbon Codefest Data Storage"},{"location":"additional-resources/cyverse_hacks/#setup","text":"CyVerse Account: Create an account if not already owned. Contact Tyson for account upgrades after maximizing current limits.","title":"Setup"},{"location":"additional-resources/cyverse_hacks/#github-connection","text":"Follow the guide for connecting GitHub to CyVerse Select \u201cJupyterLab ESIIL\u201d and choose \u201cmacrosystems\u201d in the version dropdown. Clone into /home/jovyan/data-store . Clone innovation-summit-utils for SSH connection to GitHub. Run conda install -c conda-forge openssh in the terminal if encountering errors. GitHub authentication is session-specific.","title":"GitHub Connection"},{"location":"additional-resources/cyverse_hacks/#rstudio-in-discovery-environment","text":"Copy your instance ID. It can be found in your analyis URL in form https:// .cyverse.run/lab. Use your ID in these links and run them each, in sequence, in the same browser window: https://.cyverse.run/rstudio/auth-sign-in https://.cyverse.run/rstudio/","title":"RStudio in Discovery Environment"},{"location":"additional-resources/cyverse_hacks/#data-transfer-to-cyverse","text":"Use GoCommands for HPC/CyVerse transfers. Installation: Linux: GOCMD_VER= \\((curl -L -s https://raw.githubusercontent.com/cyverse/gocommands/main/VERSION.txt); \\ curl -L -s https://github.com/cyverse/gocommands/releases/download/\\) -linux-amd64.tar.gz | tar zxvf -}/gocmd-${GOCMD_VER Windows Powershell: curl -o gocmdv.txt https://raw.githubusercontent.com/cyverse/gocommands/main/VERSION.txt ; \\(env:GOCMD_VER = (Get-Content gocmdv.txt) curl -o gocmd.zip https://github.com/cyverse/gocommands/releases/download/\\) env:GOCMD_VER/gocmd-$env:GOCMD_VER-windows-amd64.zip ; tar zxvf gocmd.zip ; del gocmd.zip ; del gocmdv.txt Usage: ./gocmd init Hit enter until you are asked for your iRODS Username (which is your cyverse username) Use put for upload and get for download. Ensure correct CyVerse directory path. Note that the CyVerse directory path should start from \u201c/iplant/home/\u2026\u201d (i.e. if you start from \u2018/home/jovyan/\u2026\u2019 GoCommands will not find the directory and throw an error)","title":"Data Transfer to CyVerse"},{"location":"additional-resources/participant_agreement/","text":"Participant Agreement \u00b6 This Participant Agreement (\u201cAgreement\u201d) is a contract between you (\u201cYou/Your\u201d or \u201cParticipant\u201d) and THE REGENTS OF THE UNIVERSITY OF COLORADO, a body corporate, acting on behalf of the University of Colorado Boulder, a public institution of higher education created under the Constitution and the Law of the State of Colorado (the \u201cUniversity\u201d), having offices located at 3100 Marine Street, Boulder, CO 80309. In consideration of Your participation in the 2024 ESIIL Innovation Summit, the sufficiency of which is hereby acknowledged, You agree as follows: Environmental Data Science Innovation & Inclusion Lab (\u201cESIIL\u201d) is a National Science Foundation (\u201cNSF\u201d) funded data synthesis center led by the University. Earth Lab is part of the Cooperative Institute for Research in Environmental Sciences (CIRES) specializing in data-intensive open, reproducible environmental science. ESIIL will host the Summit in person from May 13 through May 16, 2024. Innovation Summit Description \u00b6 ESIIL's 2024 Innovation Summit will offer an opportunity to use big data to understand resilience across genes, species, ecosystems and societies, advance ecological forecasting with solutions in mind, and inform adaptive management and natural climate solutions. The Summit will support attendees to advance data-informed courses of action for resilience and adaptation in the face of our changing environment. It will be an in-person \u2018unconference\u2019, enabling participants to dynamically work on themes that most inspire them, with inclusive physical and intellectual spaces for working together. Over two and a half days participants will work in teams to explore research questions using open science approaches, including: data infrastructure, artificial intelligence (AI) and novel analytics, and cloud computing. Participants will be encouraged to work across and respect different perspectives, with the aim of co-developing resilience solutions. ESIIL will provide participants with opportunities to learn more about cultural intelligence, ethical and open science practices, and leadership in the rapidly evolving field of environmental data science. Overall, the Summit will capitalize on the combination of open data and analytics opportunities to develop innovative or impactful approaches that improve environmental resilience and adaptation. How to Participate \u00b6 You will join a team of environmental scientists, data experts, and coders to explore curated data, consider the objectivity of the data, propose a scientific question that can be addressed with all or some of the data sets, and analyze the data in an attempt to answer your scientific question. You will present your Work to the event community. ESIIL will provide environmental data, cyberinfrastructure, cyberinfrastructure and data analytics training, and technical support. Representations and Warranties \u00b6 By and through Your participation in the Summit, You represent and warrant the following: You have read, understand, and agree to abide by the Code of Conduct and Respectful Inclusive Collaboration Guidelines for the 2024 ESIIL Innovation Summit (\u201cCode of Conduct\u201d). Any decisions concerning the Code of Conduct, Official Rules, or any other matter relating to this Summit by the University is final and binding on all Participants. Summit Assets \u00b6 5.1 Access and Use \u00b6 By participating in the Innovation Summit, You may receive access to certain datasets, webinars, and/or other copyrighted materials (collectively, the \u201cSummit Assets\u201d). You agree to follow all licenses, restrictions, and other instructions provided to You with the Summit Assets. 5.2 Disclaimer \u00b6 The Summit Assets are provided \u201cas is\u201d without warranty of any kind, either express or implied, including, without limitation, any implied warranties of merchantability and fitness for a particular purpose. Without limiting the foregoing, the University does not warrant that the Materials will be suitable for Your Solution or that the operation or supply of the Summit Assets will be uninterrupted or error free. 5.3 Restrictions \u00b6 You agree not to access or use the Summit Assets in a manner that may interfere with any other participants\u2019 or users\u2019 use of such assets, unless provided with express written consent by the University. Your access to and use of the Summit Assets may be limited, throttled, or terminated at any time at the sole discretion of the University. 5.4 Originality and Third-Party Materials \u00b6 You represent that Your Work is Your original creation. If you obtain permission to include third-party materials, You represent that Your Work includes complete details of any third-party license or other restriction (including, but not limited to, related patents and trademarks) of which You are aware and which are associated with any part of Your Work. You represent and warrant that You will not submit any materials to the University that You know or believe to have components that are malicious or harmful. You represent that You will perform a reasonable amount of due diligence in order to be properly informed of third-party licenses, infringing materials, or harmful content associated with any part of Your Work. 5.5 Work Publication \u00b6 You agree to make Your Work publicly available in GitHub under the MIT open-source license within five (5) months from the end of the Summit. Limitation of Liability \u00b6 TO THE EXTENT ALLOWED BY LAW, IN NO EVENT SHALL THE UNIVERSITY, ITS PARTNERS, LICENSORS, SERVICE PROVIDERS, OR ANY OF THEIR RESPECTIVE OFFICERS, DIRECTORS, AGENTS, EMPLOYEES OR REPRESENTATIVES, BE LIABLE FOR DIRECT, INCIDENTAL, CONSEQUENTIAL, EXEMPLARY OR PUNITIVE DAMAGES ARISING OUT OF OR IN CONNECTION WITH THE SUMMIT OR THIS AGREEMENT (HOWEVER ARISING, INCLUDING NEGLIGENCE). IF YOU HAVE A DISPUTE WITH ANY PARTICIPANT OR ANY OTHER THIRD PARTY, YOU RELEASE THE UNIVERSITY, ITS, PARTNERS, LICENSORS, AND SERVICE PROVIDERS, AND EACH OF THEIR RESPECTIVE OFFICERS, DIRECTORS, AGENTS, EMPLOYEES AND REPRESENTATIVES FROM ANY AND ALL CLAIMS, DEMANDS AND DAMAGES (ACTUAL AND CONSEQUENTIAL) OF EVERY KIND AND NATURE ARISING OUT OF OR IN ANY WAY CONNECTED WITH SUCH DISPUTES. YOU AGREE THAT ANY CLAIMS AGAINST UNIVERSITY ARISING OUT OF THE SUMMIT OR THIS AGREEMENT MUST BE FILED WITHIN ONE YEAR AFTER SUCH CLAIM AROSE; OTHERWISE, YOUR CLAIM IS PERMANENTLY BARRED. Not an Offer or Contract of Employment \u00b6 Under no circumstances will Your participation in the Summit or anything in this Agreement be construed as an offer or contract of employment with the University. Additional Terms \u00b6 You must be at least eighteen (18) years of age to participate in the Summit. The Summit is subject to applicable federal, state, and local laws. The University reserves the right to permanently disqualify any person from the Summit that it reasonably believes has violated this Agreement, the Code of Conduct, and/or the Official Rules. Any attempt to deliberately damage the Summit or the operation thereof is unlawful and subject to legal action by the University, which may seek damages to the fullest extent permitted by law. The University assumes no responsibility for any injury or damage to Your or any other person\u2019s computer relating to or resulting from entering or downloading materials or software in connection with the Summit. The University is not responsible for telecommunications, network, electronic, technical, or computer failures of any kind; for inaccurate transcription of entry information; for any human or electronic error; or for Solutions that are stolen, misdirected, garbled, delayed, lost, late, damaged, or returned. The University reserves the right to cancel, modify, or suspend the Summit or any element thereof (including, without limitation, this Agreement) without notice in any manner and for any reason (including, without limitation, in the event of any unanticipated occurrence that is not fully addressed in this Agreement). The University may prohibit any person from participating in the Summit, if such person shows a disregard for this Agreement; acts with an intent to annoy, abuse, threaten, or harass any other entrant or any agents or representatives of the University (or any associated, partners, licensors, or service providers for the University); or behaves in any other disruptive manner (as determined by the University in its sole discretion). Nothing contained in this Agreement shall be construed as an express or implied waiver by University of its governmental immunity or of the governmental immunity of the State of Colorado. Your Work shall not contain any item(s) that are either export-controlled under the International Traffic in Arms Regulations, or that appear on the Commerce Control List (except as EAR99) of the Export Administration Regulations. Dispute Resolution \u00b6 This Agreement and the Summit shall be governed and construed in accordance with and governed by the laws of the state of Colorado without giving effect to conflict of law provisions. Entire Agreement \u00b6 This Agreement and the Event Code of Conduct, constitutes the entire agreement between the University and You with respect to the Summit and supersedes all previous or contemporaneous oral or written agreements concerning the Summit. In the event of a conflict between this Agreement and/or the Event Code of Conduct, the conflict shall be resolved with the following order of precedence: This Agreement The Event Code of Conduct Severability \u00b6 The invalidity, illegality, or unenforceability of any one or more phrases, sentences, clauses, or sections in this Agreement does not affect the remaining portions of this Agreement. If you have questions about the Summit, please contact ESIIL at esiil@colorado.edu . Guidelines for Intellectual Contributions and Credit \u00b6 ESIIL Guidelines for Intellectual Contributions and Credit","title":"Participant Agreement"},{"location":"additional-resources/participant_agreement/#participant-agreement","text":"This Participant Agreement (\u201cAgreement\u201d) is a contract between you (\u201cYou/Your\u201d or \u201cParticipant\u201d) and THE REGENTS OF THE UNIVERSITY OF COLORADO, a body corporate, acting on behalf of the University of Colorado Boulder, a public institution of higher education created under the Constitution and the Law of the State of Colorado (the \u201cUniversity\u201d), having offices located at 3100 Marine Street, Boulder, CO 80309. In consideration of Your participation in the 2024 ESIIL Innovation Summit, the sufficiency of which is hereby acknowledged, You agree as follows: Environmental Data Science Innovation & Inclusion Lab (\u201cESIIL\u201d) is a National Science Foundation (\u201cNSF\u201d) funded data synthesis center led by the University. Earth Lab is part of the Cooperative Institute for Research in Environmental Sciences (CIRES) specializing in data-intensive open, reproducible environmental science. ESIIL will host the Summit in person from May 13 through May 16, 2024.","title":"Participant Agreement"},{"location":"additional-resources/participant_agreement/#innovation-summit-description","text":"ESIIL's 2024 Innovation Summit will offer an opportunity to use big data to understand resilience across genes, species, ecosystems and societies, advance ecological forecasting with solutions in mind, and inform adaptive management and natural climate solutions. The Summit will support attendees to advance data-informed courses of action for resilience and adaptation in the face of our changing environment. It will be an in-person \u2018unconference\u2019, enabling participants to dynamically work on themes that most inspire them, with inclusive physical and intellectual spaces for working together. Over two and a half days participants will work in teams to explore research questions using open science approaches, including: data infrastructure, artificial intelligence (AI) and novel analytics, and cloud computing. Participants will be encouraged to work across and respect different perspectives, with the aim of co-developing resilience solutions. ESIIL will provide participants with opportunities to learn more about cultural intelligence, ethical and open science practices, and leadership in the rapidly evolving field of environmental data science. Overall, the Summit will capitalize on the combination of open data and analytics opportunities to develop innovative or impactful approaches that improve environmental resilience and adaptation.","title":"Innovation Summit Description"},{"location":"additional-resources/participant_agreement/#how-to-participate","text":"You will join a team of environmental scientists, data experts, and coders to explore curated data, consider the objectivity of the data, propose a scientific question that can be addressed with all or some of the data sets, and analyze the data in an attempt to answer your scientific question. You will present your Work to the event community. ESIIL will provide environmental data, cyberinfrastructure, cyberinfrastructure and data analytics training, and technical support.","title":"How to Participate"},{"location":"additional-resources/participant_agreement/#representations-and-warranties","text":"By and through Your participation in the Summit, You represent and warrant the following: You have read, understand, and agree to abide by the Code of Conduct and Respectful Inclusive Collaboration Guidelines for the 2024 ESIIL Innovation Summit (\u201cCode of Conduct\u201d). Any decisions concerning the Code of Conduct, Official Rules, or any other matter relating to this Summit by the University is final and binding on all Participants.","title":"Representations and Warranties"},{"location":"additional-resources/participant_agreement/#summit-assets","text":"","title":"Summit Assets"},{"location":"additional-resources/participant_agreement/#51-access-and-use","text":"By participating in the Innovation Summit, You may receive access to certain datasets, webinars, and/or other copyrighted materials (collectively, the \u201cSummit Assets\u201d). You agree to follow all licenses, restrictions, and other instructions provided to You with the Summit Assets.","title":"5.1 Access and Use"},{"location":"additional-resources/participant_agreement/#52-disclaimer","text":"The Summit Assets are provided \u201cas is\u201d without warranty of any kind, either express or implied, including, without limitation, any implied warranties of merchantability and fitness for a particular purpose. Without limiting the foregoing, the University does not warrant that the Materials will be suitable for Your Solution or that the operation or supply of the Summit Assets will be uninterrupted or error free.","title":"5.2 Disclaimer"},{"location":"additional-resources/participant_agreement/#53-restrictions","text":"You agree not to access or use the Summit Assets in a manner that may interfere with any other participants\u2019 or users\u2019 use of such assets, unless provided with express written consent by the University. Your access to and use of the Summit Assets may be limited, throttled, or terminated at any time at the sole discretion of the University.","title":"5.3 Restrictions"},{"location":"additional-resources/participant_agreement/#54-originality-and-third-party-materials","text":"You represent that Your Work is Your original creation. If you obtain permission to include third-party materials, You represent that Your Work includes complete details of any third-party license or other restriction (including, but not limited to, related patents and trademarks) of which You are aware and which are associated with any part of Your Work. You represent and warrant that You will not submit any materials to the University that You know or believe to have components that are malicious or harmful. You represent that You will perform a reasonable amount of due diligence in order to be properly informed of third-party licenses, infringing materials, or harmful content associated with any part of Your Work.","title":"5.4 Originality and Third-Party Materials"},{"location":"additional-resources/participant_agreement/#55-work-publication","text":"You agree to make Your Work publicly available in GitHub under the MIT open-source license within five (5) months from the end of the Summit.","title":"5.5 Work Publication"},{"location":"additional-resources/participant_agreement/#limitation-of-liability","text":"TO THE EXTENT ALLOWED BY LAW, IN NO EVENT SHALL THE UNIVERSITY, ITS PARTNERS, LICENSORS, SERVICE PROVIDERS, OR ANY OF THEIR RESPECTIVE OFFICERS, DIRECTORS, AGENTS, EMPLOYEES OR REPRESENTATIVES, BE LIABLE FOR DIRECT, INCIDENTAL, CONSEQUENTIAL, EXEMPLARY OR PUNITIVE DAMAGES ARISING OUT OF OR IN CONNECTION WITH THE SUMMIT OR THIS AGREEMENT (HOWEVER ARISING, INCLUDING NEGLIGENCE). IF YOU HAVE A DISPUTE WITH ANY PARTICIPANT OR ANY OTHER THIRD PARTY, YOU RELEASE THE UNIVERSITY, ITS, PARTNERS, LICENSORS, AND SERVICE PROVIDERS, AND EACH OF THEIR RESPECTIVE OFFICERS, DIRECTORS, AGENTS, EMPLOYEES AND REPRESENTATIVES FROM ANY AND ALL CLAIMS, DEMANDS AND DAMAGES (ACTUAL AND CONSEQUENTIAL) OF EVERY KIND AND NATURE ARISING OUT OF OR IN ANY WAY CONNECTED WITH SUCH DISPUTES. YOU AGREE THAT ANY CLAIMS AGAINST UNIVERSITY ARISING OUT OF THE SUMMIT OR THIS AGREEMENT MUST BE FILED WITHIN ONE YEAR AFTER SUCH CLAIM AROSE; OTHERWISE, YOUR CLAIM IS PERMANENTLY BARRED.","title":"Limitation of Liability"},{"location":"additional-resources/participant_agreement/#not-an-offer-or-contract-of-employment","text":"Under no circumstances will Your participation in the Summit or anything in this Agreement be construed as an offer or contract of employment with the University.","title":"Not an Offer or Contract of Employment"},{"location":"additional-resources/participant_agreement/#additional-terms","text":"You must be at least eighteen (18) years of age to participate in the Summit. The Summit is subject to applicable federal, state, and local laws. The University reserves the right to permanently disqualify any person from the Summit that it reasonably believes has violated this Agreement, the Code of Conduct, and/or the Official Rules. Any attempt to deliberately damage the Summit or the operation thereof is unlawful and subject to legal action by the University, which may seek damages to the fullest extent permitted by law. The University assumes no responsibility for any injury or damage to Your or any other person\u2019s computer relating to or resulting from entering or downloading materials or software in connection with the Summit. The University is not responsible for telecommunications, network, electronic, technical, or computer failures of any kind; for inaccurate transcription of entry information; for any human or electronic error; or for Solutions that are stolen, misdirected, garbled, delayed, lost, late, damaged, or returned. The University reserves the right to cancel, modify, or suspend the Summit or any element thereof (including, without limitation, this Agreement) without notice in any manner and for any reason (including, without limitation, in the event of any unanticipated occurrence that is not fully addressed in this Agreement). The University may prohibit any person from participating in the Summit, if such person shows a disregard for this Agreement; acts with an intent to annoy, abuse, threaten, or harass any other entrant or any agents or representatives of the University (or any associated, partners, licensors, or service providers for the University); or behaves in any other disruptive manner (as determined by the University in its sole discretion). Nothing contained in this Agreement shall be construed as an express or implied waiver by University of its governmental immunity or of the governmental immunity of the State of Colorado. Your Work shall not contain any item(s) that are either export-controlled under the International Traffic in Arms Regulations, or that appear on the Commerce Control List (except as EAR99) of the Export Administration Regulations.","title":"Additional Terms"},{"location":"additional-resources/participant_agreement/#dispute-resolution","text":"This Agreement and the Summit shall be governed and construed in accordance with and governed by the laws of the state of Colorado without giving effect to conflict of law provisions.","title":"Dispute Resolution"},{"location":"additional-resources/participant_agreement/#entire-agreement","text":"This Agreement and the Event Code of Conduct, constitutes the entire agreement between the University and You with respect to the Summit and supersedes all previous or contemporaneous oral or written agreements concerning the Summit. In the event of a conflict between this Agreement and/or the Event Code of Conduct, the conflict shall be resolved with the following order of precedence: This Agreement The Event Code of Conduct","title":"Entire Agreement"},{"location":"additional-resources/participant_agreement/#severability","text":"The invalidity, illegality, or unenforceability of any one or more phrases, sentences, clauses, or sections in this Agreement does not affect the remaining portions of this Agreement. If you have questions about the Summit, please contact ESIIL at esiil@colorado.edu .","title":"Severability"},{"location":"additional-resources/participant_agreement/#guidelines-for-intellectual-contributions-and-credit","text":"ESIIL Guidelines for Intellectual Contributions and Credit","title":"Guidelines for Intellectual Contributions and Credit"},{"location":"additional-resources/useful_links/","text":"Useful links \u00b6 CyVerse User Portal GitHub ESIIL Website 2024 Summit Slack","title":"Useful links"},{"location":"additional-resources/useful_links/#useful-links","text":"CyVerse User Portal GitHub ESIIL Website 2024 Summit Slack","title":"Useful links"},{"location":"collaborating-on-the-cloud/cyverse-instructions/","text":"Connecting to Cyverse and GitHub \u00b6 Log in to Cyverse \u00b6 Go to the Cyverse user account website https://user.cyverse.org/ Click Sign up (if you do not already have an account). When you make this account, please use the email that you have been using to communicate with our team regarding the event. That email is attached to our CyVerse workshop. Log in to Cyverse https://user.cyverse.org/ with your new account. From your account, go to the navigation bar at left and select 'Workshops' From the workshop page, find the workshop titled \"Forest Carbon Codefest\". It should look like this: Click on the tile, and then on the page for the workshop, click, \"Enroll\" at upper right. You should be enrolled automatically if you are using the email you have given our team. Head over to the Cyverse Discovery Environment by clicking on 'Services' at the upper right and then 'Discovery Environment' under 'My Services'. You should now see the Discovery Environment: Open up an analysis with the hackathon environment (Jupyter Lab) \u00b6 From the Cyverse Discovery Environment, click on Apps in the left menu Select JupyterLab ESIIL Configure and launch your analysis - the defaults are fine for now: Click Go to analysis : Now you should see Jupyter Lab! Set up your GitHub credentials \u00b6 If you would prefer to follow a video instead of a written outline, we have prepared a video here: \u00b6 From Jupyter Lab, click on the GitHub icon on the left menu: Click Clone a Repository : Paste the link to the innovation-summit-utils https://github.com/CU-ESIIL/innovation-summit-utils.git and click Clone : You should now see the innovation-summit-utils folder in your directory tree (provided you haven't changed directories from the default /home/jovyan/data-store Go into the innovation-summit-utils folder: open up the create_github_keypair.ipynb notebook by double-clicking: Select the default kernel Now you should see the notebook open. Click the play button at the top. You will be prompted to enter your GitHub username and email: You should now see your Public Key. Copy the WHOLE LINE including ssh-ed25519 at the beginning and the jovyan@... at the end Go to your GitHub settings page (you may need to log in to GitHub first): Select SSH and GPG keys Select New SSH key Give your key a descriptive name, paste your ENTIRE public key in the Key input box, and click Add SSH Key . You may need to re-authenticate with your password or two-factor authentication.: You should now see your new SSH key in your Authentication Keys list! Now you will be able to clone private repositories and push changes to GitHub from your Cyverse analysis! NOTE! Your GitHub authentication is ONLY for the analysis you're working with right now. You will be able to use it as long as you want there, but once you start a new analysis you will need to go through this process again. Feel free to delete keys from old analyses that have been shut down.","title":"Connecting to Cyverse and GitHub"},{"location":"collaborating-on-the-cloud/cyverse-instructions/#connecting-to-cyverse-and-github","text":"","title":"Connecting to Cyverse and GitHub"},{"location":"collaborating-on-the-cloud/cyverse-instructions/#log-in-to-cyverse","text":"Go to the Cyverse user account website https://user.cyverse.org/ Click Sign up (if you do not already have an account). When you make this account, please use the email that you have been using to communicate with our team regarding the event. That email is attached to our CyVerse workshop. Log in to Cyverse https://user.cyverse.org/ with your new account. From your account, go to the navigation bar at left and select 'Workshops' From the workshop page, find the workshop titled \"Forest Carbon Codefest\". It should look like this: Click on the tile, and then on the page for the workshop, click, \"Enroll\" at upper right. You should be enrolled automatically if you are using the email you have given our team. Head over to the Cyverse Discovery Environment by clicking on 'Services' at the upper right and then 'Discovery Environment' under 'My Services'. You should now see the Discovery Environment:","title":"Log in to Cyverse"},{"location":"collaborating-on-the-cloud/cyverse-instructions/#open-up-an-analysis-with-the-hackathon-environment-jupyter-lab","text":"From the Cyverse Discovery Environment, click on Apps in the left menu Select JupyterLab ESIIL Configure and launch your analysis - the defaults are fine for now: Click Go to analysis : Now you should see Jupyter Lab!","title":"Open up an analysis with the hackathon environment (Jupyter Lab)"},{"location":"collaborating-on-the-cloud/cyverse-instructions/#set-up-your-github-credentials","text":"","title":"Set up your GitHub credentials"},{"location":"collaborating-on-the-cloud/cyverse-instructions/#if-you-would-prefer-to-follow-a-video-instead-of-a-written-outline-we-have-prepared-a-video-here","text":"From Jupyter Lab, click on the GitHub icon on the left menu: Click Clone a Repository : Paste the link to the innovation-summit-utils https://github.com/CU-ESIIL/innovation-summit-utils.git and click Clone : You should now see the innovation-summit-utils folder in your directory tree (provided you haven't changed directories from the default /home/jovyan/data-store Go into the innovation-summit-utils folder: open up the create_github_keypair.ipynb notebook by double-clicking: Select the default kernel Now you should see the notebook open. Click the play button at the top. You will be prompted to enter your GitHub username and email: You should now see your Public Key. Copy the WHOLE LINE including ssh-ed25519 at the beginning and the jovyan@... at the end Go to your GitHub settings page (you may need to log in to GitHub first): Select SSH and GPG keys Select New SSH key Give your key a descriptive name, paste your ENTIRE public key in the Key input box, and click Add SSH Key . You may need to re-authenticate with your password or two-factor authentication.: You should now see your new SSH key in your Authentication Keys list! Now you will be able to clone private repositories and push changes to GitHub from your Cyverse analysis! NOTE! Your GitHub authentication is ONLY for the analysis you're working with right now. You will be able to use it as long as you want there, but once you start a new analysis you will need to go through this process again. Feel free to delete keys from old analyses that have been shut down.","title":"If you would prefer to follow a video instead of a written outline, we have prepared a video here:"},{"location":"collaborating-on-the-cloud/cyverse_data_management/","text":"Cyverse data management \u00b6 Cloud-to-instance data access \u00b6 The best and most efficient way to access most data from within your Cyverse instance is via APIs, VSI, or STAC. Examples of such data access can be found throughout the data library. This is the preferred method of data access since it keeps data on the cloud, puts it directly on your instance, and then the data is removed upon instance termination. Note that any data you want to keep must be moved off the instance and to the Cyverse data store prior to instance termination (see below, \"Saving data from your instance to the data store\"). Pre-downloaded data on Cyverse data store \u00b6 Some data can be time consuming or frustrating to access. Or, you or one of your teammates may just be much more comfortable working with data that has effectively been 'downloaded locally'. In an attempt to streamline your projects, the ESIIL and Earth Lab teams have loaded a set of data onto the Cyverse data store, which can be read from your Cyverse instance. Pre-downloaded data for the Forest Carbon Codefest can be found in the Cyverse data store at this link. The path directory to this location from within a Cyverse instance is: ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest Note that, while data CAN be read on your instance directly from the data store, it is usually best to move the data to your instance prior to reading and processing the data. Having the data directly on your instance will dramatically improve processing time and performance. (see below, \"Moving data from the data store to your instance\") Moving data from the data store to your instance \u00b6 Use the terminal command line interface on your instance to move data from the data store to your instance (whether that is pre-downloaded data or data that you have saved to your team folder). The home directory of your instance is: /home/jovyan To do so, open the Terminal from your launcher Then, use the 'cp' command to copy data from the data store to your instance. Use the flag -r if you are moving an entire directory or directory structure. The command is in the form: cp -r data-store-location new-location-on-instance For example, the below command will move the entire LCMAP_SR_1985-2021 directory to a new data directory on your instance: cp -r ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/LCMAP_SR_1985_2021 /home/jovyan/data/ Saving data from your instance to the data store \u00b6 Any data or outputs that you want to keep, such as newly derived datasets or figures, must be moved off the instance and to the Cyverse data store prior to instance termination. To do so, you will follow the same steps as in \"Moving data from the data store to your instance\" (see above), but with the directories in the command reversed. All team outputs should be stored in the subdirectories named TeamX in this directory: ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/Team_outputs Each team has their own directory; make sure you are saving to the correct one! For example, if you were on Team1 and wanted to save a figures directory, you could use the below command: cp -r /home/jovyan/figures ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/Team_outputs/Team1/","title":"Cyverse data management"},{"location":"collaborating-on-the-cloud/cyverse_data_management/#cyverse-data-management","text":"","title":"Cyverse data management"},{"location":"collaborating-on-the-cloud/cyverse_data_management/#cloud-to-instance-data-access","text":"The best and most efficient way to access most data from within your Cyverse instance is via APIs, VSI, or STAC. Examples of such data access can be found throughout the data library. This is the preferred method of data access since it keeps data on the cloud, puts it directly on your instance, and then the data is removed upon instance termination. Note that any data you want to keep must be moved off the instance and to the Cyverse data store prior to instance termination (see below, \"Saving data from your instance to the data store\").","title":"Cloud-to-instance data access"},{"location":"collaborating-on-the-cloud/cyverse_data_management/#pre-downloaded-data-on-cyverse-data-store","text":"Some data can be time consuming or frustrating to access. Or, you or one of your teammates may just be much more comfortable working with data that has effectively been 'downloaded locally'. In an attempt to streamline your projects, the ESIIL and Earth Lab teams have loaded a set of data onto the Cyverse data store, which can be read from your Cyverse instance. Pre-downloaded data for the Forest Carbon Codefest can be found in the Cyverse data store at this link. The path directory to this location from within a Cyverse instance is: ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest Note that, while data CAN be read on your instance directly from the data store, it is usually best to move the data to your instance prior to reading and processing the data. Having the data directly on your instance will dramatically improve processing time and performance. (see below, \"Moving data from the data store to your instance\")","title":"Pre-downloaded data on Cyverse data store"},{"location":"collaborating-on-the-cloud/cyverse_data_management/#moving-data-from-the-data-store-to-your-instance","text":"Use the terminal command line interface on your instance to move data from the data store to your instance (whether that is pre-downloaded data or data that you have saved to your team folder). The home directory of your instance is: /home/jovyan To do so, open the Terminal from your launcher Then, use the 'cp' command to copy data from the data store to your instance. Use the flag -r if you are moving an entire directory or directory structure. The command is in the form: cp -r data-store-location new-location-on-instance For example, the below command will move the entire LCMAP_SR_1985-2021 directory to a new data directory on your instance: cp -r ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/LCMAP_SR_1985_2021 /home/jovyan/data/","title":"Moving data from the data store to your instance"},{"location":"collaborating-on-the-cloud/cyverse_data_management/#saving-data-from-your-instance-to-the-data-store","text":"Any data or outputs that you want to keep, such as newly derived datasets or figures, must be moved off the instance and to the Cyverse data store prior to instance termination. To do so, you will follow the same steps as in \"Moving data from the data store to your instance\" (see above), but with the directories in the command reversed. All team outputs should be stored in the subdirectories named TeamX in this directory: ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/Team_outputs Each team has their own directory; make sure you are saving to the correct one! For example, if you were on Team1 and wanted to save a figures directory, you could use the below command: cp -r /home/jovyan/figures ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/Team_outputs/Team1/","title":"Saving data from your instance to the data store"},{"location":"collaborating-on-the-cloud/github-basics/","text":"Github essentials \u00b6 I. Introduction (2 minutes) \u00b6 A. Brief overview of GitHub: \u00b6 GitHub is a web-based platform that provides version control and collaboration features using Git, a distributed version control system. It enables developers to work together on projects, track changes to code, and efficiently manage different versions of the project. GitHub is widely used in the software development industry and is an essential tool for collaborative projects and maintaining code quality. B. Introduce GitHub Desktop and JupyterHub GitHub widget: \u00b6 GitHub Desktop is a graphical user interface (GUI) application that simplifies working with Git and GitHub by providing a more visual and intuitive way to manage repositories, branches, commits, and other Git features. JupyterHub GitHub widget, on the other hand, is a built-in widget that integrates Git and GitHub functionality directly into Jupyter notebooks, allowing users to perform version control and collaboration tasks within the Jupyter environment. Both tools help streamline the process of working with GitHub and make it more accessible to users with varying levels of experience with Git and version control. 1. Download GitHub Desktop \u00b6 Step 1: Download GitHub Desktop \u00b6 Go to the GitHub Desktop download page: https://desktop.github.com/ Click on the \u201cDownload for Windows\u201d or \u201cDownload for macOS\u201d button, depending on your operating system. The download should start automatically. Step 2: Install GitHub Desktop \u00b6 For Windows: Locate the downloaded installer file (usually in the Downloads folder) and double-click on it to run the installer. Follow the installation instructions that appear on the screen, accepting the default settings or customizing them as desired. Once the installation is complete, GitHub Desktop will launch automatically. For macOS: Locate the downloaded .zip file (usually in the Downloads folder) and double-click on it to extract the GitHub Desktop application. Drag the extracted \u201cGitHub Desktop\u201d application into the \u201cApplications\u201d folder. Open the \u201cApplications\u201d folder and double-click on \u201cGitHub Desktop\u201d to launch the application. Step 3: Set up GitHub Desktop \u00b6 When GitHub Desktop launches for the first time, you will be prompted to sign in with your GitHub account. If you don\u2019t have one, you can create one at https://github.com/join . Enter your GitHub username (or email) and password, and click on \u201cSign in.\u201d You will then be prompted to configure Git. Enter your name and email address, which will be used for your commit messages. Click \u201cContinue\u201d when you\u2019re done. Choose whether you want to submit usage data to help improve GitHub Desktop. Click \u201cFinish\u201d to complete the setup. Now, you have successfully installed and set up GitHub Desktop. You can start using it to clone repositories, make changes, commit, and sync with the remote repositories on GitHub. 1. Download GitHub for JupyterHub cloud service \u00b6 Step 1: Accessing JupyterHub on the cloud \u00b6 Visit the JupyterHub cloud service you want to use (e.g., Binder, Google Colab, or a custom JupyterHub deployment provided by your organization). Sign in with your credentials or authenticate using a third-party service if required. Step 2: Launch a new Jupyter Notebook or open an existing one \u00b6 Click on the \u201cNew\u201d button (usually located in the top right corner) and select \u201cPython\u201d to create a new Jupyter Notebook or open an existing one from the file browser. Once the notebook is open, you will see the Jupyter Notebook interface with the familiar cells for writing and executing code. Step 3: Install and enable the JupyterLab Git extension \u00b6 In your Jupyter Notebook, create a new code cell and run the following command to install the JupyterLab Git extension: !pip install jupyterlab-git Restart the Jupyter Notebook server for the changes to take effect. Step 4: Using the JupyterHub GitHub widget \u00b6 In the Jupyter Notebook interface, you should now see a Git icon on the left sidebar. Click on it to open the GitHub widget. To clone a repository, click on the \u201c+\u201d icon in the GitHub widget and enter the repository URL. This will clone the repository into your JupyterHub workspace. You can now navigate through the cloned repository, make changes, and use the GitHub widget to stage, commit, and push your changes back to the remote repository. To create and manage branches, use the branch icon in the GitHub widget. You can create new branches, switch between branches, and merge branches using this interface. To sync your local repository with the remote repository, use the \u201cPull\u201d and \u201cPush\u201d buttons in the GitHub widget. Now, you know how to access and use the JupyterHub GitHub widget running on the cloud. This allows you to work with Git and GitHub directly from your Jupyter Notebook interface, streamlining your workflow and making collaboration easier. C. GitHub in Rstudio: \u00b6 Integrating GitHub with RStudio allows users to manage their Git repositories and collaborate on projects directly within the RStudio environment. It offers similar functionality to GitHub Desktop but caters specifically to R users working within RStudio. By configuring RStudio to work with Git, creating or opening RStudio projects, and linking projects to GitHub repositories, users can enjoy a seamless workflow for version control and collaboration. RStudio\u2019s Git pane enables users to stage, commit, and push changes to remote repositories, as well as manage branches and sync local repositories with remote ones, providing a comprehensive solution for R developers working with GitHub. Step 1: Install Git \u00b6 Before integrating GitHub with RStudio, you need to have Git installed on your computer. Visit the official Git website ( https://git-scm.com/ ) to download and install the latest version of Git for your operating system. Step 2: Configure RStudio to work with Git \u00b6 Open RStudio. Go to \u201cTools\u201d > \u201cGlobal Options\u201d in the top menu. In the \u201cGlobal Options\u201d window, click on the \u201cGit/SVN\u201d tab. Check that the \u201cGit executable\u201d field is pointing to the correct location of the installed Git. If not, click \u201cBrowse\u201d and navigate to the location of the Git executable file (usually found in the \u201cbin\u201d folder of the Git installation directory). Click \u201cOK\u201d to save the changes. Step 3: Create or open an RStudio project \u00b6 To create a new RStudio project, go to \u201cFile\u201d > \u201cNew Project\u201d in the top menu. You can either create a new directory or choose an existing one for your project. To open an existing RStudio project, go to \u201cFile\u201d > \u201cOpen Project\u201d and navigate to the project\u2019s \u201c.Rproj\u201d file. Step 4: Link your RStudio project to a GitHub repository \u00b6 In the RStudio project, go to the \u201cTools\u201d menu and select \u201cVersion Control\u201d > \u201cProject Setup.\u201d In the \u201cProject Setup\u201d window, select \u201cGit\u201d as the version control system and click \u201cOK.\u201d A new \u201c.git\u201d folder will be created in your project directory, initializing it as a Git repository. Commit any changes you have made so far by clicking on the \u201cCommit\u201d button in the \u201cGit\u201d pane in RStudio. To link your local repository to a remote GitHub repository, go to your GitHub account and create a new repository. Copy the remote repository\u2019s URL (e.g., \u201c https://github.com/username/repository.git \u201d). In RStudio, open the \u201cShell\u201d by going to \u201cTools\u201d > \u201cShell.\u201d In the shell, run the following command to add the remote repository: git remote add origin https://github.com/username/repository.git Replace the URL with the one you copied from your GitHub repository. Push your changes to the remote repository by running the following command in the shell: git push -u origin master Now, your RStudio project is linked to a GitHub repository. You can use the \u201cGit\u201d pane in RStudio to stage, commit, and push changes to the remote repository, as well as manage branches and sync your local repository with the remote one. By integrating GitHub with RStudio, you can streamline your workflow, collaborate more effectively with your team, and manage your Git repositories directly from the RStudio interface. II. GitHub Basics (4 minutes) \u00b6 A. Repository: \u00b6 A repository, often abbreviated as \u201crepo,\u201d is the fundamental building block of GitHub. It is a storage space for your project files, including the code, documentation, and other related resources. Each repository also contains the complete history of all changes made to the project files, which is crucial for effective version control. Repositories can be public, allowing anyone to access and contribute, or private, restricting access to specific collaborators. B. Fork and Clone: \u00b6 Forking and cloning are two essential operations for working with repositories on GitHub. Forking creates a personal copy of someone else\u2019s repository under your GitHub account, enabling you to make changes to the project without affecting the original repo. Cloning, on the other hand, is the process of downloading a remote repository to your local machine for offline development. In GitHub Desktop, you can clone a repository by selecting \u201cClone a repository from the Internet\u201d and entering the repository URL. In JupyterHub GitHub widget, you can clone a repository by entering the repo URL in the \u201cClone Repository\u201d section of the widget. C. Branches: \u00b6 Branches are a critical aspect of Git version control, as they allow you to create multiple parallel versions of your project within a single repository. This is particularly useful when working on new features or bug fixes, as it prevents changes from interfering with the main (or \u201cmaster\u201d) branch until they are ready to be merged. Creating a new branch in GitHub Desktop can be done by clicking the \u201cCurrent Branch\u201d dropdown and selecting \u201cNew Branch.\u201d In JupyterHub GitHub widget, you can create a new branch by clicking the \u201cNew Branch\u201d button in the \u201cBranches\u201d section of the widget. D. Replace \u2018master\u2019 with \u2018main\u2019: \u00b6 In recent years, there has been a growing awareness of the importance of inclusive language in technology. One such example is the use of the term \u201cmaster\u201d in the context of the default branch in a GitHub repository. The term \u201cmaster\u201d has historical connections to the \u201cmaster/slave\u201d file structure, which evokes an unsavory colonial past associated with slavery. In light of this, many developers and organizations have begun to replace the term \u201cmaster\u201d with more neutral terms, such as \u201cmain.\u201d We encourage you to follow this practice and change the default branch name in your repositories from \u201cmaster\u201d to \u201cmain\u201d or another suitable alternative. This small change can help promote a more inclusive and welcoming environment within the technology community. III. Collaboration and Version Control (5 minutes) \u00b6 A. Commits: \u00b6 Commits are snapshots of your project\u2019s changes at a specific point in time, serving as the fundamental building blocks of Git\u2019s version control system. Commits make it possible to track changes, revert to previous versions, and collaborate with others. In GitHub Desktop, you can make a commit by staging the changes you want to include, adding a descriptive commit message, and clicking \u201cCommit to [branch_name].\u201d In JupyterHub GitHub widget, you can create a commit by selecting the files with changes, entering a commit message, and clicking the \u201cCommit\u201d button. B. Push: \u00b6 In GitHub, \u201cpush\u201d is a fundamental operation in the version control process that transfers commits from your local repository to a remote repository, such as the one hosted on GitHub. When you push changes, you synchronize the remote repository with the latest updates made to your local repository, making those changes accessible to other collaborators working on the same project. This operation ensures that the remote repository reflects the most recent state of your work and allows your team members to stay up to date with your changes. Pushing is an essential step in distributed version control systems like Git, as it promotes efficient collaboration among multiple contributors and provides a centralized location for tracking the project\u2019s history and progress. In GitHub, the concepts of \u201ccommit\u201d and \u201cpush\u201d represent two distinct steps in the version control process. A \u201ccommit\u201d is the action of saving changes to your local repository. When you commit changes, you create a snapshot of your work, accompanied by a unique identifier and an optional descriptive message. Commits allow you to track the progress of your work over time and make it easy to revert to a previous state if necessary. On the other hand, \u201cpush\u201d is the action of transferring your local commits to a remote repository, such as the one hosted on GitHub. Pushing makes your changes accessible to others collaborating on the same project and ensures that the remote repository stays up to date with your local repository. In summary, committing saves changes locally, while pushing synchronizes those changes with a remote repository, allowing for seamless collaboration among multiple contributors. C. Pull Requests: \u00b6 Pull requests are a collaboration feature on GitHub that enables developers to propose changes to a repository, discuss those changes, and ultimately merge them into the main branch. To create a pull request, you must first push your changes to a branch on your fork of the repository. Then, using either GitHub Desktop or JupyterHub GitHub widget, you can navigate to the original repository, click the \u201cPull Request\u201d tab, and create a new pull request. After the pull request is reviewed and approved, it can be merged into the main branch. D. Merging and Resolving Conflicts: \u00b6 Merging is the process of combining changes from one branch into another. This is typically done when a feature or bugfix has been completed and is ready to be integrated into the main branch. Conflicts can arise during the merging process if the same lines of code have been modified in both branches. To resolve conflicts, you must manually review the changes and decide which version to keep. In GitHub Desktop, you can merge branches by selecting the target branch and choosing \u201cMerge into Current Branch.\u201d Conflicts will be highlighted, and you can edit the files to resolve them before committing the changes. In JupyterHub GitHub widget, you can merge branches by selecting the target branch in the \u201cBranches\u201d section and clicking the \u201cMerge\u201d button. If conflicts occur, the widget will prompt you to resolve them before completing the merge. IV. Additional Features (2 minutes) \u00b6 A. Issues and Project Management: \u00b6 Issues are a powerful feature in GitHub that allows developers to track and manage bugs, enhancements, and other tasks within a project. Issues can be assigned to collaborators, labeled for easy organization, and linked to specific commits or pull requests. They provide a centralized location for discussing and addressing project-related concerns, fostering collaboration and transparent communication among team members. Using issues effectively can significantly improve the overall management and organization of your projects. B. GitHub Pages: \u00b6 GitHub Pages is a service offered by GitHub that allows you to host static websites directly from a repository. By creating a new branch named \u201cgh-pages\u201d in your repository and adding the necessary files (HTML, CSS, JavaScript, etc.), GitHub will automatically build and deploy your website to a publicly accessible URL. This is particularly useful for showcasing project documentation, creating personal portfolios, or hosting project demos. With GitHub Pages, you can take advantage of the version control and collaboration features of GitHub while easily sharing your work with others. V. Conclusion (2 minutes) \u00b6 A. Recap of the essentials of GitHub: \u00b6 In this brief introduction, we have covered the essentials of GitHub, including the basics of repositories, forking, cloning, branching, commits, pull requests, merging, and resolving conflicts. We have also discussed additional features like issues for project management and GitHub Pages for hosting websites directly from a repository. B. Encourage further exploration and learning: \u00b6 While this introduction provides a solid foundation for understanding and using GitHub, there is still much more to learn and explore. As you continue to use GitHub in your projects, you will discover new features and workflows that can enhance your productivity and collaboration. We encourage you to dive deeper into the platform and experiment with different tools and techniques. C. Share resources for learning more about GitHub: \u00b6 There are many resources available for learning more about GitHub and expanding your skills. Some popular resources include GitHub Guides ( https://guides.github.com/ ), which offers a collection of tutorials and best practices, the official GitHub documentation ( https://docs.github.com/ ), and various online tutorials and courses. By engaging with these resources and participating in the GitHub community, you can further develop your understanding of the platform and become a more proficient user. V. Conclusion (2 minutes) \u00b6 A. Recap of the essentials of GitHub: \u00b6 In this brief introduction, we have covered the essentials of GitHub, including the basics of repositories, forking, cloning, branching, commits, pull requests, merging, and resolving conflicts. We have also discussed additional features like issues for project management and GitHub Pages for hosting websites directly from a repository. B. Encourage further exploration and learning: \u00b6 While this introduction provides a solid foundation for understanding and using GitHub, there is still much more to learn and explore. As you continue to use GitHub in your projects, you will discover new features and workflows that can enhance your productivity and collaboration. We encourage you to dive deeper into the platform and experiment with different tools and techniques. C. Share resources for learning more about GitHub: \u00b6 There are many resources available for learning more about GitHub and expanding your skills. Some popular resources include GitHub Guides ( https://guides.github.com/ ), which offers a collection of tutorials and best practices, the official GitHub documentation ( https://docs.github.com/ ), and various online tutorials and courses. By engaging with these resources and participating in the GitHub community, you can further develop your understanding of the platform and become a more proficient user. By Ty Tuff, ESIIL","title":"Github essentials"},{"location":"collaborating-on-the-cloud/github-basics/#github-essentials","text":"","title":"Github essentials"},{"location":"collaborating-on-the-cloud/github-basics/#i-introduction-2-minutes","text":"","title":"I. Introduction (2 minutes)"},{"location":"collaborating-on-the-cloud/github-basics/#a-brief-overview-of-github","text":"GitHub is a web-based platform that provides version control and collaboration features using Git, a distributed version control system. It enables developers to work together on projects, track changes to code, and efficiently manage different versions of the project. GitHub is widely used in the software development industry and is an essential tool for collaborative projects and maintaining code quality.","title":"A. Brief overview of GitHub:"},{"location":"collaborating-on-the-cloud/github-basics/#b-introduce-github-desktop-and-jupyterhub-github-widget","text":"GitHub Desktop is a graphical user interface (GUI) application that simplifies working with Git and GitHub by providing a more visual and intuitive way to manage repositories, branches, commits, and other Git features. JupyterHub GitHub widget, on the other hand, is a built-in widget that integrates Git and GitHub functionality directly into Jupyter notebooks, allowing users to perform version control and collaboration tasks within the Jupyter environment. Both tools help streamline the process of working with GitHub and make it more accessible to users with varying levels of experience with Git and version control.","title":"B. Introduce GitHub Desktop and JupyterHub GitHub widget:"},{"location":"collaborating-on-the-cloud/github-basics/#1-download-github-desktop","text":"","title":"1. Download GitHub Desktop"},{"location":"collaborating-on-the-cloud/github-basics/#step-1-download-github-desktop","text":"Go to the GitHub Desktop download page: https://desktop.github.com/ Click on the \u201cDownload for Windows\u201d or \u201cDownload for macOS\u201d button, depending on your operating system. The download should start automatically.","title":"Step 1: Download GitHub Desktop"},{"location":"collaborating-on-the-cloud/github-basics/#step-2-install-github-desktop","text":"For Windows: Locate the downloaded installer file (usually in the Downloads folder) and double-click on it to run the installer. Follow the installation instructions that appear on the screen, accepting the default settings or customizing them as desired. Once the installation is complete, GitHub Desktop will launch automatically. For macOS: Locate the downloaded .zip file (usually in the Downloads folder) and double-click on it to extract the GitHub Desktop application. Drag the extracted \u201cGitHub Desktop\u201d application into the \u201cApplications\u201d folder. Open the \u201cApplications\u201d folder and double-click on \u201cGitHub Desktop\u201d to launch the application.","title":"Step 2: Install GitHub Desktop"},{"location":"collaborating-on-the-cloud/github-basics/#step-3-set-up-github-desktop","text":"When GitHub Desktop launches for the first time, you will be prompted to sign in with your GitHub account. If you don\u2019t have one, you can create one at https://github.com/join . Enter your GitHub username (or email) and password, and click on \u201cSign in.\u201d You will then be prompted to configure Git. Enter your name and email address, which will be used for your commit messages. Click \u201cContinue\u201d when you\u2019re done. Choose whether you want to submit usage data to help improve GitHub Desktop. Click \u201cFinish\u201d to complete the setup. Now, you have successfully installed and set up GitHub Desktop. You can start using it to clone repositories, make changes, commit, and sync with the remote repositories on GitHub.","title":"Step 3: Set up GitHub Desktop"},{"location":"collaborating-on-the-cloud/github-basics/#1-download-github-for-jupyterhub-cloud-service","text":"","title":"1. Download GitHub for JupyterHub cloud service"},{"location":"collaborating-on-the-cloud/github-basics/#step-1-accessing-jupyterhub-on-the-cloud","text":"Visit the JupyterHub cloud service you want to use (e.g., Binder, Google Colab, or a custom JupyterHub deployment provided by your organization). Sign in with your credentials or authenticate using a third-party service if required.","title":"Step 1: Accessing JupyterHub on the cloud"},{"location":"collaborating-on-the-cloud/github-basics/#step-2-launch-a-new-jupyter-notebook-or-open-an-existing-one","text":"Click on the \u201cNew\u201d button (usually located in the top right corner) and select \u201cPython\u201d to create a new Jupyter Notebook or open an existing one from the file browser. Once the notebook is open, you will see the Jupyter Notebook interface with the familiar cells for writing and executing code.","title":"Step 2: Launch a new Jupyter Notebook or open an existing one"},{"location":"collaborating-on-the-cloud/github-basics/#step-3-install-and-enable-the-jupyterlab-git-extension","text":"In your Jupyter Notebook, create a new code cell and run the following command to install the JupyterLab Git extension: !pip install jupyterlab-git Restart the Jupyter Notebook server for the changes to take effect.","title":"Step 3: Install and enable the JupyterLab Git extension"},{"location":"collaborating-on-the-cloud/github-basics/#step-4-using-the-jupyterhub-github-widget","text":"In the Jupyter Notebook interface, you should now see a Git icon on the left sidebar. Click on it to open the GitHub widget. To clone a repository, click on the \u201c+\u201d icon in the GitHub widget and enter the repository URL. This will clone the repository into your JupyterHub workspace. You can now navigate through the cloned repository, make changes, and use the GitHub widget to stage, commit, and push your changes back to the remote repository. To create and manage branches, use the branch icon in the GitHub widget. You can create new branches, switch between branches, and merge branches using this interface. To sync your local repository with the remote repository, use the \u201cPull\u201d and \u201cPush\u201d buttons in the GitHub widget. Now, you know how to access and use the JupyterHub GitHub widget running on the cloud. This allows you to work with Git and GitHub directly from your Jupyter Notebook interface, streamlining your workflow and making collaboration easier.","title":"Step 4: Using the JupyterHub GitHub widget"},{"location":"collaborating-on-the-cloud/github-basics/#c-github-in-rstudio","text":"Integrating GitHub with RStudio allows users to manage their Git repositories and collaborate on projects directly within the RStudio environment. It offers similar functionality to GitHub Desktop but caters specifically to R users working within RStudio. By configuring RStudio to work with Git, creating or opening RStudio projects, and linking projects to GitHub repositories, users can enjoy a seamless workflow for version control and collaboration. RStudio\u2019s Git pane enables users to stage, commit, and push changes to remote repositories, as well as manage branches and sync local repositories with remote ones, providing a comprehensive solution for R developers working with GitHub.","title":"C. GitHub in Rstudio:"},{"location":"collaborating-on-the-cloud/github-basics/#step-1-install-git","text":"Before integrating GitHub with RStudio, you need to have Git installed on your computer. Visit the official Git website ( https://git-scm.com/ ) to download and install the latest version of Git for your operating system.","title":"Step 1: Install Git"},{"location":"collaborating-on-the-cloud/github-basics/#step-2-configure-rstudio-to-work-with-git","text":"Open RStudio. Go to \u201cTools\u201d > \u201cGlobal Options\u201d in the top menu. In the \u201cGlobal Options\u201d window, click on the \u201cGit/SVN\u201d tab. Check that the \u201cGit executable\u201d field is pointing to the correct location of the installed Git. If not, click \u201cBrowse\u201d and navigate to the location of the Git executable file (usually found in the \u201cbin\u201d folder of the Git installation directory). Click \u201cOK\u201d to save the changes.","title":"Step 2: Configure RStudio to work with Git"},{"location":"collaborating-on-the-cloud/github-basics/#step-3-create-or-open-an-rstudio-project","text":"To create a new RStudio project, go to \u201cFile\u201d > \u201cNew Project\u201d in the top menu. You can either create a new directory or choose an existing one for your project. To open an existing RStudio project, go to \u201cFile\u201d > \u201cOpen Project\u201d and navigate to the project\u2019s \u201c.Rproj\u201d file.","title":"Step 3: Create or open an RStudio project"},{"location":"collaborating-on-the-cloud/github-basics/#step-4-link-your-rstudio-project-to-a-github-repository","text":"In the RStudio project, go to the \u201cTools\u201d menu and select \u201cVersion Control\u201d > \u201cProject Setup.\u201d In the \u201cProject Setup\u201d window, select \u201cGit\u201d as the version control system and click \u201cOK.\u201d A new \u201c.git\u201d folder will be created in your project directory, initializing it as a Git repository. Commit any changes you have made so far by clicking on the \u201cCommit\u201d button in the \u201cGit\u201d pane in RStudio. To link your local repository to a remote GitHub repository, go to your GitHub account and create a new repository. Copy the remote repository\u2019s URL (e.g., \u201c https://github.com/username/repository.git \u201d). In RStudio, open the \u201cShell\u201d by going to \u201cTools\u201d > \u201cShell.\u201d In the shell, run the following command to add the remote repository: git remote add origin https://github.com/username/repository.git Replace the URL with the one you copied from your GitHub repository. Push your changes to the remote repository by running the following command in the shell: git push -u origin master Now, your RStudio project is linked to a GitHub repository. You can use the \u201cGit\u201d pane in RStudio to stage, commit, and push changes to the remote repository, as well as manage branches and sync your local repository with the remote one. By integrating GitHub with RStudio, you can streamline your workflow, collaborate more effectively with your team, and manage your Git repositories directly from the RStudio interface.","title":"Step 4: Link your RStudio project to a GitHub repository"},{"location":"collaborating-on-the-cloud/github-basics/#ii-github-basics-4-minutes","text":"","title":"II. GitHub Basics (4 minutes)"},{"location":"collaborating-on-the-cloud/github-basics/#a-repository","text":"A repository, often abbreviated as \u201crepo,\u201d is the fundamental building block of GitHub. It is a storage space for your project files, including the code, documentation, and other related resources. Each repository also contains the complete history of all changes made to the project files, which is crucial for effective version control. Repositories can be public, allowing anyone to access and contribute, or private, restricting access to specific collaborators.","title":"A. Repository:"},{"location":"collaborating-on-the-cloud/github-basics/#b-fork-and-clone","text":"Forking and cloning are two essential operations for working with repositories on GitHub. Forking creates a personal copy of someone else\u2019s repository under your GitHub account, enabling you to make changes to the project without affecting the original repo. Cloning, on the other hand, is the process of downloading a remote repository to your local machine for offline development. In GitHub Desktop, you can clone a repository by selecting \u201cClone a repository from the Internet\u201d and entering the repository URL. In JupyterHub GitHub widget, you can clone a repository by entering the repo URL in the \u201cClone Repository\u201d section of the widget.","title":"B. Fork and Clone:"},{"location":"collaborating-on-the-cloud/github-basics/#c-branches","text":"Branches are a critical aspect of Git version control, as they allow you to create multiple parallel versions of your project within a single repository. This is particularly useful when working on new features or bug fixes, as it prevents changes from interfering with the main (or \u201cmaster\u201d) branch until they are ready to be merged. Creating a new branch in GitHub Desktop can be done by clicking the \u201cCurrent Branch\u201d dropdown and selecting \u201cNew Branch.\u201d In JupyterHub GitHub widget, you can create a new branch by clicking the \u201cNew Branch\u201d button in the \u201cBranches\u201d section of the widget.","title":"C. Branches:"},{"location":"collaborating-on-the-cloud/github-basics/#d-replace-master-with-main","text":"In recent years, there has been a growing awareness of the importance of inclusive language in technology. One such example is the use of the term \u201cmaster\u201d in the context of the default branch in a GitHub repository. The term \u201cmaster\u201d has historical connections to the \u201cmaster/slave\u201d file structure, which evokes an unsavory colonial past associated with slavery. In light of this, many developers and organizations have begun to replace the term \u201cmaster\u201d with more neutral terms, such as \u201cmain.\u201d We encourage you to follow this practice and change the default branch name in your repositories from \u201cmaster\u201d to \u201cmain\u201d or another suitable alternative. This small change can help promote a more inclusive and welcoming environment within the technology community.","title":"D. Replace \u2018master\u2019 with \u2018main\u2019:"},{"location":"collaborating-on-the-cloud/github-basics/#iii-collaboration-and-version-control-5-minutes","text":"","title":"III. Collaboration and Version Control (5 minutes)"},{"location":"collaborating-on-the-cloud/github-basics/#a-commits","text":"Commits are snapshots of your project\u2019s changes at a specific point in time, serving as the fundamental building blocks of Git\u2019s version control system. Commits make it possible to track changes, revert to previous versions, and collaborate with others. In GitHub Desktop, you can make a commit by staging the changes you want to include, adding a descriptive commit message, and clicking \u201cCommit to [branch_name].\u201d In JupyterHub GitHub widget, you can create a commit by selecting the files with changes, entering a commit message, and clicking the \u201cCommit\u201d button.","title":"A. Commits:"},{"location":"collaborating-on-the-cloud/github-basics/#b-push","text":"In GitHub, \u201cpush\u201d is a fundamental operation in the version control process that transfers commits from your local repository to a remote repository, such as the one hosted on GitHub. When you push changes, you synchronize the remote repository with the latest updates made to your local repository, making those changes accessible to other collaborators working on the same project. This operation ensures that the remote repository reflects the most recent state of your work and allows your team members to stay up to date with your changes. Pushing is an essential step in distributed version control systems like Git, as it promotes efficient collaboration among multiple contributors and provides a centralized location for tracking the project\u2019s history and progress. In GitHub, the concepts of \u201ccommit\u201d and \u201cpush\u201d represent two distinct steps in the version control process. A \u201ccommit\u201d is the action of saving changes to your local repository. When you commit changes, you create a snapshot of your work, accompanied by a unique identifier and an optional descriptive message. Commits allow you to track the progress of your work over time and make it easy to revert to a previous state if necessary. On the other hand, \u201cpush\u201d is the action of transferring your local commits to a remote repository, such as the one hosted on GitHub. Pushing makes your changes accessible to others collaborating on the same project and ensures that the remote repository stays up to date with your local repository. In summary, committing saves changes locally, while pushing synchronizes those changes with a remote repository, allowing for seamless collaboration among multiple contributors.","title":"B. Push:"},{"location":"collaborating-on-the-cloud/github-basics/#c-pull-requests","text":"Pull requests are a collaboration feature on GitHub that enables developers to propose changes to a repository, discuss those changes, and ultimately merge them into the main branch. To create a pull request, you must first push your changes to a branch on your fork of the repository. Then, using either GitHub Desktop or JupyterHub GitHub widget, you can navigate to the original repository, click the \u201cPull Request\u201d tab, and create a new pull request. After the pull request is reviewed and approved, it can be merged into the main branch.","title":"C. Pull Requests:"},{"location":"collaborating-on-the-cloud/github-basics/#d-merging-and-resolving-conflicts","text":"Merging is the process of combining changes from one branch into another. This is typically done when a feature or bugfix has been completed and is ready to be integrated into the main branch. Conflicts can arise during the merging process if the same lines of code have been modified in both branches. To resolve conflicts, you must manually review the changes and decide which version to keep. In GitHub Desktop, you can merge branches by selecting the target branch and choosing \u201cMerge into Current Branch.\u201d Conflicts will be highlighted, and you can edit the files to resolve them before committing the changes. In JupyterHub GitHub widget, you can merge branches by selecting the target branch in the \u201cBranches\u201d section and clicking the \u201cMerge\u201d button. If conflicts occur, the widget will prompt you to resolve them before completing the merge.","title":"D. Merging and Resolving Conflicts:"},{"location":"collaborating-on-the-cloud/github-basics/#iv-additional-features-2-minutes","text":"","title":"IV. Additional Features (2 minutes)"},{"location":"collaborating-on-the-cloud/github-basics/#a-issues-and-project-management","text":"Issues are a powerful feature in GitHub that allows developers to track and manage bugs, enhancements, and other tasks within a project. Issues can be assigned to collaborators, labeled for easy organization, and linked to specific commits or pull requests. They provide a centralized location for discussing and addressing project-related concerns, fostering collaboration and transparent communication among team members. Using issues effectively can significantly improve the overall management and organization of your projects.","title":"A. Issues and Project Management:"},{"location":"collaborating-on-the-cloud/github-basics/#b-github-pages","text":"GitHub Pages is a service offered by GitHub that allows you to host static websites directly from a repository. By creating a new branch named \u201cgh-pages\u201d in your repository and adding the necessary files (HTML, CSS, JavaScript, etc.), GitHub will automatically build and deploy your website to a publicly accessible URL. This is particularly useful for showcasing project documentation, creating personal portfolios, or hosting project demos. With GitHub Pages, you can take advantage of the version control and collaboration features of GitHub while easily sharing your work with others.","title":"B. GitHub Pages:"},{"location":"collaborating-on-the-cloud/github-basics/#v-conclusion-2-minutes","text":"","title":"V. Conclusion (2 minutes)"},{"location":"collaborating-on-the-cloud/github-basics/#a-recap-of-the-essentials-of-github","text":"In this brief introduction, we have covered the essentials of GitHub, including the basics of repositories, forking, cloning, branching, commits, pull requests, merging, and resolving conflicts. We have also discussed additional features like issues for project management and GitHub Pages for hosting websites directly from a repository.","title":"A. Recap of the essentials of GitHub:"},{"location":"collaborating-on-the-cloud/github-basics/#b-encourage-further-exploration-and-learning","text":"While this introduction provides a solid foundation for understanding and using GitHub, there is still much more to learn and explore. As you continue to use GitHub in your projects, you will discover new features and workflows that can enhance your productivity and collaboration. We encourage you to dive deeper into the platform and experiment with different tools and techniques.","title":"B. Encourage further exploration and learning:"},{"location":"collaborating-on-the-cloud/github-basics/#c-share-resources-for-learning-more-about-github","text":"There are many resources available for learning more about GitHub and expanding your skills. Some popular resources include GitHub Guides ( https://guides.github.com/ ), which offers a collection of tutorials and best practices, the official GitHub documentation ( https://docs.github.com/ ), and various online tutorials and courses. By engaging with these resources and participating in the GitHub community, you can further develop your understanding of the platform and become a more proficient user.","title":"C. Share resources for learning more about GitHub:"},{"location":"collaborating-on-the-cloud/github-basics/#v-conclusion-2-minutes_1","text":"","title":"V. Conclusion (2 minutes)"},{"location":"collaborating-on-the-cloud/github-basics/#a-recap-of-the-essentials-of-github_1","text":"In this brief introduction, we have covered the essentials of GitHub, including the basics of repositories, forking, cloning, branching, commits, pull requests, merging, and resolving conflicts. We have also discussed additional features like issues for project management and GitHub Pages for hosting websites directly from a repository.","title":"A. Recap of the essentials of GitHub:"},{"location":"collaborating-on-the-cloud/github-basics/#b-encourage-further-exploration-and-learning_1","text":"While this introduction provides a solid foundation for understanding and using GitHub, there is still much more to learn and explore. As you continue to use GitHub in your projects, you will discover new features and workflows that can enhance your productivity and collaboration. We encourage you to dive deeper into the platform and experiment with different tools and techniques.","title":"B. Encourage further exploration and learning:"},{"location":"collaborating-on-the-cloud/github-basics/#c-share-resources-for-learning-more-about-github_1","text":"There are many resources available for learning more about GitHub and expanding your skills. Some popular resources include GitHub Guides ( https://guides.github.com/ ), which offers a collection of tutorials and best practices, the official GitHub documentation ( https://docs.github.com/ ), and various online tutorials and courses. By engaging with these resources and participating in the GitHub community, you can further develop your understanding of the platform and become a more proficient user. By Ty Tuff, ESIIL","title":"C. Share resources for learning more about GitHub:"},{"location":"collaborating-on-the-cloud/markdown_basics/","text":"Markdown for the Modern Researcher at ESIIL \u00b6 Introduction \u00b6 Overview of Markdown's relevance and utility in modern research. How Markdown streamlines documentation in diverse scientific and coding environments. Section 1: Mastering Markdown Syntax \u00b6 Objective: Equip researchers with a thorough understanding of Markdown syntax and its diverse applications. Topics Covered: Fundamentals of Text Formatting (headings, lists, bold, italics) Advanced Structures (tables, blockquotes) Integrating Multimedia (image and video links) Diagrams with Mermaid (creating flowcharts, mind maps, timelines) Interactive Elements (hyperlinks, embedding interactive content) Activities: Crafting a Markdown document with various formatting elements. Developing diagrams using Mermaid for research presentations. Embedding multimedia elements in a Markdown document for enhanced communication. Section 2: Markdown in Research Tools \u00b6 Objective: Showcase the integration of Markdown in RStudio and Jupyter Notebooks for scientific documentation. Topics Covered: Implementing Markdown in RStudio (R Markdown, knitting to HTML/PDF) Utilizing Markdown in Jupyter Notebooks (code and Markdown cells) Best practices for documenting research code Including code outputs and visualizations in documentation Activities: Creating and sharing an R Markdown document with annotated research data. Building a comprehensive Jupyter Notebook with integrated Markdown annotations. Section 3: Disseminating Research with Markdown and GitHub Pages \u00b6 Objective: Teach researchers how to publish and manage Markdown-based documentation as web pages. Topics Covered: Setting up a GitHub repository for hosting documentation Transforming Markdown files into web-friendly formats Customizing web page layouts and themes Advanced features using Jekyll Version control and content management for documentation Activities: Publishing a research project documentation on GitHub Pages. Applying custom themes and layouts to enhance online documentation. Conclusion \u00b6 Review of Markdown's role in enhancing research efficiency and clarity. Encouraging the integration of Markdown into daily research activities for improved documentation and dissemination. Additional Resources \u00b6 Curated list of advanced Markdown tutorials, guides for GitHub Pages, and Jekyll resources for researchers. Section 1: Mastering Markdown Syntax \u00b6 1. Fundamentals of Text Formatting \u00b6 Headings : Use # for different levels of headings. Heading Level 1 \u00b6 Heading Level 2 \u00b6 Heading Level 3 \u00b6 Lists : Bulleted lists use asterisks, numbers for ordered lists. Item 1 Item 2 Subitem 2.1 Subitem 2.2 First item Second item Bold and Italics : Use asterisks or underscores. Bold Text Italic Text 2. Advanced Structures \u00b6 Tables : Create tables using dashes and pipes. Header 1 Header 2 Header 3 Row 1 Data Data Row 2 Data Data Add a \":\"\" to change text justification. Here the : is added on the left for left justification. | Header 1 | Header 2 | Header 3 | |---------:|--------- |----------| | Row 1 | Data | Data | | Row 2 | Data | Data | A N A L Y T I C S E N R E I N V I R O N M E N T V E L O P M O C O M U N E G A G E L L A H C N E R A T A D E V E L O P W E I T S I T N E I C S R S O I G O L O I B H T L A H T L A E W E G N E L T I T S I T N E I C S N I E E S R E H T O E N I C S L L A H C E G L A N E G A L L E H C N E I C If you hit the boundaries of Markdown's capabilities, you can start to add html directly. Remember, this entire exercisse is to translate to html. Sudoku Puzzle Fill in the blank cells with numbers from 1 to 9, such that each row, column, and 3x3 subgrid contains all the numbers from 1 to 9 without repetition. 5 3 7 6 1 9 5 9 8 6 8 6 3 4 8 3 1 7 2 6 6 2 8 4 1 9 5 8 7 9 5 3 4 6 7 8 9 1 2 6 7 2 1 9 5 3 4 8 1 9 8 3 4 2 5 6 7 8 5 9 7 6 1 4 2 3 4 2 6 8 5 3 7 9 1 7 1 3 9 2 4 8 5 6 9 6 1 5 3 7 2 8 4 2 8 7 4 1 9 6 3 5 3 4 5 2 8 6 1 7 9 Blockquotes : Use > for blockquotes. This is a blockquote. It can span multiple lines. 3. Integrating Multimedia \u00b6 Images : Add images using the format ![alt text](image_url) . Videos : Embed videos using HTML in Markdown. 4. Diagrams with Mermaid \u00b6 Flowcharts : graph TD A[Start] --> B[Analyze Data] B --> C{Is Data Large?} C -->|Yes| D[Apply Big Data Solutions] C -->|No| E[Use Traditional Methods] D --> F[Machine Learning] E --> G[Statistical Analysis] F --> H{Model Accurate?} G --> I[Report Results] H -->|Yes| J[Deploy Model] H -->|No| K[Refine Model] J --> L[Monitor Performance] K --> F L --> M[End: Success] I --> N[End: Report Generated] style A fill:#f9f,stroke:#333,stroke-width:2px style M fill:#9f9,stroke:#333,stroke-width:2px style N fill:#9f9,stroke:#333,stroke-width:2px Mind Maps : mindmap root((ESIIL)) section Data Sources Satellite Imagery ::icon(fa fa-satellite) Remote Sensing Data Drones Aircraft On-ground Sensors Weather Stations IoT Devices Open Environmental Data Public Datasets ::icon(fa fa-database) section Research Focus Climate Change Analysis Ice Melt Patterns Sea Level Rise Biodiversity Monitoring Species Distribution Habitat Fragmentation Geospatial Analysis Techniques Machine Learning Models Predictive Analytics section Applications Conservation Strategies ::icon(fa fa-leaf) Urban Planning Green Spaces Disaster Response Flood Mapping Wildfire Tracking section Tools and Technologies GIS Software QGIS ArcGIS Programming Languages Python R Cloud Computing Platforms AWS Google Earth Engine Data Visualization D3.js Tableau Timelines : gantt title ESIIL Year 2 Project Schedule dateFormat YYYY-MM-DD section CI Sovereign OASIS via private jupiterhubs :2024-08-01, 2024-10-30 OASIS documentation :2024-09-15, 70d Data cube OASIS via cyverse account :2024-09-15, 100d Integrate with ESIIL User Management system :2024-08-01, 2024-11-30 Build badges to deploy DE from mkdoc :2024-09-01, 2024-12-15 Streamline Github ssh key management :2024-10-01, 2024-12-31 Cyverse support (R proxy link) :2024-11-01, 2024-12-31 Cyverse use summary and statistics :2024-08-01, 2024-12-15 section CI Consultation and Education Conferences/Invited talks :2024-08-01, 2024-12-31 Office hours :2024-08-15, 2024-12-15 Proposals :2024-09-01, 2024-11-15 Private lessons :2024-09-15, 2024-11-30 Pre-event trainings :2024-10-01, 2024-12-15 Textbook development w/ education team :2024-08-01, 2024-12-15 Train the trainers / group lessons :2024-08-15, 2024-11-30 Tribal engagement :2024-09-01, 2024-12-15 Ethical Space training :2024-09-15, 2024-12-31 section CI Design and Build Data library (repository) :2024-08-01, 2024-10-30 Analytics library (repository) :2024-08-15, 2024-11-15 Containers (repository) :2024-09-01, 2024-11-30 Cloud infrastructure templates (repository) :2024-09-15, 2024-12-15 Tribal resilience Data Cube :2024-10-01, 2024-12-31 %%{init: { 'logLevel': 'debug', 'theme': 'base', 'gitGraph': {'rotateCommitLabel': true}} }%% gitGraph commit id: \"Start from template\" branch c1 commit id: \"Set up SSH key pair\" commit id: \"Modify _config.yml for GitHub Pages\" commit id: \"Initial website structure\" commit id: \"Add new markdown pages\" commit id: \"Update navigation tree\" commit id: \"Edit existing pages\" commit id: \"Delete old markdown pages\" commit id: \"Finalize website updates\" commit id: \"Add new markdown pages\" commit id: \"Update navigation tree\" checkout c1 branch b1 commit commit checkout c1 merge b1 %%{init: {\"quadrantChart\": {\"chartWidth\": 400, \"chartHeight\": 400}, \"themeVariables\": {\"quadrant1TextFill\": \"#ff0000\"} }}%% quadrantChart x-axis Urgent --> Not Urgent y-axis Not Important --> \"Important \u2764\" quadrant-1 Plan quadrant-2 Do quadrant-3 Delegate quadrant-4 Delete timeline title Major Events in Environmental Science and Data Science section Environmental Science 19th century : Foundations in Ecology and Conservation 1962 : Publication of 'Silent Spring' by Rachel Carson 1970 : First Earth Day 1987 : Brundtland Report introduces Sustainable Development 1992 : Rio Earth Summit 2015 : Paris Agreement on Climate Change section Data Science 1960s-1970s : Development of Database Management Systems 1980s : Emergence of Data Warehousing 1990s : Growth of the World Wide Web and Data Mining 2000s : Big Data and Predictive Analytics 2010s : AI and Machine Learning Revolution 2020s : Integration of AI in Environmental Research erDiagram CAR ||--o{ NAMED-DRIVER : allows CAR { string registrationNumber string make string model } PERSON ||--o{ NAMED-DRIVER : is PERSON { string firstName string lastName int age } --- config: sankey: showValues: false --- sankey-beta NASA Data,Big Data Harmonization,100 Satellite Imagery,Big Data Harmonization,80 Open Environmental Data,Big Data Harmonization,70 Remote Sensing Data,Big Data Harmonization,90 Big Data Harmonization, Data Analysis and Integration,340 Data Analysis and Integration,Climate Change Research,100 Data Analysis and Integration,Biodiversity Monitoring,80 Data Analysis and Integration,Geospatial Mapping,60 Data Analysis and Integration,Urban Planning,50 Data Analysis and Integration,Disaster Response,50 5. Interactive Elements \u00b6 Hyperlinks : Use the format [link text](URL) . Google Play Tetris Embedding Interactive Content : Use HTML tags or specific platform embed codes. 6. Math Notation \u00b6 Markdown can be combined with LaTeX for mathematical notation, useful in environmental data science for expressing statistical distributions, coordinate systems, and more. This requires a Markdown renderer with LaTeX support (like MathJax or KaTeX). Inline Math : Use single dollar signs for inline math expressions. Representing the normal distribution. Example: The probability density function of the normal distribution is given by \\(f(x|\\mu,\\sigma) = \\frac{1}{\\sigma\\sqrt{2\\pi}}e^{-\\frac{1}{2}\\left(\\frac{x-\\mu}{\\sigma}\\right)^2}\\) .` Display Math : Use double dollar signs for standalone equations. Example: $$ f(x|\\mu,\\sigma) = \\frac{1}{\\sigma\\sqrt{2\\pi}}e {-\\frac{1}{2}\\left(\\frac{x-\\mu}{\\sigma}\\right) 2} $$ Common LaTeX Elements for Environmental Data Science : Statistical Distributions : Normal Distribution: \\frac{1}{\\sigma\\sqrt{2\\pi}}e^{-\\frac{1}{2}\\left(\\frac{x-\\mu}{\\sigma}\\right)^2} for \\(\\frac{1}{\\sigma\\sqrt{2\\pi}}e^{-\\frac{1}{2}\\left(\\frac{x-\\mu}{\\sigma}\\right)^2}\\) Poisson Distribution: P(k; \\lambda) = \\frac{\\lambda^k e^{-\\lambda}}{k!} for \\(P(k; \\lambda) = \\frac{\\lambda^k e^{-\\lambda}}{k!}\\) Coordinate Systems : Spherical Coordinates: (r, \\theta, \\phi) for \\((r, \\theta, \\phi)\\) Cartesian Coordinates: (x, y, z) for \\((x, y, z)\\) Geospatial Equations : Haversine Formula for Distance: a = \\sin^2\\left(\\frac{\\Delta\\phi}{2}\\right) + \\cos(\\phi_1)\\cos(\\phi_2)\\sin^2\\left(\\frac{\\Delta\\lambda}{2}\\right) for \\(a = \\sin^2\\left(\\frac{\\Delta\\phi}{2}\\right) + \\cos(\\phi_1)\\cos(\\phi_2)\\sin^2\\left(\\frac{\\Delta\\lambda}{2}\\right)\\) Note: The rendering of these equations as formatted math will depend on your Markdown viewer's LaTeX capabilities. 7. Effective Citations in Markdown \u00b6 Inline Citations \u00b6 Objective: Learn how to use inline citations in Markdown. Example Usage: Inline citation of a single work: Some text with an inline citation. [ @jones :envstudy:2020] Inline citation with specific page or section: More text with a specific section cited. [See @jones :envstudy:2020, \u00a74.2] Contrasting views: Discussion of a topic with a contrasting view. [Contra @smith :climatechange:2019, p. 78] Footnote Citations \u00b6 Objective: Understand how to use footnote citations in Markdown. Example Usage: Citing with a footnote: Some statement in the text. 1 Multiple references to the same footnote: Another statement referring to the same source. 1 A different citation: Additional comment with a new citation. 2 Creating Footnotes \u00b6 Example Syntax: First reference details. Example: Emma Jones, \"Environmental Study,\" Nature Journal, May 2020, https://nature-journal.com/envstudy2020 . \u21a9 \u21a9 Second reference details. Example: David Smith, \"Climate Change Controversies,\" Science Daily, August 2019, https://sciencedaily.com/climatechange2019 . \u21a9","title":"Markdown for the Modern Researcher at ESIIL"},{"location":"collaborating-on-the-cloud/markdown_basics/#markdown-for-the-modern-researcher-at-esiil","text":"","title":"Markdown for the Modern Researcher at ESIIL"},{"location":"collaborating-on-the-cloud/markdown_basics/#introduction","text":"Overview of Markdown's relevance and utility in modern research. How Markdown streamlines documentation in diverse scientific and coding environments.","title":"Introduction"},{"location":"collaborating-on-the-cloud/markdown_basics/#section-1-mastering-markdown-syntax","text":"Objective: Equip researchers with a thorough understanding of Markdown syntax and its diverse applications. Topics Covered: Fundamentals of Text Formatting (headings, lists, bold, italics) Advanced Structures (tables, blockquotes) Integrating Multimedia (image and video links) Diagrams with Mermaid (creating flowcharts, mind maps, timelines) Interactive Elements (hyperlinks, embedding interactive content) Activities: Crafting a Markdown document with various formatting elements. Developing diagrams using Mermaid for research presentations. Embedding multimedia elements in a Markdown document for enhanced communication.","title":"Section 1: Mastering Markdown Syntax"},{"location":"collaborating-on-the-cloud/markdown_basics/#section-2-markdown-in-research-tools","text":"Objective: Showcase the integration of Markdown in RStudio and Jupyter Notebooks for scientific documentation. Topics Covered: Implementing Markdown in RStudio (R Markdown, knitting to HTML/PDF) Utilizing Markdown in Jupyter Notebooks (code and Markdown cells) Best practices for documenting research code Including code outputs and visualizations in documentation Activities: Creating and sharing an R Markdown document with annotated research data. Building a comprehensive Jupyter Notebook with integrated Markdown annotations.","title":"Section 2: Markdown in Research Tools"},{"location":"collaborating-on-the-cloud/markdown_basics/#section-3-disseminating-research-with-markdown-and-github-pages","text":"Objective: Teach researchers how to publish and manage Markdown-based documentation as web pages. Topics Covered: Setting up a GitHub repository for hosting documentation Transforming Markdown files into web-friendly formats Customizing web page layouts and themes Advanced features using Jekyll Version control and content management for documentation Activities: Publishing a research project documentation on GitHub Pages. Applying custom themes and layouts to enhance online documentation.","title":"Section 3: Disseminating Research with Markdown and GitHub Pages"},{"location":"collaborating-on-the-cloud/markdown_basics/#conclusion","text":"Review of Markdown's role in enhancing research efficiency and clarity. Encouraging the integration of Markdown into daily research activities for improved documentation and dissemination.","title":"Conclusion"},{"location":"collaborating-on-the-cloud/markdown_basics/#additional-resources","text":"Curated list of advanced Markdown tutorials, guides for GitHub Pages, and Jekyll resources for researchers.","title":"Additional Resources"},{"location":"collaborating-on-the-cloud/markdown_basics/#section-1-mastering-markdown-syntax_1","text":"","title":"Section 1: Mastering Markdown Syntax"},{"location":"collaborating-on-the-cloud/markdown_basics/#1-fundamentals-of-text-formatting","text":"Headings : Use # for different levels of headings.","title":"1. Fundamentals of Text Formatting"},{"location":"collaborating-on-the-cloud/markdown_basics/#heading-level-1","text":"","title":"Heading Level 1"},{"location":"collaborating-on-the-cloud/markdown_basics/#heading-level-2","text":"","title":"Heading Level 2"},{"location":"collaborating-on-the-cloud/markdown_basics/#heading-level-3","text":"Lists : Bulleted lists use asterisks, numbers for ordered lists. Item 1 Item 2 Subitem 2.1 Subitem 2.2 First item Second item Bold and Italics : Use asterisks or underscores. Bold Text Italic Text","title":"Heading Level 3"},{"location":"collaborating-on-the-cloud/markdown_basics/#2-advanced-structures","text":"Tables : Create tables using dashes and pipes. Header 1 Header 2 Header 3 Row 1 Data Data Row 2 Data Data Add a \":\"\" to change text justification. Here the : is added on the left for left justification. | Header 1 | Header 2 | Header 3 | |---------:|--------- |----------| | Row 1 | Data | Data | | Row 2 | Data | Data | A N A L Y T I C S E N R E I N V I R O N M E N T V E L O P M O C O M U N E G A G E L L A H C N E R A T A D E V E L O P W E I T S I T N E I C S R S O I G O L O I B H T L A H T L A E W E G N E L T I T S I T N E I C S N I E E S R E H T O E N I C S L L A H C E G L A N E G A L L E H C N E I C If you hit the boundaries of Markdown's capabilities, you can start to add html directly. Remember, this entire exercisse is to translate to html. Sudoku Puzzle Fill in the blank cells with numbers from 1 to 9, such that each row, column, and 3x3 subgrid contains all the numbers from 1 to 9 without repetition. 5 3 7 6 1 9 5 9 8 6 8 6 3 4 8 3 1 7 2 6 6 2 8 4 1 9 5 8 7 9 5 3 4 6 7 8 9 1 2 6 7 2 1 9 5 3 4 8 1 9 8 3 4 2 5 6 7 8 5 9 7 6 1 4 2 3 4 2 6 8 5 3 7 9 1 7 1 3 9 2 4 8 5 6 9 6 1 5 3 7 2 8 4 2 8 7 4 1 9 6 3 5 3 4 5 2 8 6 1 7 9 Blockquotes : Use > for blockquotes. This is a blockquote. It can span multiple lines.","title":"2. Advanced Structures"},{"location":"collaborating-on-the-cloud/markdown_basics/#3-integrating-multimedia","text":"Images : Add images using the format ![alt text](image_url) . Videos : Embed videos using HTML in Markdown. ","title":"3. Integrating Multimedia"},{"location":"collaborating-on-the-cloud/markdown_basics/#4-diagrams-with-mermaid","text":"Flowcharts : graph TD A[Start] --> B[Analyze Data] B --> C{Is Data Large?} C -->|Yes| D[Apply Big Data Solutions] C -->|No| E[Use Traditional Methods] D --> F[Machine Learning] E --> G[Statistical Analysis] F --> H{Model Accurate?} G --> I[Report Results] H -->|Yes| J[Deploy Model] H -->|No| K[Refine Model] J --> L[Monitor Performance] K --> F L --> M[End: Success] I --> N[End: Report Generated] style A fill:#f9f,stroke:#333,stroke-width:2px style M fill:#9f9,stroke:#333,stroke-width:2px style N fill:#9f9,stroke:#333,stroke-width:2px Mind Maps : mindmap root((ESIIL)) section Data Sources Satellite Imagery ::icon(fa fa-satellite) Remote Sensing Data Drones Aircraft On-ground Sensors Weather Stations IoT Devices Open Environmental Data Public Datasets ::icon(fa fa-database) section Research Focus Climate Change Analysis Ice Melt Patterns Sea Level Rise Biodiversity Monitoring Species Distribution Habitat Fragmentation Geospatial Analysis Techniques Machine Learning Models Predictive Analytics section Applications Conservation Strategies ::icon(fa fa-leaf) Urban Planning Green Spaces Disaster Response Flood Mapping Wildfire Tracking section Tools and Technologies GIS Software QGIS ArcGIS Programming Languages Python R Cloud Computing Platforms AWS Google Earth Engine Data Visualization D3.js Tableau Timelines : gantt title ESIIL Year 2 Project Schedule dateFormat YYYY-MM-DD section CI Sovereign OASIS via private jupiterhubs :2024-08-01, 2024-10-30 OASIS documentation :2024-09-15, 70d Data cube OASIS via cyverse account :2024-09-15, 100d Integrate with ESIIL User Management system :2024-08-01, 2024-11-30 Build badges to deploy DE from mkdoc :2024-09-01, 2024-12-15 Streamline Github ssh key management :2024-10-01, 2024-12-31 Cyverse support (R proxy link) :2024-11-01, 2024-12-31 Cyverse use summary and statistics :2024-08-01, 2024-12-15 section CI Consultation and Education Conferences/Invited talks :2024-08-01, 2024-12-31 Office hours :2024-08-15, 2024-12-15 Proposals :2024-09-01, 2024-11-15 Private lessons :2024-09-15, 2024-11-30 Pre-event trainings :2024-10-01, 2024-12-15 Textbook development w/ education team :2024-08-01, 2024-12-15 Train the trainers / group lessons :2024-08-15, 2024-11-30 Tribal engagement :2024-09-01, 2024-12-15 Ethical Space training :2024-09-15, 2024-12-31 section CI Design and Build Data library (repository) :2024-08-01, 2024-10-30 Analytics library (repository) :2024-08-15, 2024-11-15 Containers (repository) :2024-09-01, 2024-11-30 Cloud infrastructure templates (repository) :2024-09-15, 2024-12-15 Tribal resilience Data Cube :2024-10-01, 2024-12-31 %%{init: { 'logLevel': 'debug', 'theme': 'base', 'gitGraph': {'rotateCommitLabel': true}} }%% gitGraph commit id: \"Start from template\" branch c1 commit id: \"Set up SSH key pair\" commit id: \"Modify _config.yml for GitHub Pages\" commit id: \"Initial website structure\" commit id: \"Add new markdown pages\" commit id: \"Update navigation tree\" commit id: \"Edit existing pages\" commit id: \"Delete old markdown pages\" commit id: \"Finalize website updates\" commit id: \"Add new markdown pages\" commit id: \"Update navigation tree\" checkout c1 branch b1 commit commit checkout c1 merge b1 %%{init: {\"quadrantChart\": {\"chartWidth\": 400, \"chartHeight\": 400}, \"themeVariables\": {\"quadrant1TextFill\": \"#ff0000\"} }}%% quadrantChart x-axis Urgent --> Not Urgent y-axis Not Important --> \"Important \u2764\" quadrant-1 Plan quadrant-2 Do quadrant-3 Delegate quadrant-4 Delete timeline title Major Events in Environmental Science and Data Science section Environmental Science 19th century : Foundations in Ecology and Conservation 1962 : Publication of 'Silent Spring' by Rachel Carson 1970 : First Earth Day 1987 : Brundtland Report introduces Sustainable Development 1992 : Rio Earth Summit 2015 : Paris Agreement on Climate Change section Data Science 1960s-1970s : Development of Database Management Systems 1980s : Emergence of Data Warehousing 1990s : Growth of the World Wide Web and Data Mining 2000s : Big Data and Predictive Analytics 2010s : AI and Machine Learning Revolution 2020s : Integration of AI in Environmental Research erDiagram CAR ||--o{ NAMED-DRIVER : allows CAR { string registrationNumber string make string model } PERSON ||--o{ NAMED-DRIVER : is PERSON { string firstName string lastName int age } --- config: sankey: showValues: false --- sankey-beta NASA Data,Big Data Harmonization,100 Satellite Imagery,Big Data Harmonization,80 Open Environmental Data,Big Data Harmonization,70 Remote Sensing Data,Big Data Harmonization,90 Big Data Harmonization, Data Analysis and Integration,340 Data Analysis and Integration,Climate Change Research,100 Data Analysis and Integration,Biodiversity Monitoring,80 Data Analysis and Integration,Geospatial Mapping,60 Data Analysis and Integration,Urban Planning,50 Data Analysis and Integration,Disaster Response,50","title":"4. Diagrams with Mermaid"},{"location":"collaborating-on-the-cloud/markdown_basics/#5-interactive-elements","text":"Hyperlinks : Use the format [link text](URL) . Google Play Tetris Embedding Interactive Content : Use HTML tags or specific platform embed codes. ","title":"5. Interactive Elements"},{"location":"collaborating-on-the-cloud/markdown_basics/#6-math-notation","text":"Markdown can be combined with LaTeX for mathematical notation, useful in environmental data science for expressing statistical distributions, coordinate systems, and more. This requires a Markdown renderer with LaTeX support (like MathJax or KaTeX). Inline Math : Use single dollar signs for inline math expressions. Representing the normal distribution. Example: The probability density function of the normal distribution is given by \\(f(x|\\mu,\\sigma) = \\frac{1}{\\sigma\\sqrt{2\\pi}}e^{-\\frac{1}{2}\\left(\\frac{x-\\mu}{\\sigma}\\right)^2}\\) .` Display Math : Use double dollar signs for standalone equations. Example: $$ f(x|\\mu,\\sigma) = \\frac{1}{\\sigma\\sqrt{2\\pi}}e {-\\frac{1}{2}\\left(\\frac{x-\\mu}{\\sigma}\\right) 2} $$ Common LaTeX Elements for Environmental Data Science : Statistical Distributions : Normal Distribution: \\frac{1}{\\sigma\\sqrt{2\\pi}}e^{-\\frac{1}{2}\\left(\\frac{x-\\mu}{\\sigma}\\right)^2} for \\(\\frac{1}{\\sigma\\sqrt{2\\pi}}e^{-\\frac{1}{2}\\left(\\frac{x-\\mu}{\\sigma}\\right)^2}\\) Poisson Distribution: P(k; \\lambda) = \\frac{\\lambda^k e^{-\\lambda}}{k!} for \\(P(k; \\lambda) = \\frac{\\lambda^k e^{-\\lambda}}{k!}\\) Coordinate Systems : Spherical Coordinates: (r, \\theta, \\phi) for \\((r, \\theta, \\phi)\\) Cartesian Coordinates: (x, y, z) for \\((x, y, z)\\) Geospatial Equations : Haversine Formula for Distance: a = \\sin^2\\left(\\frac{\\Delta\\phi}{2}\\right) + \\cos(\\phi_1)\\cos(\\phi_2)\\sin^2\\left(\\frac{\\Delta\\lambda}{2}\\right) for \\(a = \\sin^2\\left(\\frac{\\Delta\\phi}{2}\\right) + \\cos(\\phi_1)\\cos(\\phi_2)\\sin^2\\left(\\frac{\\Delta\\lambda}{2}\\right)\\) Note: The rendering of these equations as formatted math will depend on your Markdown viewer's LaTeX capabilities.","title":"6. Math Notation"},{"location":"collaborating-on-the-cloud/markdown_basics/#7-effective-citations-in-markdown","text":"","title":"7. Effective Citations in Markdown"},{"location":"collaborating-on-the-cloud/markdown_basics/#inline-citations","text":"Objective: Learn how to use inline citations in Markdown. Example Usage: Inline citation of a single work: Some text with an inline citation. [ @jones :envstudy:2020] Inline citation with specific page or section: More text with a specific section cited. [See @jones :envstudy:2020, \u00a74.2] Contrasting views: Discussion of a topic with a contrasting view. [Contra @smith :climatechange:2019, p. 78]","title":"Inline Citations"},{"location":"collaborating-on-the-cloud/markdown_basics/#footnote-citations","text":"Objective: Understand how to use footnote citations in Markdown. Example Usage: Citing with a footnote: Some statement in the text. 1 Multiple references to the same footnote: Another statement referring to the same source. 1 A different citation: Additional comment with a new citation. 2","title":"Footnote Citations"},{"location":"collaborating-on-the-cloud/markdown_basics/#creating-footnotes","text":"Example Syntax: First reference details. Example: Emma Jones, \"Environmental Study,\" Nature Journal, May 2020, https://nature-journal.com/envstudy2020 . \u21a9 \u21a9 Second reference details. Example: David Smith, \"Climate Change Controversies,\" Science Daily, August 2019, https://sciencedaily.com/climatechange2019 . \u21a9","title":"Creating Footnotes"},{"location":"data-library/Pull_Sentinal2_l2_data/","text":"Pulling Sentinal 2 data \u00b6 Ty Tuff, ESIIL Data Scientist 2023-10-27 Set Java Options \u00b6 # Run these Java options before anything else. options ( java.parameters = \"-Xmx64G\" ) options ( timeout = max ( 600 , getOption ( \"timeout\" ))) R libraries and global setting. \u00b6 #library(Rcpp) library ( sf ) library ( gdalcubes ) library ( rstac ) library ( gdalUtils ) library ( terra ) library ( rgdal ) library ( reshape2 ) library ( osmdata ) library ( terra ) library ( dplyr ) #library(glue) library ( stars ) library ( ggplot2 ) library ( colorspace ) library ( geos ) #library(glue) library ( osmdata ) library ( ggthemes ) library ( tidyr ) gdalcubes_options ( parallel = 8 ) sf :: sf_extSoftVersion () GEOS GDAL proj.4 GDAL_with_GEOS USE_PROJ_H \"3.11.0\" \"3.5.3\" \"9.1.0\" \"true\" \"true\" PROJ \"9.1.0\" gdalcubes_gdal_has_geos () [1] TRUE Start timer \u00b6 start <- Sys.time () Set color palette \u00b6 library ( ggtern ) our_yellow <- rgb2hex ( r = 253 , g = 201 , b = 51 ) our_green <- rgb2hex ( r = 10 , g = 84 , b = 62 ) our_grey <- rgb2hex ( r = 92 , g = 96 , b = 95 ) our_white <- rgb2hex ( r = 255 , g = 255 , b = 255 ) Load area of interest \u00b6 # Read the shapefile into an sf object aoi_total <- st_read ( \"/Users/ty/Documents/Github/Southern_California_Edison_Fire_Risk/SCE_Fire_Zone_V2/SCE_Fire_Zone_V2.shp\" ) %>% st_as_sf () Reading layer `SCE_Fire_Zone_V2' from data source `/Users/ty/Documents/Github/Southern_California_Edison_Fire_Risk/SCE_Fire_Zone_V2/SCE_Fire_Zone_V2.shp' using driver `ESRI Shapefile' Simple feature collection with 12 features and 5 fields Geometry type: POLYGON Dimension: XY Bounding box: xmin: 176062.4 ymin: 3674043 xmax: 764123.1 ymax: 4254012 Projected CRS: NAD83 / UTM zone 11N # Plot the entire spatial dataset plot ( aoi_total ) # Filter the dataset to obtain the geometry with OBJECTID 5 aoi <- aoi_total %>% filter ( OBJECTID == 5 ) # Obtain and plot the bounding box of the filtered geometry shape_bbox <- st_bbox ( aoi ) plot ( aoi ) # Transform the filtered geometry to EPSG:4326 and store its bounding box aoi %>% st_transform ( \"EPSG:4326\" ) %>% st_bbox () -> bbox_4326 # Transform the filtered geometry to EPSG:32618 and store its bounding box aoi %>% st_transform ( \"EPSG:32618\" ) %>% st_bbox () -> bbox_32618 Arrange STAC collection \u00b6 In this code chunk, the primary goal is to search for and obtain satellite imagery data. The data source being tapped into is a SpatioTemporal Asset Catalog (STAC) provided by an online service (earth-search by Element84). Here\u2019s a breakdown: A connection is established with the STAC service, searching specifically within the \u201csentinel-s2-l2a-cogs\u201d collection. -The search is spatially constrained to a bounding box (bbox_4326) and temporally limited to a range of one day, between May 15 and May 16, 2021. -Once the search is conducted, the desired assets or spectral bands from the returned satellite images are defined, ranging from Band 1 (B01) to Band 12 (B12) and including the Scene Classification Layer (SCL). -These bands are then organized into an image collection for further processing or analysis. # Initialize STAC connection s = stac ( \"https://earth-search.aws.element84.com/v0\" ) # Search for Sentinel-2 images within specified bounding box and date range items = s %>% stac_search ( collections = \"sentinel-s2-l2a-cogs\" , bbox = c ( bbox_4326 [ \"xmin\" ], bbox_4326 [ \"ymin\" ], bbox_4326 [ \"xmax\" ], bbox_4326 [ \"ymax\" ]), datetime = \"2021-05-15/2021-05-16\" ) %>% post_request () %>% items_fetch ( progress = FALSE ) # Print number of found items length ( items $ features ) [1] 12 # Prepare the assets for analysis library ( gdalcubes ) assets = c ( \"B01\" , \"B02\" , \"B03\" , \"B04\" , \"B05\" , \"B06\" , \"B07\" , \"B08\" , \"B8A\" , \"B09\" , \"B11\" , \"B12\" , \"SCL\" ) s2_collection = stac_image_collection ( items $ features , asset_names = assets ) # Display the image collection s2_collection Image collection object, referencing 12 images with 13 bands Images: name left top bottom right 1 S2B_11SNS_20210515_1_L2A -117.0002 33.43957 32.44372 -115.8191 2 S2B_11SPS_20210515_1_L2A -115.9361 33.43490 32.42937 -114.7436 3 S2B_11SQS_20210515_0_L2A -114.8732 33.42092 32.41918 -113.9566 4 S2B_12STB_20210515_0_L2A -114.2244 33.40433 32.61015 -113.9559 5 S2B_11SNT_20210515_0_L2A -117.0002 34.34164 33.34577 -115.8066 6 S2B_11SPT_20210515_0_L2A -115.9253 34.33683 33.33091 -114.7198 datetime srs 1 2021-05-15T18:35:13 EPSG:32611 2 2021-05-15T18:35:10 EPSG:32611 3 2021-05-15T18:35:06 EPSG:32611 4 2021-05-15T18:35:01 EPSG:32612 5 2021-05-15T18:34:59 EPSG:32611 6 2021-05-15T18:34:55 EPSG:32611 [ omitted 6 images ] Bands: name offset scale unit nodata image_count 1 B01 0 1 12 2 B02 0 1 12 3 B03 0 1 12 4 B04 0 1 12 5 B05 0 1 12 6 B06 0 1 12 7 B07 0 1 12 8 B08 0 1 12 9 B09 0 1 12 10 B11 0 1 12 11 B12 0 1 12 12 B8A 0 1 12 13 SCL 0 1 12 Define view window \u00b6 In this code chunk, a \u2018view\u2019 on the previously obtained satellite image collection is being defined. Think of this as setting up a specific lens or perspective to look at the satellite data: -The view is set to the coordinate reference system EPSG:32618. -Spatial resolution is defined as 100x100 meters. -Temporal resolution is defined monthly (P1M), even though the actual range is only one day. -When there are multiple values in a grid cell or timeframe, they are aggregated using the median value. -If any resampling is needed, the nearest neighbor method is used (near). -The spatial and temporal extents are constrained to specific values. -By defining this view, it allows for consistent analysis and visualization of the image collection within the specified spatial and temporal resolutions and extents. # Define a specific view on the satellite image collection v = cube_view ( srs = \"EPSG:32618\" , dx = 100 , dy = 100 , dt = \"P1M\" , aggregation = \"median\" , resampling = \"near\" , extent = list ( t0 = \"2021-05-15\" , t1 = \"2021-05-16\" , left = bbox_32618 [ \"xmin\" ], right = bbox_32618 [ \"xmax\" ], top = bbox_32618 [ \"ymax\" ], bottom = bbox_32618 [ \"ymin\" ] ) ) # Display the defined view v A data cube view object Dimensions: low high count pixel_size t 2021-05-01 2021-05-31 1 P1M y 4471226.41402451 4741326.41402451 2701 100 x -3463720.00044994 -3191420.00044994 2723 100 SRS: \"EPSG:32618\" Temporal aggregation method: \"median\" Spatial resampling method: \"near\" Pull data \u00b6 In this chunk, the primary aim is to transform and prepare satellite imagery data for analysis: -The current time is stored in variable a for tracking the time taken by the process. -The previously defined \u2018view\u2019 on the satellite imagery, v, is used to create a raster cube, a multi-dimensional array containing the satellite data. This raster cube contains spatial, spectral, and temporal data. -The desired spectral bands are selected. -The data is limited to a specific area of interest, aoi. -The band names are renamed to their respective wavelengths in nanometers for clarity. -A subset of the data, comprising 50,000 random samples, is selected. -Unwanted columns are removed, and the dataset is transformed into a long format, where each row represents a particular date and wavelength combination. -The entire process duration is computed by taking the difference between the end time (b) and the start time (a). -The transformed dataset y is then displayed. # Record start time a <- Sys.time () # Transform the satellite image collection into a raster cube x <- s2_collection %>% raster_cube ( v ) %>% select_bands ( c ( \"B01\" , \"B02\" , \"B03\" , \"B04\" , \"B05\" , \"B06\" , \"B07\" , \"B08\" , \"B8A\" , \"B09\" , \"B11\" , \"B12\" )) %>% extract_geom ( aoi ) %>% rename ( \"time\" = \"time\" , \"443\" = \"B01\" , \"490\" = \"B02\" , \"560\" = \"B03\" , \"665\" = \"B04\" , \"705\" = \"B05\" , \"740\" = \"B06\" , \"783\" = \"B07\" , \"842\" = \"B08\" , \"865\" = \"B8A\" , \"940\" = \"B09\" , \"1610\" = \"B11\" , \"2190\" = \"B12\" ) # Sample, transform and prepare data for analysis y <- x %>% slice_sample ( n = 50000 ) %>% select ( - FID ) %>% pivot_longer ( ! time , names_to = \"wavelength_nm\" , values_to = \"reflectance\" ) %>% mutate ( wavelength_nm = as.numeric ( wavelength_nm )) # Record end time and compute duration b <- Sys.time () processing_time <- difftime ( b , a ) # Display the processing time and transformed dataset processing_time Time difference of 1.23593 mins y # A tibble: 600,000 \u00d7 3 time wavelength_nm reflectance 1 2021-05-01 443 1855 2 2021-05-01 490 2255 3 2021-05-01 560 2884 4 2021-05-01 665 3711 5 2021-05-01 705 3990 6 2021-05-01 740 4009 7 2021-05-01 783 4078 8 2021-05-01 842 4219 9 2021-05-01 865 4060 10 2021-05-01 940 4120 # \u2139 599,990 more rows Base plot \u00b6 # Set custom colors for the plot our_green <- \"#4CAF50\" our_white <- \"#FFFFFF\" our_yellow <- \"#FFEB3B\" # Create a 2D density plot day_density <- ggplot ( data = y , aes ( x = wavelength_nm , y = reflectance , group = time )) + stat_smooth ( color = our_green , fill = \"lightgrey\" ) + geom_density2d ( colour = \"black\" , bins = 10 , alpha = 0.1 ) + stat_density2d ( aes ( alpha = ..level.. , fill = ..level.. ), linewidth = 2 , bins = 10 , geom = \"polygon\" ) + scale_fill_gradient ( low = our_white , high = our_yellow ) + scale_alpha ( range = c ( 0.00 , 0.8 ), guide = FALSE ) + theme_tufte () + xlab ( \"wavelength\" ) + ylab ( \"reflectance\" ) + ylim ( 0 , 16000 ) + theme ( aspect.ratio = 5 / 14 , axis.text.x = element_text ( angle = 90 , vjust = 0.5 , hjust = 1 , colour = c ( \"darkblue\" , \"blue\" , \"green\" , \"red\" , \"darkred\" , \"darkred\" , \"darkred\" , \"darkred\" , \"darkred\" , \"black\" , \"black\" , \"black\" , \"black\" )), axis.title.x = element_blank (), axis.title.y = element_blank (), plot.margin = margin ( t = 30 , r = 10 , b = 40 , l = 18 ) ) + scale_x_continuous ( breaks = c ( 443 , 490 , 560 , 665 , 705 , 740 , 783 , 842 , 865 , 940 , 1610 , 2190 )) # Display the plot day_density Inlay 1 - geographic zone \u00b6 guide_map <- ggplot ( data = aoi_total ) + geom_sf ( fill = our_yellow , color = our_white ) + geom_sf ( data = aoi , fill = our_green , color = our_white ) + theme_tufte () + ggtitle ( \"Zone 5\" ) + theme ( axis.text.x = element_blank (), #remove x axis labels axis.ticks.x = element_blank (), #remove x axis ticks axis.text.y = element_blank (), #remove y axis labels axis.ticks.y = element_blank () #remove y axis ticks, bg=none ) + theme ( plot.title = element_text ( hjust = 0.8 , vjust = -2 )) guide_map Inlay 2 - date text \u00b6 library ( geosphere ) aoi_total |> st_centroid () |> st_transform ( crs = \"+proj=longlat\" ) |> st_coordinates () |> colMeans () -> lat_long daylength_line <- daylength ( lat = lat_long [ 2 ], 1 : 365 ) daylengths <- data.frame ( time = 1 : 365 , daylength = daylength_line ) library ( lubridate ) # Create a template date object date <- as.POSIXlt ( \"2021-05-15\" ) doy <- format ( date , format = \"%j\" ) |> as.numeric () display_date <- format ( date , format = \"%e %B %Y \" ) Inlay 3 - daylength \u00b6 date_inlay <- ggplot ( data = daylengths ) + ggtitle ( \"Daylength\" ) + geom_ribbon ( aes ( x = time , ymin = daylength , ymax = 15 ), fill = our_grey , alpha = 0.5 ) + geom_ribbon ( aes ( x = time , ymax = daylength , ymin = 9 ), fill = our_yellow , alpha = 1 ) + geom_hline ( yintercept = 12 , color = our_white ) + geom_vline ( xintercept = doy , color = our_green , size = 1 ) + theme_tufte () + ylim ( 9 , 15 ) + theme ( axis.text.y = element_blank (), axis.ticks.y = element_blank (), axis.title.y = element_blank (), axis.title.x = element_blank (), axis.text.x = element_blank (), axis.ticks.x = element_blank ()) + theme ( plot.title = element_text ( hjust = 0.5 , vjust = 0 )) date_inlay Ensemble map assembly \u00b6 library ( cowplot ) library ( magick ) map_overlay <- ggdraw ( day_density ) + draw_plot ( guide_map , x = 1.08 , y = 1 , hjust = 1 , vjust = 1 , width = 0.3 , height = 0.3 ) + draw_plot ( date_inlay , x = 1 , y = 0.35 , hjust = 1 , vjust = 1 , width = 0.1 , height = 0.25 ) + geom_text ( aes ( x = 1 , y = 0.08 , label = display_date , hjust = 1 ), color = our_grey , cex = 3 , fontface = 'bold' ) + # draw_image(\"Ty_powerline_plots/Southern_California_Edison_Logo.png\", x = -0.24, y = 0.38, scale=.3)+ # draw_image(\"Ty_powerline_plots/earthlab_logo.png\", x = -0.38, y = 0.38, scale=.25)+ geom_text ( aes ( x = 0.4 , y = . 9 , label = \"Spectral library - Monthly average\" ), color = our_green , hjust = 0 , cex = 8 , fontface = 'bold' ) + geom_text ( aes ( x = 0.01 , y = . 04 , label = \"Created by ESIIL (T. Tuff) for Fall Hackathon -- October 2023. Sentinel 2 Data from 'https://earth-search.aws.element84.com/v0'\" ), color = our_grey , hjust = 0 , cex = 3 ) + geom_text ( aes ( x = 0.4 , y = . 1 , label = \"wavelength (nm)\" ), color = our_grey , hjust = 0 , cex = 4 , fontface = 'bold' ) + geom_text ( aes ( x = 0.01 , y = . 5 , angle = 90 , label = \"reflectance\" ), color = our_grey , hjust = 0 , cex = 4 , fontface = 'bold' ) map_overlay Save map \u00b6 ggsave ( map_overlay , file = \"day_density_15_May_2021_zone_5.png\" , bg = \"white\" , dpi = 600 , width = 12 , height = 5 ) End timer \u00b6 end <- Sys.time () difftime ( end , start ) Time difference of 3.2202 mins","title":"Pulling Sentinal 2 data"},{"location":"data-library/Pull_Sentinal2_l2_data/#pulling-sentinal-2-data","text":"Ty Tuff, ESIIL Data Scientist 2023-10-27","title":"Pulling Sentinal 2 data"},{"location":"data-library/Pull_Sentinal2_l2_data/#set-java-options","text":"# Run these Java options before anything else. options ( java.parameters = \"-Xmx64G\" ) options ( timeout = max ( 600 , getOption ( \"timeout\" )))","title":"Set Java Options"},{"location":"data-library/Pull_Sentinal2_l2_data/#r-libraries-and-global-setting","text":"#library(Rcpp) library ( sf ) library ( gdalcubes ) library ( rstac ) library ( gdalUtils ) library ( terra ) library ( rgdal ) library ( reshape2 ) library ( osmdata ) library ( terra ) library ( dplyr ) #library(glue) library ( stars ) library ( ggplot2 ) library ( colorspace ) library ( geos ) #library(glue) library ( osmdata ) library ( ggthemes ) library ( tidyr ) gdalcubes_options ( parallel = 8 ) sf :: sf_extSoftVersion () GEOS GDAL proj.4 GDAL_with_GEOS USE_PROJ_H \"3.11.0\" \"3.5.3\" \"9.1.0\" \"true\" \"true\" PROJ \"9.1.0\" gdalcubes_gdal_has_geos () [1] TRUE","title":"R libraries and global setting."},{"location":"data-library/Pull_Sentinal2_l2_data/#start-timer","text":"start <- Sys.time ()","title":"Start timer"},{"location":"data-library/Pull_Sentinal2_l2_data/#set-color-palette","text":"library ( ggtern ) our_yellow <- rgb2hex ( r = 253 , g = 201 , b = 51 ) our_green <- rgb2hex ( r = 10 , g = 84 , b = 62 ) our_grey <- rgb2hex ( r = 92 , g = 96 , b = 95 ) our_white <- rgb2hex ( r = 255 , g = 255 , b = 255 )","title":"Set color palette"},{"location":"data-library/Pull_Sentinal2_l2_data/#load-area-of-interest","text":"# Read the shapefile into an sf object aoi_total <- st_read ( \"/Users/ty/Documents/Github/Southern_California_Edison_Fire_Risk/SCE_Fire_Zone_V2/SCE_Fire_Zone_V2.shp\" ) %>% st_as_sf () Reading layer `SCE_Fire_Zone_V2' from data source `/Users/ty/Documents/Github/Southern_California_Edison_Fire_Risk/SCE_Fire_Zone_V2/SCE_Fire_Zone_V2.shp' using driver `ESRI Shapefile' Simple feature collection with 12 features and 5 fields Geometry type: POLYGON Dimension: XY Bounding box: xmin: 176062.4 ymin: 3674043 xmax: 764123.1 ymax: 4254012 Projected CRS: NAD83 / UTM zone 11N # Plot the entire spatial dataset plot ( aoi_total ) # Filter the dataset to obtain the geometry with OBJECTID 5 aoi <- aoi_total %>% filter ( OBJECTID == 5 ) # Obtain and plot the bounding box of the filtered geometry shape_bbox <- st_bbox ( aoi ) plot ( aoi ) # Transform the filtered geometry to EPSG:4326 and store its bounding box aoi %>% st_transform ( \"EPSG:4326\" ) %>% st_bbox () -> bbox_4326 # Transform the filtered geometry to EPSG:32618 and store its bounding box aoi %>% st_transform ( \"EPSG:32618\" ) %>% st_bbox () -> bbox_32618","title":"Load area of interest"},{"location":"data-library/Pull_Sentinal2_l2_data/#arrange-stac-collection","text":"In this code chunk, the primary goal is to search for and obtain satellite imagery data. The data source being tapped into is a SpatioTemporal Asset Catalog (STAC) provided by an online service (earth-search by Element84). Here\u2019s a breakdown: A connection is established with the STAC service, searching specifically within the \u201csentinel-s2-l2a-cogs\u201d collection. -The search is spatially constrained to a bounding box (bbox_4326) and temporally limited to a range of one day, between May 15 and May 16, 2021. -Once the search is conducted, the desired assets or spectral bands from the returned satellite images are defined, ranging from Band 1 (B01) to Band 12 (B12) and including the Scene Classification Layer (SCL). -These bands are then organized into an image collection for further processing or analysis. # Initialize STAC connection s = stac ( \"https://earth-search.aws.element84.com/v0\" ) # Search for Sentinel-2 images within specified bounding box and date range items = s %>% stac_search ( collections = \"sentinel-s2-l2a-cogs\" , bbox = c ( bbox_4326 [ \"xmin\" ], bbox_4326 [ \"ymin\" ], bbox_4326 [ \"xmax\" ], bbox_4326 [ \"ymax\" ]), datetime = \"2021-05-15/2021-05-16\" ) %>% post_request () %>% items_fetch ( progress = FALSE ) # Print number of found items length ( items $ features ) [1] 12 # Prepare the assets for analysis library ( gdalcubes ) assets = c ( \"B01\" , \"B02\" , \"B03\" , \"B04\" , \"B05\" , \"B06\" , \"B07\" , \"B08\" , \"B8A\" , \"B09\" , \"B11\" , \"B12\" , \"SCL\" ) s2_collection = stac_image_collection ( items $ features , asset_names = assets ) # Display the image collection s2_collection Image collection object, referencing 12 images with 13 bands Images: name left top bottom right 1 S2B_11SNS_20210515_1_L2A -117.0002 33.43957 32.44372 -115.8191 2 S2B_11SPS_20210515_1_L2A -115.9361 33.43490 32.42937 -114.7436 3 S2B_11SQS_20210515_0_L2A -114.8732 33.42092 32.41918 -113.9566 4 S2B_12STB_20210515_0_L2A -114.2244 33.40433 32.61015 -113.9559 5 S2B_11SNT_20210515_0_L2A -117.0002 34.34164 33.34577 -115.8066 6 S2B_11SPT_20210515_0_L2A -115.9253 34.33683 33.33091 -114.7198 datetime srs 1 2021-05-15T18:35:13 EPSG:32611 2 2021-05-15T18:35:10 EPSG:32611 3 2021-05-15T18:35:06 EPSG:32611 4 2021-05-15T18:35:01 EPSG:32612 5 2021-05-15T18:34:59 EPSG:32611 6 2021-05-15T18:34:55 EPSG:32611 [ omitted 6 images ] Bands: name offset scale unit nodata image_count 1 B01 0 1 12 2 B02 0 1 12 3 B03 0 1 12 4 B04 0 1 12 5 B05 0 1 12 6 B06 0 1 12 7 B07 0 1 12 8 B08 0 1 12 9 B09 0 1 12 10 B11 0 1 12 11 B12 0 1 12 12 B8A 0 1 12 13 SCL 0 1 12","title":"Arrange STAC collection"},{"location":"data-library/Pull_Sentinal2_l2_data/#define-view-window","text":"In this code chunk, a \u2018view\u2019 on the previously obtained satellite image collection is being defined. Think of this as setting up a specific lens or perspective to look at the satellite data: -The view is set to the coordinate reference system EPSG:32618. -Spatial resolution is defined as 100x100 meters. -Temporal resolution is defined monthly (P1M), even though the actual range is only one day. -When there are multiple values in a grid cell or timeframe, they are aggregated using the median value. -If any resampling is needed, the nearest neighbor method is used (near). -The spatial and temporal extents are constrained to specific values. -By defining this view, it allows for consistent analysis and visualization of the image collection within the specified spatial and temporal resolutions and extents. # Define a specific view on the satellite image collection v = cube_view ( srs = \"EPSG:32618\" , dx = 100 , dy = 100 , dt = \"P1M\" , aggregation = \"median\" , resampling = \"near\" , extent = list ( t0 = \"2021-05-15\" , t1 = \"2021-05-16\" , left = bbox_32618 [ \"xmin\" ], right = bbox_32618 [ \"xmax\" ], top = bbox_32618 [ \"ymax\" ], bottom = bbox_32618 [ \"ymin\" ] ) ) # Display the defined view v A data cube view object Dimensions: low high count pixel_size t 2021-05-01 2021-05-31 1 P1M y 4471226.41402451 4741326.41402451 2701 100 x -3463720.00044994 -3191420.00044994 2723 100 SRS: \"EPSG:32618\" Temporal aggregation method: \"median\" Spatial resampling method: \"near\"","title":"Define view window"},{"location":"data-library/Pull_Sentinal2_l2_data/#pull-data","text":"In this chunk, the primary aim is to transform and prepare satellite imagery data for analysis: -The current time is stored in variable a for tracking the time taken by the process. -The previously defined \u2018view\u2019 on the satellite imagery, v, is used to create a raster cube, a multi-dimensional array containing the satellite data. This raster cube contains spatial, spectral, and temporal data. -The desired spectral bands are selected. -The data is limited to a specific area of interest, aoi. -The band names are renamed to their respective wavelengths in nanometers for clarity. -A subset of the data, comprising 50,000 random samples, is selected. -Unwanted columns are removed, and the dataset is transformed into a long format, where each row represents a particular date and wavelength combination. -The entire process duration is computed by taking the difference between the end time (b) and the start time (a). -The transformed dataset y is then displayed. # Record start time a <- Sys.time () # Transform the satellite image collection into a raster cube x <- s2_collection %>% raster_cube ( v ) %>% select_bands ( c ( \"B01\" , \"B02\" , \"B03\" , \"B04\" , \"B05\" , \"B06\" , \"B07\" , \"B08\" , \"B8A\" , \"B09\" , \"B11\" , \"B12\" )) %>% extract_geom ( aoi ) %>% rename ( \"time\" = \"time\" , \"443\" = \"B01\" , \"490\" = \"B02\" , \"560\" = \"B03\" , \"665\" = \"B04\" , \"705\" = \"B05\" , \"740\" = \"B06\" , \"783\" = \"B07\" , \"842\" = \"B08\" , \"865\" = \"B8A\" , \"940\" = \"B09\" , \"1610\" = \"B11\" , \"2190\" = \"B12\" ) # Sample, transform and prepare data for analysis y <- x %>% slice_sample ( n = 50000 ) %>% select ( - FID ) %>% pivot_longer ( ! time , names_to = \"wavelength_nm\" , values_to = \"reflectance\" ) %>% mutate ( wavelength_nm = as.numeric ( wavelength_nm )) # Record end time and compute duration b <- Sys.time () processing_time <- difftime ( b , a ) # Display the processing time and transformed dataset processing_time Time difference of 1.23593 mins y # A tibble: 600,000 \u00d7 3 time wavelength_nm reflectance 1 2021-05-01 443 1855 2 2021-05-01 490 2255 3 2021-05-01 560 2884 4 2021-05-01 665 3711 5 2021-05-01 705 3990 6 2021-05-01 740 4009 7 2021-05-01 783 4078 8 2021-05-01 842 4219 9 2021-05-01 865 4060 10 2021-05-01 940 4120 # \u2139 599,990 more rows","title":"Pull data"},{"location":"data-library/Pull_Sentinal2_l2_data/#base-plot","text":"# Set custom colors for the plot our_green <- \"#4CAF50\" our_white <- \"#FFFFFF\" our_yellow <- \"#FFEB3B\" # Create a 2D density plot day_density <- ggplot ( data = y , aes ( x = wavelength_nm , y = reflectance , group = time )) + stat_smooth ( color = our_green , fill = \"lightgrey\" ) + geom_density2d ( colour = \"black\" , bins = 10 , alpha = 0.1 ) + stat_density2d ( aes ( alpha = ..level.. , fill = ..level.. ), linewidth = 2 , bins = 10 , geom = \"polygon\" ) + scale_fill_gradient ( low = our_white , high = our_yellow ) + scale_alpha ( range = c ( 0.00 , 0.8 ), guide = FALSE ) + theme_tufte () + xlab ( \"wavelength\" ) + ylab ( \"reflectance\" ) + ylim ( 0 , 16000 ) + theme ( aspect.ratio = 5 / 14 , axis.text.x = element_text ( angle = 90 , vjust = 0.5 , hjust = 1 , colour = c ( \"darkblue\" , \"blue\" , \"green\" , \"red\" , \"darkred\" , \"darkred\" , \"darkred\" , \"darkred\" , \"darkred\" , \"black\" , \"black\" , \"black\" , \"black\" )), axis.title.x = element_blank (), axis.title.y = element_blank (), plot.margin = margin ( t = 30 , r = 10 , b = 40 , l = 18 ) ) + scale_x_continuous ( breaks = c ( 443 , 490 , 560 , 665 , 705 , 740 , 783 , 842 , 865 , 940 , 1610 , 2190 )) # Display the plot day_density","title":"Base plot"},{"location":"data-library/Pull_Sentinal2_l2_data/#inlay-1-geographic-zone","text":"guide_map <- ggplot ( data = aoi_total ) + geom_sf ( fill = our_yellow , color = our_white ) + geom_sf ( data = aoi , fill = our_green , color = our_white ) + theme_tufte () + ggtitle ( \"Zone 5\" ) + theme ( axis.text.x = element_blank (), #remove x axis labels axis.ticks.x = element_blank (), #remove x axis ticks axis.text.y = element_blank (), #remove y axis labels axis.ticks.y = element_blank () #remove y axis ticks, bg=none ) + theme ( plot.title = element_text ( hjust = 0.8 , vjust = -2 )) guide_map","title":"Inlay 1 - geographic zone"},{"location":"data-library/Pull_Sentinal2_l2_data/#inlay-2-date-text","text":"library ( geosphere ) aoi_total |> st_centroid () |> st_transform ( crs = \"+proj=longlat\" ) |> st_coordinates () |> colMeans () -> lat_long daylength_line <- daylength ( lat = lat_long [ 2 ], 1 : 365 ) daylengths <- data.frame ( time = 1 : 365 , daylength = daylength_line ) library ( lubridate ) # Create a template date object date <- as.POSIXlt ( \"2021-05-15\" ) doy <- format ( date , format = \"%j\" ) |> as.numeric () display_date <- format ( date , format = \"%e %B %Y \" )","title":"Inlay 2 - date text"},{"location":"data-library/Pull_Sentinal2_l2_data/#inlay-3-daylength","text":"date_inlay <- ggplot ( data = daylengths ) + ggtitle ( \"Daylength\" ) + geom_ribbon ( aes ( x = time , ymin = daylength , ymax = 15 ), fill = our_grey , alpha = 0.5 ) + geom_ribbon ( aes ( x = time , ymax = daylength , ymin = 9 ), fill = our_yellow , alpha = 1 ) + geom_hline ( yintercept = 12 , color = our_white ) + geom_vline ( xintercept = doy , color = our_green , size = 1 ) + theme_tufte () + ylim ( 9 , 15 ) + theme ( axis.text.y = element_blank (), axis.ticks.y = element_blank (), axis.title.y = element_blank (), axis.title.x = element_blank (), axis.text.x = element_blank (), axis.ticks.x = element_blank ()) + theme ( plot.title = element_text ( hjust = 0.5 , vjust = 0 )) date_inlay","title":"Inlay 3 - daylength"},{"location":"data-library/Pull_Sentinal2_l2_data/#ensemble-map-assembly","text":"library ( cowplot ) library ( magick ) map_overlay <- ggdraw ( day_density ) + draw_plot ( guide_map , x = 1.08 , y = 1 , hjust = 1 , vjust = 1 , width = 0.3 , height = 0.3 ) + draw_plot ( date_inlay , x = 1 , y = 0.35 , hjust = 1 , vjust = 1 , width = 0.1 , height = 0.25 ) + geom_text ( aes ( x = 1 , y = 0.08 , label = display_date , hjust = 1 ), color = our_grey , cex = 3 , fontface = 'bold' ) + # draw_image(\"Ty_powerline_plots/Southern_California_Edison_Logo.png\", x = -0.24, y = 0.38, scale=.3)+ # draw_image(\"Ty_powerline_plots/earthlab_logo.png\", x = -0.38, y = 0.38, scale=.25)+ geom_text ( aes ( x = 0.4 , y = . 9 , label = \"Spectral library - Monthly average\" ), color = our_green , hjust = 0 , cex = 8 , fontface = 'bold' ) + geom_text ( aes ( x = 0.01 , y = . 04 , label = \"Created by ESIIL (T. Tuff) for Fall Hackathon -- October 2023. Sentinel 2 Data from 'https://earth-search.aws.element84.com/v0'\" ), color = our_grey , hjust = 0 , cex = 3 ) + geom_text ( aes ( x = 0.4 , y = . 1 , label = \"wavelength (nm)\" ), color = our_grey , hjust = 0 , cex = 4 , fontface = 'bold' ) + geom_text ( aes ( x = 0.01 , y = . 5 , angle = 90 , label = \"reflectance\" ), color = our_grey , hjust = 0 , cex = 4 , fontface = 'bold' ) map_overlay","title":"Ensemble map assembly"},{"location":"data-library/Pull_Sentinal2_l2_data/#save-map","text":"ggsave ( map_overlay , file = \"day_density_15_May_2021_zone_5.png\" , bg = \"white\" , dpi = 600 , width = 12 , height = 5 )","title":"Save map"},{"location":"data-library/Pull_Sentinal2_l2_data/#end-timer","text":"end <- Sys.time () difftime ( end , start ) Time difference of 3.2202 mins","title":"End timer"},{"location":"data-library/disturbance-stack/","text":"Earth Lab Disturbance Stack derived from Landfire \u00b6 The CU Boulder Earth Lab has integrated annual (1999-2020) disturbance presence data from Landfire with a new index of hotter drought into an easily managed raster data stack. To accelerate your access to this dataset, the ESIIL team has made disturbance stack data for the Southern Rockies available on the Cyverse data store at the below directory: ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/disturbance The stack data is in two versions, full and simplified. The full version (dist_stack_Southern_Rockies.tif) has the below values: Code Landfire disturbance status Hotter-drought status 0 none no hotter-drought/fewer than 4 thresholds exceeded 1 fire no hotter-drought/fewer than 4 thresholds exceeded 2 insect/disease no hotter-drought/fewer than 4 thresholds exceeded 3 other Landfire disturbance no hotter-drought/fewer than 4 thresholds exceeded 4 none hotter-drought with 4 thresholds exceeded 5 fire hotter-drought with 4 thresholds exceeded 6 insects/disease hotter-drought with 4 thresholds exceeded 7 other Landfire disturbance hotter-drought with 4 thresholds exceeded 8 none hotter-drought with 5 thresholds exceeded 9 fire hotter-drought with 5 thresholds exceeded 10 insects/disease hotter-drought with 5 thresholds exceeded 11 other Landfire disturbance hotter-drought with 5 thresholds exceeded 12 none hotter-drought with 6 thresholds exceeded 13 fire hotter-drought with 6 thresholds exceeded 14 insects/disease hotter-drought with 6 thresholds exceeded 15 other Landfire disturbance hotter-drought with 6 thresholds exceeded The simplified version (simple_dist_stack_Southern_Rockies.tif) has the below values, and only includes the most extreme hot drought: Code Landfire disturbance status Hotter-drought status 0 none no hotter-drought/fewer than 6 thresholds exceeded 1 fire no hotter-drought/fewer than 6 thresholds exceeded 2 insect/disease no hotter-drought/fewer than 6 thresholds exceeded 3 none hotter-drought with 6 thresholds exceeded 4 fire hotter-drought with 6 thresholds exceeded 5 insect/disease hhotter-drought with 6 thresholds exceeded Additional MODIS data is best accessed via VSI or STAC.","title":"Earth Lab Disturbance Stack derived from Landfire"},{"location":"data-library/disturbance-stack/#earth-lab-disturbance-stack-derived-from-landfire","text":"The CU Boulder Earth Lab has integrated annual (1999-2020) disturbance presence data from Landfire with a new index of hotter drought into an easily managed raster data stack. To accelerate your access to this dataset, the ESIIL team has made disturbance stack data for the Southern Rockies available on the Cyverse data store at the below directory: ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/disturbance The stack data is in two versions, full and simplified. The full version (dist_stack_Southern_Rockies.tif) has the below values: Code Landfire disturbance status Hotter-drought status 0 none no hotter-drought/fewer than 4 thresholds exceeded 1 fire no hotter-drought/fewer than 4 thresholds exceeded 2 insect/disease no hotter-drought/fewer than 4 thresholds exceeded 3 other Landfire disturbance no hotter-drought/fewer than 4 thresholds exceeded 4 none hotter-drought with 4 thresholds exceeded 5 fire hotter-drought with 4 thresholds exceeded 6 insects/disease hotter-drought with 4 thresholds exceeded 7 other Landfire disturbance hotter-drought with 4 thresholds exceeded 8 none hotter-drought with 5 thresholds exceeded 9 fire hotter-drought with 5 thresholds exceeded 10 insects/disease hotter-drought with 5 thresholds exceeded 11 other Landfire disturbance hotter-drought with 5 thresholds exceeded 12 none hotter-drought with 6 thresholds exceeded 13 fire hotter-drought with 6 thresholds exceeded 14 insects/disease hotter-drought with 6 thresholds exceeded 15 other Landfire disturbance hotter-drought with 6 thresholds exceeded The simplified version (simple_dist_stack_Southern_Rockies.tif) has the below values, and only includes the most extreme hot drought: Code Landfire disturbance status Hotter-drought status 0 none no hotter-drought/fewer than 6 thresholds exceeded 1 fire no hotter-drought/fewer than 6 thresholds exceeded 2 insect/disease no hotter-drought/fewer than 6 thresholds exceeded 3 none hotter-drought with 6 thresholds exceeded 4 fire hotter-drought with 6 thresholds exceeded 5 insect/disease hhotter-drought with 6 thresholds exceeded Additional MODIS data is best accessed via VSI or STAC.","title":"Earth Lab Disturbance Stack derived from Landfire"},{"location":"data-library/drought/","text":"Drought Indices \u00b6 There are a wide variety of drought indices and variables used to describe various forms of drought. This data is best accessed via VSI and STAC to enable climate data summarization at the desired temporal and spatial resolution. To accelerate your access to basic drought data, the ESIIL team has made annual averages of SPEI and PDSI for the Southern Rockies available on the Cyverse data store at the below directory: ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/Disturbance/drought SPEI, or the Standardised Precipitation-Evapotranspiration Index, is a multiscalar drought index based on climatic data. It can be used for determining the onset, duration and magnitude of drought conditions with respect to normal conditions in a variety of natural and managed systems such as crops, ecosystems, rivers, water resources, etc. An overview of SPEI is available here . The pre-compiled datasets are at the 30 day, 1 year, and 5 year time scales and are from the TerraClimate dataset . PDSI, or the Palmer Drought Severity Index, uses readily available temperature and precipitation data to estimate relative dryness. However, it is not multiscalar. An overview of PDSI from NCAR is here . The pre-compiled dataset is from the TerraClimate dataset .","title":"Drought Indices"},{"location":"data-library/drought/#drought-indices","text":"There are a wide variety of drought indices and variables used to describe various forms of drought. This data is best accessed via VSI and STAC to enable climate data summarization at the desired temporal and spatial resolution. To accelerate your access to basic drought data, the ESIIL team has made annual averages of SPEI and PDSI for the Southern Rockies available on the Cyverse data store at the below directory: ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/Disturbance/drought SPEI, or the Standardised Precipitation-Evapotranspiration Index, is a multiscalar drought index based on climatic data. It can be used for determining the onset, duration and magnitude of drought conditions with respect to normal conditions in a variety of natural and managed systems such as crops, ecosystems, rivers, water resources, etc. An overview of SPEI is available here . The pre-compiled datasets are at the 30 day, 1 year, and 5 year time scales and are from the TerraClimate dataset . PDSI, or the Palmer Drought Severity Index, uses readily available temperature and precipitation data to estimate relative dryness. However, it is not multiscalar. An overview of PDSI from NCAR is here . The pre-compiled dataset is from the TerraClimate dataset .","title":"Drought Indices"},{"location":"data-library/epa-ecoregions/","text":"EPA Ecoregions \u00b6 EPA ecoregions are a convenient spatial framework for ecosystem regions used by the United States Environmental Protection Agency. Full details on EPA ecoregions can be found here. A Roman numeral classification scheme has been adopted for different hierarchical levels of ecoregions, ranging from general regions to more detailed: Level I - 12 ecoregions in the continental U.S. Level II - 25 ecoregions in the continental U.S. Level III -105 ecoregions in the continental U.S. Level IV - 967 ecoregions in the conterminous U.S. Instructions for accessing spatial EPA ecoregion data can be found in the script code/create-data-library/access_epa_ecoregions.R. The script is also copied below: # This brief script demonstrates how to access level 3 and 4 EPA ecoregions for North America. # Directly accessing the files via VSI is recommended, as this uses cloud-hosted data. # A version for downloading the zipped files is also provided in case for some reason you need the actual files. # ESIIL, February 2024 # Tyler L. McIntosh ####### ACCESS SHAPEFILES DIRECTLY VIA VSI ######### require ( glue ) require ( sf ) epa_l3 <- glue :: glue ( \"/vsizip/vsicurl/\" , #magic remote connection \"https://gaftp.epa.gov/EPADataCommons/ORD/Ecoregions/us/us_eco_l3.zip\" , #copied link to download location \"/us_eco_l3.shp\" ) |> #path inside zip file sf :: st_read () epa_l4 <- glue :: glue ( \"/vsizip/vsicurl/\" , #magic remote connection \"https://gaftp.epa.gov/EPADataCommons/ORD/Ecoregions/us/us_eco_l4.zip\" , #copied link to download location \"/us_eco_l4_no_st.shp\" ) |> #path inside zip file sf :: st_read () ######### DOWNLOAD ZIPPED DATA FILES ######### #Set up directory directory <- \"~/data/ecoregions\" if ( ! dir.exists ( directory )) { dir.create ( directory ) } #Avoid download timeout options ( timeout = max ( 1000 , getOption ( \"timeout\" ))) #URLs for downloads epaUrls <- c ( \"https://gaftp.epa.gov/EPADataCommons/ORD/Ecoregions/us/us_eco_l3.zip\" , \"https://gaftp.epa.gov/EPADataCommons/ORD/Ecoregions/us/us_eco_l4.zip\" ) destFiles <- file.path ( directory , basename ( epaUrls )) #Download mapply ( FUN = function ( url , destfile ) { download.file ( url = url , destfile = destfile , mode = \"wb\" )}, url = epaUrls , destfile = destFiles ) #Unzip downloaded files mapply ( FUN = function ( destfile , exdir ) { unzip ( zipfile = destfile , files = NULL , exdir = exdir )}, destfile = destFiles , exdir = gsub ( pattern = \".zip\" , replacement = \"\" , x = destFiles ))","title":"EPA Ecoregions"},{"location":"data-library/epa-ecoregions/#epa-ecoregions","text":"EPA ecoregions are a convenient spatial framework for ecosystem regions used by the United States Environmental Protection Agency. Full details on EPA ecoregions can be found here. A Roman numeral classification scheme has been adopted for different hierarchical levels of ecoregions, ranging from general regions to more detailed: Level I - 12 ecoregions in the continental U.S. Level II - 25 ecoregions in the continental U.S. Level III -105 ecoregions in the continental U.S. Level IV - 967 ecoregions in the conterminous U.S. Instructions for accessing spatial EPA ecoregion data can be found in the script code/create-data-library/access_epa_ecoregions.R. The script is also copied below: # This brief script demonstrates how to access level 3 and 4 EPA ecoregions for North America. # Directly accessing the files via VSI is recommended, as this uses cloud-hosted data. # A version for downloading the zipped files is also provided in case for some reason you need the actual files. # ESIIL, February 2024 # Tyler L. McIntosh ####### ACCESS SHAPEFILES DIRECTLY VIA VSI ######### require ( glue ) require ( sf ) epa_l3 <- glue :: glue ( \"/vsizip/vsicurl/\" , #magic remote connection \"https://gaftp.epa.gov/EPADataCommons/ORD/Ecoregions/us/us_eco_l3.zip\" , #copied link to download location \"/us_eco_l3.shp\" ) |> #path inside zip file sf :: st_read () epa_l4 <- glue :: glue ( \"/vsizip/vsicurl/\" , #magic remote connection \"https://gaftp.epa.gov/EPADataCommons/ORD/Ecoregions/us/us_eco_l4.zip\" , #copied link to download location \"/us_eco_l4_no_st.shp\" ) |> #path inside zip file sf :: st_read () ######### DOWNLOAD ZIPPED DATA FILES ######### #Set up directory directory <- \"~/data/ecoregions\" if ( ! dir.exists ( directory )) { dir.create ( directory ) } #Avoid download timeout options ( timeout = max ( 1000 , getOption ( \"timeout\" ))) #URLs for downloads epaUrls <- c ( \"https://gaftp.epa.gov/EPADataCommons/ORD/Ecoregions/us/us_eco_l3.zip\" , \"https://gaftp.epa.gov/EPADataCommons/ORD/Ecoregions/us/us_eco_l4.zip\" ) destFiles <- file.path ( directory , basename ( epaUrls )) #Download mapply ( FUN = function ( url , destfile ) { download.file ( url = url , destfile = destfile , mode = \"wb\" )}, url = epaUrls , destfile = destFiles ) #Unzip downloaded files mapply ( FUN = function ( destfile , exdir ) { unzip ( zipfile = destfile , files = NULL , exdir = exdir )}, destfile = destFiles , exdir = gsub ( pattern = \".zip\" , replacement = \"\" , x = destFiles ))","title":"EPA Ecoregions"},{"location":"data-library/esiil-data-library/","text":"ESIIL Data Libraries \u00b6 ESIIL has compiled additional data libraries for use at summits and hackathons. Link to those data libraries are available here, along with a summary of their current contents (February 2024). General ESIIL Data Library \u00b6 Our data library features a diverse range of datasets, each with its own dedicated web page. To help you get started, we provide easy-to-use R and Python code snippets for downloading and working with each dataset. For more advanced users, we also offer comprehensive tutorials and vignettes tailored to individual datasets. Explore our rich collection and unlock the power of environmental data for your research today! Data Contents \u00b6 EDS in Indian Country Global native homelands USA federal tribal reservations All types of tribal land in USA Solving water NEON Aquatic instrument data EPA water quality USGS water services Data librarianship Public libraries survey Cutting-edge remote sensing NEON hyperspectral data Lidar-based canopy height Multispectral sentinel-2 on AWS Nature-based solutions and human development Open street map Extreme events and hazards EPA air quality data Fire Event Delineation (FIRED) US National Incident Management System Uranium mines Spatial scale Spatial occurrence as points Ecological forecasting National Ecological Observation Network (NEON) USA phenology network Forecasting NEON data NEON lidar after fire Data harmonization Data cataloged with publications NEON and LTER NEON lidar and organismal data Food supply UN Food and Agriculture Social justice Redlining Congressional voting Data science in decision making and policy US Census FDIC failed banks list AI in environmental data science WeatherBench Math, modeling, statistics NEON tick pathogen data Everglades food network Mammal primate association network EDS education Education statistics Nonprofit explorer MosAIc Data Library \u00b6 The data library from ESIIL's MosAIc Hackathon is located here . This data library contains some similar content to the general ESIIL data library, in addition to extra resources on cloud collaboration and AI. Data Contents \u00b6 Flood event inventory Flood event area (polygons) River geography River and basin features NEON Lakes NEON Rivers EPA water quality USGS Water Services Global Species Occurrence NEON LIDAR NEON biogeochemistry Open Street Map US Census Remote sensing","title":"ESIIL Data Libraries"},{"location":"data-library/esiil-data-library/#esiil-data-libraries","text":"ESIIL has compiled additional data libraries for use at summits and hackathons. Link to those data libraries are available here, along with a summary of their current contents (February 2024).","title":"ESIIL Data Libraries"},{"location":"data-library/esiil-data-library/#general-esiil-data-library","text":"Our data library features a diverse range of datasets, each with its own dedicated web page. To help you get started, we provide easy-to-use R and Python code snippets for downloading and working with each dataset. For more advanced users, we also offer comprehensive tutorials and vignettes tailored to individual datasets. Explore our rich collection and unlock the power of environmental data for your research today!","title":"General ESIIL Data Library"},{"location":"data-library/esiil-data-library/#data-contents","text":"EDS in Indian Country Global native homelands USA federal tribal reservations All types of tribal land in USA Solving water NEON Aquatic instrument data EPA water quality USGS water services Data librarianship Public libraries survey Cutting-edge remote sensing NEON hyperspectral data Lidar-based canopy height Multispectral sentinel-2 on AWS Nature-based solutions and human development Open street map Extreme events and hazards EPA air quality data Fire Event Delineation (FIRED) US National Incident Management System Uranium mines Spatial scale Spatial occurrence as points Ecological forecasting National Ecological Observation Network (NEON) USA phenology network Forecasting NEON data NEON lidar after fire Data harmonization Data cataloged with publications NEON and LTER NEON lidar and organismal data Food supply UN Food and Agriculture Social justice Redlining Congressional voting Data science in decision making and policy US Census FDIC failed banks list AI in environmental data science WeatherBench Math, modeling, statistics NEON tick pathogen data Everglades food network Mammal primate association network EDS education Education statistics Nonprofit explorer","title":"Data Contents"},{"location":"data-library/esiil-data-library/#mosaic-data-library","text":"The data library from ESIIL's MosAIc Hackathon is located here . This data library contains some similar content to the general ESIIL data library, in addition to extra resources on cloud collaboration and AI.","title":"MosAIc Data Library"},{"location":"data-library/esiil-data-library/#data-contents_1","text":"Flood event inventory Flood event area (polygons) River geography River and basin features NEON Lakes NEON Rivers EPA water quality USGS Water Services Global Species Occurrence NEON LIDAR NEON biogeochemistry Open Street Map US Census Remote sensing","title":"Data Contents"},{"location":"data-library/fia/","text":"Forest Inventory and Analysis Database (FIA or FIADB) \u00b6 Database description \u00b6 The Forest Inventory and Analysis (FIA) program of the USDA Forest Service Research and Development Branch collects, processes, analyzes, and reports on data necessary for assessing the extent and condition of forest resources in the United States. This data is collected at the plot level across the US, and includes information such as tree quantity and identifications, downed woody materials, tree regeneration, and more. If you are looking for spatially continuous data, TreeMap is a data product derived from FIA data and uses machine learning algorithms to assign each forested pixel across the US with the id of the FIA plot that best matches it. This is an overview of the FIA program. This is the most recent user guide for the FIADB. Prepared data access functions \u00b6 FIA data is available from the FIA DataMart . Two R functions have been prepared for your use in downloading FIA data directly to your cloud instance. Those functions can be found at code/create-data-library/download_fia.R The functions are also copied here: # This script contains functions to download both individual # FIA data csv files as well as bulk download data types. The two key functions # described are fia_download_individual_data_files and fia_bulk_download_data_files # ESIIL, February 2024 # Tyler L. McIntosh options ( timeout = 300 ) ################################ # DOWNLOAD INDIVIDUAL FIA DATASETS # # This function will download individual FIA datasets requested and return the filenames # It will create a new subdirectory for the files, \"fia_individual_data_files\". # If you want to bulk download data by type, use function fia_bulk_download_data_files # Note that you may want to change your environment's download timeout option to allow longer downloads # (e.g. options(timeout = 300)) # #### PARAMETERS #### # state_abbreviations : a vector of state abbreviations as strings (e.g. c(\"CO\", \"WY\", \"NM\")) # file_suffixes : a vector of data file oracle table names (e.g. c(\"DWM_VISIT\", \"COUNTY\") from https://www.fs.usda.gov/research/understory/forest-inventory-and-analysis-database-user-guide-phase-2 # directory : the directory in which to store the data (a new subdirectory will be created for the new files) # #### Example call to the function and read of the data #### # downloaded_files <- fia_download_individual_data_files( # state_abbreviations = c(\"CO\"), # file_suffixes = c(\"DWM_VISIT\", \"COUNTY\"), # directory = \"~/data\") # data_list <- downloaded_files |> lapply(readr::read_csv) # names(data_list) <- basename(downloaded_files) # fia_download_individual_data_files <- function ( state_abbreviations , file_suffixes , directory ) { #Ensure directory exists if ( ! dir.exists ( directory )) { dir.create ( directory ) } base_url <- \"https://apps.fs.usda.gov/fia/datamart/CSV/\" # Define the subdirectory path subdirectory_path <- file.path ( directory , \"fia_individual_data_files\" ) # Create the subdirectory if it does not exist if ( ! dir.exists ( subdirectory_path )) { dir.create ( subdirectory_path , recursive = TRUE ) } downloaded_files <- c () # Initialize an empty vector to store downloaded filenames for ( state in state_abbreviations ) { for ( suffix in file_suffixes ) { # Replace underscores with spaces to match the naming convention in the URL url_suffix <- gsub ( \"_\" , \" \" , suffix ) url_suffix <- gsub ( \" \" , \"_\" , toupper ( url_suffix )) # URL seems to be uppercase # Construct the URL and filename using the subdirectory path url <- paste0 ( base_url , state , \"_\" , url_suffix , \".csv\" ) filename <- paste0 ( subdirectory_path , \"/\" , state , \"_\" , suffix , \".csv\" ) # Attempt to download the file tryCatch ({ download.file ( url , destfile = filename , mode = \"wb\" ) downloaded_files <- c ( downloaded_files , filename ) # Add the filename to the vector message ( \"Downloaded \" , filename ) }, error = function ( e ) { message ( \"Failed to download \" , url , \": \" , e $ message ) }) } } return ( downloaded_files ) # Return the vector of downloaded filenames } ################################ # BULK DOWNLOAD FIA DATASETS # # This function will bulk download FIA datasets requested into associated subdirectories and return the filenames # as a named list of vectors, where each vector contains the files included in that bulk data set. # All bulk data subdirectories will be put into a directory called 'fia_bulk_data_files' # Note that you may want to change your environment's download timeout option to allow longer downloads # (e.g. options(timeout = 300)) # #### PARAMETERS #### # state_abbreviations : a vector of state abbreviations as strings (e.g. c(\"CO\", \"WY\", \"NM\")) # directory : the directory in which to store the data # bulk_data_types : a vector of bulk download mappings as strings (e.g. c(\"location level\", \"plot\")) # Available data mappings are: # \"location level\" # \"tree level\" # \"invasives and understory vegetation\" # \"down woody material\" # \"tree regeneration\" # \"ground cover\" # \"soils\" # \"population\" # \"plot\" # \"reference\" # Full descriptions of each of these data mappings can be found at the FIA user guide, # with each mapping associated with a different chapter of tables: # https://www.fs.usda.gov/research/understory/forest-inventory-and-analysis-database-user-guide-phase-2 # #### Example call to the function for multiple bulk data types and read in the data #### # downloaded_files <- fia_bulk_download_data_files( # state = c(\"CO\"), # directory = \"~/data\", # bulk_data_types = c(\"down woody material\", \"plot\") # ) # data_list_dwm <- downloaded_files$`down woody material`|> lapply(readr::read_csv) # names(data_list_dwm) <- basename(downloaded_files$`down woody material`) # fia_bulk_download_data_files <- function ( state , directory , bulk_data_types ) { #Ensure directory exists if ( ! dir.exists ( directory )) { dir.create ( directory ) } # Map bulk data types to their corresponding file suffixes bulk_data_mappings <- list ( \"down woody material\" = c ( \"DWM_VISIT\" , \"DWM_COARSE_WOODY_DEBRIS\" , \"DWM_DUFF_LITTER_FUEL\" , \"DWM_FINE_WOODY_DEBRIS\" , \"DWM_MICROPLOT_FUEL\" , \"DWM_RESIDUAL_PILE\" , \"DWM_TRANSECT_SEGMENT\" , \"COND_DWM_CALC\" ), \"location level\" = c ( \"SURVEY\" , \"PROJECT\" , \"COUNTY\" , \"PLOT\" , \"COND\" , \"SUBPLOT\" , \"SUBP_COND\" , #\"BOUNDARY\", \"SUBP_COND_CHNG_MTRX\" ), \"tree level\" = c ( \"TREE\" , \"WOODLAND_STEMS\" , \"GRM_COMPONENT\" , \"GRM_THRESHOLD\" , \"GRM_MIDPT\" , \"GRM_BEGIN\" , \"GRM_ESTN\" , \"BEGINEND\" , \"SEEDLING\" , \"SITETREE\" ), \"invasives and understory vegetation\" = c ( \"INVASIVE_SUBPLOT_SPP\" , \"P2VEG_SUBPLOT_SPP\" , \"P2VEG_SUBP_STRUCTURE\" ), \"tree regeneration\" = c ( \"PLOT_REGEN\" , \"SUBPLOT_REGEN\" , \"SEEDLING_REGEN\" ), \"ground cover\" = c ( \"GRND_CVR\" , \"GRND_LYR_FNCTL_GRP\" , \"GRND_LYR_MICROQUAD\" ), \"soils\" = c ( \"SUBP_SOIL_SAMPLE_LOC\" , \"SUBP_SOIL_SAMPLE_LAYER\" ), \"population\" = c ( \"POP_ESTN_UNIT\" , \"POP_EVAL\" , \"POP_EVAL_ATTRIBUTE\" , \"POP_EVAL_GRP\" , \"POP_EVAL_TYP\" , \"POP_PLOT_STRATUM_ASSGN\" , \"POP_STRATUM\" ), \"plot\" = c ( \"PLOTGEOM\" , \"PLOTSNAP\" ), \"reference\" = c ( \"REF_POP_ATTRIBUTE\" , \"REF_POP_EVAL_TYP_DESCR\" , \"REF_FOREST_TYPE\" , \"REF_FOREST_TYPE_GROUP\" , \"REF_SPECIES\" , \"REF_PLANT_DICTIONARY\" , \"REF_SPECIES_GROUP\" , \"REF_INVASIVE_SPECIES\" , \"REF_HABTYP_DESCRIPTION\" , \"REF_HABTYP_PUBLICATION\" , \"REF_CITATION\" , \"REF_FIADB_VERSION\" , \"REF_STATE_ELEV\" , \"REF_UNIT\" , \"REF_RESEARCH_STATION\" , \"REF_NVCS_HIERARCHY_STRICT\" , \"REF_NVCS_LEVEL_1_CODES\" , \"REF_NVCS_LEVEL_2_CODES\" , \"REF_NVCS_LEVEL_3_CODES\" , \"REF_NVCS_LEVEL_4_CODES\" , \"REF_NVCS_LEVEL_5_CODES\" , \"REF_NVCS_LEVEL_6_CODES\" , \"REF_NVCS_LEVEL_7_CODES\" , \"REF_NVCS_LEVEL_8_CODES\" , \"REF_AGENT\" , \"REF_DAMAGE_AGENT\" , \"REF_DAMAGE_AGENT_GROUP\" , \"REF_FVS_VAR_NAME\" , \"REF_FVS_LOC_NAME\" , \"REF_OWNGRP_CD\" , \"REF_DIFFERENCE_TEST_PER_ACRE\" , \"REF_DIFFERENCE_TEST_TOTALS\" , \"REF_EQUATION_TABLE\" , \"REF_SEQN\" , \"REF_GRM_TYPE\" , \"REF_INTL_TO_DOYLE_FACTOR\" , \"REF_TREE_CARBON_RATIO_DEAD\" , \"REF_TREE_DECAY_PROP\" , \"REF_TREE_STAND_DEAD_CR_PROP\" , \"REF_GRND_LYR\" ) ) # Initialize a named list to store the filenames for each bulk data type all_downloaded_files <- setNames ( vector ( \"list\" , length ( bulk_data_types )), bulk_data_types ) # Define and create the main bulk data directory main_bulk_dir <- file.path ( directory , \"fia_bulk_data_files\" ) if ( ! dir.exists ( main_bulk_dir )) { dir.create ( main_bulk_dir , recursive = TRUE ) } # Loop through each bulk data type for ( bulk_data_type in bulk_data_types ) { # Check if the bulk data type is known if ( ! bulk_data_type %in% names ( bulk_data_mappings )) { stop ( \"Unknown bulk data type: \" , bulk_data_type ) } # Create a subdirectory name by replacing spaces with underscores subdirectory <- gsub ( \" \" , \"_\" , bulk_data_type ) subdirectory_path <- file.path ( main_bulk_dir , subdirectory ) # Create the subdirectory if it does not exist if ( ! dir.exists ( subdirectory_path )) { dir.create ( subdirectory_path , recursive = TRUE ) } # Retrieve the correct set of file suffixes for the current bulk data type file_suffixes <- bulk_data_mappings [[ bulk_data_type ]] # Call the download function for each file suffix and save in the new subdirectory downloaded_files <- download_data_files ( state_abbreviations = state , file_suffixes = file_suffixes , location = subdirectory_path ) # Store the downloaded filenames in the named list under the current bulk data type all_downloaded_files [[ bulk_data_type ]] <- downloaded_files } # Return the named list of vectors with filenames return ( all_downloaded_files ) }","title":"Forest Inventory and Analysis Database (FIA or FIADB)"},{"location":"data-library/fia/#forest-inventory-and-analysis-database-fia-or-fiadb","text":"","title":"Forest Inventory and Analysis Database (FIA or FIADB)"},{"location":"data-library/fia/#database-description","text":"The Forest Inventory and Analysis (FIA) program of the USDA Forest Service Research and Development Branch collects, processes, analyzes, and reports on data necessary for assessing the extent and condition of forest resources in the United States. This data is collected at the plot level across the US, and includes information such as tree quantity and identifications, downed woody materials, tree regeneration, and more. If you are looking for spatially continuous data, TreeMap is a data product derived from FIA data and uses machine learning algorithms to assign each forested pixel across the US with the id of the FIA plot that best matches it. This is an overview of the FIA program. This is the most recent user guide for the FIADB.","title":"Database description"},{"location":"data-library/fia/#prepared-data-access-functions","text":"FIA data is available from the FIA DataMart . Two R functions have been prepared for your use in downloading FIA data directly to your cloud instance. Those functions can be found at code/create-data-library/download_fia.R The functions are also copied here: # This script contains functions to download both individual # FIA data csv files as well as bulk download data types. The two key functions # described are fia_download_individual_data_files and fia_bulk_download_data_files # ESIIL, February 2024 # Tyler L. McIntosh options ( timeout = 300 ) ################################ # DOWNLOAD INDIVIDUAL FIA DATASETS # # This function will download individual FIA datasets requested and return the filenames # It will create a new subdirectory for the files, \"fia_individual_data_files\". # If you want to bulk download data by type, use function fia_bulk_download_data_files # Note that you may want to change your environment's download timeout option to allow longer downloads # (e.g. options(timeout = 300)) # #### PARAMETERS #### # state_abbreviations : a vector of state abbreviations as strings (e.g. c(\"CO\", \"WY\", \"NM\")) # file_suffixes : a vector of data file oracle table names (e.g. c(\"DWM_VISIT\", \"COUNTY\") from https://www.fs.usda.gov/research/understory/forest-inventory-and-analysis-database-user-guide-phase-2 # directory : the directory in which to store the data (a new subdirectory will be created for the new files) # #### Example call to the function and read of the data #### # downloaded_files <- fia_download_individual_data_files( # state_abbreviations = c(\"CO\"), # file_suffixes = c(\"DWM_VISIT\", \"COUNTY\"), # directory = \"~/data\") # data_list <- downloaded_files |> lapply(readr::read_csv) # names(data_list) <- basename(downloaded_files) # fia_download_individual_data_files <- function ( state_abbreviations , file_suffixes , directory ) { #Ensure directory exists if ( ! dir.exists ( directory )) { dir.create ( directory ) } base_url <- \"https://apps.fs.usda.gov/fia/datamart/CSV/\" # Define the subdirectory path subdirectory_path <- file.path ( directory , \"fia_individual_data_files\" ) # Create the subdirectory if it does not exist if ( ! dir.exists ( subdirectory_path )) { dir.create ( subdirectory_path , recursive = TRUE ) } downloaded_files <- c () # Initialize an empty vector to store downloaded filenames for ( state in state_abbreviations ) { for ( suffix in file_suffixes ) { # Replace underscores with spaces to match the naming convention in the URL url_suffix <- gsub ( \"_\" , \" \" , suffix ) url_suffix <- gsub ( \" \" , \"_\" , toupper ( url_suffix )) # URL seems to be uppercase # Construct the URL and filename using the subdirectory path url <- paste0 ( base_url , state , \"_\" , url_suffix , \".csv\" ) filename <- paste0 ( subdirectory_path , \"/\" , state , \"_\" , suffix , \".csv\" ) # Attempt to download the file tryCatch ({ download.file ( url , destfile = filename , mode = \"wb\" ) downloaded_files <- c ( downloaded_files , filename ) # Add the filename to the vector message ( \"Downloaded \" , filename ) }, error = function ( e ) { message ( \"Failed to download \" , url , \": \" , e $ message ) }) } } return ( downloaded_files ) # Return the vector of downloaded filenames } ################################ # BULK DOWNLOAD FIA DATASETS # # This function will bulk download FIA datasets requested into associated subdirectories and return the filenames # as a named list of vectors, where each vector contains the files included in that bulk data set. # All bulk data subdirectories will be put into a directory called 'fia_bulk_data_files' # Note that you may want to change your environment's download timeout option to allow longer downloads # (e.g. options(timeout = 300)) # #### PARAMETERS #### # state_abbreviations : a vector of state abbreviations as strings (e.g. c(\"CO\", \"WY\", \"NM\")) # directory : the directory in which to store the data # bulk_data_types : a vector of bulk download mappings as strings (e.g. c(\"location level\", \"plot\")) # Available data mappings are: # \"location level\" # \"tree level\" # \"invasives and understory vegetation\" # \"down woody material\" # \"tree regeneration\" # \"ground cover\" # \"soils\" # \"population\" # \"plot\" # \"reference\" # Full descriptions of each of these data mappings can be found at the FIA user guide, # with each mapping associated with a different chapter of tables: # https://www.fs.usda.gov/research/understory/forest-inventory-and-analysis-database-user-guide-phase-2 # #### Example call to the function for multiple bulk data types and read in the data #### # downloaded_files <- fia_bulk_download_data_files( # state = c(\"CO\"), # directory = \"~/data\", # bulk_data_types = c(\"down woody material\", \"plot\") # ) # data_list_dwm <- downloaded_files$`down woody material`|> lapply(readr::read_csv) # names(data_list_dwm) <- basename(downloaded_files$`down woody material`) # fia_bulk_download_data_files <- function ( state , directory , bulk_data_types ) { #Ensure directory exists if ( ! dir.exists ( directory )) { dir.create ( directory ) } # Map bulk data types to their corresponding file suffixes bulk_data_mappings <- list ( \"down woody material\" = c ( \"DWM_VISIT\" , \"DWM_COARSE_WOODY_DEBRIS\" , \"DWM_DUFF_LITTER_FUEL\" , \"DWM_FINE_WOODY_DEBRIS\" , \"DWM_MICROPLOT_FUEL\" , \"DWM_RESIDUAL_PILE\" , \"DWM_TRANSECT_SEGMENT\" , \"COND_DWM_CALC\" ), \"location level\" = c ( \"SURVEY\" , \"PROJECT\" , \"COUNTY\" , \"PLOT\" , \"COND\" , \"SUBPLOT\" , \"SUBP_COND\" , #\"BOUNDARY\", \"SUBP_COND_CHNG_MTRX\" ), \"tree level\" = c ( \"TREE\" , \"WOODLAND_STEMS\" , \"GRM_COMPONENT\" , \"GRM_THRESHOLD\" , \"GRM_MIDPT\" , \"GRM_BEGIN\" , \"GRM_ESTN\" , \"BEGINEND\" , \"SEEDLING\" , \"SITETREE\" ), \"invasives and understory vegetation\" = c ( \"INVASIVE_SUBPLOT_SPP\" , \"P2VEG_SUBPLOT_SPP\" , \"P2VEG_SUBP_STRUCTURE\" ), \"tree regeneration\" = c ( \"PLOT_REGEN\" , \"SUBPLOT_REGEN\" , \"SEEDLING_REGEN\" ), \"ground cover\" = c ( \"GRND_CVR\" , \"GRND_LYR_FNCTL_GRP\" , \"GRND_LYR_MICROQUAD\" ), \"soils\" = c ( \"SUBP_SOIL_SAMPLE_LOC\" , \"SUBP_SOIL_SAMPLE_LAYER\" ), \"population\" = c ( \"POP_ESTN_UNIT\" , \"POP_EVAL\" , \"POP_EVAL_ATTRIBUTE\" , \"POP_EVAL_GRP\" , \"POP_EVAL_TYP\" , \"POP_PLOT_STRATUM_ASSGN\" , \"POP_STRATUM\" ), \"plot\" = c ( \"PLOTGEOM\" , \"PLOTSNAP\" ), \"reference\" = c ( \"REF_POP_ATTRIBUTE\" , \"REF_POP_EVAL_TYP_DESCR\" , \"REF_FOREST_TYPE\" , \"REF_FOREST_TYPE_GROUP\" , \"REF_SPECIES\" , \"REF_PLANT_DICTIONARY\" , \"REF_SPECIES_GROUP\" , \"REF_INVASIVE_SPECIES\" , \"REF_HABTYP_DESCRIPTION\" , \"REF_HABTYP_PUBLICATION\" , \"REF_CITATION\" , \"REF_FIADB_VERSION\" , \"REF_STATE_ELEV\" , \"REF_UNIT\" , \"REF_RESEARCH_STATION\" , \"REF_NVCS_HIERARCHY_STRICT\" , \"REF_NVCS_LEVEL_1_CODES\" , \"REF_NVCS_LEVEL_2_CODES\" , \"REF_NVCS_LEVEL_3_CODES\" , \"REF_NVCS_LEVEL_4_CODES\" , \"REF_NVCS_LEVEL_5_CODES\" , \"REF_NVCS_LEVEL_6_CODES\" , \"REF_NVCS_LEVEL_7_CODES\" , \"REF_NVCS_LEVEL_8_CODES\" , \"REF_AGENT\" , \"REF_DAMAGE_AGENT\" , \"REF_DAMAGE_AGENT_GROUP\" , \"REF_FVS_VAR_NAME\" , \"REF_FVS_LOC_NAME\" , \"REF_OWNGRP_CD\" , \"REF_DIFFERENCE_TEST_PER_ACRE\" , \"REF_DIFFERENCE_TEST_TOTALS\" , \"REF_EQUATION_TABLE\" , \"REF_SEQN\" , \"REF_GRM_TYPE\" , \"REF_INTL_TO_DOYLE_FACTOR\" , \"REF_TREE_CARBON_RATIO_DEAD\" , \"REF_TREE_DECAY_PROP\" , \"REF_TREE_STAND_DEAD_CR_PROP\" , \"REF_GRND_LYR\" ) ) # Initialize a named list to store the filenames for each bulk data type all_downloaded_files <- setNames ( vector ( \"list\" , length ( bulk_data_types )), bulk_data_types ) # Define and create the main bulk data directory main_bulk_dir <- file.path ( directory , \"fia_bulk_data_files\" ) if ( ! dir.exists ( main_bulk_dir )) { dir.create ( main_bulk_dir , recursive = TRUE ) } # Loop through each bulk data type for ( bulk_data_type in bulk_data_types ) { # Check if the bulk data type is known if ( ! bulk_data_type %in% names ( bulk_data_mappings )) { stop ( \"Unknown bulk data type: \" , bulk_data_type ) } # Create a subdirectory name by replacing spaces with underscores subdirectory <- gsub ( \" \" , \"_\" , bulk_data_type ) subdirectory_path <- file.path ( main_bulk_dir , subdirectory ) # Create the subdirectory if it does not exist if ( ! dir.exists ( subdirectory_path )) { dir.create ( subdirectory_path , recursive = TRUE ) } # Retrieve the correct set of file suffixes for the current bulk data type file_suffixes <- bulk_data_mappings [[ bulk_data_type ]] # Call the download function for each file suffix and save in the new subdirectory downloaded_files <- download_data_files ( state_abbreviations = state , file_suffixes = file_suffixes , location = subdirectory_path ) # Store the downloaded filenames in the named list under the current bulk data type all_downloaded_files [[ bulk_data_type ]] <- downloaded_files } # Return the named list of vectors with filenames return ( all_downloaded_files ) }","title":"Prepared data access functions"},{"location":"data-library/fire-cbi/","text":"Fire severity: Composite Burn Index (CBI) \u00b6 The Composite Burn Index (CBI) is a commonly used and ecologically meaningful measure of fire severity. Unlike some other measures of fire burn severity (e.g. MTBS fire severity), CBI is more readily comparable across large regions. To calculate this stack of CBI data the ESIIL team used the method described in Parks et al. (2019) , which uses random forests regression to calculate CBI based on Relativized Burn Ratio (RBR), latitude, climatic water deficit, and other factors. RBR was calculated using pre- and post-fire image composites of Landsat 4-9 imagery (Collection 2) during the growing season. A correction was applied to the CBI estimates to prevent overprediction at low values (Parks et al., 2019). This dataset has a layer for each year of data, with NA values at any location that was unburned during that year. Fire disturbance events documented in the Landfire fire events database will appear in these rasters. The data is pre-loaded onto the Cyverse data store and is located in the below file: ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/Disturbance/SR_landfire_fire_events_cbi_bc.tif","title":"Fire severity: Composite Burn Index (CBI)"},{"location":"data-library/fire-cbi/#fire-severity-composite-burn-index-cbi","text":"The Composite Burn Index (CBI) is a commonly used and ecologically meaningful measure of fire severity. Unlike some other measures of fire burn severity (e.g. MTBS fire severity), CBI is more readily comparable across large regions. To calculate this stack of CBI data the ESIIL team used the method described in Parks et al. (2019) , which uses random forests regression to calculate CBI based on Relativized Burn Ratio (RBR), latitude, climatic water deficit, and other factors. RBR was calculated using pre- and post-fire image composites of Landsat 4-9 imagery (Collection 2) during the growing season. A correction was applied to the CBI estimates to prevent overprediction at low values (Parks et al., 2019). This dataset has a layer for each year of data, with NA values at any location that was unburned during that year. Fire disturbance events documented in the Landfire fire events database will appear in these rasters. The data is pre-loaded onto the Cyverse data store and is located in the below file: ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/Disturbance/SR_landfire_fire_events_cbi_bc.tif","title":"Fire severity: Composite Burn Index (CBI)"},{"location":"data-library/gedi/","text":"GEDI data overview \u00b6 The Global Ecosystem Dynamics Investigation (GEDI) is a joint mission between NASA and the University of Maryland, with the instrument installed aboard the International Space Station. Data acquired using the instrument\u2019s three lasers are used to construct detailed three-dimensional (3D) maps of forest canopy height and the distribution of branches and leaves. By accurately measuring forests in 3D, GEDI data play an important role in understanding the amounts of biomass and carbon forests store and how much they lose when disturbed \u2013 vital information for understanding Earth\u2019s carbon cycle and how it is changing. GEDI data also can be used to study plant and animal habitats and biodiversity, and how these change over time. The GEDI homepage is located here . GEDI data is collected in footprints of ~25m along the track of the sensor. Each footprint is separated by 60m. GEDI footprint based aboveground biomass density (Mg/ha) over the Southern Rocky Mountains have been downloaded by Dr. Nayani Ilangakoon and placed on the Cyverse data store at the below path. The data are from 2019-2022, and are in the form of tiled CSV files. ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/GEDI Brief scripts in both R and Python are available in the GitHub repository demonstrating how to access and manipulate the data. The R script is copied below. ### This file reads, filter basedo on qulaity flag and ecoregion, and plots GEDI biomass data in csv format. # ESIIL, 2024 # Nayani Ilangakoon # Load necessary libraries library ( readr ) # For read_csv library ( dplyr ) # For data manipulation library ( ggplot2 ) # For plotting library ( tidyr ) # For data tidying library ( forcats ) ############### # NOTE: This script is reading the data directly from the data store. It is only actually opening and processing a single csv # If you want to use all of the GEDI data that has been made available for your use, you will want to move it # to your cyverse instance to improve performance ############### # Define the root path to the data drive ROOT_PATH <- \"~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest\" # Create the path to the GEDI data by appending the directory name to the root path indir <- file.path ( ROOT_PATH , \"GEDI/GEDI_SR_footprint_data/GEDI_biomass_SR\" ) # List the contents of the indir directory list.files ( indir ) # List all files that end with .csv in indir polyfiles <- list.files ( indir , pattern = \"\\\\.csv$\" , full.names = TRUE ) # Print the list of .csv files polyfiles out_csv <- file.path ( indir , \"recovery_treat_bms_64.csv\" ) # Reading the csv file created in the last step l4a_df <- read_csv ( out_csv ) # Assign \"NA\" to the values that needs to be discarded. l4a_df <- l4a_df %>% mutate ( agbd = if_else ( agbd == -9999 , NA_real_ , agbd )) l4a_df <- na.omit ( l4a_df ) # MCD12Q1 PFT types pft_legend <- c ( 'Water Bodies' , 'Evergreen Needleleaf Trees' , 'Evergreen Broadleaf Trees' , 'Deciduous Needleleaf Trees' , 'Deciduous Broadleaf Trees' , 'Shrub' , 'Grass' , 'Cereal Croplands' , 'Broadleaf Croplands' , 'Urban and Built-up Lands' , 'Permanent Snow and Ice' , 'Barren' , 'Unclassified' ) # label PFT classes with numbers names ( pft_legend ) <- as.character ( 0 : 12 ) # Creating mask with good quality shots and trees/shrubs pft class mask <- l4a_df $ l4_quality_flag == 1 & l4a_df $ `land_cover_data/pft_class` <= 5 # Filter the dataframe based on the mask filtered_df <- l4a_df [ mask , ] # Transforming the PFT class to a factor with labels filtered_df $ `land_cover_data/pft_class` <- factor ( filtered_df $ `land_cover_data/pft_class` , levels = names ( pft_legend ), labels = pft_legend ) # Plotting the distribution of GEDI L4A AGBD estimates by PFTs ggplot ( filtered_df , aes ( x = agbd , fill = `land_cover_data/pft_class` )) + geom_histogram ( bins = 30 , alpha = 0.6 , position = \"identity\" ) + scale_fill_manual ( values = rainbow ( length ( unique ( filtered_df $ `land_cover_data/pft_class` )))) + labs ( title = 'Distribution of GEDI L4A AGBD estimates by PFTs (Plant Functional Types) in ACA in 2020' , x = 'agbd (Mg / ha)' , y = 'Frequency' ) + theme_minimal () + guides ( fill = guide_legend ( title = \"PFT Class\" )) + theme ( legend.position = \"bottom\" ) # Saving the plot ggsave ( \"test.png\" , width = 15 , height = 5 , units = \"in\" ) # Assuming l4a_df and mask have been defined as before # Binning the elevation data l4a_df <- l4a_df %>% mutate ( elev_bin = cut ( elev_lowestmode , breaks = seq ( 0 , 5000 , by = 500 ))) # Ensure PFT class is a factor with proper labels l4a_df $ `land_cover_data/pft_class` <- factor ( l4a_df $ `land_cover_data/pft_class` , levels = names ( pft_legend ), labels = pft_legend ) # Filtering the dataframe based on mask and ensure it is applied correctly filtered_df <- l4a_df %>% filter ( mask ) # Creating the boxplot g <- ggplot ( filtered_df , aes ( x = elev_bin , y = agbd )) + geom_boxplot () + facet_wrap ( ~ `land_cover_data/pft_class` , scales = \"free\" , labeller = labeller ( `land_cover_data/pft_class` = as_labeller ( pft_legend ))) + theme ( axis.text.x = element_text ( angle = 90 , hjust = 1 )) + labs ( x = \"Elevation (m)\" , y = \"agbd\" , title = \"AGBD by Elevation and PFT Class\" ) + theme_minimal () # Print the plot print ( g ) # Save the plot ggsave ( \"agbd_category.png\" , plot = g , width = 15 , height = 10 , units = \"in\" )","title":"GEDI data overview"},{"location":"data-library/gedi/#gedi-data-overview","text":"The Global Ecosystem Dynamics Investigation (GEDI) is a joint mission between NASA and the University of Maryland, with the instrument installed aboard the International Space Station. Data acquired using the instrument\u2019s three lasers are used to construct detailed three-dimensional (3D) maps of forest canopy height and the distribution of branches and leaves. By accurately measuring forests in 3D, GEDI data play an important role in understanding the amounts of biomass and carbon forests store and how much they lose when disturbed \u2013 vital information for understanding Earth\u2019s carbon cycle and how it is changing. GEDI data also can be used to study plant and animal habitats and biodiversity, and how these change over time. The GEDI homepage is located here . GEDI data is collected in footprints of ~25m along the track of the sensor. Each footprint is separated by 60m. GEDI footprint based aboveground biomass density (Mg/ha) over the Southern Rocky Mountains have been downloaded by Dr. Nayani Ilangakoon and placed on the Cyverse data store at the below path. The data are from 2019-2022, and are in the form of tiled CSV files. ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/GEDI Brief scripts in both R and Python are available in the GitHub repository demonstrating how to access and manipulate the data. The R script is copied below. ### This file reads, filter basedo on qulaity flag and ecoregion, and plots GEDI biomass data in csv format. # ESIIL, 2024 # Nayani Ilangakoon # Load necessary libraries library ( readr ) # For read_csv library ( dplyr ) # For data manipulation library ( ggplot2 ) # For plotting library ( tidyr ) # For data tidying library ( forcats ) ############### # NOTE: This script is reading the data directly from the data store. It is only actually opening and processing a single csv # If you want to use all of the GEDI data that has been made available for your use, you will want to move it # to your cyverse instance to improve performance ############### # Define the root path to the data drive ROOT_PATH <- \"~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest\" # Create the path to the GEDI data by appending the directory name to the root path indir <- file.path ( ROOT_PATH , \"GEDI/GEDI_SR_footprint_data/GEDI_biomass_SR\" ) # List the contents of the indir directory list.files ( indir ) # List all files that end with .csv in indir polyfiles <- list.files ( indir , pattern = \"\\\\.csv$\" , full.names = TRUE ) # Print the list of .csv files polyfiles out_csv <- file.path ( indir , \"recovery_treat_bms_64.csv\" ) # Reading the csv file created in the last step l4a_df <- read_csv ( out_csv ) # Assign \"NA\" to the values that needs to be discarded. l4a_df <- l4a_df %>% mutate ( agbd = if_else ( agbd == -9999 , NA_real_ , agbd )) l4a_df <- na.omit ( l4a_df ) # MCD12Q1 PFT types pft_legend <- c ( 'Water Bodies' , 'Evergreen Needleleaf Trees' , 'Evergreen Broadleaf Trees' , 'Deciduous Needleleaf Trees' , 'Deciduous Broadleaf Trees' , 'Shrub' , 'Grass' , 'Cereal Croplands' , 'Broadleaf Croplands' , 'Urban and Built-up Lands' , 'Permanent Snow and Ice' , 'Barren' , 'Unclassified' ) # label PFT classes with numbers names ( pft_legend ) <- as.character ( 0 : 12 ) # Creating mask with good quality shots and trees/shrubs pft class mask <- l4a_df $ l4_quality_flag == 1 & l4a_df $ `land_cover_data/pft_class` <= 5 # Filter the dataframe based on the mask filtered_df <- l4a_df [ mask , ] # Transforming the PFT class to a factor with labels filtered_df $ `land_cover_data/pft_class` <- factor ( filtered_df $ `land_cover_data/pft_class` , levels = names ( pft_legend ), labels = pft_legend ) # Plotting the distribution of GEDI L4A AGBD estimates by PFTs ggplot ( filtered_df , aes ( x = agbd , fill = `land_cover_data/pft_class` )) + geom_histogram ( bins = 30 , alpha = 0.6 , position = \"identity\" ) + scale_fill_manual ( values = rainbow ( length ( unique ( filtered_df $ `land_cover_data/pft_class` )))) + labs ( title = 'Distribution of GEDI L4A AGBD estimates by PFTs (Plant Functional Types) in ACA in 2020' , x = 'agbd (Mg / ha)' , y = 'Frequency' ) + theme_minimal () + guides ( fill = guide_legend ( title = \"PFT Class\" )) + theme ( legend.position = \"bottom\" ) # Saving the plot ggsave ( \"test.png\" , width = 15 , height = 5 , units = \"in\" ) # Assuming l4a_df and mask have been defined as before # Binning the elevation data l4a_df <- l4a_df %>% mutate ( elev_bin = cut ( elev_lowestmode , breaks = seq ( 0 , 5000 , by = 500 ))) # Ensure PFT class is a factor with proper labels l4a_df $ `land_cover_data/pft_class` <- factor ( l4a_df $ `land_cover_data/pft_class` , levels = names ( pft_legend ), labels = pft_legend ) # Filtering the dataframe based on mask and ensure it is applied correctly filtered_df <- l4a_df %>% filter ( mask ) # Creating the boxplot g <- ggplot ( filtered_df , aes ( x = elev_bin , y = agbd )) + geom_boxplot () + facet_wrap ( ~ `land_cover_data/pft_class` , scales = \"free\" , labeller = labeller ( `land_cover_data/pft_class` = as_labeller ( pft_legend ))) + theme ( axis.text.x = element_text ( angle = 90 , hjust = 1 )) + labs ( x = \"Elevation (m)\" , y = \"agbd\" , title = \"AGBD by Elevation and PFT Class\" ) + theme_minimal () # Print the plot print ( g ) # Save the plot ggsave ( \"agbd_category.png\" , plot = g , width = 15 , height = 10 , units = \"in\" )","title":"GEDI data overview"},{"location":"data-library/landfire-events/","text":"LANDFIRE Public Events Geodatabase \u00b6 From 'LANDFIRE Product Descriptions with References' The LF National (LF 1.X) Public Events Geodatabase is a collection of recent natural disturbance and land management activities used to update existing vegetation and fuel layers during LF Program deliverables. Public Events exclude proprietary and/or sensitive data. This geodatabase includes three feature classes - Raw Events, Model Ready Events, and Exotics. The Public Raw and Model Ready Event feature classes include natural disturbance and vegetation/fuel treatment data. The Public Exotics feature class contains data on the occurrence of exotic or invasive plant species. There is also a look up table for the source code (lutSource_Code), an attribute found in all three feature classes. The source code is an LF internal code assigned to each data source. Consult thetable\u201clutSource_Code\u201d in thegeodatabases for more information about the data sources included in, and excluded from, releases. The data compiled in the three feature classes are collected from disparate sources including federal, state, local, and private organizations. All data submitted to LF are evaluated for inclusion into the LF Events geodatabase. Acceptable Event data must have the following minimum requirements to be included in the Events geodatabase: 1) be represented by a polygon on the landscape and have a defined spatial coordinate system 2) have an acceptable event type (Appendix B) or exotics plant species 3) be attributed with year of occurrence or observation of the current data call. Metadata \u00b6 The LANDFIRE public events geodatabase contents description is available here . This document provides a description of how polygon data of disturbans and treatments are evaluated and processed into the LANDFIRE Events geodatabase. The Raw and Model Ready Events Data Dictionary is available here . Note that this is a large geodatabase (> 1 million polygons). Recommend filtering as soon as possible. The relevant layers within the .gdb file are: CONUS_230_PublicExotics CONUS_230_PublicModelReadyEvents CONUS_230_PublicRawEvents Access \u00b6 Storage location: ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/Disturbance/LF_Public_Events_1999_2022 Example access script: system(\"cp -r ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/Disturbance/LF_Public_Events_1999_2022 ~/LF_Events\") #move the data first!! landfireEvents <- sf::st_read(\"~/LF_Events/LF_Public_Events_1999_2022.gdb\", layer = \"CONUS_230_PublicModelReadyEvents\") unique(landfireEvents$Event_Type) # [1] \"Thinning\" \"Other Mechanical\" \"Prescribed Fire\" \"Herbicide\" # [5] \"Clearcut\" \"Harvest\" \"Wildfire\" \"Mastication\" # [9] \"Wildland Fire\" \"Chemical\" \"Development\" \"Biological\" # [13] \"Weather\" \"Planting\" \"Reforestation\" \"Insects\" # [17] \"Seeding\" \"Disease\" \"Wildland Fire Use\" \"Insects/Disease\" # [21] \"Insecticide\" landfireFireEvents <- landfireEvents |> dplyr::filter(Event_Type == \"Wildfire\" | Event_Type == \"Wildland Fire Use\" | Event_Type == \"Prescribed Fire\" | Event_Type == \"Wildland Fire\" | Event_Type == \"Fire\")","title":"LANDFIRE Public Events Geodatabase"},{"location":"data-library/landfire-events/#landfire-public-events-geodatabase","text":"From 'LANDFIRE Product Descriptions with References' The LF National (LF 1.X) Public Events Geodatabase is a collection of recent natural disturbance and land management activities used to update existing vegetation and fuel layers during LF Program deliverables. Public Events exclude proprietary and/or sensitive data. This geodatabase includes three feature classes - Raw Events, Model Ready Events, and Exotics. The Public Raw and Model Ready Event feature classes include natural disturbance and vegetation/fuel treatment data. The Public Exotics feature class contains data on the occurrence of exotic or invasive plant species. There is also a look up table for the source code (lutSource_Code), an attribute found in all three feature classes. The source code is an LF internal code assigned to each data source. Consult thetable\u201clutSource_Code\u201d in thegeodatabases for more information about the data sources included in, and excluded from, releases. The data compiled in the three feature classes are collected from disparate sources including federal, state, local, and private organizations. All data submitted to LF are evaluated for inclusion into the LF Events geodatabase. Acceptable Event data must have the following minimum requirements to be included in the Events geodatabase: 1) be represented by a polygon on the landscape and have a defined spatial coordinate system 2) have an acceptable event type (Appendix B) or exotics plant species 3) be attributed with year of occurrence or observation of the current data call.","title":"LANDFIRE Public Events Geodatabase"},{"location":"data-library/landfire-events/#metadata","text":"The LANDFIRE public events geodatabase contents description is available here . This document provides a description of how polygon data of disturbans and treatments are evaluated and processed into the LANDFIRE Events geodatabase. The Raw and Model Ready Events Data Dictionary is available here . Note that this is a large geodatabase (> 1 million polygons). Recommend filtering as soon as possible. The relevant layers within the .gdb file are: CONUS_230_PublicExotics CONUS_230_PublicModelReadyEvents CONUS_230_PublicRawEvents","title":"Metadata"},{"location":"data-library/landfire-events/#access","text":"Storage location: ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/Disturbance/LF_Public_Events_1999_2022 Example access script: system(\"cp -r ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/Disturbance/LF_Public_Events_1999_2022 ~/LF_Events\") #move the data first!! landfireEvents <- sf::st_read(\"~/LF_Events/LF_Public_Events_1999_2022.gdb\", layer = \"CONUS_230_PublicModelReadyEvents\") unique(landfireEvents$Event_Type) # [1] \"Thinning\" \"Other Mechanical\" \"Prescribed Fire\" \"Herbicide\" # [5] \"Clearcut\" \"Harvest\" \"Wildfire\" \"Mastication\" # [9] \"Wildland Fire\" \"Chemical\" \"Development\" \"Biological\" # [13] \"Weather\" \"Planting\" \"Reforestation\" \"Insects\" # [17] \"Seeding\" \"Disease\" \"Wildland Fire Use\" \"Insects/Disease\" # [21] \"Insecticide\" landfireFireEvents <- landfireEvents |> dplyr::filter(Event_Type == \"Wildfire\" | Event_Type == \"Wildland Fire Use\" | Event_Type == \"Prescribed Fire\" | Event_Type == \"Wildland Fire\" | Event_Type == \"Fire\")","title":"Access"},{"location":"data-library/lcmap/","text":"Land Change Monitoring, Assessment, and Projection \u00b6 Land Change Monitoring, Assessment, and Projection (LCMAP) represents a new generation of land cover mapping and change monitoring from the U.S. Geological Survey\u2019s Earth Resources Observation and Science (EROS) Center. LCMAP answers a need for higher quality results at greater frequency with additional land cover and change variables than previous efforts. The USGS website for LCMAP is here. Collection 1.3 of the LCMAP product contains 10 different science products ( details here ). To accelerate your access to this dataset, the ESIIL team has made LCMAP 1.3 Primary Land Cover product (LCPRI) data for the Southern Rockies available on the Cyverse data store at the below directory: ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/LCMAP_SR_1985_2021 Additional LCMAP layers and products may be accessed via STAC and VSI (see below for example). The script used to download the LCMAP data already available is located in the GitHub repo at /code/create-data-library/LCMAP_Direct_Access-adapted.ipynb. The code is from the LCMAP data access tutorial. #Access LCMAP data from STAC #Adapted from 'Download data from a STAC API using R, rstac, and GDAL' #https://stacspec.org/en/tutorials/1-download-data-using-r/ require(glue) require(sf) require(terra) require(rstac) #Access ecoregiosn via VSI epa_l3 <- glue::glue( \"/vsizip/vsicurl/\", #magic remote connection \"https://gaftp.epa.gov/EPADataCommons/ORD/Ecoregions/us/us_eco_l3.zip\", #copied link to download location \"/us_eco_l3.shp\") |> #path inside zip file sf::st_read() #Get just S.Rockies and ensure that it is in EPSG:4326 southernRockies <- epa_l3 |> dplyr::filter(US_L3NAME == \"Southern Rockies\") |> dplyr::group_by(US_L3NAME) |> dplyr::summarize(geometry = sf::st_union(geometry)) |> sf::st_transform(\"EPSG:4326\") bboxSR4326 <- sf::st_bbox(southernRockies) # Create a stac query for just the 2021 LCMAP data stac_query <- rstac::stac( \"https://planetarycomputer.microsoft.com/api/stac/v1\" ) |> rstac::stac_search( collections = \"usgs-lcmap-conus-v13\", bbox = bboxSR4326, datetime = \"2021-01-01/2021-12-31\" ) |> rstac::get_request() #A function to get a vsicurl url form a base url make_lcmap_vsicurl_url <- function(base_url) { paste0( \"/vsicurl\", \"?pc_url_signing=yes\", \"&pc_collection=usgs-lcmap-conus-v13\", \"&url=\", base_url ) } lcpri_url <- make_lcmap_vsicurl_url(rstac::assets_url(stac_query, \"lcpri\")) #Pull the file out_file <- tempfile(fileext = \".tif\") sf::gdal_utils( \"warp\", source = lcpri_url, destination = out_file, options = c( \"-t_srs\", sf::st_crs(southernRockies)$wkt, \"-te\", sf::st_bbox(southernRockies) ) ) #Create the raster and plot! terra::rast(out_file) |> terra::plot() southernRockies |> sf::st_geometry() |> plot(lwd = 3, add = TRUE)","title":"Land Change Monitoring, Assessment, and Projection"},{"location":"data-library/lcmap/#land-change-monitoring-assessment-and-projection","text":"Land Change Monitoring, Assessment, and Projection (LCMAP) represents a new generation of land cover mapping and change monitoring from the U.S. Geological Survey\u2019s Earth Resources Observation and Science (EROS) Center. LCMAP answers a need for higher quality results at greater frequency with additional land cover and change variables than previous efforts. The USGS website for LCMAP is here. Collection 1.3 of the LCMAP product contains 10 different science products ( details here ). To accelerate your access to this dataset, the ESIIL team has made LCMAP 1.3 Primary Land Cover product (LCPRI) data for the Southern Rockies available on the Cyverse data store at the below directory: ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/LCMAP_SR_1985_2021 Additional LCMAP layers and products may be accessed via STAC and VSI (see below for example). The script used to download the LCMAP data already available is located in the GitHub repo at /code/create-data-library/LCMAP_Direct_Access-adapted.ipynb. The code is from the LCMAP data access tutorial. #Access LCMAP data from STAC #Adapted from 'Download data from a STAC API using R, rstac, and GDAL' #https://stacspec.org/en/tutorials/1-download-data-using-r/ require(glue) require(sf) require(terra) require(rstac) #Access ecoregiosn via VSI epa_l3 <- glue::glue( \"/vsizip/vsicurl/\", #magic remote connection \"https://gaftp.epa.gov/EPADataCommons/ORD/Ecoregions/us/us_eco_l3.zip\", #copied link to download location \"/us_eco_l3.shp\") |> #path inside zip file sf::st_read() #Get just S.Rockies and ensure that it is in EPSG:4326 southernRockies <- epa_l3 |> dplyr::filter(US_L3NAME == \"Southern Rockies\") |> dplyr::group_by(US_L3NAME) |> dplyr::summarize(geometry = sf::st_union(geometry)) |> sf::st_transform(\"EPSG:4326\") bboxSR4326 <- sf::st_bbox(southernRockies) # Create a stac query for just the 2021 LCMAP data stac_query <- rstac::stac( \"https://planetarycomputer.microsoft.com/api/stac/v1\" ) |> rstac::stac_search( collections = \"usgs-lcmap-conus-v13\", bbox = bboxSR4326, datetime = \"2021-01-01/2021-12-31\" ) |> rstac::get_request() #A function to get a vsicurl url form a base url make_lcmap_vsicurl_url <- function(base_url) { paste0( \"/vsicurl\", \"?pc_url_signing=yes\", \"&pc_collection=usgs-lcmap-conus-v13\", \"&url=\", base_url ) } lcpri_url <- make_lcmap_vsicurl_url(rstac::assets_url(stac_query, \"lcpri\")) #Pull the file out_file <- tempfile(fileext = \".tif\") sf::gdal_utils( \"warp\", source = lcpri_url, destination = out_file, options = c( \"-t_srs\", sf::st_crs(southernRockies)$wkt, \"-te\", sf::st_bbox(southernRockies) ) ) #Create the raster and plot! terra::rast(out_file) |> terra::plot() southernRockies |> sf::st_geometry() |> plot(lwd = 3, add = TRUE)","title":"Land Change Monitoring, Assessment, and Projection"},{"location":"data-library/modis-vcf/","text":"MODIS Vegetation Continuous Fields (VCF) \u00b6 The MODIS VCF product is derived from the MODIS satellite. The dataset provides proportional estimates of varying cover types. This data is developed from global training data derived using high-resolution imagery. The training data and phenological metrics are used with a regression tree to derive percent cover globally. The model is then used to estimate areal proportions of life form, leaf type, and leaf longevity. MODIS Vegetation Continuous Fields (MOD44B) provides global sub-pixel estimates of three land cover components (percent tree cover; percent non-tree vegetation; and percent non-vegetated) at 250 m spatial resolution. NASA MODIS information here . To accelerate your access to this dataset, the ESIIL team has made MODIS VCF data for the Southern Rockies available on the Cyverse data store at the below directory: ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/modis-vcf Additional MODIS data is best accessed via VSI or STAC.","title":"MODIS Vegetation Continuous Fields (VCF)"},{"location":"data-library/modis-vcf/#modis-vegetation-continuous-fields-vcf","text":"The MODIS VCF product is derived from the MODIS satellite. The dataset provides proportional estimates of varying cover types. This data is developed from global training data derived using high-resolution imagery. The training data and phenological metrics are used with a regression tree to derive percent cover globally. The model is then used to estimate areal proportions of life form, leaf type, and leaf longevity. MODIS Vegetation Continuous Fields (MOD44B) provides global sub-pixel estimates of three land cover components (percent tree cover; percent non-tree vegetation; and percent non-vegetated) at 250 m spatial resolution. NASA MODIS information here . To accelerate your access to this dataset, the ESIIL team has made MODIS VCF data for the Southern Rockies available on the Cyverse data store at the below directory: ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/modis-vcf Additional MODIS data is best accessed via VSI or STAC.","title":"MODIS Vegetation Continuous Fields (VCF)"},{"location":"data-library/mounting-via-vsi/","text":"Mounting data directly from a URL \u00b6 ESIIL, 2024 Tyler McIntosh Data can be directly accessed from where it is hosted on the internet, without the need to download the entire file to your local machine. For spatial data, special protocols from the GDAL library can be used. The first part of enabling remote access is \"vsicurl\". VSI is GDAL's Virtual File System. This is a virtual file system handler allows access to files hosted on remote servers over protocols like HTTP, HTTPS, and FTP. When you prepend \"vsicurl/\" to a URL, GDAL reads the file directly from the remote location without downloading it entirely to the local disk. It's particularly useful for large files, as it only fetches the portions of the file needed for the current operation. The second part of enabling remote access to a zipped file (most large data files hosted online) is \"vsizip\". This is another virtual file system handler in GDAL that enables reading files inside zip archives as if they were unzipped, without the need to extract them manually. By using \"vsizip/\", you can directly access the contents of a zip file. When combined, \"/vsizip/vsicurl/\" allows GDAL (and, subsequently, a package such as 'terra' or 'sf' in R, or similar Python packages) to access files inside of a zip archive on a remote server. The URL following this protocol specifies the remote location of the zip file, and the path after the URL specifies the particular file within the zip archive that you want to access. Example \u00b6 For example, you may have a url to a spatial dataset that you want to use, \" https://gaftp.epa.gov/EPADataCommons/ORD/Ecoregions/us/us_eco_l3.zip \". You may have found this link on a website. Figure out your archive contents \u00b6 In order to open a specific file within the zip archive, you need to know the names of the files within the archive. You can either: Download the archive once, view the data structure, and then access it remotely from then on, or, a better solution is to... Access the contents of the zip file using GDAL from a command-line environment To access the contents from a command-line environment, you would use a line of code like this: gdalinfo /vsizip/vsicurl/https://example.com/data.zip Or, in our example: gdalinfo /vsizip/vsicurl/https://gaftp.epa.gov/EPADataCommons/ORD/Ecoregions/us/us_eco_l3.zip If you would like to do this without leaving your R or Python environment, you can use R or Python to execute command line calls: R, using \"system\": zip_url = \"/vsizip//vsicurl/https://gaftp.epa.gov/EPADataCommons/ORD/Ecoregions/us/us_eco_l3.zip\" system(paste(\"gdalinfo\", zip_url)) Python, using \"subprocess.run\": import subprocess zip_url = \"/vsizip//vsicurl/https://gaftp.epa.gov/EPADataCommons/ORD/Ecoregions/us/us_eco_l3.zip\" subprocess.run([\"gdalinfo\", zip_url]) This will tell you that the archive contains several files, one of which is \"us_eco_l3.shp\" - our shapefile of interest. (If there were subdirectories within the directory, repeat the process). Mounting the data \u00b6 We now know the full path to our file of interest: \"/vsizip//vsicurl/https://gaftp.epa.gov/EPADataCommons/ORD/Ecoregions/us/us_eco_l3.zip/us_eco_l3.shp\" To mount the data, we simply feed this string to our spatial data package just as we would any other data location. For example, in R, we could do: require(glue) require(sf) epa_l3 <- glue::glue( \"/vsizip/vsicurl/\", #magic remote connection \"https://gaftp.epa.gov/EPADataCommons/ORD/Ecoregions/us/us_eco_l3.zip\", #copied link to download location \"/us_eco_l3.shp\") |> #path inside zip file sf::st_read() From this point, we now have the data mounted in our epa_l3 variable, and can manipulate it as usual. Note that, since vsicurl only fetches the portions of the file needed for an operation, the data mounted very quickly. Only once you attempt an operation with the data that requires the entire dataset will it actually fetch the entire dataset!","title":"Mounting data directly from a URL"},{"location":"data-library/mounting-via-vsi/#mounting-data-directly-from-a-url","text":"ESIIL, 2024 Tyler McIntosh Data can be directly accessed from where it is hosted on the internet, without the need to download the entire file to your local machine. For spatial data, special protocols from the GDAL library can be used. The first part of enabling remote access is \"vsicurl\". VSI is GDAL's Virtual File System. This is a virtual file system handler allows access to files hosted on remote servers over protocols like HTTP, HTTPS, and FTP. When you prepend \"vsicurl/\" to a URL, GDAL reads the file directly from the remote location without downloading it entirely to the local disk. It's particularly useful for large files, as it only fetches the portions of the file needed for the current operation. The second part of enabling remote access to a zipped file (most large data files hosted online) is \"vsizip\". This is another virtual file system handler in GDAL that enables reading files inside zip archives as if they were unzipped, without the need to extract them manually. By using \"vsizip/\", you can directly access the contents of a zip file. When combined, \"/vsizip/vsicurl/\" allows GDAL (and, subsequently, a package such as 'terra' or 'sf' in R, or similar Python packages) to access files inside of a zip archive on a remote server. The URL following this protocol specifies the remote location of the zip file, and the path after the URL specifies the particular file within the zip archive that you want to access.","title":"Mounting data directly from a URL"},{"location":"data-library/mounting-via-vsi/#example","text":"For example, you may have a url to a spatial dataset that you want to use, \" https://gaftp.epa.gov/EPADataCommons/ORD/Ecoregions/us/us_eco_l3.zip \". You may have found this link on a website.","title":"Example"},{"location":"data-library/mounting-via-vsi/#figure-out-your-archive-contents","text":"In order to open a specific file within the zip archive, you need to know the names of the files within the archive. You can either: Download the archive once, view the data structure, and then access it remotely from then on, or, a better solution is to... Access the contents of the zip file using GDAL from a command-line environment To access the contents from a command-line environment, you would use a line of code like this: gdalinfo /vsizip/vsicurl/https://example.com/data.zip Or, in our example: gdalinfo /vsizip/vsicurl/https://gaftp.epa.gov/EPADataCommons/ORD/Ecoregions/us/us_eco_l3.zip If you would like to do this without leaving your R or Python environment, you can use R or Python to execute command line calls: R, using \"system\": zip_url = \"/vsizip//vsicurl/https://gaftp.epa.gov/EPADataCommons/ORD/Ecoregions/us/us_eco_l3.zip\" system(paste(\"gdalinfo\", zip_url)) Python, using \"subprocess.run\": import subprocess zip_url = \"/vsizip//vsicurl/https://gaftp.epa.gov/EPADataCommons/ORD/Ecoregions/us/us_eco_l3.zip\" subprocess.run([\"gdalinfo\", zip_url]) This will tell you that the archive contains several files, one of which is \"us_eco_l3.shp\" - our shapefile of interest. (If there were subdirectories within the directory, repeat the process).","title":"Figure out your archive contents"},{"location":"data-library/mounting-via-vsi/#mounting-the-data","text":"We now know the full path to our file of interest: \"/vsizip//vsicurl/https://gaftp.epa.gov/EPADataCommons/ORD/Ecoregions/us/us_eco_l3.zip/us_eco_l3.shp\" To mount the data, we simply feed this string to our spatial data package just as we would any other data location. For example, in R, we could do: require(glue) require(sf) epa_l3 <- glue::glue( \"/vsizip/vsicurl/\", #magic remote connection \"https://gaftp.epa.gov/EPADataCommons/ORD/Ecoregions/us/us_eco_l3.zip\", #copied link to download location \"/us_eco_l3.shp\") |> #path inside zip file sf::st_read() From this point, we now have the data mounted in our epa_l3 variable, and can manipulate it as usual. Note that, since vsicurl only fetches the portions of the file needed for an operation, the data mounted very quickly. Only once you attempt an operation with the data that requires the entire dataset will it actually fetch the entire dataset!","title":"Mounting the data"},{"location":"data-library/move-data-to-instance/","text":"Moving data to your instance from the data store \u00b6 Some data has been pre-downloaded for you and stored on the CyVerse data store in order to help expedite your projects. While you CAN access that data directly on the data store, it is HIGHLY recommended that you copy the data over to your instance (see \"Cyverse data management\" under \"Collaborating on the cloud\" for more information). This is because your work with the data will be dramatically faster with it located on your instance. Take, for instance, the treemap data. If we load and plot the data without moving it, it takes just a few seconds (i.e. ~2.973 seconds). Not bad. require(terra) require(tictoc) tictoc::tic() treemap <- terra::rast(\"~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/TreeMap/treemap2016_southernrockies.tif\") terra::plot(treemap) tictoc::toc() However, if we load and plot the data after moving it, it takes less than a second (i.e. ~0.302 seconds). Even better! This 10x increase in speed will add up incredibly quickly as soon as you start working more intensively with the data. require(terra) require(tictoc) system(\"cp -r ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/TreeMap ~/TreeMap\") #move the data first!! tictoc::tic() treemap <- terra::rast(\"~/TreeMap/treemap2016_southernrockies.tif\") terra::plot(treemap) tictoc::toc() Takeaway: seriously, just copy the data over.","title":"Move and Save data"},{"location":"data-library/move-data-to-instance/#moving-data-to-your-instance-from-the-data-store","text":"Some data has been pre-downloaded for you and stored on the CyVerse data store in order to help expedite your projects. While you CAN access that data directly on the data store, it is HIGHLY recommended that you copy the data over to your instance (see \"Cyverse data management\" under \"Collaborating on the cloud\" for more information). This is because your work with the data will be dramatically faster with it located on your instance. Take, for instance, the treemap data. If we load and plot the data without moving it, it takes just a few seconds (i.e. ~2.973 seconds). Not bad. require(terra) require(tictoc) tictoc::tic() treemap <- terra::rast(\"~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/TreeMap/treemap2016_southernrockies.tif\") terra::plot(treemap) tictoc::toc() However, if we load and plot the data after moving it, it takes less than a second (i.e. ~0.302 seconds). Even better! This 10x increase in speed will add up incredibly quickly as soon as you start working more intensively with the data. require(terra) require(tictoc) system(\"cp -r ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/TreeMap ~/TreeMap\") #move the data first!! tictoc::tic() treemap <- terra::rast(\"~/TreeMap/treemap2016_southernrockies.tif\") terra::plot(treemap) tictoc::toc() Takeaway: seriously, just copy the data over.","title":"Moving data to your instance from the data store"},{"location":"data-library/stac_mount_save/","text":"The art of making a data cube \u00b6 Ty Tuff, ESIIL Data Scientist 2023-10-27 #library(Rcpp) library ( sf ) library ( gdalcubes ) library ( rstac ) library ( gdalUtils ) library ( terra ) library ( rgdal ) library ( reshape2 ) library ( osmdata ) library ( terra ) library ( dplyr ) library ( stars ) library ( ggplot2 ) library ( colorspace ) library ( geos ) library ( osmdata ) library ( ggthemes ) library ( tidyr ) gdalcubes_options ( parallel = 8 ) sf :: sf_extSoftVersion () ## GEOS GDAL proj.4 GDAL_with_GEOS USE_PROJ_H ## \"3.11.0\" \"3.5.3\" \"9.1.0\" \"true\" \"true\" ## PROJ ## \"9.1.0\" gdalcubes_gdal_has_geos () ## [1] TRUE library ( osmdata ) library ( dplyr ) library ( sf ) library ( terra ) library ( tidyterra ) library ( glue ) library ( ggplot2 ) library ( ggthemes ) library ( stars ) library ( magrittr ) library ( landsat ) The philosophy of moving data in the cloud \u00b6 The philosophy of moving data in the cloud represents a paradigm shift in how we approach data within our analytical processes. Instead of the traditional method of transferring entire datasets to our local environments, the cloud encourages a more efficient model: bring your analysis to the data. This approach minimizes data movement and leverages the cloud\u2019s computational power and scalability. By utilizing cloud-native tools and services, we can run our analyses directly on the data where it resides, selectively accessing and processing only what is necessary. This not only streamlines workflows but also significantly reduces overheads related to data transfer and storage management. In essence, the focus is on diverting computational resources to the data rather than the cumbersome and resource-intensive practice of moving large datasets to and fro. \u2018To Make\u2019 or \u2018To Take\u2019 a photo \u00b6 The distinction between making and taking a photograph lies in the approach and intent behind the camera. Taking a photo is often a reactive process, where the photographer captures moments as they naturally unfold, seizing the spontaneity of life without alteration. It\u2019s a passive form of photography where the emphasis is on the right timing and the natural interplay of elements within the frame. On the other hand, making a photo is a proactive and deliberate act. It is akin to craftsmanship, where a professional photographer starts with a concept and utilizes a variety of tools and techniques to stage and construct the desired scene. They actively manipulate lighting, composition, and subjects to create a photograph that aligns with their pre-visualized artistic vision. While both methods use a camera to produce a photograph, making a photo involves a creation process, whereas taking a photo is about finding the scene. David Yarrow is a famous photographer who \u2018makes\u2019 his photographs. What does it mean to \u2018make\u2019 a data cube? \u00b6 The artistry of Ansel Adams\u2019 photography serves as a compelling analogy for the meticulous craft of building a data cube from cloud data sources using tools like STAC and GDAL VSI. Just as Adams would survey the vastness of a landscape, discerning the interplay of light and shadow upon the mountains before him, a data architect surveys the expanse of available data. In this analogy, the raw data are the majestic mountains and sweeping landscapes waiting to be captured. The STAC collection acts as the photographer\u2019s deliberate choice of scene, pointing the camera lens\u2014our data tools\u2014towards the most telling and coherent dataset. Just as Adams\u2019 photographs are more than mere records of a landscape, but rather a confluence of his vision, technique, and the scene\u2019s natural beauty, so too is the data cube more than the sum of its parts. It is the artful synthesis of information, crafted and composed with the skill and intent of an artist, producing not just a tool for analysis but a harmonized, data-driven portrait of the world it represents. The builder of the data cube is, indeed, an artist, and the data cube their masterpiece, revealing not just data, but a story, a perspective, a landscape sewn from the raw material of cloud-sourced information. As Adams would adjust his viewfinder, setting the boundaries of his photographic frame, the data builder sets the view window, filtering and transferring relevant data to their own medium, akin to Adams\u2019 film. This is where the raw data is transformed, organized into the structured form of a data frame or data cube, a process not unlike the careful development of a photograph in a darkroom. Here, the data cube creator, much like Adams with his careful dodging and burning, harmonizes disparate elements into a cohesive whole, each decision reflecting an intention and vision for the final product. 1) The Rat through the Snake Problem: Scalability with Cloud Computing \u00b6 Just like a snake that swallows a rat, traditional computing systems often struggle to process the large volumes of environmental data \u2014 they\u2019re constrained by their static hardware limitations. Cloud computing introduces a python-esque capability: massive scalability. By migrating to the cloud, we essentially make the snake bigger, allowing it to handle larger \u201cprey.\u201d Scalable computers in the cloud can grow with the demand, providing the necessary computational power to process extensive datasets, which is vital in a field where data volumes are increasing exponentially. 2) The Antelope through the Python Problem: Streamlining with GDAL VSI \u00b6 As we scale up, we encounter a new challenge: trying to pass an antelope through a python \u2014 a metaphor for the next level of complexity in data processing. The sheer size and complexity of the data can become overwhelming. This is where GDAL\u2019s Virtual File System (VSI) becomes our ecological adaptation. VSI allows us to access remote data transparently and more efficiently. Instead of ingesting the entire \u201cantelope,\u201d VSI enables the \u201cpython\u201d to dynamically access and process only the parts of the data it needs, when it needs them, much like constriction before digestion. This selective access minimizes the need for local storage and expedites the data handling process. 3) Drinking from a Fire Hose: Accelerated Inference with AI and ML \u00b6 Once we\u2019ve enabled the flow of large amounts of digestible data, we encounter the metaphorical challenge of drinking from a fire hose. The data, now flowing and accessible, is immense and rapid \u2014 posing a challenge not just to store and process, but to understand and derive meaning from in real-time. This is where artificial intelligence (AI) and machine learning (ML) step in. These technologies act as a sophisticated filtration system, enabling us to drink safely and beneficially from the torrent. AI and ML can analyze patterns, make predictions, and infer insights at a pace that keeps up with the fast stream of data, turning raw information into actionable knowledge. By addressing these three pivotal challenges with cloud computing, GDAL VSI, and AI/ML, we not only manage to consume the data effectively but also transform our capabilities in environmental data science. We can move from mere data ingestion to meaningful data interpretation, all at a scale and speed necessary for impactful environmental analysis. Mounting data \u00b6 A void-filled Digital Elevation Model (DEM) is a comprehensive topographical representation where any missing data points, known as voids, have been filled in. These voids can occur due to various reasons, such as clouds or technical errors during data collection. In a void-filled DEM, these gaps are interpolated or estimated using the surrounding data to create a continuous, seamless surface model. This process enhances the utility and accuracy of the DEM for hydrological modeling, terrain analysis, and other geographical applications. The HydroSHEDS website ( https://www.hydrosheds.org/hydrosheds-core-downloads ) provides access to high-quality, void-filled DEM datasets like the DEM_continuous_CONUS_15s, which users can download and easily integrate into spatial analysis workflows using tools such as \u2018terra\u2019 in R, allowing for sophisticated environmental and geographical research and planning. # Record start time a <- Sys.time () # Create a string with the file path using glue, then download and read the DEM file as a raster object DEM_continuous_CONUS_15s <- glue ( \"/vsizip/vsicurl/\" , #magic remote connection \"https://data.hydrosheds.org/file/hydrosheds-v1-dem/hyd_na_dem_15s.zip\" , #copied link to download location \"/hyd_na_dem_15s.tif\" ) %>% #path inside zip file terra :: rast () # The 'glue' function constructs the file path string, which is then passed to 'terra::rast()' to read the DEM file into R as a raster layer. '/vsizip/vsicurl/' is a special GDAL virtual file system syntax that allows reading directly from a zipped file on a remote server. # Record end time and calculate the time difference b <- Sys.time () difftime ( b , a ) ## Time difference of 4.603666 secs # The resulting raster object is stored in 'DEM_continuous_CONUS_15s', which now contains the void-filled DEM data ready for use DEM_continuous_CONUS_15s # Prints out the details of the 'DEM_continuous_CONUS_15s' raster object ## class : SpatRaster ## dimensions : 13920, 20640, 1 (nrow, ncol, nlyr) ## resolution : 0.004166667, 0.004166667 (x, y) ## extent : -138, -52, 5, 63 (xmin, xmax, ymin, ymax) ## coord. ref. : lon/lat WGS 84 (EPSG:4326) ## source : hyd_na_dem_15s.tif ## name : Band_1 # output is a SpatRaster, which is the object type associated with the 'terra' package. Continuous DEM for North America # Record start time a <- Sys.time () ggplot () + geom_spatraster ( data = DEM_continuous_CONUS_15s ) + theme_tufte () b <- Sys.time () difftime ( b , a ) ## Time difference of 52.49061 secs Calculate Slope from that DEM SLOPE_continuous_CONUS_15s <- terra :: terrain ( DEM_continuous_CONUS_15s , \"slope\" ) SLOPE_continuous_CONUS_15s ## class : SpatRaster ## dimensions : 13920, 20640, 1 (nrow, ncol, nlyr) ## resolution : 0.004166667, 0.004166667 (x, y) ## extent : -138, -52, 5, 63 (xmin, xmax, ymin, ymax) ## coord. ref. : lon/lat WGS 84 (EPSG:4326) ## source(s) : memory ## name : slope ## min value : 0.00000 ## max value : 56.98691 # Record start time a <- Sys.time () ggplot () + geom_spatraster ( data = SLOPE_continuous_CONUS_15s ) + theme_tufte () b <- Sys.time () difftime ( b , a ) ## Time difference of 3.859545 secs Calculate aspect from DEM ASPECT_continuous_CONUS_15s <- terra :: terrain ( DEM_continuous_CONUS_15s , \"aspect\" ) ASPECT_continuous_CONUS_15s ## class : SpatRaster ## dimensions : 13920, 20640, 1 (nrow, ncol, nlyr) ## resolution : 0.004166667, 0.004166667 (x, y) ## extent : -138, -52, 5, 63 (xmin, xmax, ymin, ymax) ## coord. ref. : lon/lat WGS 84 (EPSG:4326) ## source(s) : memory ## name : aspect ## min value : 0 ## max value : 360 # Record start time a <- Sys.time () ggplot () + geom_spatraster ( data = ASPECT_continuous_CONUS_15s ) + theme_tufte () b <- Sys.time () difftime ( b , a ) ## Time difference of 3.650267 secs Create a cube from those layers! mini_stack <- c ( DEM_continuous_CONUS_15s , SLOPE_continuous_CONUS_15s , ASPECT_continuous_CONUS_15s ) mini_stack ## class : SpatRaster ## dimensions : 13920, 20640, 3 (nrow, ncol, nlyr) ## resolution : 0.004166667, 0.004166667 (x, y) ## extent : -138, -52, 5, 63 (xmin, xmax, ymin, ymax) ## coord. ref. : lon/lat WGS 84 (EPSG:4326) ## sources : hyd_na_dem_15s.tif ## memory ## memory ## names : Band_1, slope, aspect ## min values : ? , 0.00000, 0 ## max values : ? , 56.98691, 360 Reproject and return the bounding box coordinates for our Area of Interest # Transform the filtered geometry to EPSG:4326 and store its bounding box # Record start time a <- Sys.time () DEM_continuous_CONUS_15s |> stars :: st_as_stars () |> st_transform ( \"EPSG:4326\" ) |> st_bbox () -> bbox_4326 DEM_continuous_CONUS_15s |> stars :: st_as_stars () |> st_transform ( \"EPSG:32618\" ) |> st_bbox () -> bbox_32618 b <- Sys.time () difftime ( b , a ) ## Time difference of 3.7653 mins Get a polygon for Boulder County, reproject, and return bounding box. This is so I can make a smaller search in the stac catalog. boulder_county <- getbb ( \"boulder, co\" , format_out = \"sf_polygon\" ) boulder_county $ multipolygon |> st_transform ( crs = 4326 ) |> st_bbox () -> bbox_4326_boulder boulder_county $ multipolygon |> st_transform ( crs = 32720 ) |> st_bbox () -> bbox_32720_boulder Get a polygon for the United States and crop it to be the same size as the DEM above. aoi <- getbb ( \"United States\" , format_out = \"sf_polygon\" ) conus <- aoi $ multipolygon |> st_crop ( bbox_4326 ) ggplot ( data = conus ) + geom_sf () Search the Stac catalog. STAC, or SpatioTemporal Asset Catalog, is an open-source specification designed to standardize the way geospatial data is indexed and discovered. Developed by Element 84 among others, it facilitates better interoperability and sharing of geospatial assets by providing a common language for describing them. STAC\u2019s flexible design allows for easy cataloging of data, making it simpler for individuals and systems to search and retrieve geospatial information. By effectively organizing data about the Earth\u2019s spatial and temporal characteristics, STAC enables users to harness the full power of the cloud and modern data processing technologies, optimizing the way we access and analyze environmental data on a global scale. stac ( \"https://earth-search.aws.element84.com/v1\" ) |> get_request () ## ###STACCatalog ## - id: earth-search-aws ## - description: A STAC API of public datasets on AWS ## - field(s): stac_version, type, id, title, description, links, conformsTo Element 84\u2019s Earth Search is a STAC compliant search and discovery API that offers users access to a vast collection of geospatial open datasets hosted on AWS. It serves as a centralized search catalog providing standardized metadata for these open datasets, designed to be freely used and integrated into various applications. Alongside the API, Element 84 also provides a web application named Earth Search Console, which is map-centric and allows users to explore and visualize the data contained within the Earth Search API\u2019s catalog. This suite of tools is part of Element 84\u2019s initiative to make geospatial data more accessible and actionable for a wide range of users and applications. collection_formats () ## CHIRPS_v2_0_daily_p05_tif | Image collection format for CHIRPS v 2.0 daily ## | global precipitation dataset (0.05 degrees ## | resolution) from GeoTIFFs, expects list of .tif ## | or .tif.gz files as input. [TAGS: CHIRPS, ## | precipitation] ## CHIRPS_v2_0_monthly_p05_tif | Image collection format for CHIRPS v 2.0 monthly ## | global precipitation dataset (0.05 degrees ## | resolution) from GeoTIFFs, expects list of .tif ## | or .tif.gz files as input. [TAGS: CHIRPS, ## | precipitation] ## ESA_CCI_SM_ACTIVE | Collection format for ESA CCI soil moisture ## | active product (version 4.7) [TAGS: Soil ## | Moisture, ESA, CCI] ## ESA_CCI_SM_PASSIVE | Collection format for ESA CCI soil moisture ## | passive product (version 4.7) [TAGS: Soil ## | Moisture, ESA, CCI] ## GPM_IMERG_3B_DAY_GIS_V06A | Collection format for daily ## | IMERG_3B_DAY_GIS_V06A data [TAGS: Precipitation, ## | GPM, IMERG] ## L8_L1TP | Collection format for Landsat 8 Level 1 TP ## | product [TAGS: Landsat, USGS, Level 1, NASA] ## L8_SR | Collection format for Landsat 8 surface ## | reflectance product [TAGS: Landsat, USGS, Level ## | 2, NASA, surface reflectance] ## MAXAR | Preliminary collection format for MAXAR open ## | data, visual only (under development) [TAGS: ] ## MxD09GA | Collection format for selected bands from the ## | MODIS MxD09GA (Aqua and Terra) product [TAGS: ## | MODIS, surface reflectance] ## MxD10A2 | Collection format for selected bands from the ## | MODIS MxD10A2 (Aqua and Terra) v006 Snow Cover ## | product [TAGS: MODIS, Snow Cover] ## MxD11A1 | Collection format for selected bands from the ## | MODIS MxD11A2 (Aqua and Terra) v006 Land Surface ## | Temperature product [TAGS: MODIS, LST] ## MxD11A2 | Collection format for selected bands from the ## | MODIS MxD11A2 (Aqua and Terra) v006 Land Surface ## | Temperature product [TAGS: MODIS, LST] ## MxD13A2 | Collection format for selected bands from the ## | MODIS MxD13A2 (Aqua and Terra) product [TAGS: ## | MODIS, VI, NDVI, EVI] ## MxD13A3 | Collection format for selected bands from the ## | MODIS MxD13A3 (Aqua and Terra) product [TAGS: ## | MODIS, VI, NDVI, EVI] ## MxD13Q1 | Collection format for selected bands from the ## | MODIS MxD13Q1 (Aqua and Terra) product [TAGS: ## | MODIS, VI, NDVI, EVI] ## MxD14A2 | Collection format for the MODIS MxD14A2 (Aqua ## | and Terra) product [TAGS: MODIS, Fire] ## PlanetScope_3B_AnalyticMS_SR | Image collection format for PlanetScope 4-band ## | scenes [TAGS: PlanetScope, BOA, Surface ## | Reflectance] ## Sentinel2_L1C | Image collection format for Sentinel 2 Level 1C ## | data as downloaded from the Copernicus Open ## | Access Hub, expects a list of file paths as ## | input. The format works on original ZIP ## | compressed as well as uncompressed imagery. ## | [TAGS: Sentinel, Copernicus, ESA, TOA] ## Sentinel2_L1C_AWS | Image collection format for Sentinel 2 Level 1C ## | data in AWS [TAGS: Sentinel, Copernicus, ESA, ## | TOA] ## Sentinel2_L2A | Image collection format for Sentinel 2 Level 2A ## | data as downloaded from the Copernicus Open ## | Access Hub, expects a list of file paths as ## | input. The format should work on original ZIP ## | compressed as well as uncompressed imagery. ## | [TAGS: Sentinel, Copernicus, ESA, BOA, Surface ## | Reflectance] ## Sentinel2_L2A_THEIA | Image collection format for Sentinel 2 Level 2A ## | data as downloaded from Theia. [TAGS: Sentinel, ## | ESA, Flat Reflectance, Theia] Building a stac collection by aiming your camera at the landscape Creating a STAC collection is akin to a photographer framing a shot; the landscape is rich with diverse data, mirroring a scene bustling with potential subjects, colors, and light. Just as a photographer selects a portion of the vista to capture, focusing on elements that will compose a compelling image, a data scientist must similarly navigate the vast data terrain. They must \u2018point their camera\u2019 judiciously, ensuring that the \u2018frame\u2019 encapsulates the precise data needed. This careful selection is crucial, as it determines the relevance and quality of the data collection, much like the photographer\u2019s choice dictates the story a photograph will tell. # Record start time a <- Sys.time () # Initialize STAC connection s = stac ( \"https://earth-search.aws.element84.com/v0\" ) # Search for Sentinel-2 images within specified bounding box and date range #22 Million items items = s |> stac_search ( collections = \"sentinel-s2-l2a-cogs\" , bbox = c ( bbox_4326_boulder [ \"xmin\" ], bbox_4326_boulder [ \"ymin\" ], bbox_4326_boulder [ \"xmax\" ], bbox_4326_boulder [ \"ymax\" ]), datetime = \"2021-05-15/2021-05-16\" ) |> post_request () |> items_fetch ( progress = FALSE ) # Print number of found items length ( items $ features ) ## [1] 1 # Prepare the assets for analysis library ( gdalcubes ) assets = c ( \"B01\" , \"B02\" , \"B03\" , \"B04\" , \"B05\" , \"B06\" , \"B07\" , \"B08\" , \"B8A\" , \"B09\" , \"B11\" , \"B12\" , \"SCL\" ) s2_collection = stac_image_collection ( items $ features , asset_names = assets , property_filter = function ( x ) { x [[ \"eo:cloud_cover\" ]] < 20 }) #all images with less than 20% clouds b <- Sys.time () difftime ( b , a ) ## Time difference of 0.4706092 secs # Display the image collection s2_collection ## Image collection object, referencing 1 images with 13 bands ## Images: ## name left top bottom right ## 1 S2B_13TDE_20210516_0_L2A -106.1832 40.65079 39.65576 -104.8846 ## datetime srs ## 1 2021-05-16T18:02:54 EPSG:32613 ## ## Bands: ## name offset scale unit nodata image_count ## 1 B01 0 1 1 ## 2 B02 0 1 1 ## 3 B03 0 1 1 ## 4 B04 0 1 1 ## 5 B05 0 1 1 ## 6 B06 0 1 1 ## 7 B07 0 1 1 ## 8 B08 0 1 1 ## 9 B09 0 1 1 ## 10 B11 0 1 1 ## 11 B12 0 1 1 ## 12 B8A 0 1 1 ## 13 SCL 0 1 1 Setting up your camera and film The camera through which the data scientist frames the shot is multifaceted, akin to the tools and processes they employ. The camera\u2019s film, analogous to the data cube, defines the resolution and dimensions of the captured data, shaping how the final dataset will be utilized. The lens and its settings\u2014focus, aperture, and exposure\u2014determine the clarity, depth, and breadth of the captured information, much like the algorithms and parameters set by the data scientist dictate the granularity and scope of the data cube. The flash, like data enhancement techniques, can illuminate hidden details, ensuring that the data cube, the final product, is as informative and accurate as the landscape it represents. # Record start time a <- Sys.time () # Define a specific view on the satellite image collection v = cube_view ( srs = \"EPSG:32720\" , #this is harder than expected. dx = 100 , dy = 100 , dt = \"P1M\" , aggregation = \"median\" , resampling = \"near\" , extent = list ( t0 = \"2021-05-15\" , t1 = \"2021-05-16\" , left = bbox_32720_boulder [ 1 ], right = bbox_32720_boulder [ 2 ], top = bbox_32720_boulder [ 4 ], bottom = bbox_32720_boulder [ 3 ] ) ) b <- Sys.time () difftime ( b , a ) ## Time difference of 0.002738953 secs # Display the defined view v ## A data cube view object ## ## Dimensions: ## low high count pixel_size ## t 2021-05-01 2021-05-31 1 P1M ## y -3103099.52398788 15434400.4760121 185375 100 ## x -3178878.98542359 15369521.0145764 185484 100 ## ## SRS: \"EPSG:32720\" ## Temporal aggregation method: \"median\" ## Spatial resampling method: \"near\" Take a picture! Raster style # Record start time a <- Sys.time () s2_collection |> raster_cube ( v ) |> select_bands ( c ( \"B04\" , \"B05\" )) |> apply_pixel ( c ( \"(B05-B04)/(B05+B04)\" ), names = \"NDVI\" ) |> write_tif () |> raster :: stack () -> x x ## class : RasterStack ## dimensions : 185375, 185484, 34384096500, 1 (nrow, ncol, ncell, nlayers) ## resolution : 100, 100 (x, y) ## extent : -3178879, 15369521, -3103100, 15434400 (xmin, xmax, ymin, ymax) ## crs : +proj=utm +zone=20 +south +datum=WGS84 +units=m +no_defs ## names : NDVI b <- Sys.time () difftime ( b , a ) ## Time difference of 4.132932 mins STARS style # Record start time a <- Sys.time () s2_collection |> raster_cube ( v ) |> select_bands ( c ( \"B04\" , \"B05\" )) |> apply_pixel ( c ( \"(B05-B04)/(B05+B04)\" ), names = \"NDVI\" ) |> stars :: st_as_stars () -> y b <- Sys.time () difftime ( b , a ) ## Time difference of 1.459866 mins y ## stars_proxy object with 1 attribute in 1 file(s): ## $NDVI ## [1] \"[...]/filec5982c38536c.nc:NDVI\" ## ## dimension(s): ## from to offset delta refsys point ## x 1 185484 -3178879 100 WGS 84 / UTM zone 20S NA ## y 1 185375 15434400 -100 WGS 84 / UTM zone 20S NA ## time 1 1 NA NA POSIXct FALSE ## values x/y ## x NULL [x] ## y NULL [y] ## time [2021-05-01,2021-06-01) Extract data # Record start time a <- Sys.time () x <- s2_collection |> raster_cube ( v ) |> select_bands ( c ( \"B01\" , \"B02\" , \"B03\" , \"B04\" , \"B05\" , \"B06\" , \"B07\" , \"B08\" , \"B8A\" , \"B09\" , \"B11\" , \"B12\" )) |> extract_geom ( boulder_county $ multipolygon ) |> rename ( \"time\" = \"time\" , \"443\" = \"B01\" , \"490\" = \"B02\" , \"560\" = \"B03\" , \"665\" = \"B04\" , \"705\" = \"B05\" , \"740\" = \"B06\" , \"783\" = \"B07\" , \"842\" = \"B08\" , \"865\" = \"B8A\" , \"940\" = \"B09\" , \"1610\" = \"B11\" , \"2190\" = \"B12\" ) b <- Sys.time () difftime ( b , a ) ## Time difference of 1.699016 mins head ( x ) ## FID time 443 490 560 665 705 740 783 842 865 940 1610 ## 1 1 2021-05-01 11096 10929 10224 9893 9956 9706 9715 9641 9511 8459 5682 ## 2 1 2021-05-01 11631 11282 10550 10234 10288 10031 10032 9988 9828 9153 5802 ## 3 1 2021-05-01 11900 11393 10666 10337 10398 10142 10138 10093 9927 9461 5754 ## 4 1 2021-05-01 11406 10597 9928 9626 9694 9481 9516 9338 9336 8959 5726 ## 5 1 2021-05-01 11399 10939 10237 9905 9978 9738 9746 9633 9555 8925 5831 ## 6 1 2021-05-01 11600 11174 10462 10147 10209 9952 9960 9890 9760 9153 5773 ## 2190 ## 1 3917 ## 2 3981 ## 3 3937 ## 4 4054 ## 5 4097 ## 6 3990 Make a timeseries # Record start time a <- Sys.time () items <- s |> stac_search ( collections = \"sentinel-s2-l2a-cogs\" , bbox = c ( -105.694362 , 39.912886 , -105.052774 , 40.262785 ), datetime = \"2020-01-01/2022-12-31\" , limit = 500 ) %>% post_request () S2.mask = image_mask ( \"SCL\" , values = c ( 3 , 8 , 9 )) col = stac_image_collection ( items $ features , asset_names = assets , property_filter = function ( x ) { x [[ \"eo:cloud_cover\" ]] < 30 }) v = cube_view ( srs = \"EPSG:4326\" , extent = list ( t0 = \"2020-01-01\" , t1 = \"2022-12-31\" , left = -105.694362 , right = -105.052774 , top = 40.262785 , bottom = 39.912886 ), dx = 0.001 , dy = 0.001 , dt = \"P1M\" , aggregation = \"median\" , resampling = \"bilinear\" ) library ( colorspace ) ndvi.col = function ( n ) { rev ( sequential_hcl ( n , \"Green-Yellow\" )) } library ( gdalcubes ) raster_cube ( col , v , mask = S2.mask ) |> select_bands ( c ( \"B04\" , \"B08\" )) |> apply_pixel ( \"(B08-B04)/(B08+B04)\" , \"NDVI\" ) |> gdalcubes :: animate ( col = ndvi.col , zlim = c ( -0.2 , 1 ), key.pos = 1 , save_as = \"anim.gif\" , fps = 4 ) ## [1] \"/Users/ty/Documents/Github/hackathon2023_datacube/docs/code_for_building_cube/anim.gif\" b <- Sys.time () difftime ( b , a ) ## Time difference of 4.716672 mins y ## stars_proxy object with 1 attribute in 1 file(s): ## $NDVI ## [1] \"[...]/filec5982c38536c.nc:NDVI\" ## ## dimension(s): ## from to offset delta refsys point ## x 1 185484 -3178879 100 WGS 84 / UTM zone 20S NA ## y 1 185375 15434400 -100 WGS 84 / UTM zone 20S NA ## time 1 1 NA NA POSIXct FALSE ## values x/y ## x NULL [x] ## y NULL [y] ## time [2021-05-01,2021-06-01) Saving Data Cubes to Local Storage There are occasions when we need to manipulate data cubes using other software. For such purposes, we can save data cubes to our local disk as individual netCDF files or as a series of GeoTIFF files. In the case of the latter, each temporal segment of the cube is saved as a separate (multiband) GeoTIFF file. Both netCDF and GeoTIFF formats allow for file size reduction through compression and data packing. This process involves transforming double precision numbers into smaller integer values using a scale and offset, which can be particularly useful for managing disk space (for more details, refer to the ?write_ncdf and ?write_tif documentation). gdalcubes_options ( ncdf_compression_level = 1 ) write_ncdf ( cube , file.path ( \"~/Desktop\" , basename ( tempfile ( fileext = \".nc\" )))) gdalcubes_options ( ncdf_compression_level = 0 ) write_tif() and write_ncdf() both return the path(s) to created file(s) as a character vector. items_2020 <- s |> stac_search ( collections = \"sentinel-s2-l2a-cogs\" , bbox = c ( -105.694362 , 39.912886 , -105.052774 , 40.262785 ), datetime = \"2020-05-01/2020-06-30\" ) |> post_request () items_2021 <- s |> stac_search ( collections = \"sentinel-s2-l2a-cogs\" , bbox = c ( -105.694362 , 39.912886 , -105.052774 , 40.262785 ), datetime = \"2021-05-01/2021-06-30\" ) |> post_request () col_2020 = stac_image_collection ( items_2020 $ features , asset_names = assets ) col_2021 = stac_image_collection ( items_2021 $ features , asset_names = assets ) v_2020 = cube_view ( srs = \"EPSG:32720\" , extent = list ( t0 = \"2020-05-01\" , t1 = \"2020-06-30\" , left = bbox_32720_boulder [ \"xmin\" ], right = bbox_32720_boulder [ \"xmax\" ], top = bbox_32720_boulder [ \"ymax\" ], bottom = bbox_32720_boulder [ \"ymin\" ]), dx = 100 , dy = 100 , dt = \"P1D\" , aggregation = \"median\" , resampling = \"bilinear\" ) v_2021 = cube_view ( v_2020 , extent = list ( t0 = \"2021-05-01\" , t1 = \"2021-06-30\" )) max_ndvi_mosaic <- function ( col , v ) { raster_cube ( col , v ) |> select_bands ( c ( \"B04\" , \"B08\" )) |> apply_pixel ( c ( \"(B08-B04)/(B08+B04)\" ), names = \"NDVI\" ) |> reduce_time ( \"max(NDVI)\" ) } suppressPackageStartupMessages ( library ( stars )) max_ndvi_mosaic ( col_2020 , v_2020 ) -> maxndvi_2020 max_ndvi_mosaic ( col_2021 , v_2021 ) -> maxndvi_2021 maxndvi_2021 maxndvi_2020 difference = maxndvi_2021 - maxndvi_2020 difference [ difference > -0.15 ] = NA names ( difference ) <- \"Difference of max NDVI (2020 - 2019)\" flood_polygon_data3 <- glue ( \"/vsizip/vsicurl/https://data.hydrosheds.org/file/hydrosheds-associated/gloric/GloRiC_v10_shapefile.zip/GloRiC_v10_shapefile/GloRiC_v10.shp\" ) |> st_read () |> st_as_sf ( coords = c ( \"lon\" , \"lat\" )) flood_polygon_data3 #st_read(\"/Users/ty/Downloads/GloRiC_v10_geodatabase/GloRiC_v10.gdb\") flood_polygon_data3 <- glue ( \"/vsizip/vsicurl/https://data.hydrosheds.org/file/hydrosheds-associated/gloric/GloRiC_v10_geodatabase.zip/GloRiC_v10_geodatabase/GloRiC_v10.gdb\" ) |> st_read () |> st_as_sf ( coords = c ( \"lon\" , \"lat\" )) flood_polygon_data3","title":"Stream data"},{"location":"data-library/stac_mount_save/#the-art-of-making-a-data-cube","text":"Ty Tuff, ESIIL Data Scientist 2023-10-27 #library(Rcpp) library ( sf ) library ( gdalcubes ) library ( rstac ) library ( gdalUtils ) library ( terra ) library ( rgdal ) library ( reshape2 ) library ( osmdata ) library ( terra ) library ( dplyr ) library ( stars ) library ( ggplot2 ) library ( colorspace ) library ( geos ) library ( osmdata ) library ( ggthemes ) library ( tidyr ) gdalcubes_options ( parallel = 8 ) sf :: sf_extSoftVersion () ## GEOS GDAL proj.4 GDAL_with_GEOS USE_PROJ_H ## \"3.11.0\" \"3.5.3\" \"9.1.0\" \"true\" \"true\" ## PROJ ## \"9.1.0\" gdalcubes_gdal_has_geos () ## [1] TRUE library ( osmdata ) library ( dplyr ) library ( sf ) library ( terra ) library ( tidyterra ) library ( glue ) library ( ggplot2 ) library ( ggthemes ) library ( stars ) library ( magrittr ) library ( landsat )","title":"The art of making a data cube"},{"location":"data-library/stac_mount_save/#the-philosophy-of-moving-data-in-the-cloud","text":"The philosophy of moving data in the cloud represents a paradigm shift in how we approach data within our analytical processes. Instead of the traditional method of transferring entire datasets to our local environments, the cloud encourages a more efficient model: bring your analysis to the data. This approach minimizes data movement and leverages the cloud\u2019s computational power and scalability. By utilizing cloud-native tools and services, we can run our analyses directly on the data where it resides, selectively accessing and processing only what is necessary. This not only streamlines workflows but also significantly reduces overheads related to data transfer and storage management. In essence, the focus is on diverting computational resources to the data rather than the cumbersome and resource-intensive practice of moving large datasets to and fro.","title":"The philosophy of moving data in the cloud"},{"location":"data-library/stac_mount_save/#to-make-or-to-take-a-photo","text":"The distinction between making and taking a photograph lies in the approach and intent behind the camera. Taking a photo is often a reactive process, where the photographer captures moments as they naturally unfold, seizing the spontaneity of life without alteration. It\u2019s a passive form of photography where the emphasis is on the right timing and the natural interplay of elements within the frame. On the other hand, making a photo is a proactive and deliberate act. It is akin to craftsmanship, where a professional photographer starts with a concept and utilizes a variety of tools and techniques to stage and construct the desired scene. They actively manipulate lighting, composition, and subjects to create a photograph that aligns with their pre-visualized artistic vision. While both methods use a camera to produce a photograph, making a photo involves a creation process, whereas taking a photo is about finding the scene. David Yarrow is a famous photographer who \u2018makes\u2019 his photographs.","title":"\u2018To Make\u2019 or \u2018To Take\u2019 a photo"},{"location":"data-library/stac_mount_save/#what-does-it-mean-to-make-a-data-cube","text":"The artistry of Ansel Adams\u2019 photography serves as a compelling analogy for the meticulous craft of building a data cube from cloud data sources using tools like STAC and GDAL VSI. Just as Adams would survey the vastness of a landscape, discerning the interplay of light and shadow upon the mountains before him, a data architect surveys the expanse of available data. In this analogy, the raw data are the majestic mountains and sweeping landscapes waiting to be captured. The STAC collection acts as the photographer\u2019s deliberate choice of scene, pointing the camera lens\u2014our data tools\u2014towards the most telling and coherent dataset. Just as Adams\u2019 photographs are more than mere records of a landscape, but rather a confluence of his vision, technique, and the scene\u2019s natural beauty, so too is the data cube more than the sum of its parts. It is the artful synthesis of information, crafted and composed with the skill and intent of an artist, producing not just a tool for analysis but a harmonized, data-driven portrait of the world it represents. The builder of the data cube is, indeed, an artist, and the data cube their masterpiece, revealing not just data, but a story, a perspective, a landscape sewn from the raw material of cloud-sourced information. As Adams would adjust his viewfinder, setting the boundaries of his photographic frame, the data builder sets the view window, filtering and transferring relevant data to their own medium, akin to Adams\u2019 film. This is where the raw data is transformed, organized into the structured form of a data frame or data cube, a process not unlike the careful development of a photograph in a darkroom. Here, the data cube creator, much like Adams with his careful dodging and burning, harmonizes disparate elements into a cohesive whole, each decision reflecting an intention and vision for the final product.","title":"What does it mean to \u2018make\u2019 a data cube?"},{"location":"data-library/stac_mount_save/#1-the-rat-through-the-snake-problem-scalability-with-cloud-computing","text":"Just like a snake that swallows a rat, traditional computing systems often struggle to process the large volumes of environmental data \u2014 they\u2019re constrained by their static hardware limitations. Cloud computing introduces a python-esque capability: massive scalability. By migrating to the cloud, we essentially make the snake bigger, allowing it to handle larger \u201cprey.\u201d Scalable computers in the cloud can grow with the demand, providing the necessary computational power to process extensive datasets, which is vital in a field where data volumes are increasing exponentially.","title":"1) The Rat through the Snake Problem: Scalability with Cloud Computing"},{"location":"data-library/stac_mount_save/#2-the-antelope-through-the-python-problem-streamlining-with-gdal-vsi","text":"As we scale up, we encounter a new challenge: trying to pass an antelope through a python \u2014 a metaphor for the next level of complexity in data processing. The sheer size and complexity of the data can become overwhelming. This is where GDAL\u2019s Virtual File System (VSI) becomes our ecological adaptation. VSI allows us to access remote data transparently and more efficiently. Instead of ingesting the entire \u201cantelope,\u201d VSI enables the \u201cpython\u201d to dynamically access and process only the parts of the data it needs, when it needs them, much like constriction before digestion. This selective access minimizes the need for local storage and expedites the data handling process.","title":"2) The Antelope through the Python Problem: Streamlining with GDAL VSI"},{"location":"data-library/stac_mount_save/#3-drinking-from-a-fire-hose-accelerated-inference-with-ai-and-ml","text":"Once we\u2019ve enabled the flow of large amounts of digestible data, we encounter the metaphorical challenge of drinking from a fire hose. The data, now flowing and accessible, is immense and rapid \u2014 posing a challenge not just to store and process, but to understand and derive meaning from in real-time. This is where artificial intelligence (AI) and machine learning (ML) step in. These technologies act as a sophisticated filtration system, enabling us to drink safely and beneficially from the torrent. AI and ML can analyze patterns, make predictions, and infer insights at a pace that keeps up with the fast stream of data, turning raw information into actionable knowledge. By addressing these three pivotal challenges with cloud computing, GDAL VSI, and AI/ML, we not only manage to consume the data effectively but also transform our capabilities in environmental data science. We can move from mere data ingestion to meaningful data interpretation, all at a scale and speed necessary for impactful environmental analysis.","title":"3) Drinking from a Fire Hose: Accelerated Inference with AI and ML"},{"location":"data-library/stac_mount_save/#mounting-data","text":"A void-filled Digital Elevation Model (DEM) is a comprehensive topographical representation where any missing data points, known as voids, have been filled in. These voids can occur due to various reasons, such as clouds or technical errors during data collection. In a void-filled DEM, these gaps are interpolated or estimated using the surrounding data to create a continuous, seamless surface model. This process enhances the utility and accuracy of the DEM for hydrological modeling, terrain analysis, and other geographical applications. The HydroSHEDS website ( https://www.hydrosheds.org/hydrosheds-core-downloads ) provides access to high-quality, void-filled DEM datasets like the DEM_continuous_CONUS_15s, which users can download and easily integrate into spatial analysis workflows using tools such as \u2018terra\u2019 in R, allowing for sophisticated environmental and geographical research and planning. # Record start time a <- Sys.time () # Create a string with the file path using glue, then download and read the DEM file as a raster object DEM_continuous_CONUS_15s <- glue ( \"/vsizip/vsicurl/\" , #magic remote connection \"https://data.hydrosheds.org/file/hydrosheds-v1-dem/hyd_na_dem_15s.zip\" , #copied link to download location \"/hyd_na_dem_15s.tif\" ) %>% #path inside zip file terra :: rast () # The 'glue' function constructs the file path string, which is then passed to 'terra::rast()' to read the DEM file into R as a raster layer. '/vsizip/vsicurl/' is a special GDAL virtual file system syntax that allows reading directly from a zipped file on a remote server. # Record end time and calculate the time difference b <- Sys.time () difftime ( b , a ) ## Time difference of 4.603666 secs # The resulting raster object is stored in 'DEM_continuous_CONUS_15s', which now contains the void-filled DEM data ready for use DEM_continuous_CONUS_15s # Prints out the details of the 'DEM_continuous_CONUS_15s' raster object ## class : SpatRaster ## dimensions : 13920, 20640, 1 (nrow, ncol, nlyr) ## resolution : 0.004166667, 0.004166667 (x, y) ## extent : -138, -52, 5, 63 (xmin, xmax, ymin, ymax) ## coord. ref. : lon/lat WGS 84 (EPSG:4326) ## source : hyd_na_dem_15s.tif ## name : Band_1 # output is a SpatRaster, which is the object type associated with the 'terra' package. Continuous DEM for North America # Record start time a <- Sys.time () ggplot () + geom_spatraster ( data = DEM_continuous_CONUS_15s ) + theme_tufte () b <- Sys.time () difftime ( b , a ) ## Time difference of 52.49061 secs Calculate Slope from that DEM SLOPE_continuous_CONUS_15s <- terra :: terrain ( DEM_continuous_CONUS_15s , \"slope\" ) SLOPE_continuous_CONUS_15s ## class : SpatRaster ## dimensions : 13920, 20640, 1 (nrow, ncol, nlyr) ## resolution : 0.004166667, 0.004166667 (x, y) ## extent : -138, -52, 5, 63 (xmin, xmax, ymin, ymax) ## coord. ref. : lon/lat WGS 84 (EPSG:4326) ## source(s) : memory ## name : slope ## min value : 0.00000 ## max value : 56.98691 # Record start time a <- Sys.time () ggplot () + geom_spatraster ( data = SLOPE_continuous_CONUS_15s ) + theme_tufte () b <- Sys.time () difftime ( b , a ) ## Time difference of 3.859545 secs Calculate aspect from DEM ASPECT_continuous_CONUS_15s <- terra :: terrain ( DEM_continuous_CONUS_15s , \"aspect\" ) ASPECT_continuous_CONUS_15s ## class : SpatRaster ## dimensions : 13920, 20640, 1 (nrow, ncol, nlyr) ## resolution : 0.004166667, 0.004166667 (x, y) ## extent : -138, -52, 5, 63 (xmin, xmax, ymin, ymax) ## coord. ref. : lon/lat WGS 84 (EPSG:4326) ## source(s) : memory ## name : aspect ## min value : 0 ## max value : 360 # Record start time a <- Sys.time () ggplot () + geom_spatraster ( data = ASPECT_continuous_CONUS_15s ) + theme_tufte () b <- Sys.time () difftime ( b , a ) ## Time difference of 3.650267 secs Create a cube from those layers! mini_stack <- c ( DEM_continuous_CONUS_15s , SLOPE_continuous_CONUS_15s , ASPECT_continuous_CONUS_15s ) mini_stack ## class : SpatRaster ## dimensions : 13920, 20640, 3 (nrow, ncol, nlyr) ## resolution : 0.004166667, 0.004166667 (x, y) ## extent : -138, -52, 5, 63 (xmin, xmax, ymin, ymax) ## coord. ref. : lon/lat WGS 84 (EPSG:4326) ## sources : hyd_na_dem_15s.tif ## memory ## memory ## names : Band_1, slope, aspect ## min values : ? , 0.00000, 0 ## max values : ? , 56.98691, 360 Reproject and return the bounding box coordinates for our Area of Interest # Transform the filtered geometry to EPSG:4326 and store its bounding box # Record start time a <- Sys.time () DEM_continuous_CONUS_15s |> stars :: st_as_stars () |> st_transform ( \"EPSG:4326\" ) |> st_bbox () -> bbox_4326 DEM_continuous_CONUS_15s |> stars :: st_as_stars () |> st_transform ( \"EPSG:32618\" ) |> st_bbox () -> bbox_32618 b <- Sys.time () difftime ( b , a ) ## Time difference of 3.7653 mins Get a polygon for Boulder County, reproject, and return bounding box. This is so I can make a smaller search in the stac catalog. boulder_county <- getbb ( \"boulder, co\" , format_out = \"sf_polygon\" ) boulder_county $ multipolygon |> st_transform ( crs = 4326 ) |> st_bbox () -> bbox_4326_boulder boulder_county $ multipolygon |> st_transform ( crs = 32720 ) |> st_bbox () -> bbox_32720_boulder Get a polygon for the United States and crop it to be the same size as the DEM above. aoi <- getbb ( \"United States\" , format_out = \"sf_polygon\" ) conus <- aoi $ multipolygon |> st_crop ( bbox_4326 ) ggplot ( data = conus ) + geom_sf () Search the Stac catalog. STAC, or SpatioTemporal Asset Catalog, is an open-source specification designed to standardize the way geospatial data is indexed and discovered. Developed by Element 84 among others, it facilitates better interoperability and sharing of geospatial assets by providing a common language for describing them. STAC\u2019s flexible design allows for easy cataloging of data, making it simpler for individuals and systems to search and retrieve geospatial information. By effectively organizing data about the Earth\u2019s spatial and temporal characteristics, STAC enables users to harness the full power of the cloud and modern data processing technologies, optimizing the way we access and analyze environmental data on a global scale. stac ( \"https://earth-search.aws.element84.com/v1\" ) |> get_request () ## ###STACCatalog ## - id: earth-search-aws ## - description: A STAC API of public datasets on AWS ## - field(s): stac_version, type, id, title, description, links, conformsTo Element 84\u2019s Earth Search is a STAC compliant search and discovery API that offers users access to a vast collection of geospatial open datasets hosted on AWS. It serves as a centralized search catalog providing standardized metadata for these open datasets, designed to be freely used and integrated into various applications. Alongside the API, Element 84 also provides a web application named Earth Search Console, which is map-centric and allows users to explore and visualize the data contained within the Earth Search API\u2019s catalog. This suite of tools is part of Element 84\u2019s initiative to make geospatial data more accessible and actionable for a wide range of users and applications. collection_formats () ## CHIRPS_v2_0_daily_p05_tif | Image collection format for CHIRPS v 2.0 daily ## | global precipitation dataset (0.05 degrees ## | resolution) from GeoTIFFs, expects list of .tif ## | or .tif.gz files as input. [TAGS: CHIRPS, ## | precipitation] ## CHIRPS_v2_0_monthly_p05_tif | Image collection format for CHIRPS v 2.0 monthly ## | global precipitation dataset (0.05 degrees ## | resolution) from GeoTIFFs, expects list of .tif ## | or .tif.gz files as input. [TAGS: CHIRPS, ## | precipitation] ## ESA_CCI_SM_ACTIVE | Collection format for ESA CCI soil moisture ## | active product (version 4.7) [TAGS: Soil ## | Moisture, ESA, CCI] ## ESA_CCI_SM_PASSIVE | Collection format for ESA CCI soil moisture ## | passive product (version 4.7) [TAGS: Soil ## | Moisture, ESA, CCI] ## GPM_IMERG_3B_DAY_GIS_V06A | Collection format for daily ## | IMERG_3B_DAY_GIS_V06A data [TAGS: Precipitation, ## | GPM, IMERG] ## L8_L1TP | Collection format for Landsat 8 Level 1 TP ## | product [TAGS: Landsat, USGS, Level 1, NASA] ## L8_SR | Collection format for Landsat 8 surface ## | reflectance product [TAGS: Landsat, USGS, Level ## | 2, NASA, surface reflectance] ## MAXAR | Preliminary collection format for MAXAR open ## | data, visual only (under development) [TAGS: ] ## MxD09GA | Collection format for selected bands from the ## | MODIS MxD09GA (Aqua and Terra) product [TAGS: ## | MODIS, surface reflectance] ## MxD10A2 | Collection format for selected bands from the ## | MODIS MxD10A2 (Aqua and Terra) v006 Snow Cover ## | product [TAGS: MODIS, Snow Cover] ## MxD11A1 | Collection format for selected bands from the ## | MODIS MxD11A2 (Aqua and Terra) v006 Land Surface ## | Temperature product [TAGS: MODIS, LST] ## MxD11A2 | Collection format for selected bands from the ## | MODIS MxD11A2 (Aqua and Terra) v006 Land Surface ## | Temperature product [TAGS: MODIS, LST] ## MxD13A2 | Collection format for selected bands from the ## | MODIS MxD13A2 (Aqua and Terra) product [TAGS: ## | MODIS, VI, NDVI, EVI] ## MxD13A3 | Collection format for selected bands from the ## | MODIS MxD13A3 (Aqua and Terra) product [TAGS: ## | MODIS, VI, NDVI, EVI] ## MxD13Q1 | Collection format for selected bands from the ## | MODIS MxD13Q1 (Aqua and Terra) product [TAGS: ## | MODIS, VI, NDVI, EVI] ## MxD14A2 | Collection format for the MODIS MxD14A2 (Aqua ## | and Terra) product [TAGS: MODIS, Fire] ## PlanetScope_3B_AnalyticMS_SR | Image collection format for PlanetScope 4-band ## | scenes [TAGS: PlanetScope, BOA, Surface ## | Reflectance] ## Sentinel2_L1C | Image collection format for Sentinel 2 Level 1C ## | data as downloaded from the Copernicus Open ## | Access Hub, expects a list of file paths as ## | input. The format works on original ZIP ## | compressed as well as uncompressed imagery. ## | [TAGS: Sentinel, Copernicus, ESA, TOA] ## Sentinel2_L1C_AWS | Image collection format for Sentinel 2 Level 1C ## | data in AWS [TAGS: Sentinel, Copernicus, ESA, ## | TOA] ## Sentinel2_L2A | Image collection format for Sentinel 2 Level 2A ## | data as downloaded from the Copernicus Open ## | Access Hub, expects a list of file paths as ## | input. The format should work on original ZIP ## | compressed as well as uncompressed imagery. ## | [TAGS: Sentinel, Copernicus, ESA, BOA, Surface ## | Reflectance] ## Sentinel2_L2A_THEIA | Image collection format for Sentinel 2 Level 2A ## | data as downloaded from Theia. [TAGS: Sentinel, ## | ESA, Flat Reflectance, Theia] Building a stac collection by aiming your camera at the landscape Creating a STAC collection is akin to a photographer framing a shot; the landscape is rich with diverse data, mirroring a scene bustling with potential subjects, colors, and light. Just as a photographer selects a portion of the vista to capture, focusing on elements that will compose a compelling image, a data scientist must similarly navigate the vast data terrain. They must \u2018point their camera\u2019 judiciously, ensuring that the \u2018frame\u2019 encapsulates the precise data needed. This careful selection is crucial, as it determines the relevance and quality of the data collection, much like the photographer\u2019s choice dictates the story a photograph will tell. # Record start time a <- Sys.time () # Initialize STAC connection s = stac ( \"https://earth-search.aws.element84.com/v0\" ) # Search for Sentinel-2 images within specified bounding box and date range #22 Million items items = s |> stac_search ( collections = \"sentinel-s2-l2a-cogs\" , bbox = c ( bbox_4326_boulder [ \"xmin\" ], bbox_4326_boulder [ \"ymin\" ], bbox_4326_boulder [ \"xmax\" ], bbox_4326_boulder [ \"ymax\" ]), datetime = \"2021-05-15/2021-05-16\" ) |> post_request () |> items_fetch ( progress = FALSE ) # Print number of found items length ( items $ features ) ## [1] 1 # Prepare the assets for analysis library ( gdalcubes ) assets = c ( \"B01\" , \"B02\" , \"B03\" , \"B04\" , \"B05\" , \"B06\" , \"B07\" , \"B08\" , \"B8A\" , \"B09\" , \"B11\" , \"B12\" , \"SCL\" ) s2_collection = stac_image_collection ( items $ features , asset_names = assets , property_filter = function ( x ) { x [[ \"eo:cloud_cover\" ]] < 20 }) #all images with less than 20% clouds b <- Sys.time () difftime ( b , a ) ## Time difference of 0.4706092 secs # Display the image collection s2_collection ## Image collection object, referencing 1 images with 13 bands ## Images: ## name left top bottom right ## 1 S2B_13TDE_20210516_0_L2A -106.1832 40.65079 39.65576 -104.8846 ## datetime srs ## 1 2021-05-16T18:02:54 EPSG:32613 ## ## Bands: ## name offset scale unit nodata image_count ## 1 B01 0 1 1 ## 2 B02 0 1 1 ## 3 B03 0 1 1 ## 4 B04 0 1 1 ## 5 B05 0 1 1 ## 6 B06 0 1 1 ## 7 B07 0 1 1 ## 8 B08 0 1 1 ## 9 B09 0 1 1 ## 10 B11 0 1 1 ## 11 B12 0 1 1 ## 12 B8A 0 1 1 ## 13 SCL 0 1 1 Setting up your camera and film The camera through which the data scientist frames the shot is multifaceted, akin to the tools and processes they employ. The camera\u2019s film, analogous to the data cube, defines the resolution and dimensions of the captured data, shaping how the final dataset will be utilized. The lens and its settings\u2014focus, aperture, and exposure\u2014determine the clarity, depth, and breadth of the captured information, much like the algorithms and parameters set by the data scientist dictate the granularity and scope of the data cube. The flash, like data enhancement techniques, can illuminate hidden details, ensuring that the data cube, the final product, is as informative and accurate as the landscape it represents. # Record start time a <- Sys.time () # Define a specific view on the satellite image collection v = cube_view ( srs = \"EPSG:32720\" , #this is harder than expected. dx = 100 , dy = 100 , dt = \"P1M\" , aggregation = \"median\" , resampling = \"near\" , extent = list ( t0 = \"2021-05-15\" , t1 = \"2021-05-16\" , left = bbox_32720_boulder [ 1 ], right = bbox_32720_boulder [ 2 ], top = bbox_32720_boulder [ 4 ], bottom = bbox_32720_boulder [ 3 ] ) ) b <- Sys.time () difftime ( b , a ) ## Time difference of 0.002738953 secs # Display the defined view v ## A data cube view object ## ## Dimensions: ## low high count pixel_size ## t 2021-05-01 2021-05-31 1 P1M ## y -3103099.52398788 15434400.4760121 185375 100 ## x -3178878.98542359 15369521.0145764 185484 100 ## ## SRS: \"EPSG:32720\" ## Temporal aggregation method: \"median\" ## Spatial resampling method: \"near\" Take a picture! Raster style # Record start time a <- Sys.time () s2_collection |> raster_cube ( v ) |> select_bands ( c ( \"B04\" , \"B05\" )) |> apply_pixel ( c ( \"(B05-B04)/(B05+B04)\" ), names = \"NDVI\" ) |> write_tif () |> raster :: stack () -> x x ## class : RasterStack ## dimensions : 185375, 185484, 34384096500, 1 (nrow, ncol, ncell, nlayers) ## resolution : 100, 100 (x, y) ## extent : -3178879, 15369521, -3103100, 15434400 (xmin, xmax, ymin, ymax) ## crs : +proj=utm +zone=20 +south +datum=WGS84 +units=m +no_defs ## names : NDVI b <- Sys.time () difftime ( b , a ) ## Time difference of 4.132932 mins STARS style # Record start time a <- Sys.time () s2_collection |> raster_cube ( v ) |> select_bands ( c ( \"B04\" , \"B05\" )) |> apply_pixel ( c ( \"(B05-B04)/(B05+B04)\" ), names = \"NDVI\" ) |> stars :: st_as_stars () -> y b <- Sys.time () difftime ( b , a ) ## Time difference of 1.459866 mins y ## stars_proxy object with 1 attribute in 1 file(s): ## $NDVI ## [1] \"[...]/filec5982c38536c.nc:NDVI\" ## ## dimension(s): ## from to offset delta refsys point ## x 1 185484 -3178879 100 WGS 84 / UTM zone 20S NA ## y 1 185375 15434400 -100 WGS 84 / UTM zone 20S NA ## time 1 1 NA NA POSIXct FALSE ## values x/y ## x NULL [x] ## y NULL [y] ## time [2021-05-01,2021-06-01) Extract data # Record start time a <- Sys.time () x <- s2_collection |> raster_cube ( v ) |> select_bands ( c ( \"B01\" , \"B02\" , \"B03\" , \"B04\" , \"B05\" , \"B06\" , \"B07\" , \"B08\" , \"B8A\" , \"B09\" , \"B11\" , \"B12\" )) |> extract_geom ( boulder_county $ multipolygon ) |> rename ( \"time\" = \"time\" , \"443\" = \"B01\" , \"490\" = \"B02\" , \"560\" = \"B03\" , \"665\" = \"B04\" , \"705\" = \"B05\" , \"740\" = \"B06\" , \"783\" = \"B07\" , \"842\" = \"B08\" , \"865\" = \"B8A\" , \"940\" = \"B09\" , \"1610\" = \"B11\" , \"2190\" = \"B12\" ) b <- Sys.time () difftime ( b , a ) ## Time difference of 1.699016 mins head ( x ) ## FID time 443 490 560 665 705 740 783 842 865 940 1610 ## 1 1 2021-05-01 11096 10929 10224 9893 9956 9706 9715 9641 9511 8459 5682 ## 2 1 2021-05-01 11631 11282 10550 10234 10288 10031 10032 9988 9828 9153 5802 ## 3 1 2021-05-01 11900 11393 10666 10337 10398 10142 10138 10093 9927 9461 5754 ## 4 1 2021-05-01 11406 10597 9928 9626 9694 9481 9516 9338 9336 8959 5726 ## 5 1 2021-05-01 11399 10939 10237 9905 9978 9738 9746 9633 9555 8925 5831 ## 6 1 2021-05-01 11600 11174 10462 10147 10209 9952 9960 9890 9760 9153 5773 ## 2190 ## 1 3917 ## 2 3981 ## 3 3937 ## 4 4054 ## 5 4097 ## 6 3990 Make a timeseries # Record start time a <- Sys.time () items <- s |> stac_search ( collections = \"sentinel-s2-l2a-cogs\" , bbox = c ( -105.694362 , 39.912886 , -105.052774 , 40.262785 ), datetime = \"2020-01-01/2022-12-31\" , limit = 500 ) %>% post_request () S2.mask = image_mask ( \"SCL\" , values = c ( 3 , 8 , 9 )) col = stac_image_collection ( items $ features , asset_names = assets , property_filter = function ( x ) { x [[ \"eo:cloud_cover\" ]] < 30 }) v = cube_view ( srs = \"EPSG:4326\" , extent = list ( t0 = \"2020-01-01\" , t1 = \"2022-12-31\" , left = -105.694362 , right = -105.052774 , top = 40.262785 , bottom = 39.912886 ), dx = 0.001 , dy = 0.001 , dt = \"P1M\" , aggregation = \"median\" , resampling = \"bilinear\" ) library ( colorspace ) ndvi.col = function ( n ) { rev ( sequential_hcl ( n , \"Green-Yellow\" )) } library ( gdalcubes ) raster_cube ( col , v , mask = S2.mask ) |> select_bands ( c ( \"B04\" , \"B08\" )) |> apply_pixel ( \"(B08-B04)/(B08+B04)\" , \"NDVI\" ) |> gdalcubes :: animate ( col = ndvi.col , zlim = c ( -0.2 , 1 ), key.pos = 1 , save_as = \"anim.gif\" , fps = 4 ) ## [1] \"/Users/ty/Documents/Github/hackathon2023_datacube/docs/code_for_building_cube/anim.gif\" b <- Sys.time () difftime ( b , a ) ## Time difference of 4.716672 mins y ## stars_proxy object with 1 attribute in 1 file(s): ## $NDVI ## [1] \"[...]/filec5982c38536c.nc:NDVI\" ## ## dimension(s): ## from to offset delta refsys point ## x 1 185484 -3178879 100 WGS 84 / UTM zone 20S NA ## y 1 185375 15434400 -100 WGS 84 / UTM zone 20S NA ## time 1 1 NA NA POSIXct FALSE ## values x/y ## x NULL [x] ## y NULL [y] ## time [2021-05-01,2021-06-01) Saving Data Cubes to Local Storage There are occasions when we need to manipulate data cubes using other software. For such purposes, we can save data cubes to our local disk as individual netCDF files or as a series of GeoTIFF files. In the case of the latter, each temporal segment of the cube is saved as a separate (multiband) GeoTIFF file. Both netCDF and GeoTIFF formats allow for file size reduction through compression and data packing. This process involves transforming double precision numbers into smaller integer values using a scale and offset, which can be particularly useful for managing disk space (for more details, refer to the ?write_ncdf and ?write_tif documentation). gdalcubes_options ( ncdf_compression_level = 1 ) write_ncdf ( cube , file.path ( \"~/Desktop\" , basename ( tempfile ( fileext = \".nc\" )))) gdalcubes_options ( ncdf_compression_level = 0 ) write_tif() and write_ncdf() both return the path(s) to created file(s) as a character vector. items_2020 <- s |> stac_search ( collections = \"sentinel-s2-l2a-cogs\" , bbox = c ( -105.694362 , 39.912886 , -105.052774 , 40.262785 ), datetime = \"2020-05-01/2020-06-30\" ) |> post_request () items_2021 <- s |> stac_search ( collections = \"sentinel-s2-l2a-cogs\" , bbox = c ( -105.694362 , 39.912886 , -105.052774 , 40.262785 ), datetime = \"2021-05-01/2021-06-30\" ) |> post_request () col_2020 = stac_image_collection ( items_2020 $ features , asset_names = assets ) col_2021 = stac_image_collection ( items_2021 $ features , asset_names = assets ) v_2020 = cube_view ( srs = \"EPSG:32720\" , extent = list ( t0 = \"2020-05-01\" , t1 = \"2020-06-30\" , left = bbox_32720_boulder [ \"xmin\" ], right = bbox_32720_boulder [ \"xmax\" ], top = bbox_32720_boulder [ \"ymax\" ], bottom = bbox_32720_boulder [ \"ymin\" ]), dx = 100 , dy = 100 , dt = \"P1D\" , aggregation = \"median\" , resampling = \"bilinear\" ) v_2021 = cube_view ( v_2020 , extent = list ( t0 = \"2021-05-01\" , t1 = \"2021-06-30\" )) max_ndvi_mosaic <- function ( col , v ) { raster_cube ( col , v ) |> select_bands ( c ( \"B04\" , \"B08\" )) |> apply_pixel ( c ( \"(B08-B04)/(B08+B04)\" ), names = \"NDVI\" ) |> reduce_time ( \"max(NDVI)\" ) } suppressPackageStartupMessages ( library ( stars )) max_ndvi_mosaic ( col_2020 , v_2020 ) -> maxndvi_2020 max_ndvi_mosaic ( col_2021 , v_2021 ) -> maxndvi_2021 maxndvi_2021 maxndvi_2020 difference = maxndvi_2021 - maxndvi_2020 difference [ difference > -0.15 ] = NA names ( difference ) <- \"Difference of max NDVI (2020 - 2019)\" flood_polygon_data3 <- glue ( \"/vsizip/vsicurl/https://data.hydrosheds.org/file/hydrosheds-associated/gloric/GloRiC_v10_shapefile.zip/GloRiC_v10_shapefile/GloRiC_v10.shp\" ) |> st_read () |> st_as_sf ( coords = c ( \"lon\" , \"lat\" )) flood_polygon_data3 #st_read(\"/Users/ty/Downloads/GloRiC_v10_geodatabase/GloRiC_v10.gdb\") flood_polygon_data3 <- glue ( \"/vsizip/vsicurl/https://data.hydrosheds.org/file/hydrosheds-associated/gloric/GloRiC_v10_geodatabase.zip/GloRiC_v10_geodatabase/GloRiC_v10.gdb\" ) |> st_read () |> st_as_sf ( coords = c ( \"lon\" , \"lat\" )) flood_polygon_data3","title":"Mounting data"},{"location":"data-library/stac_simple/","text":"Accessing data via STAC \u00b6 ESIIL, 2024 Ty Tuff & Tyler McIntosh SpatioTemporal Asset Catalog, is an open-source specification designed to standardize the way geospatial data is indexed and discovered. Developed by Element 84 among others, it facilitates better interoperability and sharing of geospatial assets by providing a common language for describing them. STAC\u2019s flexible design allows for easy cataloging of data, making it simpler for individuals and systems to search and retrieve geospatial information. By effectively organizing data about the Earth\u2019s spatial and temporal characteristics, STAC enables users to harness the full power of the cloud and modern data processing technologies, optimizing the way we access and analyze environmental data on a global scale. Element 84\u2019s Earth Search is a STAC compliant search and discovery API that offers users access to a vast collection of geospatial open datasets hosted on AWS. It serves as a centralized search catalog providing standardized metadata for these open datasets, designed to be freely used and integrated into various applications. Alongside the API, Element 84 also provides a web application named Earth Search Console, which is map-centric and allows users to explore and visualize the data contained within the Earth Search API\u2019s catalog. This suite of tools is part of Element 84\u2019s initiative to make geospatial data more accessible and actionable for a wide range of users and applications. First, we need an area of interest \u00b6 require(glue) require(sf) require(gdalcubes) require(rstac) #Access ecoregiosn via VSI epa_l3 <- glue::glue( \"/vsizip/vsicurl/\", #magic remote connection \"https://gaftp.epa.gov/EPADataCommons/ORD/Ecoregions/us/us_eco_l3.zip\", #copied link to download location \"/us_eco_l3.shp\") |> #path inside zip file sf::st_read() #Get just S.Rockies and ensure that it is in EPSG:4326 southernRockies <- epa_l3 |> dplyr::filter(US_L3NAME == \"Southern Rockies\") |> dplyr::group_by(US_L3NAME) |> dplyr::summarize(geometry = sf::st_union(geometry)) |> sf::st_transform(\"EPSG:4326\") bboxSR4326 <- sf::st_bbox(southernRockies) To access data from STAC correctly, we need to request the data in a projected CRS. southernRockies <- southernRockies |> sf::st_transform(\"EPSG:32613\") bboxSRproj <- sf::st_bbox(southernRockies) Search the STAC catalog \u00b6 To get information about a STAC archive, you can use rstac::get_request(). You can also use gdalcubes::collection_formats() to see various collection formats that you may encounter. To search a STAC catalog online, stacindex.org is a useful tool. For example, here is the page for the Earth Search catalog by Element84 that we will use. stac(\"https://earth-search.aws.element84.com/v1\") |> get_request() ## ###STACCatalog ## - id: earth-search-aws ## - description: A STAC API of public datasets on AWS ## - field(s): stac_version, type, id, title, description, links, conformsTo collection_formats() Initialize a STAC connection (rstac::stac()) and search for data that you are interested in (rstac::stac_search()). Note that you will request a spatial area of interest as well as a temporal window of interest. To get more information on the data and how it is structured, you can examine the 'items' object we create. # Record start time a <- Sys.time() # Initialize STAC connection s = rstac::stac(\"https://earth-search.aws.element84.com/v0\") # Search for Sentinel-2 images within specified bounding box and date range #22 Million items items = s |> rstac::stac_search(collections = \"sentinel-s2-l2a-cogs\", bbox = c(bboxSR4326[\"xmin\"], bboxSR4326[\"ymin\"], bboxSR4326[\"xmax\"], bboxSR4326[\"ymax\"]), datetime = \"2021-05-15/2021-05-16\") |> post_request() |> items_fetch(progress = FALSE) # Print number of found items length(items$features) items There is data we want! Now, we need to prepare the assets for us to access. We will list the assets we want, and set any property filters that we would like to apply. # Prepare the assets for analysis library(gdalcubes) assets = c(\"B01\", \"B02\", \"B03\", \"B04\", \"B05\", \"B06\", \"B07\", \"B08\", \"B8A\", \"B09\", \"B11\", \"B12\", \"SCL\") s2_collection = gdalcubes::stac_image_collection(items$features, asset_names = assets, property_filter = function(x) {x[[\"eo:cloud_cover\"]] < 20}) #all images with less than 20% clouds b <- Sys.time() difftime(b, a) # Display the image collection s2_collection Access the data \u00b6 First, we need to set up our view on the collection. We will set our spatial and temporal resolution, as well as how we want the data temporally aggregated and spatially resampled. We then also set our spatial and temporal window. Note that the spatial extent here should be in a projected CRS! # Record start time a <- Sys.time() # Define a specific view on the satellite image collection v = gdalcubes::cube_view( srs = \"EPSG:32613\", dx = 100, dy = 100, dt = \"P1M\", aggregation = \"median\", resampling = \"near\", extent = list( t0 = \"2021-05-15\", t1 = \"2021-05-16\", left = bboxSRproj[1], right = bboxSRproj[2], top = bboxSRproj[4], bottom = bboxSRproj[3] ) ) b <- Sys.time() difftime(b, a) # Display the defined view v Finally, let's take our snapshot of the data! Let's also calculate NDVI and then view the data. ``` Record start time \u00b6 a <- Sys.time() s2_collection |> raster_cube(v) |> select_bands(c( \"B04\", \"B05\")) |> apply_pixel(c(\"(B05-B04)/(B05+B04)\"), names=\"NDVI\") |> write_tif() |> raster::stack() -> x View the product \u00b6 x b <- Sys.time() difftime(b, a) Let's view the dat \u00b6 mapview::mapview(x, layer.name = \"NDVI\") + mapview::mapview(southernRockies)","title":"Stream data (light)"},{"location":"data-library/stac_simple/#accessing-data-via-stac","text":"ESIIL, 2024 Ty Tuff & Tyler McIntosh SpatioTemporal Asset Catalog, is an open-source specification designed to standardize the way geospatial data is indexed and discovered. Developed by Element 84 among others, it facilitates better interoperability and sharing of geospatial assets by providing a common language for describing them. STAC\u2019s flexible design allows for easy cataloging of data, making it simpler for individuals and systems to search and retrieve geospatial information. By effectively organizing data about the Earth\u2019s spatial and temporal characteristics, STAC enables users to harness the full power of the cloud and modern data processing technologies, optimizing the way we access and analyze environmental data on a global scale. Element 84\u2019s Earth Search is a STAC compliant search and discovery API that offers users access to a vast collection of geospatial open datasets hosted on AWS. It serves as a centralized search catalog providing standardized metadata for these open datasets, designed to be freely used and integrated into various applications. Alongside the API, Element 84 also provides a web application named Earth Search Console, which is map-centric and allows users to explore and visualize the data contained within the Earth Search API\u2019s catalog. This suite of tools is part of Element 84\u2019s initiative to make geospatial data more accessible and actionable for a wide range of users and applications.","title":"Accessing data via STAC"},{"location":"data-library/stac_simple/#first-we-need-an-area-of-interest","text":"require(glue) require(sf) require(gdalcubes) require(rstac) #Access ecoregiosn via VSI epa_l3 <- glue::glue( \"/vsizip/vsicurl/\", #magic remote connection \"https://gaftp.epa.gov/EPADataCommons/ORD/Ecoregions/us/us_eco_l3.zip\", #copied link to download location \"/us_eco_l3.shp\") |> #path inside zip file sf::st_read() #Get just S.Rockies and ensure that it is in EPSG:4326 southernRockies <- epa_l3 |> dplyr::filter(US_L3NAME == \"Southern Rockies\") |> dplyr::group_by(US_L3NAME) |> dplyr::summarize(geometry = sf::st_union(geometry)) |> sf::st_transform(\"EPSG:4326\") bboxSR4326 <- sf::st_bbox(southernRockies) To access data from STAC correctly, we need to request the data in a projected CRS. southernRockies <- southernRockies |> sf::st_transform(\"EPSG:32613\") bboxSRproj <- sf::st_bbox(southernRockies)","title":"First, we need an area of interest"},{"location":"data-library/stac_simple/#search-the-stac-catalog","text":"To get information about a STAC archive, you can use rstac::get_request(). You can also use gdalcubes::collection_formats() to see various collection formats that you may encounter. To search a STAC catalog online, stacindex.org is a useful tool. For example, here is the page for the Earth Search catalog by Element84 that we will use. stac(\"https://earth-search.aws.element84.com/v1\") |> get_request() ## ###STACCatalog ## - id: earth-search-aws ## - description: A STAC API of public datasets on AWS ## - field(s): stac_version, type, id, title, description, links, conformsTo collection_formats() Initialize a STAC connection (rstac::stac()) and search for data that you are interested in (rstac::stac_search()). Note that you will request a spatial area of interest as well as a temporal window of interest. To get more information on the data and how it is structured, you can examine the 'items' object we create. # Record start time a <- Sys.time() # Initialize STAC connection s = rstac::stac(\"https://earth-search.aws.element84.com/v0\") # Search for Sentinel-2 images within specified bounding box and date range #22 Million items items = s |> rstac::stac_search(collections = \"sentinel-s2-l2a-cogs\", bbox = c(bboxSR4326[\"xmin\"], bboxSR4326[\"ymin\"], bboxSR4326[\"xmax\"], bboxSR4326[\"ymax\"]), datetime = \"2021-05-15/2021-05-16\") |> post_request() |> items_fetch(progress = FALSE) # Print number of found items length(items$features) items There is data we want! Now, we need to prepare the assets for us to access. We will list the assets we want, and set any property filters that we would like to apply. # Prepare the assets for analysis library(gdalcubes) assets = c(\"B01\", \"B02\", \"B03\", \"B04\", \"B05\", \"B06\", \"B07\", \"B08\", \"B8A\", \"B09\", \"B11\", \"B12\", \"SCL\") s2_collection = gdalcubes::stac_image_collection(items$features, asset_names = assets, property_filter = function(x) {x[[\"eo:cloud_cover\"]] < 20}) #all images with less than 20% clouds b <- Sys.time() difftime(b, a) # Display the image collection s2_collection","title":"Search the STAC catalog"},{"location":"data-library/stac_simple/#access-the-data","text":"First, we need to set up our view on the collection. We will set our spatial and temporal resolution, as well as how we want the data temporally aggregated and spatially resampled. We then also set our spatial and temporal window. Note that the spatial extent here should be in a projected CRS! # Record start time a <- Sys.time() # Define a specific view on the satellite image collection v = gdalcubes::cube_view( srs = \"EPSG:32613\", dx = 100, dy = 100, dt = \"P1M\", aggregation = \"median\", resampling = \"near\", extent = list( t0 = \"2021-05-15\", t1 = \"2021-05-16\", left = bboxSRproj[1], right = bboxSRproj[2], top = bboxSRproj[4], bottom = bboxSRproj[3] ) ) b <- Sys.time() difftime(b, a) # Display the defined view v Finally, let's take our snapshot of the data! Let's also calculate NDVI and then view the data. ```","title":"Access the data"},{"location":"data-library/stac_simple/#record-start-time","text":"a <- Sys.time() s2_collection |> raster_cube(v) |> select_bands(c( \"B04\", \"B05\")) |> apply_pixel(c(\"(B05-B04)/(B05+B04)\"), names=\"NDVI\") |> write_tif() |> raster::stack() -> x","title":"Record start time"},{"location":"data-library/stac_simple/#view-the-product","text":"x b <- Sys.time() difftime(b, a)","title":"View the product"},{"location":"data-library/stac_simple/#lets-view-the-dat","text":"mapview::mapview(x, layer.name = \"NDVI\") + mapview::mapview(southernRockies)","title":"Let's view the dat"},{"location":"data-library/treemap/","text":"TreeMap \u00b6 TreeMap 2016 is a USFS tree-level model of the forests of the conterminous United States created by using machine learning algorithms to match forest plot data from Forest Inventory and Analysis (FIA) to a 30x30 meter (m) grid. The main output of this project is a raster map of imputed plot identifiers at 30\u00d730 m spatial resolution for the conterminous U.S. for landscape conditions circa 2016. The plot identifiers can be associated with data from FIA plots held in the associated csv and SQL files. An overview of the data product can be found here. The TreeMap data dictionary PDF can be found here. A portion of the TreeMap dataset covering the Southern Rockies has been prepared and placed in the CyVerse data store at the below directroy. The associated CSV and SQL DB files are in the same location. A script showing how to access it, as well as how the raster was accessed, is available in the code repository, as well as copied below. ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/TreeMap # This script demonstrates how to open and access pre-downloaded TreeMap data from the data store # It also, at the bottom, shows how the data was accessed via VSI. # A similar approach could be used to access the SnagHazard data in the zip file via VSI if desired. (Path inside zip: Data/SnagHazard2016.tif) # ESIIL, 2024 # Tyler L. McIntosh require ( terra ) #Move data from data store to instance system ( \"cp -r ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/TreeMap ~/TreeMap \" ) #Open the raster treemap <- terra :: rast ( \"~/TreeMap/treemap2016_southernrockies.tif\" ) terra :: plot ( treemap ) #Open the csv treemapCsv <- readr :: read_csv ( \"~/TreeMap/TreeMap2016_tree_table.csv\" ) head ( treemapCsv ) ####################################################### # DATA ACCESS SCRIPT ####################################################### # Access treemap data, crop to southern rockies, and save to data store require ( glue ) require ( terra ) require ( sf ) #Access EPA L3 data for cropping epa_l3 <- glue :: glue ( \"/vsizip/vsicurl/\" , #magic remote connection \"https://gaftp.epa.gov/EPADataCommons/ORD/Ecoregions/us/us_eco_l3.zip\" , #copied link to download location \"/us_eco_l3.shp\" ) |> #path inside zip file sf :: st_read () #get just S.Rockies southernRockies <- epa_l3 |> dplyr :: filter ( US_L3NAME == \"Southern Rockies\" ) |> dplyr :: group_by ( US_L3NAME ) |> dplyr :: summarize ( geometry = sf :: st_union ( geometry )) #Access treemap data treemap <- glue :: glue ( \"/vsizip/vsicurl/\" , #magic remote connection \"https://s3-us-west-2.amazonaws.com/fs.usda.rds/RDS-2021-0074/RDS-2021-0074_Data.zip\" , #copied link to download location \"/Data/TreeMap2016.tif\" ) |> #path inside zip file terra :: rast () #Crop to s.rockies treemapSR <- treemap |> terra :: crop ( southernRockies , mask = FALSE ) #check data terra :: plot ( treemapSR ) #Write to instance terra :: writeRaster ( treemapSR , filename = '~/treemap2016_southernrockies.tif' , overwrite = TRUE , gdal = c ( \"COMPRESS=DEFLATE\" )) #Move data to data store system ( \"cp ~/treemap2016_southernrockies.tif ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/TreeMap/treemap2016_southernrockies_again.tif \" )","title":"TreeMap"},{"location":"data-library/treemap/#treemap","text":"TreeMap 2016 is a USFS tree-level model of the forests of the conterminous United States created by using machine learning algorithms to match forest plot data from Forest Inventory and Analysis (FIA) to a 30x30 meter (m) grid. The main output of this project is a raster map of imputed plot identifiers at 30\u00d730 m spatial resolution for the conterminous U.S. for landscape conditions circa 2016. The plot identifiers can be associated with data from FIA plots held in the associated csv and SQL files. An overview of the data product can be found here. The TreeMap data dictionary PDF can be found here. A portion of the TreeMap dataset covering the Southern Rockies has been prepared and placed in the CyVerse data store at the below directroy. The associated CSV and SQL DB files are in the same location. A script showing how to access it, as well as how the raster was accessed, is available in the code repository, as well as copied below. ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/TreeMap # This script demonstrates how to open and access pre-downloaded TreeMap data from the data store # It also, at the bottom, shows how the data was accessed via VSI. # A similar approach could be used to access the SnagHazard data in the zip file via VSI if desired. (Path inside zip: Data/SnagHazard2016.tif) # ESIIL, 2024 # Tyler L. McIntosh require ( terra ) #Move data from data store to instance system ( \"cp -r ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/TreeMap ~/TreeMap \" ) #Open the raster treemap <- terra :: rast ( \"~/TreeMap/treemap2016_southernrockies.tif\" ) terra :: plot ( treemap ) #Open the csv treemapCsv <- readr :: read_csv ( \"~/TreeMap/TreeMap2016_tree_table.csv\" ) head ( treemapCsv ) ####################################################### # DATA ACCESS SCRIPT ####################################################### # Access treemap data, crop to southern rockies, and save to data store require ( glue ) require ( terra ) require ( sf ) #Access EPA L3 data for cropping epa_l3 <- glue :: glue ( \"/vsizip/vsicurl/\" , #magic remote connection \"https://gaftp.epa.gov/EPADataCommons/ORD/Ecoregions/us/us_eco_l3.zip\" , #copied link to download location \"/us_eco_l3.shp\" ) |> #path inside zip file sf :: st_read () #get just S.Rockies southernRockies <- epa_l3 |> dplyr :: filter ( US_L3NAME == \"Southern Rockies\" ) |> dplyr :: group_by ( US_L3NAME ) |> dplyr :: summarize ( geometry = sf :: st_union ( geometry )) #Access treemap data treemap <- glue :: glue ( \"/vsizip/vsicurl/\" , #magic remote connection \"https://s3-us-west-2.amazonaws.com/fs.usda.rds/RDS-2021-0074/RDS-2021-0074_Data.zip\" , #copied link to download location \"/Data/TreeMap2016.tif\" ) |> #path inside zip file terra :: rast () #Crop to s.rockies treemapSR <- treemap |> terra :: crop ( southernRockies , mask = FALSE ) #check data terra :: plot ( treemapSR ) #Write to instance terra :: writeRaster ( treemapSR , filename = '~/treemap2016_southernrockies.tif' , overwrite = TRUE , gdal = c ( \"COMPRESS=DEFLATE\" )) #Move data to data store system ( \"cp ~/treemap2016_southernrockies.tif ~/data-store/data/iplant/home/shared/earthlab/forest_carbon_codefest/TreeMap/treemap2016_southernrockies_again.tif \" )","title":"TreeMap"},{"location":"project-documentation/methods/","text":"Project methods overview \u00b6 Data Sources \u00b6 List and describe data sources used, including links to cloud-optimized sources. Highlight permissions and compliance with data ownership guidelines. Data Processing Steps \u00b6 Describe data processing steps taken, the order of scripts, etc. Data Analysis \u00b6 Describe steps taken to analyze data and resulting files in team data store file structure. Visualizations \u00b6 Describe visualizations created and any specialized techniques or libraries that users should be aware of. Conclusions \u00b6 Summary of the full workflow and its outcomes. Reflect on the methods used. References \u00b6 Citations of tools, data sources, and other references used.","title":"Project methods overview"},{"location":"project-documentation/methods/#project-methods-overview","text":"","title":"Project methods overview"},{"location":"project-documentation/methods/#data-sources","text":"List and describe data sources used, including links to cloud-optimized sources. Highlight permissions and compliance with data ownership guidelines.","title":"Data Sources"},{"location":"project-documentation/methods/#data-processing-steps","text":"Describe data processing steps taken, the order of scripts, etc.","title":"Data Processing Steps"},{"location":"project-documentation/methods/#data-analysis","text":"Describe steps taken to analyze data and resulting files in team data store file structure.","title":"Data Analysis"},{"location":"project-documentation/methods/#visualizations","text":"Describe visualizations created and any specialized techniques or libraries that users should be aware of.","title":"Visualizations"},{"location":"project-documentation/methods/#conclusions","text":"Summary of the full workflow and its outcomes. Reflect on the methods used.","title":"Conclusions"},{"location":"project-documentation/methods/#references","text":"Citations of tools, data sources, and other references used.","title":"References"},{"location":"project-documentation/project-notes/","text":"Project discussion notes \u00b6 Virtual meeting #3 \u00b6 Team theme, tentative area of interest, or question: \u00b6 Day 1: March 12, 2024 - CU Boulder \u00b6 Selected scientific question: \u00b6 Day 2: March 13, 2024 - CU Boulder \u00b6 Day 3: March 14, 2024 - CU Boulder \u00b6","title":"Project discussion notes"},{"location":"project-documentation/project-notes/#project-discussion-notes","text":"","title":"Project discussion notes"},{"location":"project-documentation/project-notes/#virtual-meeting-3","text":"","title":"Virtual meeting #3"},{"location":"project-documentation/project-notes/#team-theme-tentative-area-of-interest-or-question","text":"","title":"Team theme, tentative area of interest, or question:"},{"location":"project-documentation/project-notes/#day-1-march-12-2024-cu-boulder","text":"","title":"Day 1: March 12, 2024 - CU Boulder"},{"location":"project-documentation/project-notes/#selected-scientific-question","text":"","title":"Selected scientific question:"},{"location":"project-documentation/project-notes/#day-2-march-13-2024-cu-boulder","text":"","title":"Day 2: March 13, 2024 - CU Boulder"},{"location":"project-documentation/project-notes/#day-3-march-14-2024-cu-boulder","text":"","title":"Day 3: March 14, 2024 - CU Boulder"},{"location":"project-documentation/project-presentation/","text":"Project presentation overview \u00b6 All project presentation materials should be made available on this page. Your team may present directly from this page if you would like to; alternatively, if you would prefer to use slides to present, please make sure to export your team's slides as a PDF, add them to your GitHub, and add the link to that PDF here below. Presentation \u00b6","title":"Project presentation overview"},{"location":"project-documentation/project-presentation/#project-presentation-overview","text":"All project presentation materials should be made available on this page. Your team may present directly from this page if you would like to; alternatively, if you would prefer to use slides to present, please make sure to export your team's slides as a PDF, add them to your GitHub, and add the link to that PDF here below.","title":"Project presentation overview"},{"location":"project-documentation/project-presentation/#presentation","text":"","title":"Presentation"},{"location":"resources/art%20gallery/","text":"science art \u00b6 2024-01-25 Ty\u2019s art opinion \u00b6 In the context of the ongoing discussions for the redesign of our ESIIL office space, I would like to offer my personal perspective on the art and aesthetic that might enrich our environment: Urban Realism with a Personal Touch : I have a strong appreciation for artworks that reflect a realistic depiction of nature and urban life but with an imaginative twist. Art that integrates with and elevates our daily surroundings could offer a fresh perspective on the mundane. Nature in the Workplace : On a personal note, I find that art which brings elements of the outdoors inside can create a serene and motivating atmosphere, conducive to the values of sustainability that ESIIL embodies. Interactive Art : I believe that art installations which invite interaction or present a playful exaggeration of reality can energize our space. They have the potential to foster a creative dialogue among the team and with visitors. Dimensionality and Engagement : From my viewpoint, art that breaks out of the traditional two-dimensional space and engages with the viewer in three dimensions can transform the feel of an office. Such dynamic pieces could encourage innovative thinking and collaboration. Art with a Message : It\u2019s my opinion that the art we choose should subtly reflect our collective social and environmental commitments. Pieces that prompt introspection about our role in larger societal issues could resonate well with our team\u2019s ethos. Community Connection : Lastly, I feel that our office should not just be a place for work but also a space that invites community interaction. Art can be a bridge between ESIIL and the public, making our office a hub for inspiration and engagement.","title":"science art"},{"location":"resources/art%20gallery/#science-art","text":"2024-01-25","title":"science art"},{"location":"resources/art%20gallery/#tys-art-opinion","text":"In the context of the ongoing discussions for the redesign of our ESIIL office space, I would like to offer my personal perspective on the art and aesthetic that might enrich our environment: Urban Realism with a Personal Touch : I have a strong appreciation for artworks that reflect a realistic depiction of nature and urban life but with an imaginative twist. Art that integrates with and elevates our daily surroundings could offer a fresh perspective on the mundane. Nature in the Workplace : On a personal note, I find that art which brings elements of the outdoors inside can create a serene and motivating atmosphere, conducive to the values of sustainability that ESIIL embodies. Interactive Art : I believe that art installations which invite interaction or present a playful exaggeration of reality can energize our space. They have the potential to foster a creative dialogue among the team and with visitors. Dimensionality and Engagement : From my viewpoint, art that breaks out of the traditional two-dimensional space and engages with the viewer in three dimensions can transform the feel of an office. Such dynamic pieces could encourage innovative thinking and collaboration. Art with a Message : It\u2019s my opinion that the art we choose should subtly reflect our collective social and environmental commitments. Pieces that prompt introspection about our role in larger societal issues could resonate well with our team\u2019s ethos. Community Connection : Lastly, I feel that our office should not just be a place for work but also a space that invites community interaction. Art can be a bridge between ESIIL and the public, making our office a hub for inspiration and engagement.","title":"Ty\u2019s art opinion"},{"location":"resources/citations/","text":"Citation Management and Notes Collection in Markdown \u00b6 Introduction \u00b6 This document serves as a guide for managing citations and collecting research notes for our project. We'll use a combination of a .bib file for bibliographic references and Markdown for note-taking. Part 1: Setting Up Your .bib File for Citations \u00b6 Creating a .bib File \u00b6 Create a new file with a .bib extension, for example, project_references.bib . Add bibliographic entries to this file. Each entry should follow the BibTeX format. Example of a .bib Entry \u00b6 ```bibtex @article {Doe2021, author = {Jane Doe and John Smith}, title = {Insights into Environmental Data Science}, journal = {Journal of Data Science}, year = {2021}, volume = {15}, number = {4}, pages = {123-145}, doi = {10.1000/jds.2021.15.4} } Part 2: Using Citations in Markdown \u00b6 Citing in Your Markdown Document \u00b6 Refer to works in your .bib file using citation keys, like [@Doe2021] . Converting Markdown to PDF with Citations \u00b6 Use Pandoc: pandoc yourdoc.md --bibliography=project_references.bib --citeproc -o output.pdf Part 3: Collecting Citations and Research Notes \u00b6 Structuring Your Notes \u00b6 Notes on Doe 2021 [@Doe2021] \u00b6 Key Points: Summary of the article's main arguments. Notable methodologies. Relevance to Our Project: How this research informs our project. Applicable methodologies or theories. Notes on Another Article [@Another2021] \u00b6 Key Points: ... Relevance to Our Project: ... Conclusion \u00b6 This document facilitates efficient management of references and collaborative knowledge building for our project.","title":"Citation Management and Notes Collection in Markdown"},{"location":"resources/citations/#citation-management-and-notes-collection-in-markdown","text":"","title":"Citation Management and Notes Collection in Markdown"},{"location":"resources/citations/#introduction","text":"This document serves as a guide for managing citations and collecting research notes for our project. We'll use a combination of a .bib file for bibliographic references and Markdown for note-taking.","title":"Introduction"},{"location":"resources/citations/#part-1-setting-up-your-bib-file-for-citations","text":"","title":"Part 1: Setting Up Your .bib File for Citations"},{"location":"resources/citations/#creating-a-bib-file","text":"Create a new file with a .bib extension, for example, project_references.bib . Add bibliographic entries to this file. Each entry should follow the BibTeX format.","title":"Creating a .bib File"},{"location":"resources/citations/#example-of-a-bib-entry","text":"```bibtex @article {Doe2021, author = {Jane Doe and John Smith}, title = {Insights into Environmental Data Science}, journal = {Journal of Data Science}, year = {2021}, volume = {15}, number = {4}, pages = {123-145}, doi = {10.1000/jds.2021.15.4} }","title":"Example of a .bib Entry"},{"location":"resources/citations/#part-2-using-citations-in-markdown","text":"","title":"Part 2: Using Citations in Markdown"},{"location":"resources/citations/#citing-in-your-markdown-document","text":"Refer to works in your .bib file using citation keys, like [@Doe2021] .","title":"Citing in Your Markdown Document"},{"location":"resources/citations/#converting-markdown-to-pdf-with-citations","text":"Use Pandoc: pandoc yourdoc.md --bibliography=project_references.bib --citeproc -o output.pdf","title":"Converting Markdown to PDF with Citations"},{"location":"resources/citations/#part-3-collecting-citations-and-research-notes","text":"","title":"Part 3: Collecting Citations and Research Notes"},{"location":"resources/citations/#structuring-your-notes","text":"","title":"Structuring Your Notes"},{"location":"resources/citations/#notes-on-doe-2021-doe2021","text":"Key Points: Summary of the article's main arguments. Notable methodologies. Relevance to Our Project: How this research informs our project. Applicable methodologies or theories.","title":"Notes on Doe 2021 [@Doe2021]"},{"location":"resources/citations/#notes-on-another-article-another2021","text":"Key Points: ... Relevance to Our Project: ...","title":"Notes on Another Article [@Another2021]"},{"location":"resources/citations/#conclusion","text":"This document facilitates efficient management of references and collaborative knowledge building for our project.","title":"Conclusion"},{"location":"resources/cyverse_basics/","text":"Connecting Cyverse to GitHub \u00b6 Log in to Cyverse \u00b6 Go to the Cyverse user account website https://user.cyverse.org/ Click Sign up (if you do not already have an account) Head over to the Cyverse Discovery Environment https://de.cyverse.org , and log in with your new account. You should now see the Discovery Environment: We will give you permissions to access the Hackathon app. If you haven't already, let us know that you need access Open up an analysis with the hackathon environment (Jupyter Lab) \u00b6 From the Cyverse Discovery Environment, click on Apps in the left menu Select JupyterLab ESIIL Configure and launch your analysis - when choosing the disk size, make sure to choose 64GB or greater. The rest of the settings you can change to suit your computing needs: Click Go to analysis : Now you should see Jupyter Lab! Set up your GitHub credentials \u00b6 If you would prefer to follow a video instead of a written outline, we have prepared a video here: \u00b6 From Jupyter Lab, click on the Git Extension icon on the left menu: Click Clone a Repository and Paste the link to the cyverse-utils https://github.com/CU-ESIIL/cyverse-utils.git and click Clone : You should now see the cyverse-utils folder in your directory tree (provided you haven't changed directories from the default /home/jovyan/data-store Go into the cyverse-utils folder: open up the create_github_keypair.ipynb notebook if you prefer Python or the 'create_github_keypair.R' script if you prefer R by double-clicking and then select the default 'macrosystems' kernel: Now you should see the notebook open. Click the play button at the top. You will be prompted to enter your GitHub username and email: You should now see your Public Key. Copy the WHOLE LINE including ssh-ed25519 at the beginning and the jovyan@... at the end Go to your GitHub settings page (you may need to log in to GitHub first): Select SSH and GPG keys Select New SSH key Give your key a descriptive name, paste your ENTIRE public key in the Key input box, and click Add SSH Key . You may need to re-authenticate with your password or two-factor authentication.: You should now see your new SSH key in your Authentication Keys list! Now you will be able to clone private repositories and push changes to GitHub from your Cyverse analysis! NOTE! Your GitHub authentication is ONLY for the analysis you're working with right now. You will be able to use it as long as you want there, but once you start a new analysis you will need to go through this process again. Feel free to delete keys from old analyses that have been shut down.","title":"Cyverse"},{"location":"resources/cyverse_basics/#connecting-cyverse-to-github","text":"","title":"Connecting Cyverse to GitHub"},{"location":"resources/cyverse_basics/#log-in-to-cyverse","text":"Go to the Cyverse user account website https://user.cyverse.org/ Click Sign up (if you do not already have an account) Head over to the Cyverse Discovery Environment https://de.cyverse.org , and log in with your new account. You should now see the Discovery Environment: We will give you permissions to access the Hackathon app. If you haven't already, let us know that you need access","title":"Log in to Cyverse"},{"location":"resources/cyverse_basics/#open-up-an-analysis-with-the-hackathon-environment-jupyter-lab","text":"From the Cyverse Discovery Environment, click on Apps in the left menu Select JupyterLab ESIIL Configure and launch your analysis - when choosing the disk size, make sure to choose 64GB or greater. The rest of the settings you can change to suit your computing needs: Click Go to analysis : Now you should see Jupyter Lab!","title":"Open up an analysis with the hackathon environment (Jupyter Lab)"},{"location":"resources/cyverse_basics/#set-up-your-github-credentials","text":"","title":"Set up your GitHub credentials"},{"location":"resources/cyverse_basics/#if-you-would-prefer-to-follow-a-video-instead-of-a-written-outline-we-have-prepared-a-video-here","text":"From Jupyter Lab, click on the Git Extension icon on the left menu: Click Clone a Repository and Paste the link to the cyverse-utils https://github.com/CU-ESIIL/cyverse-utils.git and click Clone : You should now see the cyverse-utils folder in your directory tree (provided you haven't changed directories from the default /home/jovyan/data-store Go into the cyverse-utils folder: open up the create_github_keypair.ipynb notebook if you prefer Python or the 'create_github_keypair.R' script if you prefer R by double-clicking and then select the default 'macrosystems' kernel: Now you should see the notebook open. Click the play button at the top. You will be prompted to enter your GitHub username and email: You should now see your Public Key. Copy the WHOLE LINE including ssh-ed25519 at the beginning and the jovyan@... at the end Go to your GitHub settings page (you may need to log in to GitHub first): Select SSH and GPG keys Select New SSH key Give your key a descriptive name, paste your ENTIRE public key in the Key input box, and click Add SSH Key . You may need to re-authenticate with your password or two-factor authentication.: You should now see your new SSH key in your Authentication Keys list! Now you will be able to clone private repositories and push changes to GitHub from your Cyverse analysis! NOTE! Your GitHub authentication is ONLY for the analysis you're working with right now. You will be able to use it as long as you want there, but once you start a new analysis you will need to go through this process again. Feel free to delete keys from old analyses that have been shut down.","title":"If you would prefer to follow a video instead of a written outline, we have prepared a video here:"},{"location":"resources/cyverse_hacks/","text":"Cyverse fixes \u00b6 Earth Lab Data Storage \u00b6 Path: /home/jovyan/data-store/iplant/home/shared/earthlab/ Ensure your project has a directory within the Earth Lab data storage. Setup \u00b6 CyVerse Account: Create an account if not already owned. Contact Tyson for account upgrades after maximizing current limits. GitHub Connection \u00b6 Follow Elsa Culler's guide for connecting GitHub to CyVerse. Select \u201cJupyterLab ESIIL\u201d and choose \u201cmacrosystems\u201d in the version dropdown. Clone into /home/jovyan/data-store . Clone innovation-summit-utils for SSH connection to GitHub. Run conda install -c conda-forge openssh in the terminal if encountering errors. GitHub authentication is session-specific. RStudio in DE \u00b6 Copy your instance ID. It can be found in your analyis URL in form https:// .cyverse.run/lab. Use your ID in these links: https://.cyverse.run/rstudio/auth-sign-in https://.cyverse.run/rstudio/ Package Requests \u00b6 List desired packages here for future container updates. Data Transfer to CyVerse \u00b6 Use GoCommands for HPC/CyVerse transfers. Installation: Linux: (Command) Windows Powershell: (Command) Usage: Use put for upload and get for download. Ensure correct CyVerse directory path.","title":"Cyverse hacks"},{"location":"resources/cyverse_hacks/#cyverse-fixes","text":"","title":"Cyverse fixes"},{"location":"resources/cyverse_hacks/#earth-lab-data-storage","text":"Path: /home/jovyan/data-store/iplant/home/shared/earthlab/ Ensure your project has a directory within the Earth Lab data storage.","title":"Earth Lab Data Storage"},{"location":"resources/cyverse_hacks/#setup","text":"CyVerse Account: Create an account if not already owned. Contact Tyson for account upgrades after maximizing current limits.","title":"Setup"},{"location":"resources/cyverse_hacks/#github-connection","text":"Follow Elsa Culler's guide for connecting GitHub to CyVerse. Select \u201cJupyterLab ESIIL\u201d and choose \u201cmacrosystems\u201d in the version dropdown. Clone into /home/jovyan/data-store . Clone innovation-summit-utils for SSH connection to GitHub. Run conda install -c conda-forge openssh in the terminal if encountering errors. GitHub authentication is session-specific.","title":"GitHub Connection"},{"location":"resources/cyverse_hacks/#rstudio-in-de","text":"Copy your instance ID. It can be found in your analyis URL in form https:// .cyverse.run/lab. Use your ID in these links: https://.cyverse.run/rstudio/auth-sign-in https://.cyverse.run/rstudio/","title":"RStudio in DE"},{"location":"resources/cyverse_hacks/#package-requests","text":"List desired packages here for future container updates.","title":"Package Requests"},{"location":"resources/cyverse_hacks/#data-transfer-to-cyverse","text":"Use GoCommands for HPC/CyVerse transfers. Installation: Linux: (Command) Windows Powershell: (Command) Usage: Use put for upload and get for download. Ensure correct CyVerse directory path.","title":"Data Transfer to CyVerse"},{"location":"resources/cyverse_move_and_save_data/","text":"Connecting Cyverse to GitHub \u00b6 Log in to Cyverse \u00b6 Go to the Cyverse user account website https://user.cyverse.org/ Click Sign up (if you do not already have an account) Head over to the Cyverse Discovery Environment https://de.cyverse.org , and log in with your new account. You should now see the Discovery Environment: We will give you permissions to access the Hackathon app. If you haven't already, let us know that you need access Open up an analysis with the hackathon environment (Jupyter Lab) \u00b6 From the Cyverse Discovery Environment, click on Apps in the left menu Select JupyterLab ESIIL Configure and launch your analysis - when choosing the disk size, make sure to choose 64GB or greater. The rest of the settings you can change to suit your computing needs: Click Go to analysis : Now you should see Jupyter Lab! Set up your GitHub credentials \u00b6 If you would prefer to follow a video instead of a written outline, we have prepared a video here: \u00b6 From Jupyter Lab, click on the Git Extension icon on the left menu: Click Clone a Repository and Paste the link to the cyverse-utils https://github.com/CU-ESIIL/cyverse-utils.git and click Clone : You should now see the cyverse-utils folder in your directory tree (provided you haven't changed directories from the default /home/jovyan/data-store Go into the cyverse-utils folder: open up the create_github_keypair.ipynb notebook if you prefer Python or the 'create_github_keypair.R' script if you prefer R by double-clicking and then select the default 'macrosystems' kernel: Now you should see the notebook open. Click the play button at the top. You will be prompted to enter your GitHub username and email: You should now see your Public Key. Copy the WHOLE LINE including ssh-ed25519 at the beginning and the jovyan@... at the end Go to your GitHub settings page (you may need to log in to GitHub first): Select SSH and GPG keys Select New SSH key Give your key a descriptive name, paste your ENTIRE public key in the Key input box, and click Add SSH Key . You may need to re-authenticate with your password or two-factor authentication.: You should now see your new SSH key in your Authentication Keys list! Now you will be able to clone private repositories and push changes to GitHub from your Cyverse analysis! NOTE! Your GitHub authentication is ONLY for the analysis you're working with right now. You will be able to use it as long as you want there, but once you start a new analysis you will need to go through this process again. Feel free to delete keys from old analyses that have been shut down.","title":"Connecting Cyverse to GitHub"},{"location":"resources/cyverse_move_and_save_data/#connecting-cyverse-to-github","text":"","title":"Connecting Cyverse to GitHub"},{"location":"resources/cyverse_move_and_save_data/#log-in-to-cyverse","text":"Go to the Cyverse user account website https://user.cyverse.org/ Click Sign up (if you do not already have an account) Head over to the Cyverse Discovery Environment https://de.cyverse.org , and log in with your new account. You should now see the Discovery Environment: We will give you permissions to access the Hackathon app. If you haven't already, let us know that you need access","title":"Log in to Cyverse"},{"location":"resources/cyverse_move_and_save_data/#open-up-an-analysis-with-the-hackathon-environment-jupyter-lab","text":"From the Cyverse Discovery Environment, click on Apps in the left menu Select JupyterLab ESIIL Configure and launch your analysis - when choosing the disk size, make sure to choose 64GB or greater. The rest of the settings you can change to suit your computing needs: Click Go to analysis : Now you should see Jupyter Lab!","title":"Open up an analysis with the hackathon environment (Jupyter Lab)"},{"location":"resources/cyverse_move_and_save_data/#set-up-your-github-credentials","text":"","title":"Set up your GitHub credentials"},{"location":"resources/cyverse_move_and_save_data/#if-you-would-prefer-to-follow-a-video-instead-of-a-written-outline-we-have-prepared-a-video-here","text":"From Jupyter Lab, click on the Git Extension icon on the left menu: Click Clone a Repository and Paste the link to the cyverse-utils https://github.com/CU-ESIIL/cyverse-utils.git and click Clone : You should now see the cyverse-utils folder in your directory tree (provided you haven't changed directories from the default /home/jovyan/data-store Go into the cyverse-utils folder: open up the create_github_keypair.ipynb notebook if you prefer Python or the 'create_github_keypair.R' script if you prefer R by double-clicking and then select the default 'macrosystems' kernel: Now you should see the notebook open. Click the play button at the top. You will be prompted to enter your GitHub username and email: You should now see your Public Key. Copy the WHOLE LINE including ssh-ed25519 at the beginning and the jovyan@... at the end Go to your GitHub settings page (you may need to log in to GitHub first): Select SSH and GPG keys Select New SSH key Give your key a descriptive name, paste your ENTIRE public key in the Key input box, and click Add SSH Key . You may need to re-authenticate with your password or two-factor authentication.: You should now see your new SSH key in your Authentication Keys list! Now you will be able to clone private repositories and push changes to GitHub from your Cyverse analysis! NOTE! Your GitHub authentication is ONLY for the analysis you're working with right now. You will be able to use it as long as you want there, but once you start a new analysis you will need to go through this process again. Feel free to delete keys from old analyses that have been shut down.","title":"If you would prefer to follow a video instead of a written outline, we have prepared a video here:"},{"location":"resources/cyverse_shutdown/","text":"Connecting Cyverse to GitHub \u00b6 Log in to Cyverse \u00b6 Go to the Cyverse user account website https://user.cyverse.org/ Click Sign up (if you do not already have an account) Head over to the Cyverse Discovery Environment https://de.cyverse.org , and log in with your new account. You should now see the Discovery Environment: We will give you permissions to access the Hackathon app. If you haven't already, let us know that you need access Open up an analysis with the hackathon environment (Jupyter Lab) \u00b6 From the Cyverse Discovery Environment, click on Apps in the left menu Select JupyterLab ESIIL Configure and launch your analysis - when choosing the disk size, make sure to choose 64GB or greater. The rest of the settings you can change to suit your computing needs: Click Go to analysis : Now you should see Jupyter Lab! Set up your GitHub credentials \u00b6 If you would prefer to follow a video instead of a written outline, we have prepared a video here: \u00b6 From Jupyter Lab, click on the Git Extension icon on the left menu: Click Clone a Repository and Paste the link to the cyverse-utils https://github.com/CU-ESIIL/cyverse-utils.git and click Clone : You should now see the cyverse-utils folder in your directory tree (provided you haven't changed directories from the default /home/jovyan/data-store Go into the cyverse-utils folder: open up the create_github_keypair.ipynb notebook if you prefer Python or the 'create_github_keypair.R' script if you prefer R by double-clicking and then select the default 'macrosystems' kernel: Now you should see the notebook open. Click the play button at the top. You will be prompted to enter your GitHub username and email: You should now see your Public Key. Copy the WHOLE LINE including ssh-ed25519 at the beginning and the jovyan@... at the end Go to your GitHub settings page (you may need to log in to GitHub first): Select SSH and GPG keys Select New SSH key Give your key a descriptive name, paste your ENTIRE public key in the Key input box, and click Add SSH Key . You may need to re-authenticate with your password or two-factor authentication.: You should now see your new SSH key in your Authentication Keys list! Now you will be able to clone private repositories and push changes to GitHub from your Cyverse analysis! NOTE! Your GitHub authentication is ONLY for the analysis you're working with right now. You will be able to use it as long as you want there, but once you start a new analysis you will need to go through this process again. Feel free to delete keys from old analyses that have been shut down.","title":"Shutdown procedure"},{"location":"resources/cyverse_shutdown/#connecting-cyverse-to-github","text":"","title":"Connecting Cyverse to GitHub"},{"location":"resources/cyverse_shutdown/#log-in-to-cyverse","text":"Go to the Cyverse user account website https://user.cyverse.org/ Click Sign up (if you do not already have an account) Head over to the Cyverse Discovery Environment https://de.cyverse.org , and log in with your new account. You should now see the Discovery Environment: We will give you permissions to access the Hackathon app. If you haven't already, let us know that you need access","title":"Log in to Cyverse"},{"location":"resources/cyverse_shutdown/#open-up-an-analysis-with-the-hackathon-environment-jupyter-lab","text":"From the Cyverse Discovery Environment, click on Apps in the left menu Select JupyterLab ESIIL Configure and launch your analysis - when choosing the disk size, make sure to choose 64GB or greater. The rest of the settings you can change to suit your computing needs: Click Go to analysis : Now you should see Jupyter Lab!","title":"Open up an analysis with the hackathon environment (Jupyter Lab)"},{"location":"resources/cyverse_shutdown/#set-up-your-github-credentials","text":"","title":"Set up your GitHub credentials"},{"location":"resources/cyverse_shutdown/#if-you-would-prefer-to-follow-a-video-instead-of-a-written-outline-we-have-prepared-a-video-here","text":"From Jupyter Lab, click on the Git Extension icon on the left menu: Click Clone a Repository and Paste the link to the cyverse-utils https://github.com/CU-ESIIL/cyverse-utils.git and click Clone : You should now see the cyverse-utils folder in your directory tree (provided you haven't changed directories from the default /home/jovyan/data-store Go into the cyverse-utils folder: open up the create_github_keypair.ipynb notebook if you prefer Python or the 'create_github_keypair.R' script if you prefer R by double-clicking and then select the default 'macrosystems' kernel: Now you should see the notebook open. Click the play button at the top. You will be prompted to enter your GitHub username and email: You should now see your Public Key. Copy the WHOLE LINE including ssh-ed25519 at the beginning and the jovyan@... at the end Go to your GitHub settings page (you may need to log in to GitHub first): Select SSH and GPG keys Select New SSH key Give your key a descriptive name, paste your ENTIRE public key in the Key input box, and click Add SSH Key . You may need to re-authenticate with your password or two-factor authentication.: You should now see your new SSH key in your Authentication Keys list! Now you will be able to clone private repositories and push changes to GitHub from your Cyverse analysis! NOTE! Your GitHub authentication is ONLY for the analysis you're working with right now. You will be able to use it as long as you want there, but once you start a new analysis you will need to go through this process again. Feel free to delete keys from old analyses that have been shut down.","title":"If you would prefer to follow a video instead of a written outline, we have prepared a video here:"},{"location":"resources/cyverse_startup/","text":"Connecting Cyverse to GitHub \u00b6 Log in to Cyverse \u00b6 Go to the Cyverse user account website https://user.cyverse.org/ Click Sign up (if you do not already have an account) Head over to the Cyverse Discovery Environment https://de.cyverse.org , and log in with your new account. You should now see the Discovery Environment: We will give you permissions to access the Hackathon app. If you haven't already, let us know that you need access Open up an analysis with the hackathon environment (Jupyter Lab) \u00b6 From the Cyverse Discovery Environment, click on Apps in the left menu Select JupyterLab ESIIL Configure and launch your analysis - when choosing the disk size, make sure to choose 64GB or greater. The rest of the settings you can change to suit your computing needs: Click Go to analysis : Now you should see Jupyter Lab! Set up your GitHub credentials \u00b6 If you would prefer to follow a video instead of a written outline, we have prepared a video here: \u00b6 From Jupyter Lab, click on the Git Extension icon on the left menu: Click Clone a Repository and Paste the link to the cyverse-utils https://github.com/CU-ESIIL/cyverse-utils.git and click Clone : You should now see the cyverse-utils folder in your directory tree (provided you haven't changed directories from the default /home/jovyan/data-store Go into the cyverse-utils folder: open up the create_github_keypair.ipynb notebook if you prefer Python or the 'create_github_keypair.R' script if you prefer R by double-clicking and then select the default 'macrosystems' kernel: Now you should see the notebook open. Click the play button at the top. You will be prompted to enter your GitHub username and email: You should now see your Public Key. Copy the WHOLE LINE including ssh-ed25519 at the beginning and the jovyan@... at the end Go to your GitHub settings page (you may need to log in to GitHub first): Select SSH and GPG keys Select New SSH key Give your key a descriptive name, paste your ENTIRE public key in the Key input box, and click Add SSH Key . You may need to re-authenticate with your password or two-factor authentication.: You should now see your new SSH key in your Authentication Keys list! Now you will be able to clone private repositories and push changes to GitHub from your Cyverse analysis! NOTE! Your GitHub authentication is ONLY for the analysis you're working with right now. You will be able to use it as long as you want there, but once you start a new analysis you will need to go through this process again. Feel free to delete keys from old analyses that have been shut down.","title":"Startup procedure"},{"location":"resources/cyverse_startup/#connecting-cyverse-to-github","text":"","title":"Connecting Cyverse to GitHub"},{"location":"resources/cyverse_startup/#log-in-to-cyverse","text":"Go to the Cyverse user account website https://user.cyverse.org/ Click Sign up (if you do not already have an account) Head over to the Cyverse Discovery Environment https://de.cyverse.org , and log in with your new account. You should now see the Discovery Environment: We will give you permissions to access the Hackathon app. If you haven't already, let us know that you need access","title":"Log in to Cyverse"},{"location":"resources/cyverse_startup/#open-up-an-analysis-with-the-hackathon-environment-jupyter-lab","text":"From the Cyverse Discovery Environment, click on Apps in the left menu Select JupyterLab ESIIL Configure and launch your analysis - when choosing the disk size, make sure to choose 64GB or greater. The rest of the settings you can change to suit your computing needs: Click Go to analysis : Now you should see Jupyter Lab!","title":"Open up an analysis with the hackathon environment (Jupyter Lab)"},{"location":"resources/cyverse_startup/#set-up-your-github-credentials","text":"","title":"Set up your GitHub credentials"},{"location":"resources/cyverse_startup/#if-you-would-prefer-to-follow-a-video-instead-of-a-written-outline-we-have-prepared-a-video-here","text":"From Jupyter Lab, click on the Git Extension icon on the left menu: Click Clone a Repository and Paste the link to the cyverse-utils https://github.com/CU-ESIIL/cyverse-utils.git and click Clone : You should now see the cyverse-utils folder in your directory tree (provided you haven't changed directories from the default /home/jovyan/data-store Go into the cyverse-utils folder: open up the create_github_keypair.ipynb notebook if you prefer Python or the 'create_github_keypair.R' script if you prefer R by double-clicking and then select the default 'macrosystems' kernel: Now you should see the notebook open. Click the play button at the top. You will be prompted to enter your GitHub username and email: You should now see your Public Key. Copy the WHOLE LINE including ssh-ed25519 at the beginning and the jovyan@... at the end Go to your GitHub settings page (you may need to log in to GitHub first): Select SSH and GPG keys Select New SSH key Give your key a descriptive name, paste your ENTIRE public key in the Key input box, and click Add SSH Key . You may need to re-authenticate with your password or two-factor authentication.: You should now see your new SSH key in your Authentication Keys list! Now you will be able to clone private repositories and push changes to GitHub from your Cyverse analysis! NOTE! Your GitHub authentication is ONLY for the analysis you're working with right now. You will be able to use it as long as you want there, but once you start a new analysis you will need to go through this process again. Feel free to delete keys from old analyses that have been shut down.","title":"If you would prefer to follow a video instead of a written outline, we have prepared a video here:"},{"location":"resources/cyverse_stream_data/","text":"Connecting Cyverse to GitHub \u00b6 Log in to Cyverse \u00b6 Go to the Cyverse user account website https://user.cyverse.org/ Click Sign up (if you do not already have an account) Head over to the Cyverse Discovery Environment https://de.cyverse.org , and log in with your new account. You should now see the Discovery Environment: We will give you permissions to access the Hackathon app. If you haven't already, let us know that you need access Open up an analysis with the hackathon environment (Jupyter Lab) \u00b6 From the Cyverse Discovery Environment, click on Apps in the left menu Select JupyterLab ESIIL Configure and launch your analysis - when choosing the disk size, make sure to choose 64GB or greater. The rest of the settings you can change to suit your computing needs: Click Go to analysis : Now you should see Jupyter Lab! Set up your GitHub credentials \u00b6 If you would prefer to follow a video instead of a written outline, we have prepared a video here: \u00b6 From Jupyter Lab, click on the Git Extension icon on the left menu: Click Clone a Repository and Paste the link to the cyverse-utils https://github.com/CU-ESIIL/cyverse-utils.git and click Clone : You should now see the cyverse-utils folder in your directory tree (provided you haven't changed directories from the default /home/jovyan/data-store Go into the cyverse-utils folder: open up the create_github_keypair.ipynb notebook if you prefer Python or the 'create_github_keypair.R' script if you prefer R by double-clicking and then select the default 'macrosystems' kernel: Now you should see the notebook open. Click the play button at the top. You will be prompted to enter your GitHub username and email: You should now see your Public Key. Copy the WHOLE LINE including ssh-ed25519 at the beginning and the jovyan@... at the end Go to your GitHub settings page (you may need to log in to GitHub first): Select SSH and GPG keys Select New SSH key Give your key a descriptive name, paste your ENTIRE public key in the Key input box, and click Add SSH Key . You may need to re-authenticate with your password or two-factor authentication.: You should now see your new SSH key in your Authentication Keys list! Now you will be able to clone private repositories and push changes to GitHub from your Cyverse analysis! NOTE! Your GitHub authentication is ONLY for the analysis you're working with right now. You will be able to use it as long as you want there, but once you start a new analysis you will need to go through this process again. Feel free to delete keys from old analyses that have been shut down.","title":"Connecting Cyverse to GitHub"},{"location":"resources/cyverse_stream_data/#connecting-cyverse-to-github","text":"","title":"Connecting Cyverse to GitHub"},{"location":"resources/cyverse_stream_data/#log-in-to-cyverse","text":"Go to the Cyverse user account website https://user.cyverse.org/ Click Sign up (if you do not already have an account) Head over to the Cyverse Discovery Environment https://de.cyverse.org , and log in with your new account. You should now see the Discovery Environment: We will give you permissions to access the Hackathon app. If you haven't already, let us know that you need access","title":"Log in to Cyverse"},{"location":"resources/cyverse_stream_data/#open-up-an-analysis-with-the-hackathon-environment-jupyter-lab","text":"From the Cyverse Discovery Environment, click on Apps in the left menu Select JupyterLab ESIIL Configure and launch your analysis - when choosing the disk size, make sure to choose 64GB or greater. The rest of the settings you can change to suit your computing needs: Click Go to analysis : Now you should see Jupyter Lab!","title":"Open up an analysis with the hackathon environment (Jupyter Lab)"},{"location":"resources/cyverse_stream_data/#set-up-your-github-credentials","text":"","title":"Set up your GitHub credentials"},{"location":"resources/cyverse_stream_data/#if-you-would-prefer-to-follow-a-video-instead-of-a-written-outline-we-have-prepared-a-video-here","text":"From Jupyter Lab, click on the Git Extension icon on the left menu: Click Clone a Repository and Paste the link to the cyverse-utils https://github.com/CU-ESIIL/cyverse-utils.git and click Clone : You should now see the cyverse-utils folder in your directory tree (provided you haven't changed directories from the default /home/jovyan/data-store Go into the cyverse-utils folder: open up the create_github_keypair.ipynb notebook if you prefer Python or the 'create_github_keypair.R' script if you prefer R by double-clicking and then select the default 'macrosystems' kernel: Now you should see the notebook open. Click the play button at the top. You will be prompted to enter your GitHub username and email: You should now see your Public Key. Copy the WHOLE LINE including ssh-ed25519 at the beginning and the jovyan@... at the end Go to your GitHub settings page (you may need to log in to GitHub first): Select SSH and GPG keys Select New SSH key Give your key a descriptive name, paste your ENTIRE public key in the Key input box, and click Add SSH Key . You may need to re-authenticate with your password or two-factor authentication.: You should now see your new SSH key in your Authentication Keys list! Now you will be able to clone private repositories and push changes to GitHub from your Cyverse analysis! NOTE! Your GitHub authentication is ONLY for the analysis you're working with right now. You will be able to use it as long as you want there, but once you start a new analysis you will need to go through this process again. Feel free to delete keys from old analyses that have been shut down.","title":"If you would prefer to follow a video instead of a written outline, we have prepared a video here:"},{"location":"resources/data_analysis/","text":"Data Analysis Documentation \u00b6 Overview \u00b6 Brief overview of the data analysis goals and the analytical questions being addressed. Analysis Methodology \u00b6 Description of the analytical approach, methods used, and justification for the chosen techniques. Code Overview \u00b6 Explanation of the structure of the analysis code, including key functions and their roles. Running the Analysis \u00b6 Instructions and example commands for executing the analysis scripts. python analysis_script.py Analysis Results \u00b6 Summary of key findings from the analysis, including interpretation and relevance. Challenges and Solutions \u00b6 Discussion of challenges faced during the analysis and solutions or workarounds implemented. Conclusions \u00b6 Concluding remarks on the analysis, insights gained, and their potential impact. Future Work \u00b6 Suggestions for extending or refining the analysis and potential areas for further research. References \u00b6 Citations or references to external sources or literature used.","title":"Data Analysis Documentation"},{"location":"resources/data_analysis/#data-analysis-documentation","text":"","title":"Data Analysis Documentation"},{"location":"resources/data_analysis/#overview","text":"Brief overview of the data analysis goals and the analytical questions being addressed.","title":"Overview"},{"location":"resources/data_analysis/#analysis-methodology","text":"Description of the analytical approach, methods used, and justification for the chosen techniques.","title":"Analysis Methodology"},{"location":"resources/data_analysis/#code-overview","text":"Explanation of the structure of the analysis code, including key functions and their roles.","title":"Code Overview"},{"location":"resources/data_analysis/#running-the-analysis","text":"Instructions and example commands for executing the analysis scripts. python analysis_script.py","title":"Running the Analysis"},{"location":"resources/data_analysis/#analysis-results","text":"Summary of key findings from the analysis, including interpretation and relevance.","title":"Analysis Results"},{"location":"resources/data_analysis/#challenges-and-solutions","text":"Discussion of challenges faced during the analysis and solutions or workarounds implemented.","title":"Challenges and Solutions"},{"location":"resources/data_analysis/#conclusions","text":"Concluding remarks on the analysis, insights gained, and their potential impact.","title":"Conclusions"},{"location":"resources/data_analysis/#future-work","text":"Suggestions for extending or refining the analysis and potential areas for further research.","title":"Future Work"},{"location":"resources/data_analysis/#references","text":"Citations or references to external sources or literature used.","title":"References"},{"location":"resources/data_processing/","text":"Data Processing Documentation \u00b6 Overview \u00b6 Brief description of the data processing objectives and scope. Reminder to adhere to data ownership and usage guidelines. Data Sources \u00b6 List and describe data sources used, including links to cloud-optimized sources. Highlight permissions and compliance with data ownership guidelines. CyVerse Discovery Environment \u00b6 Instructions for setting up and using the CyVerse Discovery Environment for data processing. Tips for cloud-based data access and processing. Data Processing Steps \u00b6 Using GDAL VSI \u00b6 Guidance on using GDAL VSI (Virtual System Interface) for data access and processing. Example commands or scripts: gdal_translate /vsicurl/http://example.com/data.tif output.tif Cloud-Optimized Data \u00b6 Advantages of using cloud-optimized data formats and processing data without downloading. Instructions for such processes. Data Storage \u00b6 Information on storing processed data, with guidelines for choosing between the repository and CyVerse Data Store. Best Practices \u00b6 Recommendations for efficient and responsible data processing in the cloud. Tips to ensure data integrity and reproducibility. Challenges and Troubleshooting \u00b6 Common challenges in data processing and potential solutions. Resources for troubleshooting in the CyVerse Discovery Environment. Conclusions \u00b6 Summary of the data processing phase and its outcomes. Reflect on the methods used. References \u00b6 Citations of tools, data sources, and other references used in the data processing phase.","title":"Data Processing Documentation"},{"location":"resources/data_processing/#data-processing-documentation","text":"","title":"Data Processing Documentation"},{"location":"resources/data_processing/#overview","text":"Brief description of the data processing objectives and scope. Reminder to adhere to data ownership and usage guidelines.","title":"Overview"},{"location":"resources/data_processing/#data-sources","text":"List and describe data sources used, including links to cloud-optimized sources. Highlight permissions and compliance with data ownership guidelines.","title":"Data Sources"},{"location":"resources/data_processing/#cyverse-discovery-environment","text":"Instructions for setting up and using the CyVerse Discovery Environment for data processing. Tips for cloud-based data access and processing.","title":"CyVerse Discovery Environment"},{"location":"resources/data_processing/#data-processing-steps","text":"","title":"Data Processing Steps"},{"location":"resources/data_processing/#using-gdal-vsi","text":"Guidance on using GDAL VSI (Virtual System Interface) for data access and processing. Example commands or scripts: gdal_translate /vsicurl/http://example.com/data.tif output.tif","title":"Using GDAL VSI"},{"location":"resources/data_processing/#cloud-optimized-data","text":"Advantages of using cloud-optimized data formats and processing data without downloading. Instructions for such processes.","title":"Cloud-Optimized Data"},{"location":"resources/data_processing/#data-storage","text":"Information on storing processed data, with guidelines for choosing between the repository and CyVerse Data Store.","title":"Data Storage"},{"location":"resources/data_processing/#best-practices","text":"Recommendations for efficient and responsible data processing in the cloud. Tips to ensure data integrity and reproducibility.","title":"Best Practices"},{"location":"resources/data_processing/#challenges-and-troubleshooting","text":"Common challenges in data processing and potential solutions. Resources for troubleshooting in the CyVerse Discovery Environment.","title":"Challenges and Troubleshooting"},{"location":"resources/data_processing/#conclusions","text":"Summary of the data processing phase and its outcomes. Reflect on the methods used.","title":"Conclusions"},{"location":"resources/data_processing/#references","text":"Citations of tools, data sources, and other references used in the data processing phase.","title":"References"},{"location":"resources/esiil_training/","text":"ESIIL Working Groups training sessions \u00b6 Introduction to ESIIL Training \u00b6 Brief overview of the training program. Objectives and expected outcomes for the working groups. Session 1: The Science of Team Science (2 Hours) \u00b6 Part 1: Creating Ethical and Innovative Work Spaces \u00b6 Strategies for fostering ethical and inclusive environments. Techniques for encouraging innovation and creativity in team settings. Part 2: Effective Communication and Collaboration \u00b6 Best practices for ensuring every team member's voice is heard. Approaches for maintaining productivity and positive team dynamics. Overview of the code of conduct and participant agreement. Session 2: Foundations of Environmental Data Science (2 Hours) \u00b6 Part 1: Data Management, Ethics, and GitHub Usage \u00b6 Principles of data management in environmental science. Understanding data ethics and ownership guidelines. Tour of GitHub repositories and setup instructions for effective collaboration. Part 2: Essential Tools and Technologies \u00b6 Introduction to key tools and technologies used in ESIIL. Basic training on software and platforms essential for data analysis. Session 3: Practical Application and Project Execution (2 Hours) \u00b6 Part 1: Travel Planning and Reimbursement \u00b6 Learn how to manage finances and submit paperwork to the University. Part 2: Hands-on Data Analysis Workflow \u00b6 Interactive session on constructing a data analysis pipeline using ESIIL/CyVerse tools. Practical exercises on data processing, analysis, and visualization techniques. Troubleshooting common issues and optimizing workflow efficiency. Part 3: Wrap-up and Project Planning \u00b6 Strategies for sustaining project momentum and managing long-term research goals. Planning for publication, data sharing, and broader impact. Final Q&A session to address any outstanding questions or concerns. Conclusion and Feedback \u00b6 Summary of key learnings from all sessions. Encouragement for participants to apply these skills in their respective projects. Collection of feedback for future training improvements. Additional Resources \u00b6 List of resources for further learning and exploration. Links to community forums or groups for ongoing support and collaboration. Roundtable Event 1: PI/Team Leads Discussion (2 Hours) \u00b6 A roundtable discussion for Principal Investigators and team leads. Sharing experiences, challenges, and strategies among group leaders. Fostering a collaborative network and problem-solving atmosphere. Roundtable Event 2: Technical Leads Office Hours (2 Hours) \u00b6 A roundtable and office hours session for technical leads. Ensuring a thorough understanding of the ESIIL/CyVerse cyberinfrastructure. Providing technical support and knowledge exchange. Conclusion and Feedback \u00b6 Recap of key takeaways from the training sessions and roundtables. Collection of feedback for continuous improvement of the training program. Additional Resources \u00b6 Supplementary materials, reading lists, and links to online tutorials and documentation.","title":"ESIIL Working Groups training sessions"},{"location":"resources/esiil_training/#esiil-working-groups-training-sessions","text":"","title":"ESIIL Working Groups training sessions"},{"location":"resources/esiil_training/#introduction-to-esiil-training","text":"Brief overview of the training program. Objectives and expected outcomes for the working groups.","title":"Introduction to ESIIL Training"},{"location":"resources/esiil_training/#session-1-the-science-of-team-science-2-hours","text":"","title":"Session 1: The Science of Team Science (2 Hours)"},{"location":"resources/esiil_training/#part-1-creating-ethical-and-innovative-work-spaces","text":"Strategies for fostering ethical and inclusive environments. Techniques for encouraging innovation and creativity in team settings.","title":"Part 1: Creating Ethical and Innovative Work Spaces"},{"location":"resources/esiil_training/#part-2-effective-communication-and-collaboration","text":"Best practices for ensuring every team member's voice is heard. Approaches for maintaining productivity and positive team dynamics. Overview of the code of conduct and participant agreement.","title":"Part 2: Effective Communication and Collaboration"},{"location":"resources/esiil_training/#session-2-foundations-of-environmental-data-science-2-hours","text":"","title":"Session 2: Foundations of Environmental Data Science (2 Hours)"},{"location":"resources/esiil_training/#part-1-data-management-ethics-and-github-usage","text":"Principles of data management in environmental science. Understanding data ethics and ownership guidelines. Tour of GitHub repositories and setup instructions for effective collaboration.","title":"Part 1: Data Management, Ethics, and GitHub Usage"},{"location":"resources/esiil_training/#part-2-essential-tools-and-technologies","text":"Introduction to key tools and technologies used in ESIIL. Basic training on software and platforms essential for data analysis.","title":"Part 2: Essential Tools and Technologies"},{"location":"resources/esiil_training/#session-3-practical-application-and-project-execution-2-hours","text":"","title":"Session 3: Practical Application and Project Execution (2 Hours)"},{"location":"resources/esiil_training/#part-1-travel-planning-and-reimbursement","text":"Learn how to manage finances and submit paperwork to the University.","title":"Part 1: Travel Planning and Reimbursement"},{"location":"resources/esiil_training/#part-2-hands-on-data-analysis-workflow","text":"Interactive session on constructing a data analysis pipeline using ESIIL/CyVerse tools. Practical exercises on data processing, analysis, and visualization techniques. Troubleshooting common issues and optimizing workflow efficiency.","title":"Part 2: Hands-on Data Analysis Workflow"},{"location":"resources/esiil_training/#part-3-wrap-up-and-project-planning","text":"Strategies for sustaining project momentum and managing long-term research goals. Planning for publication, data sharing, and broader impact. Final Q&A session to address any outstanding questions or concerns.","title":"Part 3: Wrap-up and Project Planning"},{"location":"resources/esiil_training/#conclusion-and-feedback","text":"Summary of key learnings from all sessions. Encouragement for participants to apply these skills in their respective projects. Collection of feedback for future training improvements.","title":"Conclusion and Feedback"},{"location":"resources/esiil_training/#additional-resources","text":"List of resources for further learning and exploration. Links to community forums or groups for ongoing support and collaboration.","title":"Additional Resources"},{"location":"resources/esiil_training/#roundtable-event-1-piteam-leads-discussion-2-hours","text":"A roundtable discussion for Principal Investigators and team leads. Sharing experiences, challenges, and strategies among group leaders. Fostering a collaborative network and problem-solving atmosphere.","title":"Roundtable Event 1: PI/Team Leads Discussion (2 Hours)"},{"location":"resources/esiil_training/#roundtable-event-2-technical-leads-office-hours-2-hours","text":"A roundtable and office hours session for technical leads. Ensuring a thorough understanding of the ESIIL/CyVerse cyberinfrastructure. Providing technical support and knowledge exchange.","title":"Roundtable Event 2: Technical Leads Office Hours (2 Hours)"},{"location":"resources/esiil_training/#conclusion-and-feedback_1","text":"Recap of key takeaways from the training sessions and roundtables. Collection of feedback for continuous improvement of the training program.","title":"Conclusion and Feedback"},{"location":"resources/esiil_training/#additional-resources_1","text":"Supplementary materials, reading lists, and links to online tutorials and documentation.","title":"Additional Resources"},{"location":"resources/first_meeting_notes/","text":"Primary Meeting 1 \u00b6 Day 1-5: Project Kickoff and Strategy \u00b6 Meeting Details \u00b6 Dates: Times: Location: Facilitator: Attendees \u00b6 List of attendees Daily Agenda \u00b6 Day 1: Setting the Stage \u00b6 Opening Remarks \u00b6 Welcoming speech and outline of the week's objectives. Project Overview \u00b6 Presentation of the project goals and significance. Theoretical Framework \u00b6 Discussion on the theoretical underpinnings of the project. Data Overview \u00b6 Review available data and any gaps that need addressing. Day 2-4: Deep Dives \u00b6 Daily Goals \u00b6 Outline specific goals for each day. Task Assignments \u00b6 Assign tasks and areas of responsibility to team members. Theory and Data Synthesis \u00b6 Host focused discussions on how theory will inform data analysis. Explore different methodological approaches and data integration strategies. Evening Social and Soft Work Sessions \u00b6 Casual gatherings to further discuss ideas and foster team bonding. Day 5: Roadmap and Closure \u00b6 Project Roadmap \u00b6 Draft a detailed plan of action for the project going forward. Responsibilities \u00b6 Confirm individual responsibilities and deadlines. Review and Feedback \u00b6 Reflect on the week's discussions and adjust the project plan as needed. Closing Remarks \u00b6 Summarize achievements and express appreciation for the team's efforts. Detailed Notes \u00b6 Day 1 Notes \u00b6 Summary of discussions, decisions, and key points. Day 2 Notes \u00b6 ... Day 3 Notes \u00b6 ... Day 4 Notes \u00b6 ... Day 5 Notes \u00b6 ... Action Items \u00b6 Specific task: Assigned to - Deadline Specific task: Assigned to - Deadline ... Reflections and Comments \u00b6 (Space for any additional thoughts, insights, or personal reflections on the meeting.) Next Steps \u00b6 Schedule for follow-up meetings or checkpoints. Outline of expected progress before the next primary meeting. Additional Documentation \u00b6 (Include or link to any additional documents, charts, or resources that were created or referenced during the meeting.)","title":"Primary Meeting 1"},{"location":"resources/first_meeting_notes/#primary-meeting-1","text":"","title":"Primary Meeting 1"},{"location":"resources/first_meeting_notes/#day-1-5-project-kickoff-and-strategy","text":"","title":"Day 1-5: Project Kickoff and Strategy"},{"location":"resources/first_meeting_notes/#meeting-details","text":"Dates: Times: Location: Facilitator:","title":"Meeting Details"},{"location":"resources/first_meeting_notes/#attendees","text":"List of attendees","title":"Attendees"},{"location":"resources/first_meeting_notes/#daily-agenda","text":"","title":"Daily Agenda"},{"location":"resources/first_meeting_notes/#day-1-setting-the-stage","text":"","title":"Day 1: Setting the Stage"},{"location":"resources/first_meeting_notes/#opening-remarks","text":"Welcoming speech and outline of the week's objectives.","title":"Opening Remarks"},{"location":"resources/first_meeting_notes/#project-overview","text":"Presentation of the project goals and significance.","title":"Project Overview"},{"location":"resources/first_meeting_notes/#theoretical-framework","text":"Discussion on the theoretical underpinnings of the project.","title":"Theoretical Framework"},{"location":"resources/first_meeting_notes/#data-overview","text":"Review available data and any gaps that need addressing.","title":"Data Overview"},{"location":"resources/first_meeting_notes/#day-2-4-deep-dives","text":"","title":"Day 2-4: Deep Dives"},{"location":"resources/first_meeting_notes/#daily-goals","text":"Outline specific goals for each day.","title":"Daily Goals"},{"location":"resources/first_meeting_notes/#task-assignments","text":"Assign tasks and areas of responsibility to team members.","title":"Task Assignments"},{"location":"resources/first_meeting_notes/#theory-and-data-synthesis","text":"Host focused discussions on how theory will inform data analysis. Explore different methodological approaches and data integration strategies.","title":"Theory and Data Synthesis"},{"location":"resources/first_meeting_notes/#evening-social-and-soft-work-sessions","text":"Casual gatherings to further discuss ideas and foster team bonding.","title":"Evening Social and Soft Work Sessions"},{"location":"resources/first_meeting_notes/#day-5-roadmap-and-closure","text":"","title":"Day 5: Roadmap and Closure"},{"location":"resources/first_meeting_notes/#project-roadmap","text":"Draft a detailed plan of action for the project going forward.","title":"Project Roadmap"},{"location":"resources/first_meeting_notes/#responsibilities","text":"Confirm individual responsibilities and deadlines.","title":"Responsibilities"},{"location":"resources/first_meeting_notes/#review-and-feedback","text":"Reflect on the week's discussions and adjust the project plan as needed.","title":"Review and Feedback"},{"location":"resources/first_meeting_notes/#closing-remarks","text":"Summarize achievements and express appreciation for the team's efforts.","title":"Closing Remarks"},{"location":"resources/first_meeting_notes/#detailed-notes","text":"","title":"Detailed Notes"},{"location":"resources/first_meeting_notes/#day-1-notes","text":"Summary of discussions, decisions, and key points.","title":"Day 1 Notes"},{"location":"resources/first_meeting_notes/#day-2-notes","text":"...","title":"Day 2 Notes"},{"location":"resources/first_meeting_notes/#day-3-notes","text":"...","title":"Day 3 Notes"},{"location":"resources/first_meeting_notes/#day-4-notes","text":"...","title":"Day 4 Notes"},{"location":"resources/first_meeting_notes/#day-5-notes","text":"...","title":"Day 5 Notes"},{"location":"resources/first_meeting_notes/#action-items","text":"Specific task: Assigned to - Deadline Specific task: Assigned to - Deadline ...","title":"Action Items"},{"location":"resources/first_meeting_notes/#reflections-and-comments","text":"(Space for any additional thoughts, insights, or personal reflections on the meeting.)","title":"Reflections and Comments"},{"location":"resources/first_meeting_notes/#next-steps","text":"Schedule for follow-up meetings or checkpoints. Outline of expected progress before the next primary meeting.","title":"Next Steps"},{"location":"resources/first_meeting_notes/#additional-documentation","text":"(Include or link to any additional documents, charts, or resources that were created or referenced during the meeting.)","title":"Additional Documentation"},{"location":"resources/github_basics/","text":"Github essentials \u00b6 I. Introduction (2 minutes) \u00b6 A. Brief overview of GitHub: \u00b6 GitHub is a web-based platform that provides version control and collaboration features using Git, a distributed version control system. It enables developers to work together on projects, track changes to code, and efficiently manage different versions of the project. GitHub is widely used in the software development industry and is an essential tool for collaborative projects and maintaining code quality. Image source: Artwork by @allison_horst B. Introduce GitHub Desktop and JupyterHub GitHub widget: \u00b6 GitHub Desktop is a graphical user interface (GUI) application that simplifies working with Git and GitHub by providing a more visual and intuitive way to manage repositories, branches, commits, and other Git features. JupyterHub GitHub widget, on the other hand, is a built-in widget that integrates Git and GitHub functionality directly into Jupyter notebooks, allowing users to perform version control and collaboration tasks within the Jupyter environment. Both tools help streamline the process of working with GitHub and make it more accessible to users with varying levels of experience with Git and version control. 1. Download GitHub Desktop \u00b6 Step 1: Download GitHub Desktop \u00b6 Go to the GitHub Desktop download page: https://desktop.github.com/ Click on the \u201cDownload for Windows\u201d or \u201cDownload for macOS\u201d button, depending on your operating system. The download should start automatically. Step 2: Install GitHub Desktop \u00b6 For Windows: Locate the downloaded installer file (usually in the Downloads folder) and double-click on it to run the installer. Follow the installation instructions that appear on the screen, accepting the default settings or customizing them as desired. Once the installation is complete, GitHub Desktop will launch automatically. For macOS: Locate the downloaded .zip file (usually in the Downloads folder) and double-click on it to extract the GitHub Desktop application. Drag the extracted \u201cGitHub Desktop\u201d application into the \u201cApplications\u201d folder. Open the \u201cApplications\u201d folder and double-click on \u201cGitHub Desktop\u201d to launch the application. Step 3: Set up GitHub Desktop \u00b6 When GitHub Desktop launches for the first time, you will be prompted to sign in with your GitHub account. If you don\u2019t have one, you can create one at https://github.com/join . Enter your GitHub username (or email) and password, and click on \u201cSign in.\u201d You will then be prompted to configure Git. Enter your name and email address, which will be used for your commit messages. Click \u201cContinue\u201d when you\u2019re done. Choose whether you want to submit usage data to help improve GitHub Desktop. Click \u201cFinish\u201d to complete the setup. Now, you have successfully installed and set up GitHub Desktop. You can start using it to clone repositories, make changes, commit, and sync with the remote repositories on GitHub. 1. Download GitHub for JupyterHub cloud service \u00b6 Step 1: Accessing JupyterHub on the cloud \u00b6 Visit the JupyterHub cloud service you want to use (e.g., Binder, Google Colab, or a custom JupyterHub deployment provided by your organization). Sign in with your credentials or authenticate using a third-party service if required. Step 2: Launch a new Jupyter Notebook or open an existing one \u00b6 Click on the \u201cNew\u201d button (usually located in the top right corner) and select \u201cPython\u201d to create a new Jupyter Notebook or open an existing one from the file browser. Once the notebook is open, you will see the Jupyter Notebook interface with the familiar cells for writing and executing code. Step 3: Install and enable the JupyterLab Git extension \u00b6 In your Jupyter Notebook, create a new code cell and run the following command to install the JupyterLab Git extension: !pip install jupyterlab-git Restart the Jupyter Notebook server for the changes to take effect. Step 4: Using the JupyterHub GitHub widget \u00b6 In the Jupyter Notebook interface, you should now see a Git icon on the left sidebar. Click on it to open the GitHub widget. To clone a repository, click on the \u201c+\u201d icon in the GitHub widget and enter the repository URL. This will clone the repository into your JupyterHub workspace. You can now navigate through the cloned repository, make changes, and use the GitHub widget to stage, commit, and push your changes back to the remote repository. To create and manage branches, use the branch icon in the GitHub widget. You can create new branches, switch between branches, and merge branches using this interface. To sync your local repository with the remote repository, use the \u201cPull\u201d and \u201cPush\u201d buttons in the GitHub widget. Now, you know how to access and use the JupyterHub GitHub widget running on the cloud. This allows you to work with Git and GitHub directly from your Jupyter Notebook interface, streamlining your workflow and making collaboration easier. C. GitHub in Rstudio: \u00b6 Integrating GitHub with RStudio allows users to manage their Git repositories and collaborate on projects directly within the RStudio environment. It offers similar functionality to GitHub Desktop but caters specifically to R users working within RStudio. By configuring RStudio to work with Git, creating or opening RStudio projects, and linking projects to GitHub repositories, users can enjoy a seamless workflow for version control and collaboration. RStudio\u2019s Git pane enables users to stage, commit, and push changes to remote repositories, as well as manage branches and sync local repositories with remote ones, providing a comprehensive solution for R developers working with GitHub. Step 1: Install Git \u00b6 Before integrating GitHub with RStudio, you need to have Git installed on your computer. Visit the official Git website ( https://git-scm.com/ ) to download and install the latest version of Git for your operating system. Step 2: Configure RStudio to work with Git \u00b6 Open RStudio. Go to \u201cTools\u201d > \u201cGlobal Options\u201d in the top menu. In the \u201cGlobal Options\u201d window, click on the \u201cGit/SVN\u201d tab. Check that the \u201cGit executable\u201d field is pointing to the correct location of the installed Git. If not, click \u201cBrowse\u201d and navigate to the location of the Git executable file (usually found in the \u201cbin\u201d folder of the Git installation directory). Click \u201cOK\u201d to save the changes. Step 3: Create or open an RStudio project \u00b6 To create a new RStudio project, go to \u201cFile\u201d > \u201cNew Project\u201d in the top menu. You can either create a new directory or choose an existing one for your project. To open an existing RStudio project, go to \u201cFile\u201d > \u201cOpen Project\u201d and navigate to the project\u2019s \u201c.Rproj\u201d file. Step 4: Link your RStudio project to a GitHub repository \u00b6 In the RStudio project, go to the \u201cTools\u201d menu and select \u201cVersion Control\u201d > \u201cProject Setup.\u201d In the \u201cProject Setup\u201d window, select \u201cGit\u201d as the version control system and click \u201cOK.\u201d A new \u201c.git\u201d folder will be created in your project directory, initializing it as a Git repository. Commit any changes you have made so far by clicking on the \u201cCommit\u201d button in the \u201cGit\u201d pane in RStudio. To link your local repository to a remote GitHub repository, go to your GitHub account and create a new repository. Copy the remote repository\u2019s URL (e.g., \u201c https://github.com/username/repository.git \u201d). In RStudio, open the \u201cShell\u201d by going to \u201cTools\u201d > \u201cShell.\u201d In the shell, run the following command to add the remote repository: git remote add origin https://github.com/username/repository.git Replace the URL with the one you copied from your GitHub repository. Push your changes to the remote repository by running the following command in the shell: git push -u origin master Now, your RStudio project is linked to a GitHub repository. You can use the \u201cGit\u201d pane in RStudio to stage, commit, and push changes to the remote repository, as well as manage branches and sync your local repository with the remote one. By integrating GitHub with RStudio, you can streamline your workflow, collaborate more effectively with your team, and manage your Git repositories directly from the RStudio interface. II. GitHub Basics (4 minutes) \u00b6 A. Repository: \u00b6 A repository, often abbreviated as \u201crepo,\u201d is the fundamental building block of GitHub. It is a storage space for your project files, including the code, documentation, and other related resources. Each repository also contains the complete history of all changes made to the project files, which is crucial for effective version control. Repositories can be public, allowing anyone to access and contribute, or private, restricting access to specific collaborators. B. Fork and Clone: \u00b6 Forking and cloning are two essential operations for working with repositories on GitHub. Forking creates a personal copy of someone else\u2019s repository under your GitHub account, enabling you to make changes to the project without affecting the original repo. Cloning, on the other hand, is the process of downloading a remote repository to your local machine for offline development. In GitHub Desktop, you can clone a repository by selecting \u201cClone a repository from the Internet\u201d and entering the repository URL. In JupyterHub GitHub widget, you can clone a repository by entering the repo URL in the \u201cClone Repository\u201d section of the widget. C. Branches: \u00b6 Branches are a critical aspect of Git version control, as they allow you to create multiple parallel versions of your project within a single repository. This is particularly useful when working on new features or bug fixes, as it prevents changes from interfering with the main (or \u201cmaster\u201d) branch until they are ready to be merged. Creating a new branch in GitHub Desktop can be done by clicking the \u201cCurrent Branch\u201d dropdown and selecting \u201cNew Branch.\u201d In JupyterHub GitHub widget, you can create a new branch by clicking the \u201cNew Branch\u201d button in the \u201cBranches\u201d section of the widget. D. Replace \u2018master\u2019 with \u2018main\u2019: \u00b6 In recent years, there has been a growing awareness of the importance of inclusive language in technology. One such example is the use of the term \u201cmaster\u201d in the context of the default branch in a GitHub repository. The term \u201cmaster\u201d has historical connections to the \u201cmaster/slave\u201d file structure, which evokes an unsavory colonial past associated with slavery. In light of this, many developers and organizations have begun to replace the term \u201cmaster\u201d with more neutral terms, such as \u201cmain.\u201d We encourage you to follow this practice and change the default branch name in your repositories from \u201cmaster\u201d to \u201cmain\u201d or another suitable alternative. This small change can help promote a more inclusive and welcoming environment within the technology community. III. Collaboration and Version Control (5 minutes) \u00b6 A. Commits: \u00b6 Commits are snapshots of your project\u2019s changes at a specific point in time, serving as the fundamental building blocks of Git\u2019s version control system. Commits make it possible to track changes, revert to previous versions, and collaborate with others. In GitHub Desktop, you can make a commit by staging the changes you want to include, adding a descriptive commit message, and clicking \u201cCommit to [branch_name].\u201d In JupyterHub GitHub widget, you can create a commit by selecting the files with changes, entering a commit message, and clicking the \u201cCommit\u201d button. B. Push: \u00b6 In GitHub, \u201cpush\u201d is a fundamental operation in the version control process that transfers commits from your local repository to a remote repository, such as the one hosted on GitHub. When you push changes, you synchronize the remote repository with the latest updates made to your local repository, making those changes accessible to other collaborators working on the same project. This operation ensures that the remote repository reflects the most recent state of your work and allows your team members to stay up to date with your changes. Pushing is an essential step in distributed version control systems like Git, as it promotes efficient collaboration among multiple contributors and provides a centralized location for tracking the project\u2019s history and progress. In GitHub, the concepts of \u201ccommit\u201d and \u201cpush\u201d represent two distinct steps in the version control process. A \u201ccommit\u201d is the action of saving changes to your local repository. When you commit changes, you create a snapshot of your work, accompanied by a unique identifier and an optional descriptive message. Commits allow you to track the progress of your work over time and make it easy to revert to a previous state if necessary. On the other hand, \u201cpush\u201d is the action of transferring your local commits to a remote repository, such as the one hosted on GitHub. Pushing makes your changes accessible to others collaborating on the same project and ensures that the remote repository stays up to date with your local repository. In summary, committing saves changes locally, while pushing synchronizes those changes with a remote repository, allowing for seamless collaboration among multiple contributors. C. Pull Requests: \u00b6 Pull requests are a collaboration feature on GitHub that enables developers to propose changes to a repository, discuss those changes, and ultimately merge them into the main branch. To create a pull request, you must first push your changes to a branch on your fork of the repository. Then, using either GitHub Desktop or JupyterHub GitHub widget, you can navigate to the original repository, click the \u201cPull Request\u201d tab, and create a new pull request. After the pull request is reviewed and approved, it can be merged into the main branch. D. Merging and Resolving Conflicts: \u00b6 Merging is the process of combining changes from one branch into another. This is typically done when a feature or bugfix has been completed and is ready to be integrated into the main branch. Conflicts can arise during the merging process if the same lines of code have been modified in both branches. To resolve conflicts, you must manually review the changes and decide which version to keep. In GitHub Desktop, you can merge branches by selecting the target branch and choosing \u201cMerge into Current Branch.\u201d Conflicts will be highlighted, and you can edit the files to resolve them before committing the changes. In JupyterHub GitHub widget, you can merge branches by selecting the target branch in the \u201cBranches\u201d section and clicking the \u201cMerge\u201d button. If conflicts occur, the widget will prompt you to resolve them before completing the merge. IV. Additional Features (2 minutes) \u00b6 A. Issues and Project Management: \u00b6 Issues are a powerful feature in GitHub that allows developers to track and manage bugs, enhancements, and other tasks within a project. Issues can be assigned to collaborators, labeled for easy organization, and linked to specific commits or pull requests. They provide a centralized location for discussing and addressing project-related concerns, fostering collaboration and transparent communication among team members. Using issues effectively can significantly improve the overall management and organization of your projects. B. GitHub Pages: \u00b6 GitHub Pages is a service offered by GitHub that allows you to host static websites directly from a repository. By creating a new branch named \u201cgh-pages\u201d in your repository and adding the necessary files (HTML, CSS, JavaScript, etc.), GitHub will automatically build and deploy your website to a publicly accessible URL. This is particularly useful for showcasing project documentation, creating personal portfolios, or hosting project demos. With GitHub Pages, you can take advantage of the version control and collaboration features of GitHub while easily sharing your work with others. V. Conclusion (2 minutes) \u00b6 A. Recap of the essentials of GitHub: \u00b6 In this brief introduction, we have covered the essentials of GitHub, including the basics of repositories, forking, cloning, branching, commits, pull requests, merging, and resolving conflicts. We have also discussed additional features like issues for project management and GitHub Pages for hosting websites directly from a repository. B. Encourage further exploration and learning: \u00b6 While this introduction provides a solid foundation for understanding and using GitHub, there is still much more to learn and explore. As you continue to use GitHub in your projects, you will discover new features and workflows that can enhance your productivity and collaboration. We encourage you to dive deeper into the platform and experiment with different tools and techniques. C. Share resources for learning more about GitHub: \u00b6 There are many resources available for learning more about GitHub and expanding your skills. Some popular resources include GitHub Guides ( https://guides.github.com/ ), which offers a collection of tutorials and best practices, the official GitHub documentation ( https://docs.github.com/ ), and various online tutorials and courses. By engaging with these resources and participating in the GitHub community, you can further develop your understanding of the platform and become a more proficient user. V. Conclusion (2 minutes) \u00b6 A. Recap of the essentials of GitHub: \u00b6 In this brief introduction, we have covered the essentials of GitHub, including the basics of repositories, forking, cloning, branching, commits, pull requests, merging, and resolving conflicts. We have also discussed additional features like issues for project management and GitHub Pages for hosting websites directly from a repository. B. Encourage further exploration and learning: \u00b6 While this introduction provides a solid foundation for understanding and using GitHub, there is still much more to learn and explore. As you continue to use GitHub in your projects, you will discover new features and workflows that can enhance your productivity and collaboration. We encourage you to dive deeper into the platform and experiment with different tools and techniques. C. Share resources for learning more about GitHub: \u00b6 There are many resources available for learning more about GitHub and expanding your skills. Some popular resources include GitHub Guides ( https://guides.github.com/ ), which offers a collection of tutorials and best practices, the official GitHub documentation ( https://docs.github.com/ ), and various online tutorials and courses. By engaging with these resources and participating in the GitHub community, you can further develop your understanding of the platform and become a more proficient user.","title":"Github"},{"location":"resources/github_basics/#github-essentials","text":"","title":"Github essentials"},{"location":"resources/github_basics/#i-introduction-2-minutes","text":"","title":"I. Introduction (2 minutes)"},{"location":"resources/github_basics/#a-brief-overview-of-github","text":"GitHub is a web-based platform that provides version control and collaboration features using Git, a distributed version control system. It enables developers to work together on projects, track changes to code, and efficiently manage different versions of the project. GitHub is widely used in the software development industry and is an essential tool for collaborative projects and maintaining code quality. Image source: Artwork by @allison_horst","title":"A. Brief overview of GitHub:"},{"location":"resources/github_basics/#b-introduce-github-desktop-and-jupyterhub-github-widget","text":"GitHub Desktop is a graphical user interface (GUI) application that simplifies working with Git and GitHub by providing a more visual and intuitive way to manage repositories, branches, commits, and other Git features. JupyterHub GitHub widget, on the other hand, is a built-in widget that integrates Git and GitHub functionality directly into Jupyter notebooks, allowing users to perform version control and collaboration tasks within the Jupyter environment. Both tools help streamline the process of working with GitHub and make it more accessible to users with varying levels of experience with Git and version control.","title":"B. Introduce GitHub Desktop and JupyterHub GitHub widget:"},{"location":"resources/github_basics/#1-download-github-desktop","text":"","title":"1. Download GitHub Desktop"},{"location":"resources/github_basics/#step-1-download-github-desktop","text":"Go to the GitHub Desktop download page: https://desktop.github.com/ Click on the \u201cDownload for Windows\u201d or \u201cDownload for macOS\u201d button, depending on your operating system. The download should start automatically.","title":"Step 1: Download GitHub Desktop"},{"location":"resources/github_basics/#step-2-install-github-desktop","text":"For Windows: Locate the downloaded installer file (usually in the Downloads folder) and double-click on it to run the installer. Follow the installation instructions that appear on the screen, accepting the default settings or customizing them as desired. Once the installation is complete, GitHub Desktop will launch automatically. For macOS: Locate the downloaded .zip file (usually in the Downloads folder) and double-click on it to extract the GitHub Desktop application. Drag the extracted \u201cGitHub Desktop\u201d application into the \u201cApplications\u201d folder. Open the \u201cApplications\u201d folder and double-click on \u201cGitHub Desktop\u201d to launch the application.","title":"Step 2: Install GitHub Desktop"},{"location":"resources/github_basics/#step-3-set-up-github-desktop","text":"When GitHub Desktop launches for the first time, you will be prompted to sign in with your GitHub account. If you don\u2019t have one, you can create one at https://github.com/join . Enter your GitHub username (or email) and password, and click on \u201cSign in.\u201d You will then be prompted to configure Git. Enter your name and email address, which will be used for your commit messages. Click \u201cContinue\u201d when you\u2019re done. Choose whether you want to submit usage data to help improve GitHub Desktop. Click \u201cFinish\u201d to complete the setup. Now, you have successfully installed and set up GitHub Desktop. You can start using it to clone repositories, make changes, commit, and sync with the remote repositories on GitHub.","title":"Step 3: Set up GitHub Desktop"},{"location":"resources/github_basics/#1-download-github-for-jupyterhub-cloud-service","text":"","title":"1. Download GitHub for JupyterHub cloud service"},{"location":"resources/github_basics/#step-1-accessing-jupyterhub-on-the-cloud","text":"Visit the JupyterHub cloud service you want to use (e.g., Binder, Google Colab, or a custom JupyterHub deployment provided by your organization). Sign in with your credentials or authenticate using a third-party service if required.","title":"Step 1: Accessing JupyterHub on the cloud"},{"location":"resources/github_basics/#step-2-launch-a-new-jupyter-notebook-or-open-an-existing-one","text":"Click on the \u201cNew\u201d button (usually located in the top right corner) and select \u201cPython\u201d to create a new Jupyter Notebook or open an existing one from the file browser. Once the notebook is open, you will see the Jupyter Notebook interface with the familiar cells for writing and executing code.","title":"Step 2: Launch a new Jupyter Notebook or open an existing one"},{"location":"resources/github_basics/#step-3-install-and-enable-the-jupyterlab-git-extension","text":"In your Jupyter Notebook, create a new code cell and run the following command to install the JupyterLab Git extension: !pip install jupyterlab-git Restart the Jupyter Notebook server for the changes to take effect.","title":"Step 3: Install and enable the JupyterLab Git extension"},{"location":"resources/github_basics/#step-4-using-the-jupyterhub-github-widget","text":"In the Jupyter Notebook interface, you should now see a Git icon on the left sidebar. Click on it to open the GitHub widget. To clone a repository, click on the \u201c+\u201d icon in the GitHub widget and enter the repository URL. This will clone the repository into your JupyterHub workspace. You can now navigate through the cloned repository, make changes, and use the GitHub widget to stage, commit, and push your changes back to the remote repository. To create and manage branches, use the branch icon in the GitHub widget. You can create new branches, switch between branches, and merge branches using this interface. To sync your local repository with the remote repository, use the \u201cPull\u201d and \u201cPush\u201d buttons in the GitHub widget. Now, you know how to access and use the JupyterHub GitHub widget running on the cloud. This allows you to work with Git and GitHub directly from your Jupyter Notebook interface, streamlining your workflow and making collaboration easier.","title":"Step 4: Using the JupyterHub GitHub widget"},{"location":"resources/github_basics/#c-github-in-rstudio","text":"Integrating GitHub with RStudio allows users to manage their Git repositories and collaborate on projects directly within the RStudio environment. It offers similar functionality to GitHub Desktop but caters specifically to R users working within RStudio. By configuring RStudio to work with Git, creating or opening RStudio projects, and linking projects to GitHub repositories, users can enjoy a seamless workflow for version control and collaboration. RStudio\u2019s Git pane enables users to stage, commit, and push changes to remote repositories, as well as manage branches and sync local repositories with remote ones, providing a comprehensive solution for R developers working with GitHub.","title":"C. GitHub in Rstudio:"},{"location":"resources/github_basics/#step-1-install-git","text":"Before integrating GitHub with RStudio, you need to have Git installed on your computer. Visit the official Git website ( https://git-scm.com/ ) to download and install the latest version of Git for your operating system.","title":"Step 1: Install Git"},{"location":"resources/github_basics/#step-2-configure-rstudio-to-work-with-git","text":"Open RStudio. Go to \u201cTools\u201d > \u201cGlobal Options\u201d in the top menu. In the \u201cGlobal Options\u201d window, click on the \u201cGit/SVN\u201d tab. Check that the \u201cGit executable\u201d field is pointing to the correct location of the installed Git. If not, click \u201cBrowse\u201d and navigate to the location of the Git executable file (usually found in the \u201cbin\u201d folder of the Git installation directory). Click \u201cOK\u201d to save the changes.","title":"Step 2: Configure RStudio to work with Git"},{"location":"resources/github_basics/#step-3-create-or-open-an-rstudio-project","text":"To create a new RStudio project, go to \u201cFile\u201d > \u201cNew Project\u201d in the top menu. You can either create a new directory or choose an existing one for your project. To open an existing RStudio project, go to \u201cFile\u201d > \u201cOpen Project\u201d and navigate to the project\u2019s \u201c.Rproj\u201d file.","title":"Step 3: Create or open an RStudio project"},{"location":"resources/github_basics/#step-4-link-your-rstudio-project-to-a-github-repository","text":"In the RStudio project, go to the \u201cTools\u201d menu and select \u201cVersion Control\u201d > \u201cProject Setup.\u201d In the \u201cProject Setup\u201d window, select \u201cGit\u201d as the version control system and click \u201cOK.\u201d A new \u201c.git\u201d folder will be created in your project directory, initializing it as a Git repository. Commit any changes you have made so far by clicking on the \u201cCommit\u201d button in the \u201cGit\u201d pane in RStudio. To link your local repository to a remote GitHub repository, go to your GitHub account and create a new repository. Copy the remote repository\u2019s URL (e.g., \u201c https://github.com/username/repository.git \u201d). In RStudio, open the \u201cShell\u201d by going to \u201cTools\u201d > \u201cShell.\u201d In the shell, run the following command to add the remote repository: git remote add origin https://github.com/username/repository.git Replace the URL with the one you copied from your GitHub repository. Push your changes to the remote repository by running the following command in the shell: git push -u origin master Now, your RStudio project is linked to a GitHub repository. You can use the \u201cGit\u201d pane in RStudio to stage, commit, and push changes to the remote repository, as well as manage branches and sync your local repository with the remote one. By integrating GitHub with RStudio, you can streamline your workflow, collaborate more effectively with your team, and manage your Git repositories directly from the RStudio interface.","title":"Step 4: Link your RStudio project to a GitHub repository"},{"location":"resources/github_basics/#ii-github-basics-4-minutes","text":"","title":"II. GitHub Basics (4 minutes)"},{"location":"resources/github_basics/#a-repository","text":"A repository, often abbreviated as \u201crepo,\u201d is the fundamental building block of GitHub. It is a storage space for your project files, including the code, documentation, and other related resources. Each repository also contains the complete history of all changes made to the project files, which is crucial for effective version control. Repositories can be public, allowing anyone to access and contribute, or private, restricting access to specific collaborators.","title":"A. Repository:"},{"location":"resources/github_basics/#b-fork-and-clone","text":"Forking and cloning are two essential operations for working with repositories on GitHub. Forking creates a personal copy of someone else\u2019s repository under your GitHub account, enabling you to make changes to the project without affecting the original repo. Cloning, on the other hand, is the process of downloading a remote repository to your local machine for offline development. In GitHub Desktop, you can clone a repository by selecting \u201cClone a repository from the Internet\u201d and entering the repository URL. In JupyterHub GitHub widget, you can clone a repository by entering the repo URL in the \u201cClone Repository\u201d section of the widget.","title":"B. Fork and Clone:"},{"location":"resources/github_basics/#c-branches","text":"Branches are a critical aspect of Git version control, as they allow you to create multiple parallel versions of your project within a single repository. This is particularly useful when working on new features or bug fixes, as it prevents changes from interfering with the main (or \u201cmaster\u201d) branch until they are ready to be merged. Creating a new branch in GitHub Desktop can be done by clicking the \u201cCurrent Branch\u201d dropdown and selecting \u201cNew Branch.\u201d In JupyterHub GitHub widget, you can create a new branch by clicking the \u201cNew Branch\u201d button in the \u201cBranches\u201d section of the widget.","title":"C. Branches:"},{"location":"resources/github_basics/#d-replace-master-with-main","text":"In recent years, there has been a growing awareness of the importance of inclusive language in technology. One such example is the use of the term \u201cmaster\u201d in the context of the default branch in a GitHub repository. The term \u201cmaster\u201d has historical connections to the \u201cmaster/slave\u201d file structure, which evokes an unsavory colonial past associated with slavery. In light of this, many developers and organizations have begun to replace the term \u201cmaster\u201d with more neutral terms, such as \u201cmain.\u201d We encourage you to follow this practice and change the default branch name in your repositories from \u201cmaster\u201d to \u201cmain\u201d or another suitable alternative. This small change can help promote a more inclusive and welcoming environment within the technology community.","title":"D. Replace \u2018master\u2019 with \u2018main\u2019:"},{"location":"resources/github_basics/#iii-collaboration-and-version-control-5-minutes","text":"","title":"III. Collaboration and Version Control (5 minutes)"},{"location":"resources/github_basics/#a-commits","text":"Commits are snapshots of your project\u2019s changes at a specific point in time, serving as the fundamental building blocks of Git\u2019s version control system. Commits make it possible to track changes, revert to previous versions, and collaborate with others. In GitHub Desktop, you can make a commit by staging the changes you want to include, adding a descriptive commit message, and clicking \u201cCommit to [branch_name].\u201d In JupyterHub GitHub widget, you can create a commit by selecting the files with changes, entering a commit message, and clicking the \u201cCommit\u201d button.","title":"A. Commits:"},{"location":"resources/github_basics/#b-push","text":"In GitHub, \u201cpush\u201d is a fundamental operation in the version control process that transfers commits from your local repository to a remote repository, such as the one hosted on GitHub. When you push changes, you synchronize the remote repository with the latest updates made to your local repository, making those changes accessible to other collaborators working on the same project. This operation ensures that the remote repository reflects the most recent state of your work and allows your team members to stay up to date with your changes. Pushing is an essential step in distributed version control systems like Git, as it promotes efficient collaboration among multiple contributors and provides a centralized location for tracking the project\u2019s history and progress. In GitHub, the concepts of \u201ccommit\u201d and \u201cpush\u201d represent two distinct steps in the version control process. A \u201ccommit\u201d is the action of saving changes to your local repository. When you commit changes, you create a snapshot of your work, accompanied by a unique identifier and an optional descriptive message. Commits allow you to track the progress of your work over time and make it easy to revert to a previous state if necessary. On the other hand, \u201cpush\u201d is the action of transferring your local commits to a remote repository, such as the one hosted on GitHub. Pushing makes your changes accessible to others collaborating on the same project and ensures that the remote repository stays up to date with your local repository. In summary, committing saves changes locally, while pushing synchronizes those changes with a remote repository, allowing for seamless collaboration among multiple contributors.","title":"B. Push:"},{"location":"resources/github_basics/#c-pull-requests","text":"Pull requests are a collaboration feature on GitHub that enables developers to propose changes to a repository, discuss those changes, and ultimately merge them into the main branch. To create a pull request, you must first push your changes to a branch on your fork of the repository. Then, using either GitHub Desktop or JupyterHub GitHub widget, you can navigate to the original repository, click the \u201cPull Request\u201d tab, and create a new pull request. After the pull request is reviewed and approved, it can be merged into the main branch.","title":"C. Pull Requests:"},{"location":"resources/github_basics/#d-merging-and-resolving-conflicts","text":"Merging is the process of combining changes from one branch into another. This is typically done when a feature or bugfix has been completed and is ready to be integrated into the main branch. Conflicts can arise during the merging process if the same lines of code have been modified in both branches. To resolve conflicts, you must manually review the changes and decide which version to keep. In GitHub Desktop, you can merge branches by selecting the target branch and choosing \u201cMerge into Current Branch.\u201d Conflicts will be highlighted, and you can edit the files to resolve them before committing the changes. In JupyterHub GitHub widget, you can merge branches by selecting the target branch in the \u201cBranches\u201d section and clicking the \u201cMerge\u201d button. If conflicts occur, the widget will prompt you to resolve them before completing the merge.","title":"D. Merging and Resolving Conflicts:"},{"location":"resources/github_basics/#iv-additional-features-2-minutes","text":"","title":"IV. Additional Features (2 minutes)"},{"location":"resources/github_basics/#a-issues-and-project-management","text":"Issues are a powerful feature in GitHub that allows developers to track and manage bugs, enhancements, and other tasks within a project. Issues can be assigned to collaborators, labeled for easy organization, and linked to specific commits or pull requests. They provide a centralized location for discussing and addressing project-related concerns, fostering collaboration and transparent communication among team members. Using issues effectively can significantly improve the overall management and organization of your projects.","title":"A. Issues and Project Management:"},{"location":"resources/github_basics/#b-github-pages","text":"GitHub Pages is a service offered by GitHub that allows you to host static websites directly from a repository. By creating a new branch named \u201cgh-pages\u201d in your repository and adding the necessary files (HTML, CSS, JavaScript, etc.), GitHub will automatically build and deploy your website to a publicly accessible URL. This is particularly useful for showcasing project documentation, creating personal portfolios, or hosting project demos. With GitHub Pages, you can take advantage of the version control and collaboration features of GitHub while easily sharing your work with others.","title":"B. GitHub Pages:"},{"location":"resources/github_basics/#v-conclusion-2-minutes","text":"","title":"V. Conclusion (2 minutes)"},{"location":"resources/github_basics/#a-recap-of-the-essentials-of-github","text":"In this brief introduction, we have covered the essentials of GitHub, including the basics of repositories, forking, cloning, branching, commits, pull requests, merging, and resolving conflicts. We have also discussed additional features like issues for project management and GitHub Pages for hosting websites directly from a repository.","title":"A. Recap of the essentials of GitHub:"},{"location":"resources/github_basics/#b-encourage-further-exploration-and-learning","text":"While this introduction provides a solid foundation for understanding and using GitHub, there is still much more to learn and explore. As you continue to use GitHub in your projects, you will discover new features and workflows that can enhance your productivity and collaboration. We encourage you to dive deeper into the platform and experiment with different tools and techniques.","title":"B. Encourage further exploration and learning:"},{"location":"resources/github_basics/#c-share-resources-for-learning-more-about-github","text":"There are many resources available for learning more about GitHub and expanding your skills. Some popular resources include GitHub Guides ( https://guides.github.com/ ), which offers a collection of tutorials and best practices, the official GitHub documentation ( https://docs.github.com/ ), and various online tutorials and courses. By engaging with these resources and participating in the GitHub community, you can further develop your understanding of the platform and become a more proficient user.","title":"C. Share resources for learning more about GitHub:"},{"location":"resources/github_basics/#v-conclusion-2-minutes_1","text":"","title":"V. Conclusion (2 minutes)"},{"location":"resources/github_basics/#a-recap-of-the-essentials-of-github_1","text":"In this brief introduction, we have covered the essentials of GitHub, including the basics of repositories, forking, cloning, branching, commits, pull requests, merging, and resolving conflicts. We have also discussed additional features like issues for project management and GitHub Pages for hosting websites directly from a repository.","title":"A. Recap of the essentials of GitHub:"},{"location":"resources/github_basics/#b-encourage-further-exploration-and-learning_1","text":"While this introduction provides a solid foundation for understanding and using GitHub, there is still much more to learn and explore. As you continue to use GitHub in your projects, you will discover new features and workflows that can enhance your productivity and collaboration. We encourage you to dive deeper into the platform and experiment with different tools and techniques.","title":"B. Encourage further exploration and learning:"},{"location":"resources/github_basics/#c-share-resources-for-learning-more-about-github_1","text":"There are many resources available for learning more about GitHub and expanding your skills. Some popular resources include GitHub Guides ( https://guides.github.com/ ), which offers a collection of tutorials and best practices, the official GitHub documentation ( https://docs.github.com/ ), and various online tutorials and courses. By engaging with these resources and participating in the GitHub community, you can further develop your understanding of the platform and become a more proficient user.","title":"C. Share resources for learning more about GitHub:"},{"location":"resources/manuscript/","text":"Manuscript Title \u00b6 Authors \u00b6 Author 1, Affiliation Author 2, Affiliation ... Abstract \u00b6 A brief summary of the research, its objectives, main findings, and conclusions. Introduction \u00b6 Background information and context setting for the research. Statement of the problem and research objectives. Overview of the methodology and approach. Literature Review \u00b6 Discussion of relevant previous work and how this research contributes to the field. Methodology \u00b6 Detailed description of the research methodology. Explanation of data collection and analysis techniques. Justification for methodological choices. Results \u00b6 Presentation of the research findings. Use of tables, graphs, and figures to illustrate key points. Analysis and interpretation of the results. Discussion \u00b6 Discussion of the implications of the findings. Comparison with previous research in the field. Consideration of the limitations of the study. Conclusion \u00b6 Summary of the main findings. Reflection on the research's significance and potential impact. Suggestions for future research directions. Acknowledgements \u00b6 Acknowledgement of any assistance, funding, or contributions from others. References \u00b6 Bibliographic details of the cited works. Use a consistent citation style throughout. Appendices \u00b6 Additional material that supports the manuscript but is too detailed for the main sections.","title":"Manuscript Title"},{"location":"resources/manuscript/#manuscript-title","text":"","title":"Manuscript Title"},{"location":"resources/manuscript/#authors","text":"Author 1, Affiliation Author 2, Affiliation ...","title":"Authors"},{"location":"resources/manuscript/#abstract","text":"A brief summary of the research, its objectives, main findings, and conclusions.","title":"Abstract"},{"location":"resources/manuscript/#introduction","text":"Background information and context setting for the research. Statement of the problem and research objectives. Overview of the methodology and approach.","title":"Introduction"},{"location":"resources/manuscript/#literature-review","text":"Discussion of relevant previous work and how this research contributes to the field.","title":"Literature Review"},{"location":"resources/manuscript/#methodology","text":"Detailed description of the research methodology. Explanation of data collection and analysis techniques. Justification for methodological choices.","title":"Methodology"},{"location":"resources/manuscript/#results","text":"Presentation of the research findings. Use of tables, graphs, and figures to illustrate key points. Analysis and interpretation of the results.","title":"Results"},{"location":"resources/manuscript/#discussion","text":"Discussion of the implications of the findings. Comparison with previous research in the field. Consideration of the limitations of the study.","title":"Discussion"},{"location":"resources/manuscript/#conclusion","text":"Summary of the main findings. Reflection on the research's significance and potential impact. Suggestions for future research directions.","title":"Conclusion"},{"location":"resources/manuscript/#acknowledgements","text":"Acknowledgement of any assistance, funding, or contributions from others.","title":"Acknowledgements"},{"location":"resources/manuscript/#references","text":"Bibliographic details of the cited works. Use a consistent citation style throughout.","title":"References"},{"location":"resources/manuscript/#appendices","text":"Additional material that supports the manuscript but is too detailed for the main sections.","title":"Appendices"},{"location":"resources/markdown_basics/","text":"Markdown for the Modern Researcher at ESIIL \u00b6 Join us on a HackMD page to practice Markdown Section 1: Mastering Markdown Syntax \u00b6 1. Fundamentals of Text Formatting \u00b6 Headings : Use # for different levels of headings. Heading Level 1 \u00b6 Heading Level 2 \u00b6 Heading Level 3 \u00b6 Lists : Bulleted lists use asterisks, numbers for ordered lists. Item 1 Item 2 Subitem 2.1 Subitem 2.2 First item Second item Bold and Italics : Use asterisks or underscores. Bold Text Italic Text 2. Advanced Structures \u00b6 Tables : Create tables using dashes and pipes. Header 1 Header 2 Header 3 Row 1 Data Data Row 2 Data Data Add a \":\"\" to change text justification. Here the : is added on the left for left justification. | Header 1 | Header 2 | Header 3 | |---------:|--------- |----------| | Row 1 | Data | Data | | Row 2 | Data | Data | A N A L Y T I C S E N R E I N V I R O N M E N T V E L O P M O C O M U N E G A G E L L A H C N E R A T A D E V E L O P W E I T S I T N E I C S R S O I G O L O I B H T L A H T L A E W E G N E L T I T S I T N E I C S N I E E S R E H T O E N I C S L L A H C E G L A N E G A L L E H C N E I C If you hit the boundaries of Markdown's capabilities, you can start to add html directly. Remember, this entire exercisse is to translate to html. Sudoku Puzzle Fill in the blank cells with numbers from 1 to 9, such that each row, column, and 3x3 subgrid contains all the numbers from 1 to 9 without repetition. 5 3 7 6 1 9 5 9 8 6 8 6 3 4 8 3 1 7 2 6 6 2 8 4 1 9 5 8 7 9 5 3 4 6 7 8 9 1 2 6 7 2 1 9 5 3 4 8 1 9 8 3 4 2 5 6 7 8 5 9 7 6 1 4 2 3 4 2 6 8 5 3 7 9 1 7 1 3 9 2 4 8 5 6 9 6 1 5 3 7 2 8 4 2 8 7 4 1 9 6 3 5 3 4 5 2 8 6 1 7 9 Blockquotes : Use > for blockquotes. This is a blockquote. It can span multiple lines. 3. Integrating Multimedia \u00b6 Images : Add images using the format ![alt text](image_url) . Videos : Embed videos using HTML in Markdown. 4. Diagrams with Mermaid \u00b6 Flowcharts : graph TD A[Start] --> B[Analyze Data] B --> C{Is Data Large?} C -->|Yes| D[Apply Big Data Solutions] C -->|No| E[Use Traditional Methods] D --> F[Machine Learning] E --> G[Statistical Analysis] F --> H{Model Accurate?} G --> I[Report Results] H -->|Yes| J[Deploy Model] H -->|No| K[Refine Model] J --> L[Monitor Performance] K --> F L --> M[End: Success] I --> N[End: Report Generated] style A fill:#f9f,stroke:#333,stroke-width:2px style M fill:#9f9,stroke:#333,stroke-width:2px style N fill:#9f9,stroke:#333,stroke-width:2px Mind Maps : mindmap root((ESIIL)) section Data Sources Satellite Imagery ::icon(fa fa-satellite) Remote Sensing Data Drones Aircraft On-ground Sensors Weather Stations IoT Devices Open Environmental Data Public Datasets ::icon(fa fa-database) section Research Focus Climate Change Analysis Ice Melt Patterns Sea Level Rise Biodiversity Monitoring Species Distribution Habitat Fragmentation Geospatial Analysis Techniques Machine Learning Models Predictive Analytics section Applications Conservation Strategies ::icon(fa fa-leaf) Urban Planning Green Spaces Disaster Response Flood Mapping Wildfire Tracking section Tools and Technologies GIS Software QGIS ArcGIS Programming Languages Python R Cloud Computing Platforms AWS Google Earth Engine Data Visualization D3.js Tableau Timelines : gantt title ESIIL Year 2 Project Schedule dateFormat YYYY-MM-DD section CI Sovereign OASIS via private jupiterhubs :2024-08-01, 2024-10-30 OASIS documentation :2024-09-15, 70d Data cube OASIS via cyverse account :2024-09-15, 100d Integrate with ESIIL User Management system :2024-08-01, 2024-11-30 Build badges to deploy DE from mkdoc :2024-09-01, 2024-12-15 Streamline Github ssh key management :2024-10-01, 2024-12-31 Cyverse support (R proxy link) :2024-11-01, 2024-12-31 Cyverse use summary and statistics :2024-08-01, 2024-12-15 section CI Consultation and Education Conferences/Invited talks :2024-08-01, 2024-12-31 Office hours :2024-08-15, 2024-12-15 Proposals :2024-09-01, 2024-11-15 Private lessons :2024-09-15, 2024-11-30 Pre-event trainings :2024-10-01, 2024-12-15 Textbook development w/ education team :2024-08-01, 2024-12-15 Train the trainers / group lessons :2024-08-15, 2024-11-30 Tribal engagement :2024-09-01, 2024-12-15 Ethical Space training :2024-09-15, 2024-12-31 section CI Design and Build Data library (repository) :2024-08-01, 2024-10-30 Analytics library (repository) :2024-08-15, 2024-11-15 Containers (repository) :2024-09-01, 2024-11-30 Cloud infrastructure templates (repository) :2024-09-15, 2024-12-15 Tribal resilience Data Cube :2024-10-01, 2024-12-31 %%{init: { 'logLevel': 'debug', 'theme': 'base', 'gitGraph': {'rotateCommitLabel': true}} }%% gitGraph commit id: \"Start from template\" branch c1 commit id: \"Set up SSH key pair\" commit id: \"Modify _config.yml for GitHub Pages\" commit id: \"Initial website structure\" commit id: \"Add new markdown pages\" commit id: \"Update navigation tree\" commit id: \"Edit existing pages\" commit id: \"Delete old markdown pages\" commit id: \"Finalize website updates\" commit id: \"Add new markdown pages\" commit id: \"Update navigation tree\" checkout c1 branch b1 commit commit checkout c1 merge b1 %%{init: {\"quadrantChart\": {\"chartWidth\": 400, \"chartHeight\": 400}, \"themeVariables\": {\"quadrant1TextFill\": \"#ff0000\"} }}%% quadrantChart x-axis Urgent --> Not Urgent y-axis Not Important --> \"Important \u2764\" quadrant-1 Plan quadrant-2 Do quadrant-3 Delegate quadrant-4 Delete timeline title Major Events in Environmental Science and Data Science section Environmental Science 19th century : Foundations in Ecology and Conservation 1962 : Publication of 'Silent Spring' by Rachel Carson 1970 : First Earth Day 1987 : Brundtland Report introduces Sustainable Development 1992 : Rio Earth Summit 2015 : Paris Agreement on Climate Change section Data Science 1960s-1970s : Development of Database Management Systems 1980s : Emergence of Data Warehousing 1990s : Growth of the World Wide Web and Data Mining 2000s : Big Data and Predictive Analytics 2010s : AI and Machine Learning Revolution 2020s : Integration of AI in Environmental Research erDiagram CAR ||--o{ NAMED-DRIVER : allows CAR { string registrationNumber string make string model } PERSON ||--o{ NAMED-DRIVER : is PERSON { string firstName string lastName int age } --- config: sankey: showValues: false --- sankey-beta NASA Data,Big Data Harmonization,100 Satellite Imagery,Big Data Harmonization,80 Open Environmental Data,Big Data Harmonization,70 Remote Sensing Data,Big Data Harmonization,90 Big Data Harmonization, Data Analysis and Integration,340 Data Analysis and Integration,Climate Change Research,100 Data Analysis and Integration,Biodiversity Monitoring,80 Data Analysis and Integration,Geospatial Mapping,60 Data Analysis and Integration,Urban Planning,50 Data Analysis and Integration,Disaster Response,50 5. Interactive Elements \u00b6 Hyperlinks : Use the format [link text](URL) . Google Play Tetris Embedding Interactive Content : Use HTML tags or specific platform embed codes. 6. Math Notation \u00b6 Markdown can be combined with LaTeX for mathematical notation, useful in environmental data science for expressing statistical distributions, coordinate systems, and more. This requires a Markdown renderer with LaTeX support (like MathJax or KaTeX). Inline Math : Use single dollar signs for inline math expressions. Representing the normal distribution. Example: The probability density function of the normal distribution is given by \\(f(x|\\mu,\\sigma) = \\frac{1}{\\sigma\\sqrt{2\\pi}}e^{-\\frac{1}{2}\\left(\\frac{x-\\mu}{\\sigma}\\right)^2}\\) .` Display Math : Use double dollar signs for standalone equations. Example: $$ f(x|\\mu,\\sigma) = \\frac{1}{\\sigma\\sqrt{2\\pi}}e {-\\frac{1}{2}\\left(\\frac{x-\\mu}{\\sigma}\\right) 2} $$ Common LaTeX Elements for Environmental Data Science : Statistical Distributions : Normal Distribution: \\frac{1}{\\sigma\\sqrt{2\\pi}}e^{-\\frac{1}{2}\\left(\\frac{x-\\mu}{\\sigma}\\right)^2} for \\(\\frac{1}{\\sigma\\sqrt{2\\pi}}e^{-\\frac{1}{2}\\left(\\frac{x-\\mu}{\\sigma}\\right)^2}\\) Poisson Distribution: P(k; \\lambda) = \\frac{\\lambda^k e^{-\\lambda}}{k!} for \\(P(k; \\lambda) = \\frac{\\lambda^k e^{-\\lambda}}{k!}\\) Coordinate Systems : Spherical Coordinates: (r, \\theta, \\phi) for \\((r, \\theta, \\phi)\\) Cartesian Coordinates: (x, y, z) for \\((x, y, z)\\) Geospatial Equations : Haversine Formula for Distance: a = \\sin^2\\left(\\frac{\\Delta\\phi}{2}\\right) + \\cos(\\phi_1)\\cos(\\phi_2)\\sin^2\\left(\\frac{\\Delta\\lambda}{2}\\right) for \\(a = \\sin^2\\left(\\frac{\\Delta\\phi}{2}\\right) + \\cos(\\phi_1)\\cos(\\phi_2)\\sin^2\\left(\\frac{\\Delta\\lambda}{2}\\right)\\) Note: The rendering of these equations as formatted math will depend on your Markdown viewer's LaTeX capabilities. 7. Effective Citations in Markdown \u00b6 Inline Citations \u00b6 Objective: Learn how to use inline citations in Markdown. Example Usage: Inline citation of a single work: Some text with an inline citation. [ @jones :envstudy:2020] Inline citation with specific page or section: More text with a specific section cited. [See @jones :envstudy:2020, \u00a74.2] Contrasting views: Discussion of a topic with a contrasting view. [Contra @smith :climatechange:2019, p. 78] Footnote Citations \u00b6 Objective: Understand how to use footnote citations in Markdown. Example Usage: Citing with a footnote: Some statement in the text. 1 Multiple references to the same footnote: Another statement referring to the same source. 1 A different citation: Additional comment with a new citation. 2 Creating Footnotes \u00b6 Example Syntax: First reference details. Example: Emma Jones, \"Environmental Study,\" Nature Journal, May 2020, https://nature-journal.com/envstudy2020 . \u21a9 \u21a9 Second reference details. Example: David Smith, \"Climate Change Controversies,\" Science Daily, August 2019, https://sciencedaily.com/climatechange2019 . \u21a9","title":"Markdown"},{"location":"resources/markdown_basics/#markdown-for-the-modern-researcher-at-esiil","text":"Join us on a HackMD page to practice Markdown","title":"Markdown for the Modern Researcher at ESIIL"},{"location":"resources/markdown_basics/#section-1-mastering-markdown-syntax","text":"","title":"Section 1: Mastering Markdown Syntax"},{"location":"resources/markdown_basics/#1-fundamentals-of-text-formatting","text":"Headings : Use # for different levels of headings.","title":"1. Fundamentals of Text Formatting"},{"location":"resources/markdown_basics/#heading-level-1","text":"","title":"Heading Level 1"},{"location":"resources/markdown_basics/#heading-level-2","text":"","title":"Heading Level 2"},{"location":"resources/markdown_basics/#heading-level-3","text":"Lists : Bulleted lists use asterisks, numbers for ordered lists. Item 1 Item 2 Subitem 2.1 Subitem 2.2 First item Second item Bold and Italics : Use asterisks or underscores. Bold Text Italic Text","title":"Heading Level 3"},{"location":"resources/markdown_basics/#2-advanced-structures","text":"Tables : Create tables using dashes and pipes. Header 1 Header 2 Header 3 Row 1 Data Data Row 2 Data Data Add a \":\"\" to change text justification. Here the : is added on the left for left justification. | Header 1 | Header 2 | Header 3 | |---------:|--------- |----------| | Row 1 | Data | Data | | Row 2 | Data | Data | A N A L Y T I C S E N R E I N V I R O N M E N T V E L O P M O C O M U N E G A G E L L A H C N E R A T A D E V E L O P W E I T S I T N E I C S R S O I G O L O I B H T L A H T L A E W E G N E L T I T S I T N E I C S N I E E S R E H T O E N I C S L L A H C E G L A N E G A L L E H C N E I C If you hit the boundaries of Markdown's capabilities, you can start to add html directly. Remember, this entire exercisse is to translate to html. Sudoku Puzzle Fill in the blank cells with numbers from 1 to 9, such that each row, column, and 3x3 subgrid contains all the numbers from 1 to 9 without repetition. 5 3 7 6 1 9 5 9 8 6 8 6 3 4 8 3 1 7 2 6 6 2 8 4 1 9 5 8 7 9 5 3 4 6 7 8 9 1 2 6 7 2 1 9 5 3 4 8 1 9 8 3 4 2 5 6 7 8 5 9 7 6 1 4 2 3 4 2 6 8 5 3 7 9 1 7 1 3 9 2 4 8 5 6 9 6 1 5 3 7 2 8 4 2 8 7 4 1 9 6 3 5 3 4 5 2 8 6 1 7 9 Blockquotes : Use > for blockquotes. This is a blockquote. It can span multiple lines.","title":"2. Advanced Structures"},{"location":"resources/markdown_basics/#3-integrating-multimedia","text":"Images : Add images using the format ![alt text](image_url) . Videos : Embed videos using HTML in Markdown. ","title":"3. Integrating Multimedia"},{"location":"resources/markdown_basics/#4-diagrams-with-mermaid","text":"Flowcharts : graph TD A[Start] --> B[Analyze Data] B --> C{Is Data Large?} C -->|Yes| D[Apply Big Data Solutions] C -->|No| E[Use Traditional Methods] D --> F[Machine Learning] E --> G[Statistical Analysis] F --> H{Model Accurate?} G --> I[Report Results] H -->|Yes| J[Deploy Model] H -->|No| K[Refine Model] J --> L[Monitor Performance] K --> F L --> M[End: Success] I --> N[End: Report Generated] style A fill:#f9f,stroke:#333,stroke-width:2px style M fill:#9f9,stroke:#333,stroke-width:2px style N fill:#9f9,stroke:#333,stroke-width:2px Mind Maps : mindmap root((ESIIL)) section Data Sources Satellite Imagery ::icon(fa fa-satellite) Remote Sensing Data Drones Aircraft On-ground Sensors Weather Stations IoT Devices Open Environmental Data Public Datasets ::icon(fa fa-database) section Research Focus Climate Change Analysis Ice Melt Patterns Sea Level Rise Biodiversity Monitoring Species Distribution Habitat Fragmentation Geospatial Analysis Techniques Machine Learning Models Predictive Analytics section Applications Conservation Strategies ::icon(fa fa-leaf) Urban Planning Green Spaces Disaster Response Flood Mapping Wildfire Tracking section Tools and Technologies GIS Software QGIS ArcGIS Programming Languages Python R Cloud Computing Platforms AWS Google Earth Engine Data Visualization D3.js Tableau Timelines : gantt title ESIIL Year 2 Project Schedule dateFormat YYYY-MM-DD section CI Sovereign OASIS via private jupiterhubs :2024-08-01, 2024-10-30 OASIS documentation :2024-09-15, 70d Data cube OASIS via cyverse account :2024-09-15, 100d Integrate with ESIIL User Management system :2024-08-01, 2024-11-30 Build badges to deploy DE from mkdoc :2024-09-01, 2024-12-15 Streamline Github ssh key management :2024-10-01, 2024-12-31 Cyverse support (R proxy link) :2024-11-01, 2024-12-31 Cyverse use summary and statistics :2024-08-01, 2024-12-15 section CI Consultation and Education Conferences/Invited talks :2024-08-01, 2024-12-31 Office hours :2024-08-15, 2024-12-15 Proposals :2024-09-01, 2024-11-15 Private lessons :2024-09-15, 2024-11-30 Pre-event trainings :2024-10-01, 2024-12-15 Textbook development w/ education team :2024-08-01, 2024-12-15 Train the trainers / group lessons :2024-08-15, 2024-11-30 Tribal engagement :2024-09-01, 2024-12-15 Ethical Space training :2024-09-15, 2024-12-31 section CI Design and Build Data library (repository) :2024-08-01, 2024-10-30 Analytics library (repository) :2024-08-15, 2024-11-15 Containers (repository) :2024-09-01, 2024-11-30 Cloud infrastructure templates (repository) :2024-09-15, 2024-12-15 Tribal resilience Data Cube :2024-10-01, 2024-12-31 %%{init: { 'logLevel': 'debug', 'theme': 'base', 'gitGraph': {'rotateCommitLabel': true}} }%% gitGraph commit id: \"Start from template\" branch c1 commit id: \"Set up SSH key pair\" commit id: \"Modify _config.yml for GitHub Pages\" commit id: \"Initial website structure\" commit id: \"Add new markdown pages\" commit id: \"Update navigation tree\" commit id: \"Edit existing pages\" commit id: \"Delete old markdown pages\" commit id: \"Finalize website updates\" commit id: \"Add new markdown pages\" commit id: \"Update navigation tree\" checkout c1 branch b1 commit commit checkout c1 merge b1 %%{init: {\"quadrantChart\": {\"chartWidth\": 400, \"chartHeight\": 400}, \"themeVariables\": {\"quadrant1TextFill\": \"#ff0000\"} }}%% quadrantChart x-axis Urgent --> Not Urgent y-axis Not Important --> \"Important \u2764\" quadrant-1 Plan quadrant-2 Do quadrant-3 Delegate quadrant-4 Delete timeline title Major Events in Environmental Science and Data Science section Environmental Science 19th century : Foundations in Ecology and Conservation 1962 : Publication of 'Silent Spring' by Rachel Carson 1970 : First Earth Day 1987 : Brundtland Report introduces Sustainable Development 1992 : Rio Earth Summit 2015 : Paris Agreement on Climate Change section Data Science 1960s-1970s : Development of Database Management Systems 1980s : Emergence of Data Warehousing 1990s : Growth of the World Wide Web and Data Mining 2000s : Big Data and Predictive Analytics 2010s : AI and Machine Learning Revolution 2020s : Integration of AI in Environmental Research erDiagram CAR ||--o{ NAMED-DRIVER : allows CAR { string registrationNumber string make string model } PERSON ||--o{ NAMED-DRIVER : is PERSON { string firstName string lastName int age } --- config: sankey: showValues: false --- sankey-beta NASA Data,Big Data Harmonization,100 Satellite Imagery,Big Data Harmonization,80 Open Environmental Data,Big Data Harmonization,70 Remote Sensing Data,Big Data Harmonization,90 Big Data Harmonization, Data Analysis and Integration,340 Data Analysis and Integration,Climate Change Research,100 Data Analysis and Integration,Biodiversity Monitoring,80 Data Analysis and Integration,Geospatial Mapping,60 Data Analysis and Integration,Urban Planning,50 Data Analysis and Integration,Disaster Response,50","title":"4. Diagrams with Mermaid"},{"location":"resources/markdown_basics/#5-interactive-elements","text":"Hyperlinks : Use the format [link text](URL) . Google Play Tetris Embedding Interactive Content : Use HTML tags or specific platform embed codes. ","title":"5. Interactive Elements"},{"location":"resources/markdown_basics/#6-math-notation","text":"Markdown can be combined with LaTeX for mathematical notation, useful in environmental data science for expressing statistical distributions, coordinate systems, and more. This requires a Markdown renderer with LaTeX support (like MathJax or KaTeX). Inline Math : Use single dollar signs for inline math expressions. Representing the normal distribution. Example: The probability density function of the normal distribution is given by \\(f(x|\\mu,\\sigma) = \\frac{1}{\\sigma\\sqrt{2\\pi}}e^{-\\frac{1}{2}\\left(\\frac{x-\\mu}{\\sigma}\\right)^2}\\) .` Display Math : Use double dollar signs for standalone equations. Example: $$ f(x|\\mu,\\sigma) = \\frac{1}{\\sigma\\sqrt{2\\pi}}e {-\\frac{1}{2}\\left(\\frac{x-\\mu}{\\sigma}\\right) 2} $$ Common LaTeX Elements for Environmental Data Science : Statistical Distributions : Normal Distribution: \\frac{1}{\\sigma\\sqrt{2\\pi}}e^{-\\frac{1}{2}\\left(\\frac{x-\\mu}{\\sigma}\\right)^2} for \\(\\frac{1}{\\sigma\\sqrt{2\\pi}}e^{-\\frac{1}{2}\\left(\\frac{x-\\mu}{\\sigma}\\right)^2}\\) Poisson Distribution: P(k; \\lambda) = \\frac{\\lambda^k e^{-\\lambda}}{k!} for \\(P(k; \\lambda) = \\frac{\\lambda^k e^{-\\lambda}}{k!}\\) Coordinate Systems : Spherical Coordinates: (r, \\theta, \\phi) for \\((r, \\theta, \\phi)\\) Cartesian Coordinates: (x, y, z) for \\((x, y, z)\\) Geospatial Equations : Haversine Formula for Distance: a = \\sin^2\\left(\\frac{\\Delta\\phi}{2}\\right) + \\cos(\\phi_1)\\cos(\\phi_2)\\sin^2\\left(\\frac{\\Delta\\lambda}{2}\\right) for \\(a = \\sin^2\\left(\\frac{\\Delta\\phi}{2}\\right) + \\cos(\\phi_1)\\cos(\\phi_2)\\sin^2\\left(\\frac{\\Delta\\lambda}{2}\\right)\\) Note: The rendering of these equations as formatted math will depend on your Markdown viewer's LaTeX capabilities.","title":"6. Math Notation"},{"location":"resources/markdown_basics/#7-effective-citations-in-markdown","text":"","title":"7. Effective Citations in Markdown"},{"location":"resources/markdown_basics/#inline-citations","text":"Objective: Learn how to use inline citations in Markdown. Example Usage: Inline citation of a single work: Some text with an inline citation. [ @jones :envstudy:2020] Inline citation with specific page or section: More text with a specific section cited. [See @jones :envstudy:2020, \u00a74.2] Contrasting views: Discussion of a topic with a contrasting view. [Contra @smith :climatechange:2019, p. 78]","title":"Inline Citations"},{"location":"resources/markdown_basics/#footnote-citations","text":"Objective: Understand how to use footnote citations in Markdown. Example Usage: Citing with a footnote: Some statement in the text. 1 Multiple references to the same footnote: Another statement referring to the same source. 1 A different citation: Additional comment with a new citation. 2","title":"Footnote Citations"},{"location":"resources/markdown_basics/#creating-footnotes","text":"Example Syntax: First reference details. Example: Emma Jones, \"Environmental Study,\" Nature Journal, May 2020, https://nature-journal.com/envstudy2020 . \u21a9 \u21a9 Second reference details. Example: David Smith, \"Climate Change Controversies,\" Science Daily, August 2019, https://sciencedaily.com/climatechange2019 . \u21a9","title":"Creating Footnotes"},{"location":"resources/notes_from_readings/","text":"Literature Reading Notes \u00b6 Reference Information \u00b6 Title: Authors: Publication Year: Journal/Source: DOI/URL: Summary \u00b6 Brief summary of the main objective, research question, or thesis of the literature. Key Findings \u00b6 Major findings or conclusions: Finding 1 Finding 2 ... Methodology \u00b6 Description of research methodology, techniques, or approaches. Notable tools, datasets, or analytical methods used. Theoretical Framework \u00b6 Theoretical models or frameworks underpinning the research. Positioning within the broader field. Critical Analysis \u00b6 Strengths: Well-executed aspects or convincing arguments. Limitations: Weaknesses, gaps, or biases. Insights: New understandings or perspectives gained. Connections to Other Work \u00b6 Similarities or differences with other readings. Complementarity to other studies. Quotations and Notes \u00b6 Significant quotes: \"Quote here.\" - Author Name, page number Additional notes or comments. Personal Reflections \u00b6 Influence on understanding or perspective. Potential impact on future research or studies. Action Items \u00b6 Follow-up actions such as readings, discussions, or research activities: Action item 1 Action item 2 ...","title":"Literature Reading Notes"},{"location":"resources/notes_from_readings/#literature-reading-notes","text":"","title":"Literature Reading Notes"},{"location":"resources/notes_from_readings/#reference-information","text":"Title: Authors: Publication Year: Journal/Source: DOI/URL:","title":"Reference Information"},{"location":"resources/notes_from_readings/#summary","text":"Brief summary of the main objective, research question, or thesis of the literature.","title":"Summary"},{"location":"resources/notes_from_readings/#key-findings","text":"Major findings or conclusions: Finding 1 Finding 2 ...","title":"Key Findings"},{"location":"resources/notes_from_readings/#methodology","text":"Description of research methodology, techniques, or approaches. Notable tools, datasets, or analytical methods used.","title":"Methodology"},{"location":"resources/notes_from_readings/#theoretical-framework","text":"Theoretical models or frameworks underpinning the research. Positioning within the broader field.","title":"Theoretical Framework"},{"location":"resources/notes_from_readings/#critical-analysis","text":"Strengths: Well-executed aspects or convincing arguments. Limitations: Weaknesses, gaps, or biases. Insights: New understandings or perspectives gained.","title":"Critical Analysis"},{"location":"resources/notes_from_readings/#connections-to-other-work","text":"Similarities or differences with other readings. Complementarity to other studies.","title":"Connections to Other Work"},{"location":"resources/notes_from_readings/#quotations-and-notes","text":"Significant quotes: \"Quote here.\" - Author Name, page number Additional notes or comments.","title":"Quotations and Notes"},{"location":"resources/notes_from_readings/#personal-reflections","text":"Influence on understanding or perspective. Potential impact on future research or studies.","title":"Personal Reflections"},{"location":"resources/notes_from_readings/#action-items","text":"Follow-up actions such as readings, discussions, or research activities: Action item 1 Action item 2 ...","title":"Action Items"},{"location":"resources/post_meeting_notes/","text":"Post-Meeting Notes Template \u00b6 Meeting Details \u00b6 Date: Time: Location: Facilitator: Attendees \u00b6 List of attendees Agenda \u00b6 1. Review of Meeting Goals \u00b6 Recap the primary objectives and if they were met. 2. Manuscript Development \u00b6 Discuss the status of current manuscript drafts. Assign writing and editing tasks for different sections of the manuscript. Set deadlines for draft completion and review. 3. Research Highlights \u00b6 Identify key findings and outcomes that should be emphasized in the publications. Discuss any new research insights that emerged from the meeting. 4. Publication Strategy \u00b6 Decide on target journals or conferences for publication submission. Discuss authorship order and contributions. Plan for any additional data or research needed to strengthen the manuscript. 5. Editing and Review Process \u00b6 Establish a peer-review process within the group for initial feedback. Assign members to focus on specific aspects of editing, such as clarity, grammar, and technical accuracy. Agree on a schedule for review rounds to ensure timely submission. 6. Responsibilities and Expectations \u00b6 Clearly define what is expected from each member before the next meeting. Discuss communication methods for progress updates and questions. 7. Closing Remarks \u00b6 Summarize the discussion and confirm the action plan. Reiterate the importance of meeting the set deadlines and maintaining communication. Action Items \u00b6 Draft introduction section: Responsible person(s) - Deadline Compile and analyze additional data: Responsible person(s) - Deadline Draft methodology section: Responsible person(s) - Deadline ... Coordinate manuscript peer review: Responsible person(s) - Deadline Next Steps \u00b6 Define the timeline for the submission process. Schedule follow-up meetings or check-ins to monitor progress. Notes \u00b6 (Additional notes, comments, or observations made during the meeting.)","title":"Post-Meeting Notes Template"},{"location":"resources/post_meeting_notes/#post-meeting-notes-template","text":"","title":"Post-Meeting Notes Template"},{"location":"resources/post_meeting_notes/#meeting-details","text":"Date: Time: Location: Facilitator:","title":"Meeting Details"},{"location":"resources/post_meeting_notes/#attendees","text":"List of attendees","title":"Attendees"},{"location":"resources/post_meeting_notes/#agenda","text":"","title":"Agenda"},{"location":"resources/post_meeting_notes/#1-review-of-meeting-goals","text":"Recap the primary objectives and if they were met.","title":"1. Review of Meeting Goals"},{"location":"resources/post_meeting_notes/#2-manuscript-development","text":"Discuss the status of current manuscript drafts. Assign writing and editing tasks for different sections of the manuscript. Set deadlines for draft completion and review.","title":"2. Manuscript Development"},{"location":"resources/post_meeting_notes/#3-research-highlights","text":"Identify key findings and outcomes that should be emphasized in the publications. Discuss any new research insights that emerged from the meeting.","title":"3. Research Highlights"},{"location":"resources/post_meeting_notes/#4-publication-strategy","text":"Decide on target journals or conferences for publication submission. Discuss authorship order and contributions. Plan for any additional data or research needed to strengthen the manuscript.","title":"4. Publication Strategy"},{"location":"resources/post_meeting_notes/#5-editing-and-review-process","text":"Establish a peer-review process within the group for initial feedback. Assign members to focus on specific aspects of editing, such as clarity, grammar, and technical accuracy. Agree on a schedule for review rounds to ensure timely submission.","title":"5. Editing and Review Process"},{"location":"resources/post_meeting_notes/#6-responsibilities-and-expectations","text":"Clearly define what is expected from each member before the next meeting. Discuss communication methods for progress updates and questions.","title":"6. Responsibilities and Expectations"},{"location":"resources/post_meeting_notes/#7-closing-remarks","text":"Summarize the discussion and confirm the action plan. Reiterate the importance of meeting the set deadlines and maintaining communication.","title":"7. Closing Remarks"},{"location":"resources/post_meeting_notes/#action-items","text":"Draft introduction section: Responsible person(s) - Deadline Compile and analyze additional data: Responsible person(s) - Deadline Draft methodology section: Responsible person(s) - Deadline ... Coordinate manuscript peer review: Responsible person(s) - Deadline","title":"Action Items"},{"location":"resources/post_meeting_notes/#next-steps","text":"Define the timeline for the submission process. Schedule follow-up meetings or check-ins to monitor progress.","title":"Next Steps"},{"location":"resources/post_meeting_notes/#notes","text":"(Additional notes, comments, or observations made during the meeting.)","title":"Notes"},{"location":"resources/pre_meeting_notes/","text":"Pre-Meeting Notes \u00b6 Meeting Details \u00b6 Date: Time: Location: Facilitator: Attendees \u00b6 List of attendees Agenda \u00b6 1. Opening Remarks \u00b6 Brief welcome and overview of the meeting's objectives. 2. Introductions \u00b6 Roundtable introductions for all attendees. Share a personal note or interesting fact to foster camaraderie. 3. Planning \u00b6 Discuss the agenda for the primary meetings. Outline the key topics and issues to address. Assign roles for note-taking, timekeeping, and facilitation in primary meetings. 4. Goal Setting \u00b6 Establish clear, actionable goals for the upcoming period. Identify specific outcomes desired from the primary meetings. Agree on metrics or indicators of success for these goals. 5. Camaraderie Building \u00b6 Icebreaker activity or team-building exercise. Share expectations and aspirations for the group's progress. Highlight the importance of collaboration and mutual support. 6. Open Discussion \u00b6 Allow for any additional topics, concerns, or ideas to be brought forward. 7. Closing Remarks \u00b6 Summarize the discussions and confirm the next steps. Confirm dates and times for primary meetings. Express appreciation for participation. Action Items \u00b6 Action item 1: Responsible person(s) - Deadline Action item 2: Responsible person(s) - Deadline ... Notes \u00b6 (Any additional notes or comments about the meeting.)","title":"Pre-Meeting Notes"},{"location":"resources/pre_meeting_notes/#pre-meeting-notes","text":"","title":"Pre-Meeting Notes"},{"location":"resources/pre_meeting_notes/#meeting-details","text":"Date: Time: Location: Facilitator:","title":"Meeting Details"},{"location":"resources/pre_meeting_notes/#attendees","text":"List of attendees","title":"Attendees"},{"location":"resources/pre_meeting_notes/#agenda","text":"","title":"Agenda"},{"location":"resources/pre_meeting_notes/#1-opening-remarks","text":"Brief welcome and overview of the meeting's objectives.","title":"1. Opening Remarks"},{"location":"resources/pre_meeting_notes/#2-introductions","text":"Roundtable introductions for all attendees. Share a personal note or interesting fact to foster camaraderie.","title":"2. Introductions"},{"location":"resources/pre_meeting_notes/#3-planning","text":"Discuss the agenda for the primary meetings. Outline the key topics and issues to address. Assign roles for note-taking, timekeeping, and facilitation in primary meetings.","title":"3. Planning"},{"location":"resources/pre_meeting_notes/#4-goal-setting","text":"Establish clear, actionable goals for the upcoming period. Identify specific outcomes desired from the primary meetings. Agree on metrics or indicators of success for these goals.","title":"4. Goal Setting"},{"location":"resources/pre_meeting_notes/#5-camaraderie-building","text":"Icebreaker activity or team-building exercise. Share expectations and aspirations for the group's progress. Highlight the importance of collaboration and mutual support.","title":"5. Camaraderie Building"},{"location":"resources/pre_meeting_notes/#6-open-discussion","text":"Allow for any additional topics, concerns, or ideas to be brought forward.","title":"6. Open Discussion"},{"location":"resources/pre_meeting_notes/#7-closing-remarks","text":"Summarize the discussions and confirm the next steps. Confirm dates and times for primary meetings. Express appreciation for participation.","title":"7. Closing Remarks"},{"location":"resources/pre_meeting_notes/#action-items","text":"Action item 1: Responsible person(s) - Deadline Action item 2: Responsible person(s) - Deadline ...","title":"Action Items"},{"location":"resources/pre_meeting_notes/#notes","text":"(Any additional notes or comments about the meeting.)","title":"Notes"},{"location":"resources/second_meeting_notes/","text":"Primary Meeting Day 6-10: Progress and Development \u00b6 Meeting Details \u00b6 Dates: Times: Location: Facilitator: Attendees \u00b6 List of attendees Daily Agenda \u00b6 Day 6: Review and Refine \u00b6 Recap of Previous Sessions \u00b6 Summary of progress made since the last meeting. Review of action items and milestones achieved. Refinement of Goals and Tasks \u00b6 Reassessment and adjustment of goals based on current progress. Identification of any new challenges or opportunities. Day 7-9: In-Depth Work Sessions \u00b6 Daily Goals \u00b6 Clear objectives for each day\u2019s work sessions. Task Progress Updates \u00b6 Brief reports from team members on their assigned tasks. Collaborative problem-solving for any issues encountered. Theory and Data Integration \u00b6 Continued discussions on aligning theoretical frameworks with data analysis. Workshops or breakout sessions for detailed aspects of the project. Evening Collaborative Activities \u00b6 Informal sessions to encourage ongoing dialogue and collaboration. Day 10: Mid-Point Review \u00b6 Progress Evaluation \u00b6 Assessment of the work done during the week. Feedback sessions to ensure quality and consistency in outputs. Documentation and Record-Keeping \u00b6 Ensure thorough documentation of methods, results, and decisions. Establish a system for organizing and sharing this documentation. Planning Forward \u00b6 Setting objectives for the next phase of the project. Adjusting the roadmap as necessary based on insights from the week\u2019s work. Detailed Notes \u00b6 Day 6 Notes \u00b6 ... Day 7 Notes \u00b6 ... Day 8 Notes \u00b6 ... Day 9 Notes \u00b6 ... Day 10 Notes \u00b6 ... Action Items \u00b6 Specific task: Assigned to - Deadline Specific task: Assigned to - Deadline ... Reflections and Comments \u00b6 (Space for any additional thoughts, insights, or personal reflections on the meeting.)","title":"Primary Meeting Day 6-10: Progress and Development"},{"location":"resources/second_meeting_notes/#primary-meeting-day-6-10-progress-and-development","text":"","title":"Primary Meeting Day 6-10: Progress and Development"},{"location":"resources/second_meeting_notes/#meeting-details","text":"Dates: Times: Location: Facilitator:","title":"Meeting Details"},{"location":"resources/second_meeting_notes/#attendees","text":"List of attendees","title":"Attendees"},{"location":"resources/second_meeting_notes/#daily-agenda","text":"","title":"Daily Agenda"},{"location":"resources/second_meeting_notes/#day-6-review-and-refine","text":"","title":"Day 6: Review and Refine"},{"location":"resources/second_meeting_notes/#recap-of-previous-sessions","text":"Summary of progress made since the last meeting. Review of action items and milestones achieved.","title":"Recap of Previous Sessions"},{"location":"resources/second_meeting_notes/#refinement-of-goals-and-tasks","text":"Reassessment and adjustment of goals based on current progress. Identification of any new challenges or opportunities.","title":"Refinement of Goals and Tasks"},{"location":"resources/second_meeting_notes/#day-7-9-in-depth-work-sessions","text":"","title":"Day 7-9: In-Depth Work Sessions"},{"location":"resources/second_meeting_notes/#daily-goals","text":"Clear objectives for each day\u2019s work sessions.","title":"Daily Goals"},{"location":"resources/second_meeting_notes/#task-progress-updates","text":"Brief reports from team members on their assigned tasks. Collaborative problem-solving for any issues encountered.","title":"Task Progress Updates"},{"location":"resources/second_meeting_notes/#theory-and-data-integration","text":"Continued discussions on aligning theoretical frameworks with data analysis. Workshops or breakout sessions for detailed aspects of the project.","title":"Theory and Data Integration"},{"location":"resources/second_meeting_notes/#evening-collaborative-activities","text":"Informal sessions to encourage ongoing dialogue and collaboration.","title":"Evening Collaborative Activities"},{"location":"resources/second_meeting_notes/#day-10-mid-point-review","text":"","title":"Day 10: Mid-Point Review"},{"location":"resources/second_meeting_notes/#progress-evaluation","text":"Assessment of the work done during the week. Feedback sessions to ensure quality and consistency in outputs.","title":"Progress Evaluation"},{"location":"resources/second_meeting_notes/#documentation-and-record-keeping","text":"Ensure thorough documentation of methods, results, and decisions. Establish a system for organizing and sharing this documentation.","title":"Documentation and Record-Keeping"},{"location":"resources/second_meeting_notes/#planning-forward","text":"Setting objectives for the next phase of the project. Adjusting the roadmap as necessary based on insights from the week\u2019s work.","title":"Planning Forward"},{"location":"resources/second_meeting_notes/#detailed-notes","text":"","title":"Detailed Notes"},{"location":"resources/second_meeting_notes/#day-6-notes","text":"...","title":"Day 6 Notes"},{"location":"resources/second_meeting_notes/#day-7-notes","text":"...","title":"Day 7 Notes"},{"location":"resources/second_meeting_notes/#day-8-notes","text":"...","title":"Day 8 Notes"},{"location":"resources/second_meeting_notes/#day-9-notes","text":"...","title":"Day 9 Notes"},{"location":"resources/second_meeting_notes/#day-10-notes","text":"...","title":"Day 10 Notes"},{"location":"resources/second_meeting_notes/#action-items","text":"Specific task: Assigned to - Deadline Specific task: Assigned to - Deadline ...","title":"Action Items"},{"location":"resources/second_meeting_notes/#reflections-and-comments","text":"(Space for any additional thoughts, insights, or personal reflections on the meeting.)","title":"Reflections and Comments"},{"location":"resources/third_meeting_notes/","text":"Primary Meeting Day 11-15: Finalization and Conclusion \u00b6 Meeting Details \u00b6 Dates: Times: Location: Facilitator: Attendees \u00b6 List of attendees Daily Agenda \u00b6 Day 11: Alignment and Focus \u00b6 Realigning Objectives \u00b6 Review the project's main goals to ensure alignment with the final output. Address any misalignments or deviations from the original plan. Prioritization of Tasks \u00b6 Identify critical tasks that need to be completed. Allocate resources and efforts to ensure these priorities are met. Day 12-14: Intensive Work Period \u00b6 Task Completion \u00b6 Dedicated time for team members to complete their individual contributions. Regular check-ins to track progress and address any blockers. Integration of Work \u00b6 Begin to combine individual contributions into a cohesive whole. Review the integration to ensure consistency and coherency across the project. Final Reviews and Edits \u00b6 Conduct thorough reviews of the project's outputs. Perform final edits to refine the quality of the work. Day 15: Closure and Celebration \u00b6 Final Presentation \u00b6 Present the completed project to the group. Discuss any last-minute adjustments or refinements needed. Reflective Session \u00b6 Reflect on the achievements and learnings from the project. Share appreciation for the team's hard work and dedication. Celebration \u00b6 Acknowledge the successful completion of the project. Plan for any dissemination of the project's findings or outputs. Detailed Notes \u00b6 Day 11 Notes \u00b6 ... Day 12 Notes \u00b6 ... Day 13 Notes \u00b6 ... Day 14 Notes \u00b6 ... Day 15 Notes \u00b6 ... Action Items \u00b6 Finalize manuscript for publication: Assigned to - Deadline Prepare data for repository submission: Assigned to - Deadline Organize project materials for archival: Assigned to - Deadline ... Reflections and Comments \u00b6 (Space for any additional thoughts, insights, or personal reflections on the meeting and the project as a whole.) Next Steps \u00b6 Define the publication and dissemination plan. Outline any follow-up research or projects that have stemmed from this work. Additional Documentation \u00b6 (Include or link to any additional documents, charts, or resources that were created or referenced during the meeting.)","title":"Primary Meeting Day 11-15: Finalization and Conclusion"},{"location":"resources/third_meeting_notes/#primary-meeting-day-11-15-finalization-and-conclusion","text":"","title":"Primary Meeting Day 11-15: Finalization and Conclusion"},{"location":"resources/third_meeting_notes/#meeting-details","text":"Dates: Times: Location: Facilitator:","title":"Meeting Details"},{"location":"resources/third_meeting_notes/#attendees","text":"List of attendees","title":"Attendees"},{"location":"resources/third_meeting_notes/#daily-agenda","text":"","title":"Daily Agenda"},{"location":"resources/third_meeting_notes/#day-11-alignment-and-focus","text":"","title":"Day 11: Alignment and Focus"},{"location":"resources/third_meeting_notes/#realigning-objectives","text":"Review the project's main goals to ensure alignment with the final output. Address any misalignments or deviations from the original plan.","title":"Realigning Objectives"},{"location":"resources/third_meeting_notes/#prioritization-of-tasks","text":"Identify critical tasks that need to be completed. Allocate resources and efforts to ensure these priorities are met.","title":"Prioritization of Tasks"},{"location":"resources/third_meeting_notes/#day-12-14-intensive-work-period","text":"","title":"Day 12-14: Intensive Work Period"},{"location":"resources/third_meeting_notes/#task-completion","text":"Dedicated time for team members to complete their individual contributions. Regular check-ins to track progress and address any blockers.","title":"Task Completion"},{"location":"resources/third_meeting_notes/#integration-of-work","text":"Begin to combine individual contributions into a cohesive whole. Review the integration to ensure consistency and coherency across the project.","title":"Integration of Work"},{"location":"resources/third_meeting_notes/#final-reviews-and-edits","text":"Conduct thorough reviews of the project's outputs. Perform final edits to refine the quality of the work.","title":"Final Reviews and Edits"},{"location":"resources/third_meeting_notes/#day-15-closure-and-celebration","text":"","title":"Day 15: Closure and Celebration"},{"location":"resources/third_meeting_notes/#final-presentation","text":"Present the completed project to the group. Discuss any last-minute adjustments or refinements needed.","title":"Final Presentation"},{"location":"resources/third_meeting_notes/#reflective-session","text":"Reflect on the achievements and learnings from the project. Share appreciation for the team's hard work and dedication.","title":"Reflective Session"},{"location":"resources/third_meeting_notes/#celebration","text":"Acknowledge the successful completion of the project. Plan for any dissemination of the project's findings or outputs.","title":"Celebration"},{"location":"resources/third_meeting_notes/#detailed-notes","text":"","title":"Detailed Notes"},{"location":"resources/third_meeting_notes/#day-11-notes","text":"...","title":"Day 11 Notes"},{"location":"resources/third_meeting_notes/#day-12-notes","text":"...","title":"Day 12 Notes"},{"location":"resources/third_meeting_notes/#day-13-notes","text":"...","title":"Day 13 Notes"},{"location":"resources/third_meeting_notes/#day-14-notes","text":"...","title":"Day 14 Notes"},{"location":"resources/third_meeting_notes/#day-15-notes","text":"...","title":"Day 15 Notes"},{"location":"resources/third_meeting_notes/#action-items","text":"Finalize manuscript for publication: Assigned to - Deadline Prepare data for repository submission: Assigned to - Deadline Organize project materials for archival: Assigned to - Deadline ...","title":"Action Items"},{"location":"resources/third_meeting_notes/#reflections-and-comments","text":"(Space for any additional thoughts, insights, or personal reflections on the meeting and the project as a whole.)","title":"Reflections and Comments"},{"location":"resources/third_meeting_notes/#next-steps","text":"Define the publication and dissemination plan. Outline any follow-up research or projects that have stemmed from this work.","title":"Next Steps"},{"location":"resources/third_meeting_notes/#additional-documentation","text":"(Include or link to any additional documents, charts, or resources that were created or referenced during the meeting.)","title":"Additional Documentation"},{"location":"resources/visualizations/","text":"Visualization Strategy and Development Documentation \u00b6 Overview \u00b6 Brief overview of the visualization goals and their alignment with the overall project objectives. Visualization Strategy \u00b6 Identifying Key Messages \u00b6 Discuss main messages or insights to communicate through visualizations. Identify target audience and their specific needs. Selecting Appropriate Visualization Types \u00b6 Explore different types of visualizations (charts, graphs, 3D, interactive elements) suitable for the data and message. Brainstorm creative visualization approaches. Visualization Development \u00b6 Code-Generated Visualizations \u00b6 Outline initial visualizations generated from the data pipeline. Include code snippets and explanations. # Example Python code for a basic plot import matplotlib.pyplot as plt plt . plot ( data [ 'x' ], data [ 'y' ]) plt . show () Enhancing Visualizations \u00b6 Steps for annotating, animating, creating 3D, immersive, or interactive visualizations. Discuss challenges and solutions in enhancing visuals. Versioning and Iterations \u00b6 Document different versions and iterations of visualizations. Reflect on improvements or changes in each version. Finalizing Visualizations \u00b6 Process of finalizing visuals for presentation or publication. Feedback incorporation from team or test audiences. Documentation of Tools and Resources \u00b6 List software, libraries, and tools used for visualization. Reference external resources or tutorials. Conclusions \u00b6 Summarize the visualization process and contributions to the project. Reflect on lessons learned and potential future improvements. References \u00b6 Cite external sources, inspirations, or frameworks used in visualization.","title":"Visualization Strategy and Development Documentation"},{"location":"resources/visualizations/#visualization-strategy-and-development-documentation","text":"","title":"Visualization Strategy and Development Documentation"},{"location":"resources/visualizations/#overview","text":"Brief overview of the visualization goals and their alignment with the overall project objectives.","title":"Overview"},{"location":"resources/visualizations/#visualization-strategy","text":"","title":"Visualization Strategy"},{"location":"resources/visualizations/#identifying-key-messages","text":"Discuss main messages or insights to communicate through visualizations. Identify target audience and their specific needs.","title":"Identifying Key Messages"},{"location":"resources/visualizations/#selecting-appropriate-visualization-types","text":"Explore different types of visualizations (charts, graphs, 3D, interactive elements) suitable for the data and message. Brainstorm creative visualization approaches.","title":"Selecting Appropriate Visualization Types"},{"location":"resources/visualizations/#visualization-development","text":"","title":"Visualization Development"},{"location":"resources/visualizations/#code-generated-visualizations","text":"Outline initial visualizations generated from the data pipeline. Include code snippets and explanations. # Example Python code for a basic plot import matplotlib.pyplot as plt plt . plot ( data [ 'x' ], data [ 'y' ]) plt . show ()","title":"Code-Generated Visualizations"},{"location":"resources/visualizations/#enhancing-visualizations","text":"Steps for annotating, animating, creating 3D, immersive, or interactive visualizations. Discuss challenges and solutions in enhancing visuals.","title":"Enhancing Visualizations"},{"location":"resources/visualizations/#versioning-and-iterations","text":"Document different versions and iterations of visualizations. Reflect on improvements or changes in each version.","title":"Versioning and Iterations"},{"location":"resources/visualizations/#finalizing-visualizations","text":"Process of finalizing visuals for presentation or publication. Feedback incorporation from team or test audiences.","title":"Finalizing Visualizations"},{"location":"resources/visualizations/#documentation-of-tools-and-resources","text":"List software, libraries, and tools used for visualization. Reference external resources or tutorials.","title":"Documentation of Tools and Resources"},{"location":"resources/visualizations/#conclusions","text":"Summarize the visualization process and contributions to the project. Reflect on lessons learned and potential future improvements.","title":"Conclusions"},{"location":"resources/visualizations/#references","text":"Cite external sources, inspirations, or frameworks used in visualization.","title":"References"},{"location":"resources/working_groups_and_postdocs/","text":"ESIIL Postdoctoral Researcher Responsibilities and Opportunities \u00b6 Primary Responsibilities \u00b6 Independent Research: Conducting self-proposed research projects. Adhering to open data principles. Data and Code Storage: Storing all research code and data in the designated ESIIL repository. Use of CyVerse: Utilizing CyVerse as the primary computational platform. Opportunities for Collaboration \u00b6 Joining Working Groups: Opportunity to collaborate with working groups within ESIIL, subject to invitation. Networking and Collaboration: Engaging in regular meetings and seminars for networking. Additional Responsibilities \u00b6 Reviewing Working Group Applications: Assisting in the review process of working group applications. Supporting Working Groups: Providing support to working groups in various capacities, even if not an author. Note \u00b6 Primary research commitments should be prioritized unless otherwise directed by supervisors or ESIIL's administrative body. This framework ensures that ESIIL postdocs balance independent research with collaborative opportunities, adhering to open data principles, and utilizing designated platforms for their work.","title":"ESIIL Postdoctoral Researcher Responsibilities and Opportunities"},{"location":"resources/working_groups_and_postdocs/#esiil-postdoctoral-researcher-responsibilities-and-opportunities","text":"","title":"ESIIL Postdoctoral Researcher Responsibilities and Opportunities"},{"location":"resources/working_groups_and_postdocs/#primary-responsibilities","text":"Independent Research: Conducting self-proposed research projects. Adhering to open data principles. Data and Code Storage: Storing all research code and data in the designated ESIIL repository. Use of CyVerse: Utilizing CyVerse as the primary computational platform.","title":"Primary Responsibilities"},{"location":"resources/working_groups_and_postdocs/#opportunities-for-collaboration","text":"Joining Working Groups: Opportunity to collaborate with working groups within ESIIL, subject to invitation. Networking and Collaboration: Engaging in regular meetings and seminars for networking.","title":"Opportunities for Collaboration"},{"location":"resources/working_groups_and_postdocs/#additional-responsibilities","text":"Reviewing Working Group Applications: Assisting in the review process of working group applications. Supporting Working Groups: Providing support to working groups in various capacities, even if not an author.","title":"Additional Responsibilities"},{"location":"resources/working_groups_and_postdocs/#note","text":"Primary research commitments should be prioritized unless otherwise directed by supervisors or ESIIL's administrative body. This framework ensures that ESIIL postdocs balance independent research with collaborative opportunities, adhering to open data principles, and utilizing designated platforms for their work.","title":"Note"},{"location":"trainings/training_2_code/","text":"Pre-summit training \u00b6","title":"Pre-summit training"},{"location":"trainings/training_2_code/#pre-summit-training","text":"","title":"Pre-summit training"},{"location":"trainings/training_one/","text":"Markdown for the Modern Researcher at ESIIL \u00b6 Introduction \u00b6 Overview of Markdown's relevance and utility in modern research. How Markdown streamlines documentation in diverse scientific and coding environments. Section 1: Mastering Markdown Syntax \u00b6 Objective: Equip researchers with a thorough understanding of Markdown syntax and its diverse applications. Topics Covered: Fundamentals of Text Formatting (headings, lists, bold, italics) Advanced Structures (tables, blockquotes) Integrating Multimedia (image and video links) Diagrams with Mermaid (creating flowcharts, mind maps, timelines) Interactive Elements (hyperlinks, embedding interactive content) Activities: Crafting a Markdown document with various formatting elements. Developing diagrams using Mermaid for research presentations. Embedding multimedia elements in a Markdown document for enhanced communication. Section 2: Markdown in Research Tools \u00b6 Objective: Showcase the integration of Markdown in RStudio and Jupyter Notebooks for scientific documentation. Topics Covered: Implementing Markdown in RStudio (R Markdown, knitting to HTML/PDF) Utilizing Markdown in Jupyter Notebooks (code and Markdown cells) Best practices for documenting research code Including code outputs and visualizations in documentation Activities: Creating and sharing an R Markdown document with annotated research data. Building a comprehensive Jupyter Notebook with integrated Markdown annotations. Section 3: Disseminating Research with Markdown and GitHub Pages \u00b6 Objective: Teach researchers how to publish and manage Markdown-based documentation as web pages. Topics Covered: Setting up a GitHub repository for hosting documentation Transforming Markdown files into web-friendly formats Customizing web page layouts and themes Advanced features using Jekyll Version control and content management for documentation Activities: Publishing a research project documentation on GitHub Pages. Applying custom themes and layouts to enhance online documentation. Conclusion \u00b6 Review of Markdown's role in enhancing research efficiency and clarity. Encouraging the integration of Markdown into daily research activities for improved documentation and dissemination. Additional Resources \u00b6 Curated list of advanced Markdown tutorials, guides for GitHub Pages, and Jekyll resources for researchers. Section 1: Mastering Markdown Syntax \u00b6 1. Fundamentals of Text Formatting \u00b6 Headings : Use # for different levels of headings. Heading Level 1 \u00b6 Heading Level 2 \u00b6 Heading Level 3 \u00b6 Lists : Bulleted lists use asterisks, numbers for ordered lists. Item 1 Item 2 Subitem 2.1 Subitem 2.2 First item Second item Bold and Italics : Use asterisks or underscores. Bold Text Italic Text 2. Advanced Structures \u00b6 Tables : Create tables using dashes and pipes. Header 1 Header 2 Header 3 Row 1 Data Data Row 2 Data Data Add a \":\"\" to change text justification. Here the : is added on the left for left justification. | Header 1 | Header 2 | Header 3 | |---------:|--------- |----------| | Row 1 | Data | Data | | Row 2 | Data | Data | A N A L Y T I C S E N R E I N V I R O N M E N T V E L O P M O C O M U N E G A G E L L A H C N E R A T A D E V E L O P W E I T S I T N E I C S R S O I G O L O I B H T L A H T L A E W E G N E L T I T S I T N E I C S N I E E S R E H T O E N I C S L L A H C E G L A N E G A L L E H C N E I C If you hit the boundaries of Markdown's capabilities, you can start to add html directly. Remember, this entire exercisse is to translate to html. Sudoku Puzzle Fill in the blank cells with numbers from 1 to 9, such that each row, column, and 3x3 subgrid contains all the numbers from 1 to 9 without repetition. 5 3 7 6 1 9 5 9 8 6 8 6 3 4 8 3 1 7 2 6 6 2 8 4 1 9 5 8 7 9 5 3 4 6 7 8 9 1 2 6 7 2 1 9 5 3 4 8 1 9 8 3 4 2 5 6 7 8 5 9 7 6 1 4 2 3 4 2 6 8 5 3 7 9 1 7 1 3 9 2 4 8 5 6 9 6 1 5 3 7 2 8 4 2 8 7 4 1 9 6 3 5 3 4 5 2 8 6 1 7 9 Blockquotes : Use > for blockquotes. This is a blockquote. It can span multiple lines. 3. Integrating Multimedia \u00b6 Images : Add images using the format ![alt text](image_url) . Videos : Embed videos using HTML in Markdown. 4. Diagrams with Mermaid \u00b6 Flowcharts : graph TD A[Start] --> B[Analyze Data] B --> C{Is Data Large?} C -->|Yes| D[Apply Big Data Solutions] C -->|No| E[Use Traditional Methods] D --> F[Machine Learning] E --> G[Statistical Analysis] F --> H{Model Accurate?} G --> I[Report Results] H -->|Yes| J[Deploy Model] H -->|No| K[Refine Model] J --> L[Monitor Performance] K --> F L --> M[End: Success] I --> N[End: Report Generated] style A fill:#f9f,stroke:#333,stroke-width:2px style M fill:#9f9,stroke:#333,stroke-width:2px style N fill:#9f9,stroke:#333,stroke-width:2px Mind Maps : mindmap root((ESIIL)) section Data Sources Satellite Imagery ::icon(fa fa-satellite) Remote Sensing Data Drones Aircraft On-ground Sensors Weather Stations IoT Devices Open Environmental Data Public Datasets ::icon(fa fa-database) section Research Focus Climate Change Analysis Ice Melt Patterns Sea Level Rise Biodiversity Monitoring Species Distribution Habitat Fragmentation Geospatial Analysis Techniques Machine Learning Models Predictive Analytics section Applications Conservation Strategies ::icon(fa fa-leaf) Urban Planning Green Spaces Disaster Response Flood Mapping Wildfire Tracking section Tools and Technologies GIS Software QGIS ArcGIS Programming Languages Python R Cloud Computing Platforms AWS Google Earth Engine Data Visualization D3.js Tableau Timelines : gantt title ESIIL Year 2 Project Schedule dateFormat YYYY-MM-DD section CI Sovereign OASIS via private jupiterhubs :2024-08-01, 2024-10-30 OASIS documentation :2024-09-15, 70d Data cube OASIS via cyverse account :2024-09-15, 100d Integrate with ESIIL User Management system :2024-08-01, 2024-11-30 Build badges to deploy DE from mkdoc :2024-09-01, 2024-12-15 Streamline Github ssh key management :2024-10-01, 2024-12-31 Cyverse support (R proxy link) :2024-11-01, 2024-12-31 Cyverse use summary and statistics :2024-08-01, 2024-12-15 section CI Consultation and Education Conferences/Invited talks :2024-08-01, 2024-12-31 Office hours :2024-08-15, 2024-12-15 Proposals :2024-09-01, 2024-11-15 Private lessons :2024-09-15, 2024-11-30 Pre-event trainings :2024-10-01, 2024-12-15 Textbook development w/ education team :2024-08-01, 2024-12-15 Train the trainers / group lessons :2024-08-15, 2024-11-30 Tribal engagement :2024-09-01, 2024-12-15 Ethical Space training :2024-09-15, 2024-12-31 section CI Design and Build Data library (repository) :2024-08-01, 2024-10-30 Analytics library (repository) :2024-08-15, 2024-11-15 Containers (repository) :2024-09-01, 2024-11-30 Cloud infrastructure templates (repository) :2024-09-15, 2024-12-15 Tribal resilience Data Cube :2024-10-01, 2024-12-31 %%{init: { 'logLevel': 'debug', 'theme': 'base', 'gitGraph': {'rotateCommitLabel': true}} }%% gitGraph commit id: \"Start from template\" branch c1 commit id: \"Set up SSH key pair\" commit id: \"Modify _config.yml for GitHub Pages\" commit id: \"Initial website structure\" commit id: \"Add new markdown pages\" commit id: \"Update navigation tree\" commit id: \"Edit existing pages\" commit id: \"Delete old markdown pages\" commit id: \"Finalize website updates\" commit id: \"Add new markdown pages\" commit id: \"Update navigation tree\" checkout c1 branch b1 commit commit checkout c1 merge b1 %%{init: {\"quadrantChart\": {\"chartWidth\": 400, \"chartHeight\": 400}, \"themeVariables\": {\"quadrant1TextFill\": \"#ff0000\"} }}%% quadrantChart x-axis Urgent --> Not Urgent y-axis Not Important --> \"Important \u2764\" quadrant-1 Plan quadrant-2 Do quadrant-3 Delegate quadrant-4 Delete timeline title Major Events in Environmental Science and Data Science section Environmental Science 19th century : Foundations in Ecology and Conservation 1962 : Publication of 'Silent Spring' by Rachel Carson 1970 : First Earth Day 1987 : Brundtland Report introduces Sustainable Development 1992 : Rio Earth Summit 2015 : Paris Agreement on Climate Change section Data Science 1960s-1970s : Development of Database Management Systems 1980s : Emergence of Data Warehousing 1990s : Growth of the World Wide Web and Data Mining 2000s : Big Data and Predictive Analytics 2010s : AI and Machine Learning Revolution 2020s : Integration of AI in Environmental Research erDiagram CAR ||--o{ NAMED-DRIVER : allows CAR { string registrationNumber string make string model } PERSON ||--o{ NAMED-DRIVER : is PERSON { string firstName string lastName int age } --- config: sankey: showValues: false --- sankey-beta NASA Data,Big Data Harmonization,100 Satellite Imagery,Big Data Harmonization,80 Open Environmental Data,Big Data Harmonization,70 Remote Sensing Data,Big Data Harmonization,90 Big Data Harmonization, Data Analysis and Integration,340 Data Analysis and Integration,Climate Change Research,100 Data Analysis and Integration,Biodiversity Monitoring,80 Data Analysis and Integration,Geospatial Mapping,60 Data Analysis and Integration,Urban Planning,50 Data Analysis and Integration,Disaster Response,50 5. Interactive Elements \u00b6 Hyperlinks : Use the format [link text](URL) . Google Play Tetris Embedding Interactive Content : Use HTML tags or specific platform embed codes. 6. Math Notation \u00b6 Markdown can be combined with LaTeX for mathematical notation, useful in environmental data science for expressing statistical distributions, coordinate systems, and more. This requires a Markdown renderer with LaTeX support (like MathJax or KaTeX). Inline Math : Use single dollar signs for inline math expressions. Representing the normal distribution. Example: The probability density function of the normal distribution is given by \\(f(x|\\mu,\\sigma) = \\frac{1}{\\sigma\\sqrt{2\\pi}}e^{-\\frac{1}{2}\\left(\\frac{x-\\mu}{\\sigma}\\right)^2}\\) .` Display Math : Use double dollar signs for standalone equations. Example: $$ f(x|\\mu,\\sigma) = \\frac{1}{\\sigma\\sqrt{2\\pi}}e {-\\frac{1}{2}\\left(\\frac{x-\\mu}{\\sigma}\\right) 2} $$ Common LaTeX Elements for Environmental Data Science : Statistical Distributions : Normal Distribution: \\frac{1}{\\sigma\\sqrt{2\\pi}}e^{-\\frac{1}{2}\\left(\\frac{x-\\mu}{\\sigma}\\right)^2} for \\(\\frac{1}{\\sigma\\sqrt{2\\pi}}e^{-\\frac{1}{2}\\left(\\frac{x-\\mu}{\\sigma}\\right)^2}\\) Poisson Distribution: P(k; \\lambda) = \\frac{\\lambda^k e^{-\\lambda}}{k!} for \\(P(k; \\lambda) = \\frac{\\lambda^k e^{-\\lambda}}{k!}\\) Coordinate Systems : Spherical Coordinates: (r, \\theta, \\phi) for \\((r, \\theta, \\phi)\\) Cartesian Coordinates: (x, y, z) for \\((x, y, z)\\) Geospatial Equations : Haversine Formula for Distance: a = \\sin^2\\left(\\frac{\\Delta\\phi}{2}\\right) + \\cos(\\phi_1)\\cos(\\phi_2)\\sin^2\\left(\\frac{\\Delta\\lambda}{2}\\right) for \\(a = \\sin^2\\left(\\frac{\\Delta\\phi}{2}\\right) + \\cos(\\phi_1)\\cos(\\phi_2)\\sin^2\\left(\\frac{\\Delta\\lambda}{2}\\right)\\) Note: The rendering of these equations as formatted math will depend on your Markdown viewer's LaTeX capabilities. 7. Effective Citations in Markdown \u00b6 Inline Citations \u00b6 Objective: Learn how to use inline citations in Markdown. Example Usage: Inline citation of a single work: Some text with an inline citation. [ @jones :envstudy:2020] Inline citation with specific page or section: More text with a specific section cited. [See @jones :envstudy:2020, \u00a74.2] Contrasting views: Discussion of a topic with a contrasting view. [Contra @smith :climatechange:2019, p. 78] Footnote Citations \u00b6 Objective: Understand how to use footnote citations in Markdown. Example Usage: Citing with a footnote: Some statement in the text. 1 Multiple references to the same footnote: Another statement referring to the same source. 1 A different citation: Additional comment with a new citation. 2 Creating Footnotes \u00b6 Example Syntax: First reference details. Example: Emma Jones, \"Environmental Study,\" Nature Journal, May 2020, https://nature-journal.com/envstudy2020 . \u21a9 \u21a9 Second reference details. Example: David Smith, \"Climate Change Controversies,\" Science Daily, August 2019, https://sciencedaily.com/climatechange2019 . \u21a9","title":"Markdown for the Modern Researcher at ESIIL"},{"location":"trainings/training_one/#markdown-for-the-modern-researcher-at-esiil","text":"","title":"Markdown for the Modern Researcher at ESIIL"},{"location":"trainings/training_one/#introduction","text":"Overview of Markdown's relevance and utility in modern research. How Markdown streamlines documentation in diverse scientific and coding environments.","title":"Introduction"},{"location":"trainings/training_one/#section-1-mastering-markdown-syntax","text":"Objective: Equip researchers with a thorough understanding of Markdown syntax and its diverse applications. Topics Covered: Fundamentals of Text Formatting (headings, lists, bold, italics) Advanced Structures (tables, blockquotes) Integrating Multimedia (image and video links) Diagrams with Mermaid (creating flowcharts, mind maps, timelines) Interactive Elements (hyperlinks, embedding interactive content) Activities: Crafting a Markdown document with various formatting elements. Developing diagrams using Mermaid for research presentations. Embedding multimedia elements in a Markdown document for enhanced communication.","title":"Section 1: Mastering Markdown Syntax"},{"location":"trainings/training_one/#section-2-markdown-in-research-tools","text":"Objective: Showcase the integration of Markdown in RStudio and Jupyter Notebooks for scientific documentation. Topics Covered: Implementing Markdown in RStudio (R Markdown, knitting to HTML/PDF) Utilizing Markdown in Jupyter Notebooks (code and Markdown cells) Best practices for documenting research code Including code outputs and visualizations in documentation Activities: Creating and sharing an R Markdown document with annotated research data. Building a comprehensive Jupyter Notebook with integrated Markdown annotations.","title":"Section 2: Markdown in Research Tools"},{"location":"trainings/training_one/#section-3-disseminating-research-with-markdown-and-github-pages","text":"Objective: Teach researchers how to publish and manage Markdown-based documentation as web pages. Topics Covered: Setting up a GitHub repository for hosting documentation Transforming Markdown files into web-friendly formats Customizing web page layouts and themes Advanced features using Jekyll Version control and content management for documentation Activities: Publishing a research project documentation on GitHub Pages. Applying custom themes and layouts to enhance online documentation.","title":"Section 3: Disseminating Research with Markdown and GitHub Pages"},{"location":"trainings/training_one/#conclusion","text":"Review of Markdown's role in enhancing research efficiency and clarity. Encouraging the integration of Markdown into daily research activities for improved documentation and dissemination.","title":"Conclusion"},{"location":"trainings/training_one/#additional-resources","text":"Curated list of advanced Markdown tutorials, guides for GitHub Pages, and Jekyll resources for researchers.","title":"Additional Resources"},{"location":"trainings/training_one/#section-1-mastering-markdown-syntax_1","text":"","title":"Section 1: Mastering Markdown Syntax"},{"location":"trainings/training_one/#1-fundamentals-of-text-formatting","text":"Headings : Use # for different levels of headings.","title":"1. Fundamentals of Text Formatting"},{"location":"trainings/training_one/#heading-level-1","text":"","title":"Heading Level 1"},{"location":"trainings/training_one/#heading-level-2","text":"","title":"Heading Level 2"},{"location":"trainings/training_one/#heading-level-3","text":"Lists : Bulleted lists use asterisks, numbers for ordered lists. Item 1 Item 2 Subitem 2.1 Subitem 2.2 First item Second item Bold and Italics : Use asterisks or underscores. Bold Text Italic Text","title":"Heading Level 3"},{"location":"trainings/training_one/#2-advanced-structures","text":"Tables : Create tables using dashes and pipes. Header 1 Header 2 Header 3 Row 1 Data Data Row 2 Data Data Add a \":\"\" to change text justification. Here the : is added on the left for left justification. | Header 1 | Header 2 | Header 3 | |---------:|--------- |----------| | Row 1 | Data | Data | | Row 2 | Data | Data | A N A L Y T I C S E N R E I N V I R O N M E N T V E L O P M O C O M U N E G A G E L L A H C N E R A T A D E V E L O P W E I T S I T N E I C S R S O I G O L O I B H T L A H T L A E W E G N E L T I T S I T N E I C S N I E E S R E H T O E N I C S L L A H C E G L A N E G A L L E H C N E I C If you hit the boundaries of Markdown's capabilities, you can start to add html directly. Remember, this entire exercisse is to translate to html. Sudoku Puzzle Fill in the blank cells with numbers from 1 to 9, such that each row, column, and 3x3 subgrid contains all the numbers from 1 to 9 without repetition. 5 3 7 6 1 9 5 9 8 6 8 6 3 4 8 3 1 7 2 6 6 2 8 4 1 9 5 8 7 9 5 3 4 6 7 8 9 1 2 6 7 2 1 9 5 3 4 8 1 9 8 3 4 2 5 6 7 8 5 9 7 6 1 4 2 3 4 2 6 8 5 3 7 9 1 7 1 3 9 2 4 8 5 6 9 6 1 5 3 7 2 8 4 2 8 7 4 1 9 6 3 5 3 4 5 2 8 6 1 7 9 Blockquotes : Use > for blockquotes. This is a blockquote. It can span multiple lines.","title":"2. Advanced Structures"},{"location":"trainings/training_one/#3-integrating-multimedia","text":"Images : Add images using the format ![alt text](image_url) . Videos : Embed videos using HTML in Markdown. ","title":"3. Integrating Multimedia"},{"location":"trainings/training_one/#4-diagrams-with-mermaid","text":"Flowcharts : graph TD A[Start] --> B[Analyze Data] B --> C{Is Data Large?} C -->|Yes| D[Apply Big Data Solutions] C -->|No| E[Use Traditional Methods] D --> F[Machine Learning] E --> G[Statistical Analysis] F --> H{Model Accurate?} G --> I[Report Results] H -->|Yes| J[Deploy Model] H -->|No| K[Refine Model] J --> L[Monitor Performance] K --> F L --> M[End: Success] I --> N[End: Report Generated] style A fill:#f9f,stroke:#333,stroke-width:2px style M fill:#9f9,stroke:#333,stroke-width:2px style N fill:#9f9,stroke:#333,stroke-width:2px Mind Maps : mindmap root((ESIIL)) section Data Sources Satellite Imagery ::icon(fa fa-satellite) Remote Sensing Data Drones Aircraft On-ground Sensors Weather Stations IoT Devices Open Environmental Data Public Datasets ::icon(fa fa-database) section Research Focus Climate Change Analysis Ice Melt Patterns Sea Level Rise Biodiversity Monitoring Species Distribution Habitat Fragmentation Geospatial Analysis Techniques Machine Learning Models Predictive Analytics section Applications Conservation Strategies ::icon(fa fa-leaf) Urban Planning Green Spaces Disaster Response Flood Mapping Wildfire Tracking section Tools and Technologies GIS Software QGIS ArcGIS Programming Languages Python R Cloud Computing Platforms AWS Google Earth Engine Data Visualization D3.js Tableau Timelines : gantt title ESIIL Year 2 Project Schedule dateFormat YYYY-MM-DD section CI Sovereign OASIS via private jupiterhubs :2024-08-01, 2024-10-30 OASIS documentation :2024-09-15, 70d Data cube OASIS via cyverse account :2024-09-15, 100d Integrate with ESIIL User Management system :2024-08-01, 2024-11-30 Build badges to deploy DE from mkdoc :2024-09-01, 2024-12-15 Streamline Github ssh key management :2024-10-01, 2024-12-31 Cyverse support (R proxy link) :2024-11-01, 2024-12-31 Cyverse use summary and statistics :2024-08-01, 2024-12-15 section CI Consultation and Education Conferences/Invited talks :2024-08-01, 2024-12-31 Office hours :2024-08-15, 2024-12-15 Proposals :2024-09-01, 2024-11-15 Private lessons :2024-09-15, 2024-11-30 Pre-event trainings :2024-10-01, 2024-12-15 Textbook development w/ education team :2024-08-01, 2024-12-15 Train the trainers / group lessons :2024-08-15, 2024-11-30 Tribal engagement :2024-09-01, 2024-12-15 Ethical Space training :2024-09-15, 2024-12-31 section CI Design and Build Data library (repository) :2024-08-01, 2024-10-30 Analytics library (repository) :2024-08-15, 2024-11-15 Containers (repository) :2024-09-01, 2024-11-30 Cloud infrastructure templates (repository) :2024-09-15, 2024-12-15 Tribal resilience Data Cube :2024-10-01, 2024-12-31 %%{init: { 'logLevel': 'debug', 'theme': 'base', 'gitGraph': {'rotateCommitLabel': true}} }%% gitGraph commit id: \"Start from template\" branch c1 commit id: \"Set up SSH key pair\" commit id: \"Modify _config.yml for GitHub Pages\" commit id: \"Initial website structure\" commit id: \"Add new markdown pages\" commit id: \"Update navigation tree\" commit id: \"Edit existing pages\" commit id: \"Delete old markdown pages\" commit id: \"Finalize website updates\" commit id: \"Add new markdown pages\" commit id: \"Update navigation tree\" checkout c1 branch b1 commit commit checkout c1 merge b1 %%{init: {\"quadrantChart\": {\"chartWidth\": 400, \"chartHeight\": 400}, \"themeVariables\": {\"quadrant1TextFill\": \"#ff0000\"} }}%% quadrantChart x-axis Urgent --> Not Urgent y-axis Not Important --> \"Important \u2764\" quadrant-1 Plan quadrant-2 Do quadrant-3 Delegate quadrant-4 Delete timeline title Major Events in Environmental Science and Data Science section Environmental Science 19th century : Foundations in Ecology and Conservation 1962 : Publication of 'Silent Spring' by Rachel Carson 1970 : First Earth Day 1987 : Brundtland Report introduces Sustainable Development 1992 : Rio Earth Summit 2015 : Paris Agreement on Climate Change section Data Science 1960s-1970s : Development of Database Management Systems 1980s : Emergence of Data Warehousing 1990s : Growth of the World Wide Web and Data Mining 2000s : Big Data and Predictive Analytics 2010s : AI and Machine Learning Revolution 2020s : Integration of AI in Environmental Research erDiagram CAR ||--o{ NAMED-DRIVER : allows CAR { string registrationNumber string make string model } PERSON ||--o{ NAMED-DRIVER : is PERSON { string firstName string lastName int age } --- config: sankey: showValues: false --- sankey-beta NASA Data,Big Data Harmonization,100 Satellite Imagery,Big Data Harmonization,80 Open Environmental Data,Big Data Harmonization,70 Remote Sensing Data,Big Data Harmonization,90 Big Data Harmonization, Data Analysis and Integration,340 Data Analysis and Integration,Climate Change Research,100 Data Analysis and Integration,Biodiversity Monitoring,80 Data Analysis and Integration,Geospatial Mapping,60 Data Analysis and Integration,Urban Planning,50 Data Analysis and Integration,Disaster Response,50","title":"4. Diagrams with Mermaid"},{"location":"trainings/training_one/#5-interactive-elements","text":"Hyperlinks : Use the format [link text](URL) . Google Play Tetris Embedding Interactive Content : Use HTML tags or specific platform embed codes. ","title":"5. Interactive Elements"},{"location":"trainings/training_one/#6-math-notation","text":"Markdown can be combined with LaTeX for mathematical notation, useful in environmental data science for expressing statistical distributions, coordinate systems, and more. This requires a Markdown renderer with LaTeX support (like MathJax or KaTeX). Inline Math : Use single dollar signs for inline math expressions. Representing the normal distribution. Example: The probability density function of the normal distribution is given by \\(f(x|\\mu,\\sigma) = \\frac{1}{\\sigma\\sqrt{2\\pi}}e^{-\\frac{1}{2}\\left(\\frac{x-\\mu}{\\sigma}\\right)^2}\\) .` Display Math : Use double dollar signs for standalone equations. Example: $$ f(x|\\mu,\\sigma) = \\frac{1}{\\sigma\\sqrt{2\\pi}}e {-\\frac{1}{2}\\left(\\frac{x-\\mu}{\\sigma}\\right) 2} $$ Common LaTeX Elements for Environmental Data Science : Statistical Distributions : Normal Distribution: \\frac{1}{\\sigma\\sqrt{2\\pi}}e^{-\\frac{1}{2}\\left(\\frac{x-\\mu}{\\sigma}\\right)^2} for \\(\\frac{1}{\\sigma\\sqrt{2\\pi}}e^{-\\frac{1}{2}\\left(\\frac{x-\\mu}{\\sigma}\\right)^2}\\) Poisson Distribution: P(k; \\lambda) = \\frac{\\lambda^k e^{-\\lambda}}{k!} for \\(P(k; \\lambda) = \\frac{\\lambda^k e^{-\\lambda}}{k!}\\) Coordinate Systems : Spherical Coordinates: (r, \\theta, \\phi) for \\((r, \\theta, \\phi)\\) Cartesian Coordinates: (x, y, z) for \\((x, y, z)\\) Geospatial Equations : Haversine Formula for Distance: a = \\sin^2\\left(\\frac{\\Delta\\phi}{2}\\right) + \\cos(\\phi_1)\\cos(\\phi_2)\\sin^2\\left(\\frac{\\Delta\\lambda}{2}\\right) for \\(a = \\sin^2\\left(\\frac{\\Delta\\phi}{2}\\right) + \\cos(\\phi_1)\\cos(\\phi_2)\\sin^2\\left(\\frac{\\Delta\\lambda}{2}\\right)\\) Note: The rendering of these equations as formatted math will depend on your Markdown viewer's LaTeX capabilities.","title":"6. Math Notation"},{"location":"trainings/training_one/#7-effective-citations-in-markdown","text":"","title":"7. Effective Citations in Markdown"},{"location":"trainings/training_one/#inline-citations","text":"Objective: Learn how to use inline citations in Markdown. Example Usage: Inline citation of a single work: Some text with an inline citation. [ @jones :envstudy:2020] Inline citation with specific page or section: More text with a specific section cited. [See @jones :envstudy:2020, \u00a74.2] Contrasting views: Discussion of a topic with a contrasting view. [Contra @smith :climatechange:2019, p. 78]","title":"Inline Citations"},{"location":"trainings/training_one/#footnote-citations","text":"Objective: Understand how to use footnote citations in Markdown. Example Usage: Citing with a footnote: Some statement in the text. 1 Multiple references to the same footnote: Another statement referring to the same source. 1 A different citation: Additional comment with a new citation. 2","title":"Footnote Citations"},{"location":"trainings/training_one/#creating-footnotes","text":"Example Syntax: First reference details. Example: Emma Jones, \"Environmental Study,\" Nature Journal, May 2020, https://nature-journal.com/envstudy2020 . \u21a9 \u21a9 Second reference details. Example: David Smith, \"Climate Change Controversies,\" Science Daily, August 2019, https://sciencedaily.com/climatechange2019 . \u21a9","title":"Creating Footnotes"},{"location":"trainings/training_three/","text":"Team Science Community Skills ESIIL Behavior Expectations Poster Calling People In Handout Tool: Interrupting Microagressions How to Apologize Voices in Concert Jamboard Cultural Intelligence","title":"Voices in Concert"},{"location":"trainings/training_two/","text":"Markdown for the Modern Researcher at ESIIL \u00b6 Definitions \"Open Science is defined as an inclusive construct that combines various movements and practices aiming to make multilingual scientific knowledge openly available, accessible and reusable for everyone, to increase scientific collaborations and sharing of information for the benefits of science and society, and to open the processes of scientific knowledge creation, evaluation and communication to societal actors beyond the traditional scientific community.\" - UNESCO Definition UNESCO's Recommendation on Open Science \"Open Science is the movement to make scientific research (including publications, data, physical samples, and software) and its dissemination accessible to all levels of society, amateur or professional...\" Wikipedia definition Open and Collaborative Science Network's Open Science Manifesto Six Pillars of Open Science Open Access Publications Open Data Open Educational Resources Open Methodology Open Peer Review Open Source Software Wait, how many pillars of Open Science Really Are There? The number can be from 4 to 8 Foster Open Science Diagram Graphic by Foster Open Science Introduction \u00b6 Overview of Markdown's relevance and utility in modern research. How Markdown streamlines documentation in diverse scientific and coding environments. Section 1: Mastering Markdown Syntax \u00b6 Objective: Equip researchers with a thorough understanding of Markdown syntax and its diverse applications. Topics Covered: Fundamentals of Text Formatting (headings, lists, bold, italics) Advanced Structures (tables, blockquotes) Integrating Multimedia (image and video links) Diagrams with Mermaid (creating flowcharts, mind maps, timelines) Interactive Elements (hyperlinks, embedding interactive content) Activities: Crafting a Markdown document with various formatting elements. Developing diagrams using Mermaid for research presentations. Embedding multimedia elements in a Markdown document for enhanced communication. Section 2: Markdown in Research Tools \u00b6 Objective: Showcase the integration of Markdown in RStudio and Jupyter Notebooks for scientific documentation. Topics Covered: Implementing Markdown in RStudio (R Markdown, knitting to HTML/PDF) Utilizing Markdown in Jupyter Notebooks (code and Markdown cells) Best practices for documenting research code Including code outputs and visualizations in documentation Activities: Creating and sharing an R Markdown document with annotated research data. Building a comprehensive Jupyter Notebook with integrated Markdown annotations. Section 3: Disseminating Research with Markdown and GitHub Pages \u00b6 Objective: Teach researchers how to publish and manage Markdown-based documentation as web pages. Topics Covered: Setting up a GitHub repository for hosting documentation Transforming Markdown files into web-friendly formats Customizing web page layouts and themes Advanced features using Jekyll Version control and content management for documentation Activities: Publishing a research project documentation on GitHub Pages. Applying custom themes and layouts to enhance online documentation. Conclusion \u00b6 Review of Markdown's role in enhancing research efficiency and clarity. Encouraging the integration of Markdown into daily research activities for improved documentation and dissemination. Additional Resources \u00b6 Curated list of advanced Markdown tutorials, guides for GitHub Pages, and Jekyll resources for researchers. Section 1: Mastering Markdown Syntax \u00b6 1. Fundamentals of Text Formatting \u00b6 Headings : Use # for different levels of headings. Heading Level 1 \u00b6 Heading Level 2 \u00b6 Heading Level 3 \u00b6 Lists : Bulleted lists use asterisks, numbers for ordered lists. Item 1 Item 2 Subitem 2.1 Subitem 2.2 First item Second item Bold and Italics : Use asterisks or underscores. Bold Text Italic Text 2. Advanced Structures \u00b6 Tables : Create tables using dashes and pipes. Header 1 Header 2 Header 3 Row 1 Data Data Row 2 Data Data Add a \":\"\" to change text justification. Here the : is added on the left for left justification. | Header 1 | Header 2 | Header 3 | |---------:|--------- |----------| | Row 1 | Data | Data | | Row 2 | Data | Data | A N A L Y T I C S E N R E I N V I R O N M E N T V E L O P M O C O M U N E G A G E L L A H C N E R A T A D E V E L O P W E I T S I T N E I C S R S O I G O L O I B H T L A H T L A E W E G N E L T I T S I T N E I C S N I E E S R E H T O E N I C S L L A H C E G L A N E G A L L E H C N E I C If you hit the boundaries of Markdown's capabilities, you can start to add html directly. Remember, this entire exercisse is to translate to html. Sudoku Puzzle Fill in the blank cells with numbers from 1 to 9, such that each row, column, and 3x3 subgrid contains all the numbers from 1 to 9 without repetition. 5 3 7 6 1 9 5 9 8 6 8 6 3 4 8 3 1 7 2 6 6 2 8 4 1 9 5 8 7 9 5 3 4 6 7 8 9 1 2 6 7 2 1 9 5 3 4 8 1 9 8 3 4 2 5 6 7 8 5 9 7 6 1 4 2 3 4 2 6 8 5 3 7 9 1 7 1 3 9 2 4 8 5 6 9 6 1 5 3 7 2 8 4 2 8 7 4 1 9 6 3 5 3 4 5 2 8 6 1 7 9 Blockquotes : Use > for blockquotes. This is a blockquote. It can span multiple lines. 3. Integrating Multimedia \u00b6 Images : Add images using the format ![alt text](image_url) . Videos : Embed videos using HTML in Markdown. 4. Diagrams with Mermaid \u00b6 Flowcharts : graph TD A[Start] --> B[Analyze Data] B --> C{Is Data Large?} C -->|Yes| D[Apply Big Data Solutions] C -->|No| E[Use Traditional Methods] D --> F[Machine Learning] E --> G[Statistical Analysis] F --> H{Model Accurate?} G --> I[Report Results] H -->|Yes| J[Deploy Model] H -->|No| K[Refine Model] J --> L[Monitor Performance] K --> F L --> M[End: Success] I --> N[End: Report Generated] style A fill:#f9f,stroke:#333,stroke-width:2px style M fill:#9f9,stroke:#333,stroke-width:2px style N fill:#9f9,stroke:#333,stroke-width:2px Mind Maps : mindmap root((ESIIL)) section Data Sources Satellite Imagery ::icon(fa fa-satellite) Remote Sensing Data Drones Aircraft On-ground Sensors Weather Stations IoT Devices Open Environmental Data Public Datasets ::icon(fa fa-database) section Research Focus Climate Change Analysis Ice Melt Patterns Sea Level Rise Biodiversity Monitoring Species Distribution Habitat Fragmentation Geospatial Analysis Techniques Machine Learning Models Predictive Analytics section Applications Conservation Strategies ::icon(fa fa-leaf) Urban Planning Green Spaces Disaster Response Flood Mapping Wildfire Tracking section Tools and Technologies GIS Software QGIS ArcGIS Programming Languages Python R Cloud Computing Platforms AWS Google Earth Engine Data Visualization D3.js Tableau Timelines : gantt title ESIIL Year 2 Project Schedule dateFormat YYYY-MM-DD section CI Sovereign OASIS via private jupiterhubs :2024-08-01, 2024-10-30 OASIS documentation :2024-09-15, 70d Data cube OASIS via cyverse account :2024-09-15, 100d Integrate with ESIIL User Management system :2024-08-01, 2024-11-30 Build badges to deploy DE from mkdoc :2024-09-01, 2024-12-15 Streamline Github ssh key management :2024-10-01, 2024-12-31 Cyverse support (R proxy link) :2024-11-01, 2024-12-31 Cyverse use summary and statistics :2024-08-01, 2024-12-15 section CI Consultation and Education Conferences/Invited talks :2024-08-01, 2024-12-31 Office hours :2024-08-15, 2024-12-15 Proposals :2024-09-01, 2024-11-15 Private lessons :2024-09-15, 2024-11-30 Pre-event trainings :2024-10-01, 2024-12-15 Textbook development w/ education team :2024-08-01, 2024-12-15 Train the trainers / group lessons :2024-08-15, 2024-11-30 Tribal engagement :2024-09-01, 2024-12-15 Ethical Space training :2024-09-15, 2024-12-31 section CI Design and Build Data library (repository) :2024-08-01, 2024-10-30 Analytics library (repository) :2024-08-15, 2024-11-15 Containers (repository) :2024-09-01, 2024-11-30 Cloud infrastructure templates (repository) :2024-09-15, 2024-12-15 Tribal resilience Data Cube :2024-10-01, 2024-12-31 %%{init: { 'logLevel': 'debug', 'theme': 'base', 'gitGraph': {'rotateCommitLabel': true}} }%% gitGraph commit id: \"Start from template\" branch c1 commit id: \"Set up SSH key pair\" commit id: \"Modify _config.yml for GitHub Pages\" commit id: \"Initial website structure\" commit id: \"Add new markdown pages\" commit id: \"Update navigation tree\" commit id: \"Edit existing pages\" commit id: \"Delete old markdown pages\" commit id: \"Finalize website updates\" commit id: \"Add new markdown pages\" commit id: \"Update navigation tree\" checkout c1 branch b1 commit commit checkout c1 merge b1 %%{init: {\"quadrantChart\": {\"chartWidth\": 400, \"chartHeight\": 400}, \"themeVariables\": {\"quadrant1TextFill\": \"#ff0000\"} }}%% quadrantChart x-axis Urgent --> Not Urgent y-axis Not Important --> \"Important \u2764\" quadrant-1 Plan quadrant-2 Do quadrant-3 Delegate quadrant-4 Delete timeline title Major Events in Environmental Science and Data Science section Environmental Science 19th century : Foundations in Ecology and Conservation 1962 : Publication of 'Silent Spring' by Rachel Carson 1970 : First Earth Day 1987 : Brundtland Report introduces Sustainable Development 1992 : Rio Earth Summit 2015 : Paris Agreement on Climate Change section Data Science 1960s-1970s : Development of Database Management Systems 1980s : Emergence of Data Warehousing 1990s : Growth of the World Wide Web and Data Mining 2000s : Big Data and Predictive Analytics 2010s : AI and Machine Learning Revolution 2020s : Integration of AI in Environmental Research erDiagram CAR ||--o{ NAMED-DRIVER : allows CAR { string registrationNumber string make string model } PERSON ||--o{ NAMED-DRIVER : is PERSON { string firstName string lastName int age } --- config: sankey: showValues: false --- sankey-beta NASA Data,Big Data Harmonization,100 Satellite Imagery,Big Data Harmonization,80 Open Environmental Data,Big Data Harmonization,70 Remote Sensing Data,Big Data Harmonization,90 Big Data Harmonization, Data Analysis and Integration,340 Data Analysis and Integration,Climate Change Research,100 Data Analysis and Integration,Biodiversity Monitoring,80 Data Analysis and Integration,Geospatial Mapping,60 Data Analysis and Integration,Urban Planning,50 Data Analysis and Integration,Disaster Response,50 5. Interactive Elements \u00b6 Hyperlinks : Use the format [link text](URL) . Google Play Tetris Embedding Interactive Content : Use HTML tags or specific platform embed codes. 6. Math Notation \u00b6 Markdown can be combined with LaTeX for mathematical notation, useful in environmental data science for expressing statistical distributions, coordinate systems, and more. This requires a Markdown renderer with LaTeX support (like MathJax or KaTeX). Inline Math : Use single dollar signs for inline math expressions. Representing the normal distribution. Example: The probability density function of the normal distribution is given by \\(f(x|\\mu,\\sigma) = \\frac{1}{\\sigma\\sqrt{2\\pi}}e^{-\\frac{1}{2}\\left(\\frac{x-\\mu}{\\sigma}\\right)^2}\\) .` Display Math : Use double dollar signs for standalone equations. Example: $$ f(x|\\mu,\\sigma) = \\frac{1}{\\sigma\\sqrt{2\\pi}}e {-\\frac{1}{2}\\left(\\frac{x-\\mu}{\\sigma}\\right) 2} $$ Common LaTeX Elements for Environmental Data Science : Statistical Distributions : Normal Distribution: \\frac{1}{\\sigma\\sqrt{2\\pi}}e^{-\\frac{1}{2}\\left(\\frac{x-\\mu}{\\sigma}\\right)^2} for \\(\\frac{1}{\\sigma\\sqrt{2\\pi}}e^{-\\frac{1}{2}\\left(\\frac{x-\\mu}{\\sigma}\\right)^2}\\) Poisson Distribution: P(k; \\lambda) = \\frac{\\lambda^k e^{-\\lambda}}{k!} for \\(P(k; \\lambda) = \\frac{\\lambda^k e^{-\\lambda}}{k!}\\) Coordinate Systems : Spherical Coordinates: (r, \\theta, \\phi) for \\((r, \\theta, \\phi)\\) Cartesian Coordinates: (x, y, z) for \\((x, y, z)\\) Geospatial Equations : Haversine Formula for Distance: a = \\sin^2\\left(\\frac{\\Delta\\phi}{2}\\right) + \\cos(\\phi_1)\\cos(\\phi_2)\\sin^2\\left(\\frac{\\Delta\\lambda}{2}\\right) for \\(a = \\sin^2\\left(\\frac{\\Delta\\phi}{2}\\right) + \\cos(\\phi_1)\\cos(\\phi_2)\\sin^2\\left(\\frac{\\Delta\\lambda}{2}\\right)\\) Note: The rendering of these equations as formatted math will depend on your Markdown viewer's LaTeX capabilities. 7. Effective Citations in Markdown \u00b6 Inline Citations \u00b6 Objective: Learn how to use inline citations in Markdown. Example Usage: Inline citation of a single work: Some text with an inline citation. [ @jones :envstudy:2020] Inline citation with specific page or section: More text with a specific section cited. [See @jones :envstudy:2020, \u00a74.2] Contrasting views: Discussion of a topic with a contrasting view. [Contra @smith :climatechange:2019, p. 78] Footnote Citations \u00b6 Objective: Understand how to use footnote citations in Markdown. Example Usage: Citing with a footnote: Some statement in the text. 1 Multiple references to the same footnote: Another statement referring to the same source. 1 A different citation: Additional comment with a new citation. 2 Creating Footnotes \u00b6 Example Syntax: First reference details. Example: Emma Jones, \"Environmental Study,\" Nature Journal, May 2020, https://nature-journal.com/envstudy2020 . \u21a9 \u21a9 Second reference details. Example: David Smith, \"Climate Change Controversies,\" Science Daily, August 2019, https://sciencedaily.com/climatechange2019 . \u21a9","title":"Markdown for the Modern Researcher at ESIIL"},{"location":"trainings/training_two/#markdown-for-the-modern-researcher-at-esiil","text":"Definitions \"Open Science is defined as an inclusive construct that combines various movements and practices aiming to make multilingual scientific knowledge openly available, accessible and reusable for everyone, to increase scientific collaborations and sharing of information for the benefits of science and society, and to open the processes of scientific knowledge creation, evaluation and communication to societal actors beyond the traditional scientific community.\" - UNESCO Definition UNESCO's Recommendation on Open Science \"Open Science is the movement to make scientific research (including publications, data, physical samples, and software) and its dissemination accessible to all levels of society, amateur or professional...\" Wikipedia definition Open and Collaborative Science Network's Open Science Manifesto Six Pillars of Open Science Open Access Publications Open Data Open Educational Resources Open Methodology Open Peer Review Open Source Software Wait, how many pillars of Open Science Really Are There? The number can be from 4 to 8 Foster Open Science Diagram Graphic by Foster Open Science","title":"Markdown for the Modern Researcher at ESIIL"},{"location":"trainings/training_two/#introduction","text":"Overview of Markdown's relevance and utility in modern research. How Markdown streamlines documentation in diverse scientific and coding environments.","title":"Introduction"},{"location":"trainings/training_two/#section-1-mastering-markdown-syntax","text":"Objective: Equip researchers with a thorough understanding of Markdown syntax and its diverse applications. Topics Covered: Fundamentals of Text Formatting (headings, lists, bold, italics) Advanced Structures (tables, blockquotes) Integrating Multimedia (image and video links) Diagrams with Mermaid (creating flowcharts, mind maps, timelines) Interactive Elements (hyperlinks, embedding interactive content) Activities: Crafting a Markdown document with various formatting elements. Developing diagrams using Mermaid for research presentations. Embedding multimedia elements in a Markdown document for enhanced communication.","title":"Section 1: Mastering Markdown Syntax"},{"location":"trainings/training_two/#section-2-markdown-in-research-tools","text":"Objective: Showcase the integration of Markdown in RStudio and Jupyter Notebooks for scientific documentation. Topics Covered: Implementing Markdown in RStudio (R Markdown, knitting to HTML/PDF) Utilizing Markdown in Jupyter Notebooks (code and Markdown cells) Best practices for documenting research code Including code outputs and visualizations in documentation Activities: Creating and sharing an R Markdown document with annotated research data. Building a comprehensive Jupyter Notebook with integrated Markdown annotations.","title":"Section 2: Markdown in Research Tools"},{"location":"trainings/training_two/#section-3-disseminating-research-with-markdown-and-github-pages","text":"Objective: Teach researchers how to publish and manage Markdown-based documentation as web pages. Topics Covered: Setting up a GitHub repository for hosting documentation Transforming Markdown files into web-friendly formats Customizing web page layouts and themes Advanced features using Jekyll Version control and content management for documentation Activities: Publishing a research project documentation on GitHub Pages. Applying custom themes and layouts to enhance online documentation.","title":"Section 3: Disseminating Research with Markdown and GitHub Pages"},{"location":"trainings/training_two/#conclusion","text":"Review of Markdown's role in enhancing research efficiency and clarity. Encouraging the integration of Markdown into daily research activities for improved documentation and dissemination.","title":"Conclusion"},{"location":"trainings/training_two/#additional-resources","text":"Curated list of advanced Markdown tutorials, guides for GitHub Pages, and Jekyll resources for researchers.","title":"Additional Resources"},{"location":"trainings/training_two/#section-1-mastering-markdown-syntax_1","text":"","title":"Section 1: Mastering Markdown Syntax"},{"location":"trainings/training_two/#1-fundamentals-of-text-formatting","text":"Headings : Use # for different levels of headings.","title":"1. Fundamentals of Text Formatting"},{"location":"trainings/training_two/#heading-level-1","text":"","title":"Heading Level 1"},{"location":"trainings/training_two/#heading-level-2","text":"","title":"Heading Level 2"},{"location":"trainings/training_two/#heading-level-3","text":"Lists : Bulleted lists use asterisks, numbers for ordered lists. Item 1 Item 2 Subitem 2.1 Subitem 2.2 First item Second item Bold and Italics : Use asterisks or underscores. Bold Text Italic Text","title":"Heading Level 3"},{"location":"trainings/training_two/#2-advanced-structures","text":"Tables : Create tables using dashes and pipes. Header 1 Header 2 Header 3 Row 1 Data Data Row 2 Data Data Add a \":\"\" to change text justification. Here the : is added on the left for left justification. | Header 1 | Header 2 | Header 3 | |---------:|--------- |----------| | Row 1 | Data | Data | | Row 2 | Data | Data | A N A L Y T I C S E N R E I N V I R O N M E N T V E L O P M O C O M U N E G A G E L L A H C N E R A T A D E V E L O P W E I T S I T N E I C S R S O I G O L O I B H T L A H T L A E W E G N E L T I T S I T N E I C S N I E E S R E H T O E N I C S L L A H C E G L A N E G A L L E H C N E I C If you hit the boundaries of Markdown's capabilities, you can start to add html directly. Remember, this entire exercisse is to translate to html. Sudoku Puzzle Fill in the blank cells with numbers from 1 to 9, such that each row, column, and 3x3 subgrid contains all the numbers from 1 to 9 without repetition. 5 3 7 6 1 9 5 9 8 6 8 6 3 4 8 3 1 7 2 6 6 2 8 4 1 9 5 8 7 9 5 3 4 6 7 8 9 1 2 6 7 2 1 9 5 3 4 8 1 9 8 3 4 2 5 6 7 8 5 9 7 6 1 4 2 3 4 2 6 8 5 3 7 9 1 7 1 3 9 2 4 8 5 6 9 6 1 5 3 7 2 8 4 2 8 7 4 1 9 6 3 5 3 4 5 2 8 6 1 7 9 Blockquotes : Use > for blockquotes. This is a blockquote. It can span multiple lines.","title":"2. Advanced Structures"},{"location":"trainings/training_two/#3-integrating-multimedia","text":"Images : Add images using the format ![alt text](image_url) . Videos : Embed videos using HTML in Markdown. ","title":"3. Integrating Multimedia"},{"location":"trainings/training_two/#4-diagrams-with-mermaid","text":"Flowcharts : graph TD A[Start] --> B[Analyze Data] B --> C{Is Data Large?} C -->|Yes| D[Apply Big Data Solutions] C -->|No| E[Use Traditional Methods] D --> F[Machine Learning] E --> G[Statistical Analysis] F --> H{Model Accurate?} G --> I[Report Results] H -->|Yes| J[Deploy Model] H -->|No| K[Refine Model] J --> L[Monitor Performance] K --> F L --> M[End: Success] I --> N[End: Report Generated] style A fill:#f9f,stroke:#333,stroke-width:2px style M fill:#9f9,stroke:#333,stroke-width:2px style N fill:#9f9,stroke:#333,stroke-width:2px Mind Maps : mindmap root((ESIIL)) section Data Sources Satellite Imagery ::icon(fa fa-satellite) Remote Sensing Data Drones Aircraft On-ground Sensors Weather Stations IoT Devices Open Environmental Data Public Datasets ::icon(fa fa-database) section Research Focus Climate Change Analysis Ice Melt Patterns Sea Level Rise Biodiversity Monitoring Species Distribution Habitat Fragmentation Geospatial Analysis Techniques Machine Learning Models Predictive Analytics section Applications Conservation Strategies ::icon(fa fa-leaf) Urban Planning Green Spaces Disaster Response Flood Mapping Wildfire Tracking section Tools and Technologies GIS Software QGIS ArcGIS Programming Languages Python R Cloud Computing Platforms AWS Google Earth Engine Data Visualization D3.js Tableau Timelines : gantt title ESIIL Year 2 Project Schedule dateFormat YYYY-MM-DD section CI Sovereign OASIS via private jupiterhubs :2024-08-01, 2024-10-30 OASIS documentation :2024-09-15, 70d Data cube OASIS via cyverse account :2024-09-15, 100d Integrate with ESIIL User Management system :2024-08-01, 2024-11-30 Build badges to deploy DE from mkdoc :2024-09-01, 2024-12-15 Streamline Github ssh key management :2024-10-01, 2024-12-31 Cyverse support (R proxy link) :2024-11-01, 2024-12-31 Cyverse use summary and statistics :2024-08-01, 2024-12-15 section CI Consultation and Education Conferences/Invited talks :2024-08-01, 2024-12-31 Office hours :2024-08-15, 2024-12-15 Proposals :2024-09-01, 2024-11-15 Private lessons :2024-09-15, 2024-11-30 Pre-event trainings :2024-10-01, 2024-12-15 Textbook development w/ education team :2024-08-01, 2024-12-15 Train the trainers / group lessons :2024-08-15, 2024-11-30 Tribal engagement :2024-09-01, 2024-12-15 Ethical Space training :2024-09-15, 2024-12-31 section CI Design and Build Data library (repository) :2024-08-01, 2024-10-30 Analytics library (repository) :2024-08-15, 2024-11-15 Containers (repository) :2024-09-01, 2024-11-30 Cloud infrastructure templates (repository) :2024-09-15, 2024-12-15 Tribal resilience Data Cube :2024-10-01, 2024-12-31 %%{init: { 'logLevel': 'debug', 'theme': 'base', 'gitGraph': {'rotateCommitLabel': true}} }%% gitGraph commit id: \"Start from template\" branch c1 commit id: \"Set up SSH key pair\" commit id: \"Modify _config.yml for GitHub Pages\" commit id: \"Initial website structure\" commit id: \"Add new markdown pages\" commit id: \"Update navigation tree\" commit id: \"Edit existing pages\" commit id: \"Delete old markdown pages\" commit id: \"Finalize website updates\" commit id: \"Add new markdown pages\" commit id: \"Update navigation tree\" checkout c1 branch b1 commit commit checkout c1 merge b1 %%{init: {\"quadrantChart\": {\"chartWidth\": 400, \"chartHeight\": 400}, \"themeVariables\": {\"quadrant1TextFill\": \"#ff0000\"} }}%% quadrantChart x-axis Urgent --> Not Urgent y-axis Not Important --> \"Important \u2764\" quadrant-1 Plan quadrant-2 Do quadrant-3 Delegate quadrant-4 Delete timeline title Major Events in Environmental Science and Data Science section Environmental Science 19th century : Foundations in Ecology and Conservation 1962 : Publication of 'Silent Spring' by Rachel Carson 1970 : First Earth Day 1987 : Brundtland Report introduces Sustainable Development 1992 : Rio Earth Summit 2015 : Paris Agreement on Climate Change section Data Science 1960s-1970s : Development of Database Management Systems 1980s : Emergence of Data Warehousing 1990s : Growth of the World Wide Web and Data Mining 2000s : Big Data and Predictive Analytics 2010s : AI and Machine Learning Revolution 2020s : Integration of AI in Environmental Research erDiagram CAR ||--o{ NAMED-DRIVER : allows CAR { string registrationNumber string make string model } PERSON ||--o{ NAMED-DRIVER : is PERSON { string firstName string lastName int age } --- config: sankey: showValues: false --- sankey-beta NASA Data,Big Data Harmonization,100 Satellite Imagery,Big Data Harmonization,80 Open Environmental Data,Big Data Harmonization,70 Remote Sensing Data,Big Data Harmonization,90 Big Data Harmonization, Data Analysis and Integration,340 Data Analysis and Integration,Climate Change Research,100 Data Analysis and Integration,Biodiversity Monitoring,80 Data Analysis and Integration,Geospatial Mapping,60 Data Analysis and Integration,Urban Planning,50 Data Analysis and Integration,Disaster Response,50","title":"4. Diagrams with Mermaid"},{"location":"trainings/training_two/#5-interactive-elements","text":"Hyperlinks : Use the format [link text](URL) . Google Play Tetris Embedding Interactive Content : Use HTML tags or specific platform embed codes. ","title":"5. Interactive Elements"},{"location":"trainings/training_two/#6-math-notation","text":"Markdown can be combined with LaTeX for mathematical notation, useful in environmental data science for expressing statistical distributions, coordinate systems, and more. This requires a Markdown renderer with LaTeX support (like MathJax or KaTeX). Inline Math : Use single dollar signs for inline math expressions. Representing the normal distribution. Example: The probability density function of the normal distribution is given by \\(f(x|\\mu,\\sigma) = \\frac{1}{\\sigma\\sqrt{2\\pi}}e^{-\\frac{1}{2}\\left(\\frac{x-\\mu}{\\sigma}\\right)^2}\\) .` Display Math : Use double dollar signs for standalone equations. Example: $$ f(x|\\mu,\\sigma) = \\frac{1}{\\sigma\\sqrt{2\\pi}}e {-\\frac{1}{2}\\left(\\frac{x-\\mu}{\\sigma}\\right) 2} $$ Common LaTeX Elements for Environmental Data Science : Statistical Distributions : Normal Distribution: \\frac{1}{\\sigma\\sqrt{2\\pi}}e^{-\\frac{1}{2}\\left(\\frac{x-\\mu}{\\sigma}\\right)^2} for \\(\\frac{1}{\\sigma\\sqrt{2\\pi}}e^{-\\frac{1}{2}\\left(\\frac{x-\\mu}{\\sigma}\\right)^2}\\) Poisson Distribution: P(k; \\lambda) = \\frac{\\lambda^k e^{-\\lambda}}{k!} for \\(P(k; \\lambda) = \\frac{\\lambda^k e^{-\\lambda}}{k!}\\) Coordinate Systems : Spherical Coordinates: (r, \\theta, \\phi) for \\((r, \\theta, \\phi)\\) Cartesian Coordinates: (x, y, z) for \\((x, y, z)\\) Geospatial Equations : Haversine Formula for Distance: a = \\sin^2\\left(\\frac{\\Delta\\phi}{2}\\right) + \\cos(\\phi_1)\\cos(\\phi_2)\\sin^2\\left(\\frac{\\Delta\\lambda}{2}\\right) for \\(a = \\sin^2\\left(\\frac{\\Delta\\phi}{2}\\right) + \\cos(\\phi_1)\\cos(\\phi_2)\\sin^2\\left(\\frac{\\Delta\\lambda}{2}\\right)\\) Note: The rendering of these equations as formatted math will depend on your Markdown viewer's LaTeX capabilities.","title":"6. Math Notation"},{"location":"trainings/training_two/#7-effective-citations-in-markdown","text":"","title":"7. Effective Citations in Markdown"},{"location":"trainings/training_two/#inline-citations","text":"Objective: Learn how to use inline citations in Markdown. Example Usage: Inline citation of a single work: Some text with an inline citation. [ @jones :envstudy:2020] Inline citation with specific page or section: More text with a specific section cited. [See @jones :envstudy:2020, \u00a74.2] Contrasting views: Discussion of a topic with a contrasting view. [Contra @smith :climatechange:2019, p. 78]","title":"Inline Citations"},{"location":"trainings/training_two/#footnote-citations","text":"Objective: Understand how to use footnote citations in Markdown. Example Usage: Citing with a footnote: Some statement in the text. 1 Multiple references to the same footnote: Another statement referring to the same source. 1 A different citation: Additional comment with a new citation. 2","title":"Footnote Citations"},{"location":"trainings/training_two/#creating-footnotes","text":"Example Syntax: First reference details. Example: Emma Jones, \"Environmental Study,\" Nature Journal, May 2020, https://nature-journal.com/envstudy2020 . \u21a9 \u21a9 Second reference details. Example: David Smith, \"Climate Change Controversies,\" Science Daily, August 2019, https://sciencedaily.com/climatechange2019 . \u21a9","title":"Creating Footnotes"},{"location":"worksheets/worksheet_0/","text":"Exploring Resilience with Data in your Third Space (CyVerse) \u00b6 Instructions \u00b6 Work through the prompts below with the people at your table. Please use a decision-making method to decide before moving to a new section of the activity. Introductions \u00b6 Please share the following information with your team: - Name - Pronouns - Where did you travel from? - Reflecting back on the polarities exercise, share one thing you observed about yourself. Objectives of this group activity \u00b6 Increase comfort with Cyverse Practice decision-making with a group Get to know other Summit participants Explore how historic policies continue to affect the spatial distribution of environmental amenities. Background \u00b6 Introduction to Redlining \u00b6 This group exploration delves into the long-term impacts of historical redlining on urban greenspace, emphasizing the powerful role of maps in shaping environmental and social landscapes. By drawing on the research by Nardone et al. (2021), you will collaboratively investigate how discriminatory practices encoded in maps have led to persistent disparities in urban settings. This exploration aims to uncover the resilience of communities in adapting to these entrenched injustices and to foster a deeper understanding of how mapping can serve both as a tool of exclusion and as a means for promoting social equity. Understanding Redlining as a Systemic Disturbance \u00b6 Redlining originated in the 1930s as a discriminatory practice where the Home Owners' Loan Corporation (HOLC) systematically denied mortgages or offered unfavorable terms based on racial and ethnic compositions. This methodical exclusion, executed through maps that color-coded \"risky\" investment areas in red, marked minority-populated areas, denying them crucial investment and development opportunities and initiating a profound and lasting disturbance in the urban fabric. Maps serve as powerful tools beyond navigation; they communicate and enforce control. By defining neighborhood boundaries through redlining, HOLC maps not only mirrored societal biases but also perpetuated and embedded them into the urban landscape. This manipulation of geographic data set a trajectory that limited economic growth, dictated the allocation of services, and influenced the development or deterioration of community infrastructure. Figure 1: 1938 Map of Atlanta uses colors as grades for neighborhoods. The red swaths identify each area with large African-American populations that were deemed \u201cless safe.\u201d Explore the Story Map: Click on the image above to explore the interactive story map about [subject of the story map]. Resilience and Adaptation in Urban Environments \u00b6 The legacy of redlining presents both a challenge and an opportunity for resilience and adaptation. Economically and socially, redlining entrenched cycles of poverty and racial segregation, creating a resilient wealth gap that has been difficult to dismantle. Environmentally, the neighborhoods targeted by redlining continue to face significant challenges\u2014they generally feature less greenspace, suffer from higher pollution levels, and are more vulnerable to the impacts of climate change. These factors compound the health and wellness challenges faced by residents. Despite these adversities, urban communities have continually demonstrated remarkable resilience. Adaptation strategies, such as community-led green initiatives, urban agriculture, and grassroots activism, have emerged as responses to these systemic disturbances. By enhancing green infrastructure and advocating for equitable environmental policies, these communities strive to increase their resilience against both historical inequities and environmental challenges. The following group exercise will uncover the impact of redlining on urban greenspace and highlight the adaptive strategies developed in response to this enduring disturbance. Through mapping and analysis, we aim to illustrate the powerful role that geographic data can play in understanding and fostering urban resilience and social equity. References \u00b6 Nardone, A., Rudolph, K. E., Morello-Frosch, R., & Casey, J. A. (2021). Redlines and Greenspace: The Relationship between Historical Redlining and 2010 Greenspace across the United States. Environmental Health Perspectives , 129(1), 017006. DOI:10.1289/EHP7495. Hoffman, J. S., Shandas, V., & Pendleton, N. (2020). The Effects of Historical Housing Policies on Resident Exposure to Intra-Urban Heat: A Study of 108 US Urban Areas. Climate , 8(1), 12. DOI:10.3390/cli8010012. Group Activity \u00b6 Setting up CyVerse \u00b6 Log into CyVerse Use the startup procedure to start an instance on CyVerse. Be sure to: Create an SSH key and add it to your Github account (2 nd half of start-up procedure instructions) Clone the Innovation-Summit-2024 repository Complete the R Studio hack Please raise your hand if you have questions or run into technical issues. ESIIL represenatives will be walking around to help. Once you initiate your CyVerse instance, DO NOT close it . You can keep this instance running the entire Summit so you don't have to do the start-up procedure again. Create a Map \u00b6 We'll be using pre-developed code to visualize redlining impacts on Denver, CO. Please follow these steps: Open R Studio in CyVerse Use \"files\" (lower right) to navigate to this markdown document: innovation-summit-2024/code/worksheet_redlining_student_edition.qmd Start at the beginning of the code and complete the following: Create a map of historically redlined districts in Denver Overlay current-day NDVI (vegetation greenness) data onto your map You can choose \"Run All\" to run all the code at once. Note: It will take about 5 minutes to run. Now, it's your turn to choose a variable to observe. Use the provided code to select the variable you want to add to your map. More detailed instructions are included in the code. Variable Options: Tree inventory Traffic accidents Stream sampling effort Soil sampling effort Public art density Liquor license density Crime density Decision-Making \u00b6 Use the gradient of agreement (Kaner 2014) to make a decision as a team about which variable you want to explore. Unique Title \u00b6 Come up with a unique title for your anaylysis. Write it down on a sticky note at your table. Discussion Questions \u00b6 After completing your anaylysis, discuss these questions with your group: What patterns do you notice? What are the immediate questions that come to mind? How does big data help illustrate resilience? Redlining has a long-term impact. How is the impact of redlining still evident today? Still have time? \u00b6 As a group, choose another variable to explore and then discuss your findings. Look through all the variables: \u00b6 Once you're done, you can see all the code and variable maps on the \"Teacher Edition\" version of the activity: https://cu-esiil.github.io/Innovation-Summit-2024/worksheets/worksheet_redlining/","title":"Explore an example (student edition)"},{"location":"worksheets/worksheet_0/#exploring-resilience-with-data-in-your-third-space-cyverse","text":"","title":"Exploring Resilience with Data in your Third Space (CyVerse)"},{"location":"worksheets/worksheet_0/#instructions","text":"Work through the prompts below with the people at your table. Please use a decision-making method to decide before moving to a new section of the activity.","title":"Instructions"},{"location":"worksheets/worksheet_0/#introductions","text":"Please share the following information with your team: - Name - Pronouns - Where did you travel from? - Reflecting back on the polarities exercise, share one thing you observed about yourself.","title":"Introductions"},{"location":"worksheets/worksheet_0/#objectives-of-this-group-activity","text":"Increase comfort with Cyverse Practice decision-making with a group Get to know other Summit participants Explore how historic policies continue to affect the spatial distribution of environmental amenities.","title":"Objectives of this group activity"},{"location":"worksheets/worksheet_0/#background","text":"","title":"Background"},{"location":"worksheets/worksheet_0/#introduction-to-redlining","text":"This group exploration delves into the long-term impacts of historical redlining on urban greenspace, emphasizing the powerful role of maps in shaping environmental and social landscapes. By drawing on the research by Nardone et al. (2021), you will collaboratively investigate how discriminatory practices encoded in maps have led to persistent disparities in urban settings. This exploration aims to uncover the resilience of communities in adapting to these entrenched injustices and to foster a deeper understanding of how mapping can serve both as a tool of exclusion and as a means for promoting social equity.","title":"Introduction to Redlining"},{"location":"worksheets/worksheet_0/#understanding-redlining-as-a-systemic-disturbance","text":"Redlining originated in the 1930s as a discriminatory practice where the Home Owners' Loan Corporation (HOLC) systematically denied mortgages or offered unfavorable terms based on racial and ethnic compositions. This methodical exclusion, executed through maps that color-coded \"risky\" investment areas in red, marked minority-populated areas, denying them crucial investment and development opportunities and initiating a profound and lasting disturbance in the urban fabric. Maps serve as powerful tools beyond navigation; they communicate and enforce control. By defining neighborhood boundaries through redlining, HOLC maps not only mirrored societal biases but also perpetuated and embedded them into the urban landscape. This manipulation of geographic data set a trajectory that limited economic growth, dictated the allocation of services, and influenced the development or deterioration of community infrastructure. Figure 1: 1938 Map of Atlanta uses colors as grades for neighborhoods. The red swaths identify each area with large African-American populations that were deemed \u201cless safe.\u201d Explore the Story Map: Click on the image above to explore the interactive story map about [subject of the story map].","title":"Understanding Redlining as a Systemic Disturbance"},{"location":"worksheets/worksheet_0/#resilience-and-adaptation-in-urban-environments","text":"The legacy of redlining presents both a challenge and an opportunity for resilience and adaptation. Economically and socially, redlining entrenched cycles of poverty and racial segregation, creating a resilient wealth gap that has been difficult to dismantle. Environmentally, the neighborhoods targeted by redlining continue to face significant challenges\u2014they generally feature less greenspace, suffer from higher pollution levels, and are more vulnerable to the impacts of climate change. These factors compound the health and wellness challenges faced by residents. Despite these adversities, urban communities have continually demonstrated remarkable resilience. Adaptation strategies, such as community-led green initiatives, urban agriculture, and grassroots activism, have emerged as responses to these systemic disturbances. By enhancing green infrastructure and advocating for equitable environmental policies, these communities strive to increase their resilience against both historical inequities and environmental challenges. The following group exercise will uncover the impact of redlining on urban greenspace and highlight the adaptive strategies developed in response to this enduring disturbance. Through mapping and analysis, we aim to illustrate the powerful role that geographic data can play in understanding and fostering urban resilience and social equity.","title":"Resilience and Adaptation in Urban Environments"},{"location":"worksheets/worksheet_0/#references","text":"Nardone, A., Rudolph, K. E., Morello-Frosch, R., & Casey, J. A. (2021). Redlines and Greenspace: The Relationship between Historical Redlining and 2010 Greenspace across the United States. Environmental Health Perspectives , 129(1), 017006. DOI:10.1289/EHP7495. Hoffman, J. S., Shandas, V., & Pendleton, N. (2020). The Effects of Historical Housing Policies on Resident Exposure to Intra-Urban Heat: A Study of 108 US Urban Areas. Climate , 8(1), 12. DOI:10.3390/cli8010012.","title":"References"},{"location":"worksheets/worksheet_0/#group-activity","text":"","title":"Group Activity"},{"location":"worksheets/worksheet_0/#setting-up-cyverse","text":"Log into CyVerse Use the startup procedure to start an instance on CyVerse. Be sure to: Create an SSH key and add it to your Github account (2 nd half of start-up procedure instructions) Clone the Innovation-Summit-2024 repository Complete the R Studio hack Please raise your hand if you have questions or run into technical issues. ESIIL represenatives will be walking around to help. Once you initiate your CyVerse instance, DO NOT close it . You can keep this instance running the entire Summit so you don't have to do the start-up procedure again.","title":"Setting up CyVerse"},{"location":"worksheets/worksheet_0/#create-a-map","text":"We'll be using pre-developed code to visualize redlining impacts on Denver, CO. Please follow these steps: Open R Studio in CyVerse Use \"files\" (lower right) to navigate to this markdown document: innovation-summit-2024/code/worksheet_redlining_student_edition.qmd Start at the beginning of the code and complete the following: Create a map of historically redlined districts in Denver Overlay current-day NDVI (vegetation greenness) data onto your map You can choose \"Run All\" to run all the code at once. Note: It will take about 5 minutes to run. Now, it's your turn to choose a variable to observe. Use the provided code to select the variable you want to add to your map. More detailed instructions are included in the code. Variable Options: Tree inventory Traffic accidents Stream sampling effort Soil sampling effort Public art density Liquor license density Crime density","title":"Create a Map"},{"location":"worksheets/worksheet_0/#decision-making","text":"Use the gradient of agreement (Kaner 2014) to make a decision as a team about which variable you want to explore.","title":"Decision-Making"},{"location":"worksheets/worksheet_0/#unique-title","text":"Come up with a unique title for your anaylysis. Write it down on a sticky note at your table.","title":"Unique Title"},{"location":"worksheets/worksheet_0/#discussion-questions","text":"After completing your anaylysis, discuss these questions with your group: What patterns do you notice? What are the immediate questions that come to mind? How does big data help illustrate resilience? Redlining has a long-term impact. How is the impact of redlining still evident today?","title":"Discussion Questions"},{"location":"worksheets/worksheet_0/#still-have-time","text":"As a group, choose another variable to explore and then discuss your findings.","title":"Still have time?"},{"location":"worksheets/worksheet_0/#look-through-all-the-variables","text":"Once you're done, you can see all the code and variable maps on the \"Teacher Edition\" version of the activity: https://cu-esiil.github.io/Innovation-Summit-2024/worksheets/worksheet_redlining/","title":"Look through all the variables:"},{"location":"worksheets/worksheet_2/","text":"TEAM ACTIVITY Day 1: Make a plan \u00b6 Instructions \u00b6 Work through the prompts in order. Please use a decision-making method \u201cto decide\u201d before moving to a new section of the activity. Day 1 Objectives \u00b6 Get to know your group members. Decide on a research question and project title. Start exploring potential datasets. Introductions (approx. time: 10 mins total or \"1-2 breaths\" per prompt) \u00b6 Please share the following information about yourself. Each team member should type their response in the space below (create more as needed). Name: [Your Name] Pronouns: [Your Pronouns] Expertise: [Your Expertise] Environmental Data Science Superpower: [Describe your unique skill or interest in environmental data science] Reflection on Polarities Exercise: [Share one thing you observed about yourself] Name: [Your Name] Pronouns: [Your Pronouns] Expertise: [Your Expertise] Environmental Data Science Superpower: [Describe your unique skill or interest in environmental data science] Reflection on Polarities Exercise: [Share one thing you observed about yourself] Name: [Your Name] Pronouns: [Your Pronouns] Expertise: [Your Expertise] Environmental Data Science Superpower: [Describe your unique skill or interest in environmental data science] Reflection on Polarities Exercise: [Share one thing you observed about yourself] Name: [Your Name] Pronouns: [Your Pronouns] Expertise: [Your Expertise] Environmental Data Science Superpower: [Describe your unique skill or interest in environmental data science] Reflection on Polarities Exercise: [Share one thing you observed about yourself] Continue adding more team members following the same format, with a line break after each person. Research Question: Innovation for Inclusion or Computation (approx. time: 5-10 mins) \u00b6 Write the research question your team selected in the space below. Feel free to revise the original question. [Insert research question here] Project Title (approx. time: 5-10 mins) \u00b6 Craft a catchy title for your team\u2019s project. Think of something that would grab attention at a conference or in a headline. [Insert title here] Promoting Resilience and Adaptation \u00b6 Describe how your proposed project aligns with the Summit's themes of resilience and adaptation. Please provide 1-2 sentences that clearly connect your project's goals or methods to these themes. [Insert your response here] Choosing Big Data Sets \u00b6 Explore potential data sets for your project's topic from the data library . List your options below, organizing them by whether they represent the system you're studying (e.g., deciduous forests) or the disruption to it (e.g., wildfire). Then discuss your choices and indicate your final selections. Draft Potential Data Sets \u00b6 System Being Perturbed/Disrupted: [List all potential data sets here] Perturbator/Disrupter: [List all potential data sets here] Final Choice \u00b6 System Being Perturbed/Disrupted (Final Choice): [Indicate your final selected data set here] Perturbator/Disrupter (Final Choice): [Indicate your final selected data set here] Brief Check-in: Definition of Resilience (approx. 5 mins) \u00b6 Below is a working definition of the word \"Resilience\" for the Summit. Please edit the definition below based on your earlier discussion and chosen project. \"Resilience is the capacity of a system, community, organization, or individual to absorb stress, recover from disruptions, adapt to change, and continue to develop and thrive.\" [Edit or reaffirm this definition here] Day 1 Report Back \u00b6 Select one representative from your group to present your proposed project. For the report back, each group will have 30-60 seconds to present their responses to the questions below. Keep it concise and focused. This is just a quick oral presentation - you will not be able to use slides/images. Project Title: [Insert your team's project title here] Research Question: [Insert your team's refined research question here] Selected Data Sets: [List the data sets your team has chosen to use here]","title":"Make a plan"},{"location":"worksheets/worksheet_2/#team-activity-day-1-make-a-plan","text":"","title":"TEAM ACTIVITY Day 1: Make a plan"},{"location":"worksheets/worksheet_2/#instructions","text":"Work through the prompts in order. Please use a decision-making method \u201cto decide\u201d before moving to a new section of the activity.","title":"Instructions"},{"location":"worksheets/worksheet_2/#day-1-objectives","text":"Get to know your group members. Decide on a research question and project title. Start exploring potential datasets.","title":"Day 1 Objectives"},{"location":"worksheets/worksheet_2/#introductions-approx-time-10-mins-total-or-1-2-breaths-per-prompt","text":"Please share the following information about yourself. Each team member should type their response in the space below (create more as needed). Name: [Your Name] Pronouns: [Your Pronouns] Expertise: [Your Expertise] Environmental Data Science Superpower: [Describe your unique skill or interest in environmental data science] Reflection on Polarities Exercise: [Share one thing you observed about yourself] Name: [Your Name] Pronouns: [Your Pronouns] Expertise: [Your Expertise] Environmental Data Science Superpower: [Describe your unique skill or interest in environmental data science] Reflection on Polarities Exercise: [Share one thing you observed about yourself] Name: [Your Name] Pronouns: [Your Pronouns] Expertise: [Your Expertise] Environmental Data Science Superpower: [Describe your unique skill or interest in environmental data science] Reflection on Polarities Exercise: [Share one thing you observed about yourself] Name: [Your Name] Pronouns: [Your Pronouns] Expertise: [Your Expertise] Environmental Data Science Superpower: [Describe your unique skill or interest in environmental data science] Reflection on Polarities Exercise: [Share one thing you observed about yourself] Continue adding more team members following the same format, with a line break after each person.","title":"Introductions (approx. time: 10 mins total or \"1-2 breaths\" per prompt)"},{"location":"worksheets/worksheet_2/#research-question-innovation-for-inclusion-or-computation-approx-time-5-10-mins","text":"Write the research question your team selected in the space below. Feel free to revise the original question. [Insert research question here]","title":"Research Question: Innovation for Inclusion or Computation (approx. time: 5-10 mins)"},{"location":"worksheets/worksheet_2/#project-title-approx-time-5-10-mins","text":"Craft a catchy title for your team\u2019s project. Think of something that would grab attention at a conference or in a headline. [Insert title here]","title":"Project Title (approx. time: 5-10 mins)"},{"location":"worksheets/worksheet_2/#promoting-resilience-and-adaptation","text":"Describe how your proposed project aligns with the Summit's themes of resilience and adaptation. Please provide 1-2 sentences that clearly connect your project's goals or methods to these themes. [Insert your response here]","title":"Promoting Resilience and Adaptation"},{"location":"worksheets/worksheet_2/#choosing-big-data-sets","text":"Explore potential data sets for your project's topic from the data library . List your options below, organizing them by whether they represent the system you're studying (e.g., deciduous forests) or the disruption to it (e.g., wildfire). Then discuss your choices and indicate your final selections.","title":"Choosing Big Data Sets"},{"location":"worksheets/worksheet_2/#draft-potential-data-sets","text":"System Being Perturbed/Disrupted: [List all potential data sets here] Perturbator/Disrupter: [List all potential data sets here]","title":"Draft Potential Data Sets"},{"location":"worksheets/worksheet_2/#final-choice","text":"System Being Perturbed/Disrupted (Final Choice): [Indicate your final selected data set here] Perturbator/Disrupter (Final Choice): [Indicate your final selected data set here]","title":"Final Choice"},{"location":"worksheets/worksheet_2/#brief-check-in-definition-of-resilience-approx-5-mins","text":"Below is a working definition of the word \"Resilience\" for the Summit. Please edit the definition below based on your earlier discussion and chosen project. \"Resilience is the capacity of a system, community, organization, or individual to absorb stress, recover from disruptions, adapt to change, and continue to develop and thrive.\" [Edit or reaffirm this definition here]","title":"Brief Check-in: Definition of Resilience (approx. 5 mins)"},{"location":"worksheets/worksheet_2/#day-1-report-back","text":"Select one representative from your group to present your proposed project. For the report back, each group will have 30-60 seconds to present their responses to the questions below. Keep it concise and focused. This is just a quick oral presentation - you will not be able to use slides/images. Project Title: [Insert your team's project title here] Research Question: [Insert your team's refined research question here] Selected Data Sets: [List the data sets your team has chosen to use here]","title":"Day 1 Report Back"},{"location":"worksheets/worksheet_3/","text":"TEAM ACTIVITY 2: Innovate as a Team \u00b6 Welcome back! We hope today is a productive day getting to know your team and coding. Day 2 summary: \u00b6 Please complete the warm-up with your team, briefly review today\u2019s objectives, and carefully read the Day 2 and Day 3 report out items to guide your efforts. Objectives for Day 2 \u00b6 Work together to decide on the data sets you will use. Reminder: Use a decision-making technique discussed during Day 1. Kaner\u2019s Gradient of Agreement is below for reference. Practice joining your datasets together. Discuss and try creating interesting graphics. Report back on your results at the end of the day. Today\u2019s report back is short and focused on your team process. The Day 3 report back is more detailed. Morning Warm-up \u00b6 Please share the following informaton with your team. (No need to write down your responses this time) - Name - Pronouns - Reflecting on Day 1, what is something that surprised you? Decision-Making \u00b6 Use the gradient of agreement (Kaner 20214) to make decisions as a team. Day 2 Report Back \u00b6 Day 2 report-back questions are about the team process . We are interested in your team\u2019s unique experience. Below are some prompts you might consider. You don't need to address all of them - choose which ones you want to present. Please limit your reflection to 2-3 mins. What worked well for your team? What\u2019s one thing you would change? Did your group ever have an \u201cah-ha\u201d moment? What led up to that moment? Did your group experience the groan zone? What is one tip you want to share with future groups at the Summit about getting through the groan zone? [insert your group reflection responses here] Looking Ahead: Day 3 Report Back \u00b6 These are the prompts for the final Report Back tomorrow (Day 3) - start thinking about these questions as you work today. Each group will share their Day 3 GitHub page on the screen and give a 4 minute presentation. Project Title: Research Question: One interesting graphic/finding: What are you thinking about doing next with your team? Long-term, short-term? What\u2019s missing: what resources, people, data sets, etc. does your team need? Reminder \u00b6 There is the opportunity for groups to continue working on their projects as an ESIIL Working Group. If you love your team and want to continue working together, considering submitting a Working Group Application this fall. See the ESIIL website for more information: https://esiil.org/working-groups .","title":"Innovate as a team"},{"location":"worksheets/worksheet_3/#team-activity-2-innovate-as-a-team","text":"Welcome back! We hope today is a productive day getting to know your team and coding.","title":"TEAM ACTIVITY 2: Innovate as a Team"},{"location":"worksheets/worksheet_3/#day-2-summary","text":"Please complete the warm-up with your team, briefly review today\u2019s objectives, and carefully read the Day 2 and Day 3 report out items to guide your efforts.","title":"Day 2 summary:"},{"location":"worksheets/worksheet_3/#objectives-for-day-2","text":"Work together to decide on the data sets you will use. Reminder: Use a decision-making technique discussed during Day 1. Kaner\u2019s Gradient of Agreement is below for reference. Practice joining your datasets together. Discuss and try creating interesting graphics. Report back on your results at the end of the day. Today\u2019s report back is short and focused on your team process. The Day 3 report back is more detailed.","title":"Objectives for Day 2"},{"location":"worksheets/worksheet_3/#morning-warm-up","text":"Please share the following informaton with your team. (No need to write down your responses this time) - Name - Pronouns - Reflecting on Day 1, what is something that surprised you?","title":"Morning Warm-up"},{"location":"worksheets/worksheet_3/#decision-making","text":"Use the gradient of agreement (Kaner 20214) to make decisions as a team.","title":"Decision-Making"},{"location":"worksheets/worksheet_3/#day-2-report-back","text":"Day 2 report-back questions are about the team process . We are interested in your team\u2019s unique experience. Below are some prompts you might consider. You don't need to address all of them - choose which ones you want to present. Please limit your reflection to 2-3 mins. What worked well for your team? What\u2019s one thing you would change? Did your group ever have an \u201cah-ha\u201d moment? What led up to that moment? Did your group experience the groan zone? What is one tip you want to share with future groups at the Summit about getting through the groan zone? [insert your group reflection responses here]","title":"Day 2 Report Back"},{"location":"worksheets/worksheet_3/#looking-ahead-day-3-report-back","text":"These are the prompts for the final Report Back tomorrow (Day 3) - start thinking about these questions as you work today. Each group will share their Day 3 GitHub page on the screen and give a 4 minute presentation. Project Title: Research Question: One interesting graphic/finding: What are you thinking about doing next with your team? Long-term, short-term? What\u2019s missing: what resources, people, data sets, etc. does your team need?","title":"Looking Ahead: Day 3 Report Back"},{"location":"worksheets/worksheet_3/#reminder","text":"There is the opportunity for groups to continue working on their projects as an ESIIL Working Group. If you love your team and want to continue working together, considering submitting a Working Group Application this fall. See the ESIIL website for more information: https://esiil.org/working-groups .","title":"Reminder"},{"location":"worksheets/worksheet_4/","text":"TEAM ACTIVITY 3: Share Your Progress \u00b6 Use this time to prepare for the final report back, where you'll share an interesting result/outcome from your project and discuss potential future plans. Day 3 Report Back \u00b6 Select one or more people from your group to give a final report back. You will share this page on the screen as your presentation. Presentations should be no longer than 4 minutes. Project Title: [Insert project title here] Research Question: [Insert research question here] One interesting graphic/finding: [Insert graphic/finding here] What are you thinking about doing next with your team? Long-term, short-term? [Insert response here] What\u2019s missing: what resources, people, data sets, etc. does your team need? [Insert response here] Reminder \u00b6 There is the opportunity for groups to continue working on their projects as an ESIIL Working Group. If you love your team and want to continue working together, considering submitting a Working Group Application this fall. See the ESIIL website for more information: https://esiil.org/working-groups . Thank you for participating in the 2024 ESIIL Innovation Summit!!","title":"Share your progress"},{"location":"worksheets/worksheet_4/#team-activity-3-share-your-progress","text":"Use this time to prepare for the final report back, where you'll share an interesting result/outcome from your project and discuss potential future plans.","title":"TEAM ACTIVITY 3: Share Your Progress"},{"location":"worksheets/worksheet_4/#day-3-report-back","text":"Select one or more people from your group to give a final report back. You will share this page on the screen as your presentation. Presentations should be no longer than 4 minutes. Project Title: [Insert project title here] Research Question: [Insert research question here] One interesting graphic/finding: [Insert graphic/finding here] What are you thinking about doing next with your team? Long-term, short-term? [Insert response here] What\u2019s missing: what resources, people, data sets, etc. does your team need? [Insert response here]","title":"Day 3 Report Back"},{"location":"worksheets/worksheet_4/#reminder","text":"There is the opportunity for groups to continue working on their projects as an ESIIL Working Group. If you love your team and want to continue working together, considering submitting a Working Group Application this fall. See the ESIIL website for more information: https://esiil.org/working-groups . Thank you for participating in the 2024 ESIIL Innovation Summit!!","title":"Reminder"},{"location":"worksheets/worksheet_5/","text":"TEAM ACTIVITY 2: Make a plan \u00b6 Instructions: \u00b6 Follow the Prompts Sequentially: Work through the prompts in the order they are presented. Decision-Making Process: Before advancing to the next section of the handout, use a structured decision-making method. Ensure that all team members agree on the decisions made. This approach helps in maintaining coherence and collective agreement throughout the activities. Introductions (approx. time: 10 mins total or \"2 breaths\" per person) \u00b6 Each team member please share the following information about yourself: Name: [Your Name] Preferred Pronouns: [Your Pronouns] Expertise: [Your Expertise] Environmental Data Science Superpower: [Describe your unique skill or interest in environmental data science] Reflection on Polarities Exercise: [Share one thing you observed about yourself] Name: [Your Name] Preferred Pronouns: [Your Pronouns] Expertise: [Your Expertise] Environmental Data Science Superpower: [Describe your unique skill or interest in environmental data science] Reflection on Polarities Exercise: [Share one thing you observed about yourself] Name: [Your Name] Preferred Pronouns: [Your Pronouns] Expertise: [Your Expertise] Environmental Data Science Superpower: [Describe your unique skill or interest in environmental data science] Reflection on Polarities Exercise: [Share one thing you observed about yourself] Name: [Your Name] Preferred Pronouns: [Your Pronouns] Expertise: [Your Expertise] Environmental Data Science Superpower: [Describe your unique skill or interest in environmental data science] Reflection on Polarities Exercise: [Share one thing you observed about yourself] Continue adding more team members following the same format, with a line break after each person. Research Question: Innovation for Inclusion or Computation (approx. time: 5-10 mins) \u00b6 Refine the initial research question your team developed earlier. Please make any necessary edits or adjustments below: [Edit or refine your team's previously selected research question here] Title: Innovation for Inclusion or Computation (approx. time: 5-10 mins) \u00b6 Craft a catchy and public-facing title for your team\u2019s project. Think of something that would grab attention at a conference or in a headline: [Create an engaging title that captures the essence of your project here] Promoting Resilience and Adaptation \u00b6 Describe how your proposed project aligns with the summit's themes of resilience and adaptation. Please provide 1-2 sentences that clearly connect your project's goals or methods to these themes: [Insert your response here] Which Big Data Sets \u00b6 Explore potential data sets for your project's topic from the data library . List your options below, and after discussion and review, indicate your final choice for both the system being perturbed/disrupted and the perturbator/disrupter. Draft Potential Data Sets \u00b6 System Being Perturbed/Disrupted: [List all potential data sets here] Perturbator/Disrupter: [List all potential data sets here] Final Choice \u00b6 System Being Perturbed/Disrupted (Final Choice): [Indicate your final selected data set here] Perturbator/Disrupter (Final Choice): [Indicate your final selected data set here] Brief Check-in: Definition of Resilience (approx. 5 mins) \u00b6 Review and refine the working definition of 'Resilience' provided below, based on your discussions and insights from earlier sections of this worksheet. Adjust the definition to better align with your team\u2019s understanding or reaffirm it if it resonates with your views: \"Resilience is the capacity of a system, community, organization, or individual to absorb stress, recover from disruptions, adapt to change, and continue to develop and thrive.\" [Edit or reaffirm this definition here] Day 1 Report Back \u00b6 Select one representative from your group to present your proposed project to all Summit attendees (~125 people). This is an opportunity for your breakout group to summarize your project\u2019s approach as it relates to the Summit themes of adaptation and resilience. Presentation Content: Project Title: [Insert your team's project title here] Research Question: [Insert your team's refined research question here] Selected Data Sets: [List the data sets your team has chosen to use here] Presentation Guidelines: Duration: Your presentation should last between 30-60 seconds. Keep it concise and focused. This is just a quick oral presentation -you will not be able to use slides/images. Objective: Clearly communicate how your project aligns with the conference themes and highlight actionable insights that can aid decision makers.","title":"TEAM ACTIVITY 2: Make a plan"},{"location":"worksheets/worksheet_5/#team-activity-2-make-a-plan","text":"","title":"TEAM ACTIVITY 2: Make a plan"},{"location":"worksheets/worksheet_5/#instructions","text":"Follow the Prompts Sequentially: Work through the prompts in the order they are presented. Decision-Making Process: Before advancing to the next section of the handout, use a structured decision-making method. Ensure that all team members agree on the decisions made. This approach helps in maintaining coherence and collective agreement throughout the activities.","title":"Instructions:"},{"location":"worksheets/worksheet_5/#introductions-approx-time-10-mins-total-or-2-breaths-per-person","text":"Each team member please share the following information about yourself: Name: [Your Name] Preferred Pronouns: [Your Pronouns] Expertise: [Your Expertise] Environmental Data Science Superpower: [Describe your unique skill or interest in environmental data science] Reflection on Polarities Exercise: [Share one thing you observed about yourself] Name: [Your Name] Preferred Pronouns: [Your Pronouns] Expertise: [Your Expertise] Environmental Data Science Superpower: [Describe your unique skill or interest in environmental data science] Reflection on Polarities Exercise: [Share one thing you observed about yourself] Name: [Your Name] Preferred Pronouns: [Your Pronouns] Expertise: [Your Expertise] Environmental Data Science Superpower: [Describe your unique skill or interest in environmental data science] Reflection on Polarities Exercise: [Share one thing you observed about yourself] Name: [Your Name] Preferred Pronouns: [Your Pronouns] Expertise: [Your Expertise] Environmental Data Science Superpower: [Describe your unique skill or interest in environmental data science] Reflection on Polarities Exercise: [Share one thing you observed about yourself] Continue adding more team members following the same format, with a line break after each person.","title":"Introductions (approx. time: 10 mins total or \"2 breaths\" per person)"},{"location":"worksheets/worksheet_5/#research-question-innovation-for-inclusion-or-computation-approx-time-5-10-mins","text":"Refine the initial research question your team developed earlier. Please make any necessary edits or adjustments below: [Edit or refine your team's previously selected research question here]","title":"Research Question: Innovation for Inclusion or Computation (approx. time: 5-10 mins)"},{"location":"worksheets/worksheet_5/#title-innovation-for-inclusion-or-computation-approx-time-5-10-mins","text":"Craft a catchy and public-facing title for your team\u2019s project. Think of something that would grab attention at a conference or in a headline: [Create an engaging title that captures the essence of your project here]","title":"Title: Innovation for Inclusion or Computation (approx. time: 5-10 mins)"},{"location":"worksheets/worksheet_5/#promoting-resilience-and-adaptation","text":"Describe how your proposed project aligns with the summit's themes of resilience and adaptation. Please provide 1-2 sentences that clearly connect your project's goals or methods to these themes: [Insert your response here]","title":"Promoting Resilience and Adaptation"},{"location":"worksheets/worksheet_5/#which-big-data-sets","text":"Explore potential data sets for your project's topic from the data library . List your options below, and after discussion and review, indicate your final choice for both the system being perturbed/disrupted and the perturbator/disrupter.","title":"Which Big Data Sets"},{"location":"worksheets/worksheet_5/#draft-potential-data-sets","text":"System Being Perturbed/Disrupted: [List all potential data sets here] Perturbator/Disrupter: [List all potential data sets here]","title":"Draft Potential Data Sets"},{"location":"worksheets/worksheet_5/#final-choice","text":"System Being Perturbed/Disrupted (Final Choice): [Indicate your final selected data set here] Perturbator/Disrupter (Final Choice): [Indicate your final selected data set here]","title":"Final Choice"},{"location":"worksheets/worksheet_5/#brief-check-in-definition-of-resilience-approx-5-mins","text":"Review and refine the working definition of 'Resilience' provided below, based on your discussions and insights from earlier sections of this worksheet. Adjust the definition to better align with your team\u2019s understanding or reaffirm it if it resonates with your views: \"Resilience is the capacity of a system, community, organization, or individual to absorb stress, recover from disruptions, adapt to change, and continue to develop and thrive.\" [Edit or reaffirm this definition here]","title":"Brief Check-in: Definition of Resilience (approx. 5 mins)"},{"location":"worksheets/worksheet_5/#day-1-report-back","text":"Select one representative from your group to present your proposed project to all Summit attendees (~125 people). This is an opportunity for your breakout group to summarize your project\u2019s approach as it relates to the Summit themes of adaptation and resilience. Presentation Content: Project Title: [Insert your team's project title here] Research Question: [Insert your team's refined research question here] Selected Data Sets: [List the data sets your team has chosen to use here] Presentation Guidelines: Duration: Your presentation should last between 30-60 seconds. Keep it concise and focused. This is just a quick oral presentation -you will not be able to use slides/images. Objective: Clearly communicate how your project aligns with the conference themes and highlight actionable insights that can aid decision makers.","title":"Day 1 Report Back"},{"location":"worksheets/worksheet_redlining/","text":"Redlining \u00b6 Exploring the Impact of Historical Redlining on Urban Greenspace: A Collaborative Examination of Maps, Justice, and Resilience \u00b6 Introduction \u00b6 This group exploration delves into the long-term impacts of historical redlining on urban greenspace, emphasizing the powerful role of maps in shaping environmental and social landscapes. By drawing on the research by Nardone et al. (2021), you will collaboratively investigate how discriminatory practices encoded in maps have led to persistent disparities in urban settings. This exploration aims to uncover the resilience of communities in adapting to these entrenched injustices and to foster a deeper understanding of how mapping can serve both as a tool of exclusion and as a means for promoting social equity. ) Understanding Redlining as a Systemic Disturbance \u00b6 Redlining originated in the 1930s as a discriminatory practice where the Home Owners\u2019 Loan Corporation (HOLC) systematically denied mortgages or offered unfavorable terms based on racial and ethnic compositions. This methodical exclusion, executed through maps that color-coded \u201crisky\u201d investment areas in red, marked minority-populated areas, denying them crucial investment and development opportunities and initiating a profound and lasting disturbance in the urban fabric. Maps serve as powerful tools beyond navigation; they communicate and enforce control. By defining neighborhood boundaries through redlining, HOLC maps not only mirrored societal biases but also perpetuated and embedded them into the urban landscape. This manipulation of geographic data set a trajectory that limited economic growth, dictated the allocation of services, and influenced the development or deterioration of community infrastructure. Figure 1: 1938 Map of Atlanta uses colors as grades for neighborhoods. The red swaths identify each area with large African-American populations that were deemed \u201cless safe.\u201d ArcGIS Story Map Explore the Story Map: Click on the image above to explore the interactive story map about [subject of the story map]. Resilience and Adaptation in Urban Environments \u00b6 The legacy of redlining presents both a challenge and an opportunity for resilience and adaptation. Economically and socially, redlining entrenched cycles of poverty and racial segregation, creating a resilient wealth gap that has been difficult to dismantle. Environmentally, the neighborhoods targeted by redlining continue to face significant challenges\u2014they generally feature less greenspace, suffer from higher pollution levels, and are more vulnerable to the impacts of climate change. These factors compound the health and wellness challenges faced by residents. Despite these adversities, urban communities have continually demonstrated remarkable resilience. Adaptation strategies, such as community-led green initiatives, urban agriculture, and grassroots activism, have emerged as responses to these systemic disturbances. By enhancing green infrastructure and advocating for equitable environmental policies, these communities strive to increase their resilience against both historical inequities and environmental challenges. Watch the video Video Title: Exploring the Impacts of Historical Redlining on Urban Development Description: Click on the image above to watch a video that delves into the consequences of historical redlining and its ongoing impact on urban environments. This educational piece offers insights into how such discriminatory practices have shaped cities and what can be learned from them. The following group exercise will not only uncover the impact of redlining on urban greenspace but also highlight the adaptive strategies developed in response to this enduring disturbance. Through mapping and analysis, we aim to illustrate the powerful role that geographic data can play in understanding and fostering urban resilience and social equity. References \u00b6 Nardone, A., Rudolph, K. E., Morello-Frosch, R., & Casey, J. A. (2021). Redlines and Greenspace: The Relationship between Historical Redlining and 2010 Greenspace across the United States. Environmental Health Perspectives , 129(1), 017006. DOI:10.1289/EHP7495. Hoffman, J. S., Shandas, V., & Pendleton, N. (2020). The Effects of Historical Housing Policies on Resident Exposure to Intra-Urban Heat: A Study of 108 US Urban Areas. Climate , 8(1), 12. DOI:10.3390/cli8010012. Goals of this group activity \u00b6 The primary objectives of this tutorial are: 1. To practice coding in CyVerse. 2. To analyze the relationship between HOLC grades and the presence of urban greenspace. 3. To understand how historic policies continue to affect the spatial distribution of environmental amenities. Part 1: Accessing and Visualizing Historic Redlining Data \u00b6 We will begin by accessing HOLC maps from the Mapping Inequality project and overlaying this data with modern geographic datasets to visualize the historical impact on contemporary urban landscapes. Data Acquisition \u00b6 Download HOLC map shapefiles from the University of Richmond\u2019s Mapping Inequality Project. Utilize satellite imagery and other geospatial data to map current greenspace using the normalized difference vegetation index (NDVI). Analysis Methodology \u00b6 Replicate the approach used by Nardone et al. to calculate NDVI values for each HOLC neighborhood, assessing greenspace as a health-promoting resource. Employ statistical methods such as propensity score matching to control for confounding variables and estimate the true impact of HOLC grades on urban greenspace. img { width: 100%; } details summary { color: black; background-color: white; } details[open] summary { color: black; } R libraries we use in this analysis if ( ! requireNamespace ( \"tidytext\" , quietly = TRUE )) { install.packages ( \"tidytext\" ) } library ( tidytext ) ## Warning: package 'tidytext' was built under R version 4.3.2 library ( sf ) ## Warning: package 'sf' was built under R version 4.3.2 ## Linking to GEOS 3.11.0, GDAL 3.5.3, PROJ 9.1.0; sf_use_s2() is TRUE library ( ggplot2 ) ## Warning: package 'ggplot2' was built under R version 4.3.2 library ( ggthemes ) ## Warning: package 'ggthemes' was built under R version 4.3.2 library ( dplyr ) ## ## Attaching package: 'dplyr' ## The following objects are masked from 'package:stats': ## ## filter, lag ## The following objects are masked from 'package:base': ## ## intersect, setdiff, setequal, union library ( rstac ) ## Warning: package 'rstac' was built under R version 4.3.2 library ( gdalcubes ) ## Warning: package 'gdalcubes' was built under R version 4.3.2 library ( gdalUtils ) ## Please note that rgdal will be retired during October 2023, ## plan transition to sf/stars/terra functions using GDAL and PROJ ## at your earliest convenience. ## See https://r-spatial.org/r/2023/05/15/evolution4.html and https://github.com/r-spatial/evolution ## rgdal: version: 1.6-7, (SVN revision 1203) ## Geospatial Data Abstraction Library extensions to R successfully loaded ## Loaded GDAL runtime: GDAL 3.5.3, released 2022/10/21 ## Path to GDAL shared files: /Library/Frameworks/R.framework/Versions/4.3-x86_64/Resources/library/rgdal/gdal ## GDAL does not use iconv for recoding strings. ## GDAL binary built with GEOS: TRUE ## Loaded PROJ runtime: Rel. 9.1.0, September 1st, 2022, [PJ_VERSION: 910] ## Path to PROJ shared files: /Library/Frameworks/R.framework/Versions/4.3-x86_64/Resources/library/gdalcubes/proj ## PROJ CDN enabled: FALSE ## Linking to sp version:1.6-1 ## To mute warnings of possible GDAL/OSR exportToProj4() degradation, ## use options(\"rgdal_show_exportToProj4_warnings\"=\"none\") before loading sp or rgdal. ## ## Attaching package: 'gdalUtils' ## The following object is masked from 'package:sf': ## ## gdal_rasterize library ( gdalcubes ) library ( colorspace ) library ( terra ) ## Warning: package 'terra' was built under R version 4.3.2 ## terra 1.7.71 ## ## Attaching package: 'terra' ## The following object is masked from 'package:colorspace': ## ## RGB ## The following objects are masked from 'package:gdalcubes': ## ## animate, crop, size library ( tidyterra ) ## ## Attaching package: 'tidyterra' ## The following object is masked from 'package:stats': ## ## filter library ( basemapR ) library ( tidytext ) library ( ggwordcloud ) library ( osmextract ) ## Data (c) OpenStreetMap contributors, ODbL 1.0. https://www.openstreetmap.org/copyright. ## Check the package website, https://docs.ropensci.org/osmextract/, for more details. library ( sf ) library ( ggplot2 ) library ( ggthemes ) library ( glue ) ## ## Attaching package: 'glue' ## The following object is masked from 'package:terra': ## ## trim library ( purrr ) FUNCTION: List cities where HOLC data are available # Function to get a list of unique cities and states from the redlining data get_city_state_list_from_redlining_data <- function () { # URL to the GeoJSON data url <- \"https://raw.githubusercontent.com/americanpanorama/mapping-inequality-census-crosswalk/main/MIv3Areas_2010TractCrosswalk.geojson\" # Read the GeoJSON file into an sf object redlining_data <- tryCatch ({ read_sf ( url ) }, error = function ( e ) { stop ( \"Error reading GeoJSON data: \" , e $ message ) }) # Check for the existence of 'city' and 'state' columns if ( ! all ( c ( \"city\" , \"state\" ) %in% names ( redlining_data ))) { stop ( \"The required columns 'city' and/or 'state' do not exist in the data.\" ) } # Extract a unique list of city and state pairs without the geometries city_state_df <- redlining_data %>% select ( city , state ) %>% st_set_geometry ( NULL ) %>% # Drop the geometry to avoid issues with invalid shapes distinct ( city , state ) %>% arrange ( state , city ) # Arrange the list alphabetically by state, then by city # Return the dataframe of unique city-state pairs return ( city_state_df ) } Stream list of available HOLC cities #Retrieve the list of cities and states city_state_list <- get_city_state_list_from_redlining_data () knitr :: kable ( city_state_list , format = \"markdown\" ) | city | state | |:---------------------------------|:------| | Birmingham | AL | | Mobile | AL | | Montgomery | AL | | Arkadelphia | AR | | Batesville | AR | | Camden | AR | | Conway | AR | | El Dorado | AR | | Fort Smith | AR | | Little Rock | AR | | Russellville | AR | | Texarkana | AR | | Phoenix | AZ | | Fresno | CA | | Los Angeles | CA | | Oakland | CA | | Sacramento | CA | | San Diego | CA | | San Francisco | CA | | San Jose | CA | | Stockton | CA | | Boulder | CO | | Colorado Springs | CO | | Denver | CO | | Fort Collins | CO | | Fort Morgan | CO | | Grand Junction | CO | | Greeley | CO | | Longmont | CO | | Pueblo | CO | | Bridgeport and Fairfield | CT | | Hartford | CT | | New Britain | CT | | New Haven | CT | | Stamford, Darien, and New Canaan | CT | | Waterbury | CT | | Crestview | FL | | Daytona Beach | FL | | DeFuniak Springs | FL | | DeLand | FL | | Jacksonville | FL | | Miami | FL | | New Smyrna | FL | | Orlando | FL | | Pensacola | FL | | St. Petersburg | FL | | Tampa | FL | | Atlanta | GA | | Augusta | GA | | Columbus | GA | | Macon | GA | | Savannah | GA | | Boone | IA | | Cedar Rapids | IA | | Council Bluffs | IA | | Davenport | IA | | Des Moines | IA | | Dubuque | IA | | Sioux City | IA | | Waterloo | IA | | Aurora | IL | | Chicago | IL | | Decatur | IL | | East St. Louis | IL | | Joliet | IL | | Peoria | IL | | Rockford | IL | | Springfield | IL | | Evansville | IN | | Fort Wayne | IN | | Indianapolis | IN | | Lake Co. Gary | IN | | Muncie | IN | | South Bend | IN | | Terre Haute | IN | | Atchison | KS | | Junction City | KS | | Topeka | KS | | Wichita | KS | | Covington | KY | | Lexington | KY | | Louisville | KY | | New Orleans | LA | | Shreveport | LA | | Arlington | MA | | Belmont | MA | | Boston | MA | | Braintree | MA | | Brockton | MA | | Brookline | MA | | Cambridge | MA | | Chelsea | MA | | Dedham | MA | | Everett | MA | | Fall River | MA | | Fitchburg | MA | | Haverhill | MA | | Holyoke Chicopee | MA | | Lawrence | MA | | Lexington | MA | | Lowell | MA | | Lynn | MA | | Malden | MA | | Medford | MA | | Melrose | MA | | Milton | MA | | Needham | MA | | New Bedford | MA | | Newton | MA | | Pittsfield | MA | | Quincy | MA | | Revere | MA | | Salem | MA | | Saugus | MA | | Somerville | MA | | Springfield | MA | | Waltham | MA | | Watertown | MA | | Winchester | MA | | Winthrop | MA | | Worcester | MA | | Baltimore | MD | | Augusta | ME | | Boothbay | ME | | Portland | ME | | Sanford | ME | | Waterville | ME | | Battle Creek | MI | | Bay City | MI | | Detroit | MI | | Flint | MI | | Grand Rapids | MI | | Jackson | MI | | Kalamazoo | MI | | Lansing | MI | | Muskegon | MI | | Pontiac | MI | | Saginaw | MI | | Austin | MN | | Duluth | MN | | Mankato | MN | | Minneapolis | MN | | Rochester | MN | | St. Cloud | MN | | St. Paul | MN | | Staples | MN | | Cape Girardeau | MO | | Carthage | MO | | Greater Kansas City | MO | | Joplin | MO | | Springfield | MO | | St. Joseph | MO | | St. Louis | MO | | Jackson | MS | | Asheville | NC | | Charlotte | NC | | Durham | NC | | Elizabeth City | NC | | Fayetteville | NC | | Goldsboro | NC | | Greensboro | NC | | Hendersonville | NC | | High Point | NC | | New Bern | NC | | Rocky Mount | NC | | Statesville | NC | | Winston-Salem | NC | | Fargo | ND | | Grand Forks | ND | | Minot | ND | | Williston | ND | | Lincoln | NE | | Omaha | NE | | Manchester | NH | | Atlantic City | NJ | | Bergen Co. | NJ | | Camden | NJ | | Essex Co. | NJ | | Hudson Co. | NJ | | Monmouth | NJ | | Passaic County | NJ | | Perth Amboy | NJ | | Trenton | NJ | | Union Co. | NJ | | Albany | NY | | Binghamton-Johnson City | NY | | Bronx | NY | | Brooklyn | NY | | Buffalo | NY | | Elmira | NY | | Jamestown | NY | | Lower Westchester Co. | NY | | Manhattan | NY | | Niagara Falls | NY | | Poughkeepsie | NY | | Queens | NY | | Rochester | NY | | Schenectady | NY | | Staten Island | NY | | Syracuse | NY | | Troy | NY | | Utica | NY | | Akron | OH | | Canton | OH | | Cleveland | OH | | Columbus | OH | | Dayton | OH | | Hamilton | OH | | Lima | OH | | Lorain | OH | | Portsmouth | OH | | Springfield | OH | | Toledo | OH | | Warren | OH | | Youngstown | OH | | Ada | OK | | Alva | OK | | Enid | OK | | Miami Ottawa County | OK | | Muskogee | OK | | Norman | OK | | Oklahoma City | OK | | South McAlester | OK | | Tulsa | OK | | Portland | OR | | Allentown | PA | | Altoona | PA | | Bethlehem | PA | | Chester | PA | | Erie | PA | | Harrisburg | PA | | Johnstown | PA | | Lancaster | PA | | McKeesport | PA | | New Castle | PA | | Philadelphia | PA | | Pittsburgh | PA | | Wilkes-Barre | PA | | York | PA | | Pawtucket & Central Falls | RI | | Providence | RI | | Woonsocket | RI | | Aiken | SC | | Charleston | SC | | Columbia | SC | | Greater Anderson | SC | | Greater Greenville | SC | | Orangeburg | SC | | Rock Hill | SC | | Spartanburg | SC | | Sumter | SC | | Aberdeen | SD | | Huron | SD | | Milbank | SD | | Mitchell | SD | | Rapid City | SD | | Sioux Falls | SD | | Vermillion | SD | | Watertown | SD | | Chattanooga | TN | | Elizabethton | TN | | Erwin | TN | | Greenville | TN | | Johnson City | TN | | Knoxville | TN | | Memphis | TN | | Nashville | TN | | Amarillo | TX | | Austin | TX | | Beaumont | TX | | Dallas | TX | | El Paso | TX | | Fort Worth | TX | | Galveston | TX | | Houston | TX | | Port Arthur | TX | | San Antonio | TX | | Waco | TX | | Wichita Falls | TX | | Ogden | UT | | Salt Lake City | UT | | Bristol | VA | | Danville | VA | | Harrisonburg | VA | | Lynchburg | VA | | Newport News | VA | | Norfolk | VA | | Petersburg | VA | | Phoebus | VA | | Richmond | VA | | Roanoke | VA | | Staunton | VA | | Bennington | VT | | Brattleboro | VT | | Burlington | VT | | Montpelier | VT | | Newport City | VT | | Poultney | VT | | Rutland | VT | | Springfield | VT | | St. Albans | VT | | St. Johnsbury | VT | | Windsor | VT | | Seattle | WA | | Spokane | WA | | Tacoma | WA | | Kenosha | WI | | Madison | WI | | Milwaukee Co. | WI | | Oshkosh | WI | | Racine | WI | | Charleston | WV | | Huntington | WV | | Wheeling | WV | FUNCTION: Stream HOLC data from a city # Function to load and filter redlining data by city load_city_redlining_data <- function ( city_name ) { # URL to the GeoJSON data url <- \"https://raw.githubusercontent.com/americanpanorama/mapping-inequality-census-crosswalk/main/MIv3Areas_2010TractCrosswalk.geojson\" # Read the GeoJSON file into an sf object redlining_data <- read_sf ( url ) # Filter the data for the specified city and non-empty grades city_redline <- redlining_data %>% filter ( city == city_name ) # Return the filtered data return ( city_redline ) } Stream HOLC data for Denver, CO # Load redlining data for Denver denver_redlining <- load_city_redlining_data ( \"Denver\" ) knitr :: kable ( head ( denver_redlining ), format = \"markdown\" ) | area_id | city | state | city_survey | cat | grade | label | res | com | ind | fill | GEOID10 | GISJOIN | calc_area | pct_tract | geometry | |--------:|:-------|:------|:------------|:-----|:------|:------|:-----|:------|:------|:---------|:------------|:---------------|-------------:|----------:|:-----------------------------| | 6525 | Denver | CO | TRUE | Best | A | A1 | TRUE | FALSE | FALSE | \\#76a865 | 08031004104 | G0800310004104 | 1.525535e+01 | 0.00001 | MULTIPOLYGON (((-104.9125 3\u2026 | | 6525 | Denver | CO | TRUE | Best | A | A1 | TRUE | FALSE | FALSE | \\#76a865 | 08031004201 | G0800310004201 | 3.987458e+05 | 0.20900 | MULTIPOLYGON (((-104.9246 3\u2026 | | 6525 | Denver | CO | TRUE | Best | A | A1 | TRUE | FALSE | FALSE | \\#76a865 | 08031004304 | G0800310004304 | 1.554195e+05 | 0.05927 | MULTIPOLYGON (((-104.9125 3\u2026 | | 6525 | Denver | CO | TRUE | Best | A | A1 | TRUE | FALSE | FALSE | \\#76a865 | 08031004202 | G0800310004202 | 1.117770e+06 | 0.57245 | MULTIPOLYGON (((-104.9125 3\u2026 | | 6529 | Denver | CO | TRUE | Best | A | A2 | TRUE | FALSE | FALSE | \\#76a865 | 08031004302 | G0800310004302 | 3.133415e+05 | 0.28381 | MULTIPOLYGON (((-104.928 39\u2026 | | 6529 | Denver | CO | TRUE | Best | A | A2 | TRUE | FALSE | FALSE | \\#76a865 | 08031004301 | G0800310004301 | 1.221218e+05 | 0.08622 | MULTIPOLYGON (((-104.9305 3\u2026 | FUNCTION: Get Points-of-Interest from city of interest get_places <- function ( polygon_layer , type = \"food\" ) { # Check if the input is an sf object if ( ! inherits ( polygon_layer , \"sf\" )) { stop ( \"The provided object is not an sf object.\" ) } # Create a bounding box from the input sf object bbox_here <- st_bbox ( polygon_layer ) |> st_as_sfc () if ( type == \"food\" ){ my_layer <- \"multipolygons\" my_query <- \"SELECT * FROM multipolygons WHERE ( shop IN ('supermarket', 'bodega', 'market', 'other_market', 'farm', 'garden_centre', 'doityourself', 'farm_supply', 'compost', 'mulch', 'fertilizer') OR amenity IN ('social_facility', 'market', 'restaurant', 'coffee') OR leisure = 'garden' OR landuse IN ('farm', 'farmland', 'row_crops', 'orchard_plantation', 'dairy_grazing') OR building IN ('brewery', 'winery', 'distillery') OR shop = 'greengrocer' OR amenity = 'marketplace' )\" title <- \"food\" } if ( type == \"processed_food\" ) { my_layer <- \"multipolygons\" my_query <- \"SELECT * FROM multipolygons WHERE ( amenity IN ('fast_food', 'cafe', 'pub') OR shop IN ('convenience', 'supermarket') OR shop = 'kiosk' )\" title <- \"Processed Food Locations\" } if ( type == \"natural_habitats\" ){ my_layer <- \"multipolygons\" my_query <- \"SELECT * FROM multipolygons WHERE ( boundary = 'protected_area' OR natural IN ('tree', 'wood') OR landuse = 'forest' OR leisure = 'park' )\" title <- \"Natural habitats or City owned trees\" } if ( type == \"roads\" ){ my_layer <- \"lines\" my_query <- \"SELECT * FROM lines WHERE ( highway IN ('motorway', 'trunk', 'primary', 'secondary', 'tertiary') )\" title <- \"Major roads\" } if ( type == \"rivers\" ){ my_layer <- \"lines\" my_query <- \"SELECT * FROM lines WHERE ( waterway IN ('river'))\" title <- \"Major rivers\" } if ( type == \"internet_access\" ) { my_layer <- \"multipolygons\" my_query <- \"SELECT * FROM multipolygons WHERE ( amenity IN ('library', 'cafe', 'community_centre', 'public_building') AND internet_access = 'yes' )\" title <- \"Internet Access Locations\" } if ( type == \"water_bodies\" ) { my_layer <- \"multipolygons\" my_query <- \"SELECT * FROM multipolygons WHERE ( natural IN ('water', 'lake', 'pond') OR water IN ('lake', 'pond') OR landuse = 'reservoir' )\" title <- \"Water Bodies\" } if ( type == \"government_buildings\" ) { my_layer <- \"multipolygons\" my_query <- \"SELECT * FROM multipolygons WHERE ( amenity IN ('townhall', 'courthouse', 'embassy', 'police', 'fire_station') OR building IN ('capitol', 'government') )\" title <- \"Government Buildings\" } # Use the bbox to get data with oe_get(), specifying the desired layer and a custom SQL query for fresh food places tryCatch ({ places <- oe_get ( place = bbox_here , layer = my_layer , # Adjusted layer; change as per actual data availability query = my_query , quiet = TRUE ) places <- st_make_valid ( places ) # Crop the data to the bounding box cropped_places <- st_crop ( places , bbox_here ) # Plotting the cropped fresh food places plot <- ggplot ( data = cropped_places ) + geom_sf ( fill = \"cornflowerblue\" , color = \"cornflowerblue\" ) + ggtitle ( title ) + theme_tufte () + theme ( legend.position = \"none\" , # Optionally hide the legend axis.text = element_blank (), # Remove axis text axis.title = element_blank (), # Remove axis titles axis.ticks = element_blank (), # Remove axis ticks plot.background = element_rect ( fill = \"white\" , color = NA ), # Set the plot background to white panel.background = element_rect ( fill = \"white\" , color = NA ), # Set the panel background to white panel.grid.major = element_blank (), # Remove major grid lines panel.grid.minor = element_blank (), ) # Save the plot as a PNG file png_filename <- paste0 ( title , \"_\" , Sys.Date (), \".png\" ) ggsave ( png_filename , plot , width = 10 , height = 8 , units = \"in\" ) # Return the cropped dataset return ( cropped_places ) }, error = function ( e ) { stop ( \"Failed to retrieve or plot data: \" , e $ message ) }) } FUNCTION: Plot POI over HOLC grades plot_city_redlining <- function ( redlining_data , filename = \"redlining_plot.png\" ) { # Fetch additional geographic data based on redlining data roads <- get_places ( redlining_data , type = \"roads\" ) rivers <- get_places ( redlining_data , type = \"rivers\" ) # Filter residential zones with valid grades and where city survey is TRUE residential_zones <- redlining_data %>% filter ( city_survey == TRUE & grade != \"\" ) # Colors for the grades colors <- c ( \"#76a865\" , \"#7cb5bd\" , \"#ffff00\" , \"#d9838d\" ) # Plot the data using ggplot2 plot <- ggplot () + geom_sf ( data = roads , lwd = 0.1 ) + geom_sf ( data = rivers , color = \"blue\" , alpha = 0.5 , lwd = 1.1 ) + geom_sf ( data = residential_zones , aes ( fill = grade ), alpha = 0.5 ) + theme_tufte () + scale_fill_manual ( values = colors ) + labs ( fill = 'HOLC Categories' ) + theme ( plot.background = element_rect ( fill = \"white\" , color = NA ), panel.background = element_rect ( fill = \"white\" , color = NA ), panel.grid.major = element_blank (), panel.grid.minor = element_blank (), legend.position = \"right\" ) # Save the plot as a high-resolution PNG file ggsave ( filename , plot , width = 10 , height = 8 , units = \"in\" , dpi = 600 ) # Return the plot object if needed for further manipulation or checking return ( plot ) } Plot Denver Redlining denver_plot <- plot_city_redlining ( denver_redlining ) Stream amenities by category food <- get_places ( denver_redlining , type = \"food\" ) food_processed <- get_places ( denver_redlining , type = \"processed_food\" ) natural_habitats <- get_places ( denver_redlining , type = \"natural_habitats\" ) roads <- get_places ( denver_redlining , type = \"roads\" ) rivers <- get_places ( denver_redlining , type = \"rivers\" ) #water_bodies <- get_places(denver_redlining, type=\"water_bodies\") government_buildings <- get_places ( denver_redlining , type = \"government_buildings\" ) FUNCTION: Plot the HOLC grades individually split_plot <- function ( sf_data , roads , rivers ) { # Filter for grades A, B, C, and D sf_data_filtered <- sf_data %>% filter ( grade %in% c ( 'A' , 'B' , 'C' , 'D' )) # Define a color for each grade grade_colors <- c ( \"A\" = \"#76a865\" , \"B\" = \"#7cb5bd\" , \"C\" = \"#ffff00\" , \"D\" = \"#d9838d\" ) # Create the plot with panels for each grade plot <- ggplot ( data = sf_data_filtered ) + geom_sf ( data = roads , alpha = 0.1 , lwd = 0.1 ) + geom_sf ( data = rivers , color = \"blue\" , alpha = 0.1 , lwd = 1.1 ) + geom_sf ( aes ( fill = grade )) + facet_wrap ( ~ grade , nrow = 1 ) + # Free scales for different zoom levels if needed scale_fill_manual ( values = grade_colors ) + theme_minimal () + labs ( fill = 'HOLC Grade' ) + theme_tufte () + theme ( plot.background = element_rect ( fill = \"white\" , color = NA ), panel.background = element_rect ( fill = \"white\" , color = NA ), legend.position = \"none\" , # Optionally hide the legend axis.text = element_blank (), # Remove axis text axis.title = element_blank (), # Remove axis titles axis.ticks = element_blank (), # Remove axis ticks panel.grid.major = element_blank (), # Remove major grid lines panel.grid.minor = element_blank ()) ggsave ( plot , filename = \"HOLC_grades_individually.png\" , width = 10 , height = 4 , units = \"in\" , dpi = 1200 ) return ( plot ) } Plot 4 HOLC grades individually plot_row <- split_plot ( denver_redlining , roads , rivers ) FUNCTION: Map an amenity over each grade individually process_and_plot_sf_layers <- function ( layer1 , layer2 , output_file = \"output_plot.png\" ) { # Make geometries valid layer1 <- st_make_valid ( layer1 ) layer2 <- st_make_valid ( layer2 ) # Optionally, simplify geometries to remove duplicate vertices layer1 <- st_simplify ( layer1 , preserveTopology = TRUE ) |> filter ( grade != \"\" ) # Prepare a list to store results results <- list () # Loop through each grade and perform operations for ( grade in c ( \"A\" , \"B\" , \"C\" , \"D\" )) { # Filter layer1 for current grade layer1_grade <- layer1 [ layer1 $ grade == grade , ] # Buffer the geometries of the current grade buffered_layer1_grade <- st_buffer ( layer1_grade , dist = 500 ) # Intersect with the second layer intersections <- st_intersects ( layer2 , buffered_layer1_grade , sparse = FALSE ) selected_polygons <- layer2 [ rowSums ( intersections ) > 0 , ] # Add a new column to store the grade information selected_polygons $ grade <- grade # Store the result results [[ grade ]] <- selected_polygons } # Combine all selected polygons from different grades into one sf object final_selected_polygons <- do.call ( rbind , results ) # Define colors for the grades grade_colors <- c ( \"A\" = \"grey\" , \"B\" = \"grey\" , \"C\" = \"grey\" , \"D\" = \"grey\" ) # Create the plot plot <- ggplot () + geom_sf ( data = roads , alpha = 0.05 , lwd = 0.1 ) + geom_sf ( data = rivers , color = \"blue\" , alpha = 0.1 , lwd = 1.1 ) + geom_sf ( data = layer1 , fill = \"grey\" , color = \"grey\" , size = 0.1 ) + facet_wrap ( ~ grade , nrow = 1 ) + geom_sf ( data = final_selected_polygons , fill = \"green\" , color = \"green\" , size = 0.1 ) + facet_wrap ( ~ grade , nrow = 1 ) + #scale_fill_manual(values = grade_colors) + #scale_color_manual(values = grade_colors) + theme_minimal () + labs ( fill = 'HOLC Grade' ) + theme_tufte () + theme ( plot.background = element_rect ( fill = \"white\" , color = NA ), panel.background = element_rect ( fill = \"white\" , color = NA ), legend.position = \"none\" , axis.text = element_blank (), axis.title = element_blank (), axis.ticks = element_blank (), panel.grid.major = element_blank (), panel.grid.minor = element_blank ()) # Save the plot as a high-resolution PNG file ggsave ( output_file , plot , width = 10 , height = 4 , units = \"in\" , dpi = 1200 ) # Return the plot for optional further use return ( list ( plot = plot , sf = final_selected_polygons )) } FUNCTION: Create word cloud per grade create_wordclouds_by_grade <- function ( sf_object , output_file = \"food_word_cloud_per_grade.png\" , title = \"Healthy food place names word cloud\" , max_size = 25 , col_select = \"name\" ) { # Extract relevant data and prepare text data text_data <- sf_object %>% select ( grade , col_select ) %>% filter ( ! is.na ( col_select )) %>% unnest_tokens ( output = \"word\" , input = col_select , token = \"words\" ) %>% count ( grade , word , sort = TRUE ) %>% ungroup () %>% filter ( n () > 1 ) # Filter to remove overly common or single-occurrence words # Ensure there are no NA values in the 'word' column text_data <- text_data %>% filter ( ! is.na ( word )) # Handle cases where text_data might be empty if ( nrow ( text_data ) == 0 ) { stop ( \"No data available for creating word clouds.\" ) } # Create a word cloud using ggplot2 and ggwordcloud p <- ggplot ( ) + geom_text_wordcloud_area ( data = text_data , aes ( label = word , size = n ), rm_outside = TRUE ) + scale_size_area ( max_size = max_size ) + facet_wrap ( ~ grade , nrow = 1 ) + scale_color_gradient ( low = \"darkred\" , high = \"red\" ) + theme_minimal () + theme ( plot.background = element_rect ( fill = \"white\" , color = NA ), panel.background = element_rect ( fill = \"white\" , color = NA ), panel.spacing = unit ( 0.5 , \"lines\" ), plot.title = element_text ( size = 16 , face = \"bold\" ), legend.position = \"none\" ) + labs ( title = title ) # Attempt to save the plot and handle any errors tryCatch ({ ggsave ( output_file , p , width = 10 , height = 4 , units = \"in\" , dpi = 600 ) }, error = function ( e ) { cat ( \"Error in saving the plot: \" , e $ message , \"\\n\" ) }) return ( p ) } Map food over each grade individually layer1 <- denver_redlining layer2 <- food food_match <- process_and_plot_sf_layers ( layer1 , layer2 , \"food_match.png\" ) WORD CLOUD: Names of places with fresh food food_word_cloud <- create_wordclouds_by_grade ( food_match $ sf , output_file = \"food_word_cloud_per_grade.png\" ) Warning: Using an external vector in selections was deprecated in tidyselect 1.1.0. \u2139 Please use `all_of()` or `any_of()` instead. # Was: data %>% select(col_select) # Now: data %>% select(all_of(col_select)) See . Warning in wordcloud_boxes(data_points = points_valid_first, boxes = boxes, : Some words could not fit on page. They have been removed. Map processed food over each grade individually layer1 <- denver_redlining layer2 <- food_processed processed_food_match <- process_and_plot_sf_layers ( layer1 , layer2 , \"processed_food_match.png\" ) WORD CLOUD: Names of places with processed food processed_food_cloud <- create_wordclouds_by_grade ( processed_food_match $ sf , output_file = \"processed_food_word_cloud_per_grade.png\" , title = \"Processed food place names where larger text is more frequent\" , max_size = 17 ) Part 2: Integrating Environmental Data \u00b6 Data Processing \u00b6 Use satellite data from 2010 to analyze greenspace using NDVI, an index that measures the quantity of vegetation in an area. Apply methods to adjust for potential confounders as described in the study, ensuring that comparisons of greenspace across HOLC grades are valid and not biased by historical or socio-demographic factors. Map natural habitats over each grade individually layer1 <- denver_redlining layer2 <- natural_habitats natural_habitats_match <- process_and_plot_sf_layers ( layer1 , layer2 , \"natural_habitats_match.png\" ) print ( natural_habitats_match $ plot ) ![](worksheet_redlining_files/figure-gfm/unnamed-chunk-18-1.png) WORD CLOUD: Name of natural habitat area natural_habitats_cloud <- create_wordclouds_by_grade ( natural_habitats_match $ sf , output_file = \"natural_habitats_word_cloud_per_grade.png\" , title = \"Natural habitats place names where larger text is more frequent\" , max_size = 35 ) FUNCTION: Stream NDVI data polygon_layer <- denver_redlining # Function to process satellite data based on an SF polygon's extent process_satellite_data <- function ( polygon_layer , start_date , end_date , assets , fps = 1 , output_file = \"anim.gif\" ) { # Record start time start_time <- Sys.time () # Calculate the bbox from the polygon layer bbox <- st_bbox ( polygon_layer ) s = stac ( \"https://earth-search.aws.element84.com/v0\" ) # Use stacR to search for Sentinel-2 images within the bbox and date range items = s |> stac_search ( collections = \"sentinel-s2-l2a-cogs\" , bbox = c ( bbox [ \"xmin\" ], bbox [ \"ymin\" ], bbox [ \"xmax\" ], bbox [ \"ymax\" ]), datetime = paste ( start_date , end_date , sep = \"/\" ), limit = 500 ) %>% post_request () # Define mask for Sentinel-2 image quality #S2.mask <- image_mask(\"SCL\", values = c(3, 8, 9)) # Create a collection of images filtering by cloud cover col <- stac_image_collection ( items $ features , asset_names = assets , property_filter = function ( x ) { x [[ \"eo:cloud_cover\" ]] < 30 }) # Define a view for processing the data v <- cube_view ( srs = \"EPSG:4326\" , extent = list ( t0 = start_date , t1 = end_date , left = bbox [ \"xmin\" ], right = bbox [ \"xmax\" ], top = bbox [ \"ymax\" ], bottom = bbox [ \"ymin\" ]), dx = 0.001 , dy = 0.001 , dt = \"P1M\" , aggregation = \"median\" , resampling = \"bilinear\" ) # Calculate NDVI and create an animation ndvi_col <- function ( n ) { rev ( sequential_hcl ( n , \"Green-Yellow\" )) } #raster_cube(col, v, mask = S2.mask) %>% raster_cube ( col , v ) %>% select_bands ( c ( \"B04\" , \"B08\" )) %>% apply_pixel ( \"(B08-B04)/(B08+B04)\" , \"NDVI\" ) %>% gdalcubes :: animate ( col = ndvi_col , zlim = c ( -0.2 , 1 ), key.pos = 1 , save_as = output_file , fps = fps ) # Calculate processing time end_time <- Sys.time () processing_time <- difftime ( end_time , start_time ) # Return processing time return ( processing_time ) } Stream NDVI data: animation processing_time <- process_satellite_data ( denver_redlining , \"2022-05-31\" , \"2023-05-31\" , c ( \"B04\" , \"B08\" )) FUNCTION: Stream year average NDVI yearly_average_ndvi <- function ( polygon_layer , output_file = \"ndvi.png\" , dx = 0.01 , dy = 0.01 ) { # Record start time start_time <- Sys.time () # Calculate the bbox from the polygon layer bbox <- st_bbox ( polygon_layer ) s = stac ( \"https://earth-search.aws.element84.com/v0\" ) # Search for Sentinel-2 images within the bbox for June items <- s |> stac_search ( collections = \"sentinel-s2-l2a-cogs\" , bbox = c ( bbox [ \"xmin\" ], bbox [ \"ymin\" ], bbox [ \"xmax\" ], bbox [ \"ymax\" ]), datetime = \"2023-01-01/2023-12-31\" , limit = 500 ) %>% post_request () # Create a collection of images filtering by cloud cover col <- stac_image_collection ( items $ features , asset_names = c ( \"B04\" , \"B08\" ), property_filter = function ( x ) { x [[ \"eo:cloud_cover\" ]] < 80 }) # Define a view for processing the data specifically for June v <- cube_view ( srs = \"EPSG:4326\" , extent = list ( t0 = \"2023-01-01\" , t1 = \"2023-12-31\" , left = bbox [ \"xmin\" ], right = bbox [ \"xmax\" ], top = bbox [ \"ymax\" ], bottom = bbox [ \"ymin\" ]), dx = dx , dy = dy , dt = \"P1Y\" , aggregation = \"median\" , resampling = \"bilinear\" ) # Process NDVI ndvi_rast <- raster_cube ( col , v ) %>% select_bands ( c ( \"B04\" , \"B08\" )) %>% apply_pixel ( \"(B08-B04)/(B08+B04)\" , \"NDVI\" ) %>% write_tif () |> terra :: rast () # Convert terra Raster to ggplot using tidyterra ndvi_plot <- ggplot () + geom_spatraster ( data = ndvi_rast , aes ( fill = NDVI )) + scale_fill_viridis_c ( option = \"viridis\" , direction = -1 , name = \"NDVI\" ) + labs ( title = \"NDVI mean for 2023\" ) + theme_minimal () + coord_sf () + theme ( plot.background = element_rect ( fill = \"white\" , color = NA ), panel.background = element_rect ( fill = \"white\" , color = NA ), legend.position = \"right\" , axis.text = element_blank (), axis.title = element_blank (), axis.ticks = element_blank (), panel.grid.major = element_blank (), panel.grid.minor = element_blank ()) # Save the plot as a high-resolution PNG file ggsave ( output_file , ndvi_plot , width = 10 , height = 8 , dpi = 600 ) # Calculate processing time end_time <- Sys.time () processing_time <- difftime ( end_time , start_time ) # Return the plot and processing time return ( list ( plot = ndvi_plot , processing_time = processing_time , raster = ndvi_rast )) } Stream NDVI: high resolution ndvi_background <- yearly_average_ndvi ( denver_redlining , dx = 0.0001 , dy = 0.0001 ) FUNCTION: Map NDVI per HOLC grade individually create_mask_and_plot <- function ( redlining_sf , background_raster = ndvi $ raster , roads = NULL , rivers = NULL ){ start_time <- Sys.time () # Start timing # Validate and prepare the redlining data redlining_sf <- redlining_sf %>% filter ( grade != \"\" ) %>% st_make_valid () bbox <- st_bbox ( redlining_sf ) # Get original bounding box expanded_bbox <- expand_bbox ( bbox , 6000 , 1000 ) # expanded_bbox_poly <- st_as_sfc ( expanded_bbox , crs = st_crs ( redlining_sf )) %>% st_make_valid () # Initialize an empty list to store masks masks <- list () # Iterate over each grade to create masks unique_grades <- unique ( redlining_sf $ grade ) for ( grade in unique_grades ) { # Filter polygons by grade grade_polygons <- redlining_sf [ redlining_sf $ grade == grade , ] # Create an \"inverted\" mask by subtracting these polygons from the background mask <- st_difference ( expanded_bbox_poly , st_union ( grade_polygons )) # Store the mask in the list with the grade as the name masks [[ grade ]] <- st_sf ( geometry = mask , grade = grade ) } # Combine all masks into a single sf object mask_sf <- do.call ( rbind , masks ) # Normalize the grades so that C.2 becomes C, but correctly handle other grades mask_sf $ grade <- ifelse ( mask_sf $ grade == \"C.2\" , \"C\" , mask_sf $ grade ) # Prepare the plot plot <- ggplot () + geom_spatraster ( data = background_raster , aes ( fill = NDVI )) + scale_fill_viridis_c ( name = \"NDVI\" , option = \"viridis\" , direction = -1 ) + geom_sf ( data = mask_sf , aes ( color = grade ), fill = \"white\" , size = 0.1 , show.legend = FALSE ) + scale_color_manual ( values = c ( \"A\" = \"white\" , \"B\" = \"white\" , \"C\" = \"white\" , \"D\" = \"white\" ), name = \"Grade\" ) + facet_wrap ( ~ grade , nrow = 1 ) + geom_sf ( data = roads , alpha = 1 , lwd = 0.1 , color = \"white\" ) + geom_sf ( data = rivers , color = \"white\" , alpha = 0.5 , lwd = 1.1 ) + labs ( title = \"NDVI: Normalized Difference Vegetation Index\" ) + theme_minimal () + coord_sf ( xlim = c ( bbox [ \"xmin\" ], bbox [ \"xmax\" ]), ylim = c ( bbox [ \"ymin\" ], bbox [ \"ymax\" ]), expand = FALSE ) + theme ( plot.background = element_rect ( fill = \"white\" , color = NA ), panel.background = element_rect ( fill = \"white\" , color = NA ), legend.position = \"bottom\" , axis.text = element_blank (), axis.title = element_blank (), axis.ticks = element_blank (), panel.grid.major = element_blank (), panel.grid.minor = element_blank ()) # Save the plot ggsave ( \"redlining_mask_ndvi.png\" , plot , width = 10 , height = 4 , dpi = 600 ) end_time <- Sys.time () # End timing runtime <- end_time - start_time # Return the plot and runtime return ( list ( plot = plot , runtime = runtime , mask_sf = mask_sf )) } Stream NDVI: low resolution ndvi_background_low <- yearly_average_ndvi ( denver_redlining ) Map low resolution NDVI per HOLC grade ndvi <- create_mask_and_plot ( denver_redlining , background_raster = ndvi_background_low $ raster , roads = roads , rivers = rivers ) FUNCTION: Map Denver City provided data per HOLC grade process_city_inventory_data <- function ( address , inner_file , polygon_layer , output_filename , variable_label = 'Tree Density' ) { # Download and read the shapefile full_path <- glue ( \"/vsizip/vsicurl/{address}/{inner_file}\" ) shape_data <- st_read ( full_path , quiet = TRUE ) |> st_as_sf () # Process the shape data with the provided polygon layer processed_data <- process_and_plot_sf_layers ( polygon_layer , shape_data , paste0 ( output_filename , \".png\" )) # Extract trees from the processed data trees <- processed_data $ sf denver_redlining_residential <- polygon_layer |> filter ( grade != \"\" ) # Generate the density plot plot <- ggplot () + geom_sf ( data = roads , alpha = 0.05 , lwd = 0.1 ) + geom_sf ( data = rivers , color = \"blue\" , alpha = 0.1 , lwd = 1.1 ) + geom_sf ( data = denver_redlining_residential , fill = \"grey\" , color = \"grey\" , size = 0.1 ) + facet_wrap ( ~ grade , nrow = 1 ) + stat_density_2d ( data = trees , mapping = aes ( x = map_dbl ( geometry , ~ . [ 1 ]), y = map_dbl ( geometry , ~ . [ 2 ]), fill = stat ( density )), geom = 'tile' , contour = FALSE , alpha = 0.9 ) + scale_fill_gradientn ( colors = c ( \"transparent\" , \"white\" , \"limegreen\" ), values = scales :: rescale ( c ( 0 , 0.1 , 1 )), # Adjust these based on your density range guide = \"colourbar\" ) + theme_minimal () + labs ( fill = variable_label ) + theme_tufte () + theme ( plot.background = element_rect ( fill = \"white\" , color = NA ), panel.background = element_rect ( fill = \"white\" , color = NA ), legend.position = \"bottom\" , axis.text = element_blank (), axis.title = element_blank (), axis.ticks = element_blank (), panel.grid.major = element_blank (), panel.grid.minor = element_blank ()) # Save the plot ggsave ( paste0 ( output_filename , \"_density_plot.png\" ), plot , width = 10 , height = 4 , units = \"in\" , dpi = 600 ) # Return the plot and the tree layer return ( list ( plot = plot , layer = trees )) } Map tree inventory per HOLC grade result <- process_city_inventory_data ( \"https://www.denvergov.org/media/gis/DataCatalog/tree_inventory/shape/tree_inventory.zip\" , \"tree_inventory.shp\" , denver_redlining , \"Denver_tree_inventory_2023\" ) Warning: `stat(density)` was deprecated in ggplot2 3.4.0. \u2139 Please use `after_stat(density)` instead. Map traffic accidents per HOLC grade result <- process_city_inventory_data ( \"https://www.denvergov.org/media/gis/DataCatalog/traffic_accidents/shape/traffic_accidents.zip\" , \"traffic_accidents.shp\" , denver_redlining , \"Denver_traffic_accidents\" , variable_label = 'Traffic accidents density' ) Map stream sampling effort per HOLC grade instream_sampling_sites <- process_city_inventory_data ( \"https://www.denvergov.org/media/gis/DataCatalog/instream_sampling_sites/shape/instream_sampling_sites.zip\" , \"instream_sampling_sites.shp\" , denver_redlining , \"instream_sampling_sites\" , variable_label = 'Instream sampling sites density' ) Map soil sampling effort per HOLC grade soil_samples <- process_city_inventory_data ( \"https://www.denvergov.org/media/gis/DataCatalog/soil_samples/shape/soil_samples.zip\" , \"soil_samples.shp\" , denver_redlining , \"Soil samples\" , variable_label = 'soil samples density' ) Map public art density per HOLC grade public_art <- process_city_inventory_data ( \"https://www.denvergov.org/media/gis/DataCatalog/public_art/shape/public_art.zip\" , \"public_art.shp\" , denver_redlining , \"Public art \" , variable_label = 'Public art density' ) Map liquor licenses density per HOLC grade liquor_licenses <- process_city_inventory_data ( \"https://www.denvergov.org/media/gis/DataCatalog/liquor_licenses/shape/liquor_licenses.zip\" , \"liquor_licenses.shp\" , denver_redlining , \"liquor licenses \" , variable_label = 'liquor licenses density' ) Map crime density per HOLC grade Crime <- process_city_inventory_data ( \"https://www.denvergov.org/media/gis/DataCatalog/crime/shape/crime.zip\" , \"crime.shp\" , denver_redlining , \"crime\" , variable_label = 'Crime density' ) WORD CLOUD: Types of crimes crime_cloud <- create_wordclouds_by_grade ( Crime $ layer , output_file = \"Crime_word_cloud_per_grade.png\" , title = \"Crime type where larger text is more frequent\" , max_size = 25 , col_select = \"OFFENSE_TY\" ) Warning: Using an external vector in selections was deprecated in tidyselect 1.1.0. \u2139 Please use `all_of()` or `any_of()` instead. # Was: data %>% select(col_select) # Now: data %>% select(all_of(col_select)) See . Map police shooting density per HOLC grade Denver_police_shootings <- process_city_inventory_data ( \"https://www.denvergov.org/media/gis/DataCatalog/denver_police_officer_involved_shootings/shape/denver_police_officer_involved_shootings.zip\" , \"denver_police_officer_involved_shootings.shp\" , denver_redlining , \"Police shootings\" , variable_label = 'Police shootings density' ) Not enough data for density across all 4 WORD CLOUD: Police involved shootings Denver_police_shootings_cloud <- create_wordclouds_by_grade ( Denver_police_shootings $ layer , output_file = \"police_shootings_word_cloud_per_grade.png\" , title = \"police involved shooting per crime type where larger text is more frequent\" , max_size = 35 , col_select = \"SHOOT_ACTI\" ) Part 3: Comparative Analysis and Visualization \u00b6 Statistical Analysis \u00b6 Conduct a detailed statistical analysis to compare greenspace across different HOLC grades, using techniques like Targeted Maximum Likelihood Estimation (TMLE) to assess the association between historical redlining and current greenspace levels. Visualize the disparities in greenspace distribution using GIS tools, highlighting how redlining has shaped urban ecological landscapes. Conclusion \u00b6 This tutorial provides tools and methodologies to explore the lingering effects of historic redlining on urban greenspace, offering insights into the intersection of urban planning, environmental justice, and public health. References \u00b6 Nardone, A., Rudolph, K. E., Morello-Frosch, R., & Casey, J. A. (2021). Redlines and Greenspace: The Relationship between Historical Redlining and 2010 Greenspace across the United States. Environmental Health Perspectives , 129(1), 017006. DOI:10.1289/EHP7495. Available online","title":"Explore an example (teacher edition)"},{"location":"worksheets/worksheet_redlining/#redlining","text":"","title":"Redlining"},{"location":"worksheets/worksheet_redlining/#exploring-the-impact-of-historical-redlining-on-urban-greenspace-a-collaborative-examination-of-maps-justice-and-resilience","text":"","title":"Exploring the Impact of Historical Redlining on Urban Greenspace: A Collaborative Examination of Maps, Justice, and Resilience"},{"location":"worksheets/worksheet_redlining/#introduction","text":"This group exploration delves into the long-term impacts of historical redlining on urban greenspace, emphasizing the powerful role of maps in shaping environmental and social landscapes. By drawing on the research by Nardone et al. (2021), you will collaboratively investigate how discriminatory practices encoded in maps have led to persistent disparities in urban settings. This exploration aims to uncover the resilience of communities in adapting to these entrenched injustices and to foster a deeper understanding of how mapping can serve both as a tool of exclusion and as a means for promoting social equity. )","title":"Introduction"},{"location":"worksheets/worksheet_redlining/#understanding-redlining-as-a-systemic-disturbance","text":"Redlining originated in the 1930s as a discriminatory practice where the Home Owners\u2019 Loan Corporation (HOLC) systematically denied mortgages or offered unfavorable terms based on racial and ethnic compositions. This methodical exclusion, executed through maps that color-coded \u201crisky\u201d investment areas in red, marked minority-populated areas, denying them crucial investment and development opportunities and initiating a profound and lasting disturbance in the urban fabric. Maps serve as powerful tools beyond navigation; they communicate and enforce control. By defining neighborhood boundaries through redlining, HOLC maps not only mirrored societal biases but also perpetuated and embedded them into the urban landscape. This manipulation of geographic data set a trajectory that limited economic growth, dictated the allocation of services, and influenced the development or deterioration of community infrastructure. Figure 1: 1938 Map of Atlanta uses colors as grades for neighborhoods. The red swaths identify each area with large African-American populations that were deemed \u201cless safe.\u201d ArcGIS Story Map Explore the Story Map: Click on the image above to explore the interactive story map about [subject of the story map].","title":"Understanding Redlining as a Systemic Disturbance"},{"location":"worksheets/worksheet_redlining/#resilience-and-adaptation-in-urban-environments","text":"The legacy of redlining presents both a challenge and an opportunity for resilience and adaptation. Economically and socially, redlining entrenched cycles of poverty and racial segregation, creating a resilient wealth gap that has been difficult to dismantle. Environmentally, the neighborhoods targeted by redlining continue to face significant challenges\u2014they generally feature less greenspace, suffer from higher pollution levels, and are more vulnerable to the impacts of climate change. These factors compound the health and wellness challenges faced by residents. Despite these adversities, urban communities have continually demonstrated remarkable resilience. Adaptation strategies, such as community-led green initiatives, urban agriculture, and grassroots activism, have emerged as responses to these systemic disturbances. By enhancing green infrastructure and advocating for equitable environmental policies, these communities strive to increase their resilience against both historical inequities and environmental challenges. Watch the video Video Title: Exploring the Impacts of Historical Redlining on Urban Development Description: Click on the image above to watch a video that delves into the consequences of historical redlining and its ongoing impact on urban environments. This educational piece offers insights into how such discriminatory practices have shaped cities and what can be learned from them. The following group exercise will not only uncover the impact of redlining on urban greenspace but also highlight the adaptive strategies developed in response to this enduring disturbance. Through mapping and analysis, we aim to illustrate the powerful role that geographic data can play in understanding and fostering urban resilience and social equity.","title":"Resilience and Adaptation in Urban Environments"},{"location":"worksheets/worksheet_redlining/#references","text":"Nardone, A., Rudolph, K. E., Morello-Frosch, R., & Casey, J. A. (2021). Redlines and Greenspace: The Relationship between Historical Redlining and 2010 Greenspace across the United States. Environmental Health Perspectives , 129(1), 017006. DOI:10.1289/EHP7495. Hoffman, J. S., Shandas, V., & Pendleton, N. (2020). The Effects of Historical Housing Policies on Resident Exposure to Intra-Urban Heat: A Study of 108 US Urban Areas. Climate , 8(1), 12. DOI:10.3390/cli8010012.","title":"References"},{"location":"worksheets/worksheet_redlining/#goals-of-this-group-activity","text":"The primary objectives of this tutorial are: 1. To practice coding in CyVerse. 2. To analyze the relationship between HOLC grades and the presence of urban greenspace. 3. To understand how historic policies continue to affect the spatial distribution of environmental amenities.","title":"Goals of this group activity"},{"location":"worksheets/worksheet_redlining/#part-1-accessing-and-visualizing-historic-redlining-data","text":"We will begin by accessing HOLC maps from the Mapping Inequality project and overlaying this data with modern geographic datasets to visualize the historical impact on contemporary urban landscapes.","title":"Part 1: Accessing and Visualizing Historic Redlining Data"},{"location":"worksheets/worksheet_redlining/#data-acquisition","text":"Download HOLC map shapefiles from the University of Richmond\u2019s Mapping Inequality Project. Utilize satellite imagery and other geospatial data to map current greenspace using the normalized difference vegetation index (NDVI).","title":"Data Acquisition"},{"location":"worksheets/worksheet_redlining/#analysis-methodology","text":"Replicate the approach used by Nardone et al. to calculate NDVI values for each HOLC neighborhood, assessing greenspace as a health-promoting resource. Employ statistical methods such as propensity score matching to control for confounding variables and estimate the true impact of HOLC grades on urban greenspace. img { width: 100%; } details summary { color: black; background-color: white; } details[open] summary { color: black; } R libraries we use in this analysis if ( ! requireNamespace ( \"tidytext\" , quietly = TRUE )) { install.packages ( \"tidytext\" ) } library ( tidytext ) ## Warning: package 'tidytext' was built under R version 4.3.2 library ( sf ) ## Warning: package 'sf' was built under R version 4.3.2 ## Linking to GEOS 3.11.0, GDAL 3.5.3, PROJ 9.1.0; sf_use_s2() is TRUE library ( ggplot2 ) ## Warning: package 'ggplot2' was built under R version 4.3.2 library ( ggthemes ) ## Warning: package 'ggthemes' was built under R version 4.3.2 library ( dplyr ) ## ## Attaching package: 'dplyr' ## The following objects are masked from 'package:stats': ## ## filter, lag ## The following objects are masked from 'package:base': ## ## intersect, setdiff, setequal, union library ( rstac ) ## Warning: package 'rstac' was built under R version 4.3.2 library ( gdalcubes ) ## Warning: package 'gdalcubes' was built under R version 4.3.2 library ( gdalUtils ) ## Please note that rgdal will be retired during October 2023, ## plan transition to sf/stars/terra functions using GDAL and PROJ ## at your earliest convenience. ## See https://r-spatial.org/r/2023/05/15/evolution4.html and https://github.com/r-spatial/evolution ## rgdal: version: 1.6-7, (SVN revision 1203) ## Geospatial Data Abstraction Library extensions to R successfully loaded ## Loaded GDAL runtime: GDAL 3.5.3, released 2022/10/21 ## Path to GDAL shared files: /Library/Frameworks/R.framework/Versions/4.3-x86_64/Resources/library/rgdal/gdal ## GDAL does not use iconv for recoding strings. ## GDAL binary built with GEOS: TRUE ## Loaded PROJ runtime: Rel. 9.1.0, September 1st, 2022, [PJ_VERSION: 910] ## Path to PROJ shared files: /Library/Frameworks/R.framework/Versions/4.3-x86_64/Resources/library/gdalcubes/proj ## PROJ CDN enabled: FALSE ## Linking to sp version:1.6-1 ## To mute warnings of possible GDAL/OSR exportToProj4() degradation, ## use options(\"rgdal_show_exportToProj4_warnings\"=\"none\") before loading sp or rgdal. ## ## Attaching package: 'gdalUtils' ## The following object is masked from 'package:sf': ## ## gdal_rasterize library ( gdalcubes ) library ( colorspace ) library ( terra ) ## Warning: package 'terra' was built under R version 4.3.2 ## terra 1.7.71 ## ## Attaching package: 'terra' ## The following object is masked from 'package:colorspace': ## ## RGB ## The following objects are masked from 'package:gdalcubes': ## ## animate, crop, size library ( tidyterra ) ## ## Attaching package: 'tidyterra' ## The following object is masked from 'package:stats': ## ## filter library ( basemapR ) library ( tidytext ) library ( ggwordcloud ) library ( osmextract ) ## Data (c) OpenStreetMap contributors, ODbL 1.0. https://www.openstreetmap.org/copyright. ## Check the package website, https://docs.ropensci.org/osmextract/, for more details. library ( sf ) library ( ggplot2 ) library ( ggthemes ) library ( glue ) ## ## Attaching package: 'glue' ## The following object is masked from 'package:terra': ## ## trim library ( purrr ) FUNCTION: List cities where HOLC data are available # Function to get a list of unique cities and states from the redlining data get_city_state_list_from_redlining_data <- function () { # URL to the GeoJSON data url <- \"https://raw.githubusercontent.com/americanpanorama/mapping-inequality-census-crosswalk/main/MIv3Areas_2010TractCrosswalk.geojson\" # Read the GeoJSON file into an sf object redlining_data <- tryCatch ({ read_sf ( url ) }, error = function ( e ) { stop ( \"Error reading GeoJSON data: \" , e $ message ) }) # Check for the existence of 'city' and 'state' columns if ( ! all ( c ( \"city\" , \"state\" ) %in% names ( redlining_data ))) { stop ( \"The required columns 'city' and/or 'state' do not exist in the data.\" ) } # Extract a unique list of city and state pairs without the geometries city_state_df <- redlining_data %>% select ( city , state ) %>% st_set_geometry ( NULL ) %>% # Drop the geometry to avoid issues with invalid shapes distinct ( city , state ) %>% arrange ( state , city ) # Arrange the list alphabetically by state, then by city # Return the dataframe of unique city-state pairs return ( city_state_df ) } Stream list of available HOLC cities #Retrieve the list of cities and states city_state_list <- get_city_state_list_from_redlining_data () knitr :: kable ( city_state_list , format = \"markdown\" ) | city | state | |:---------------------------------|:------| | Birmingham | AL | | Mobile | AL | | Montgomery | AL | | Arkadelphia | AR | | Batesville | AR | | Camden | AR | | Conway | AR | | El Dorado | AR | | Fort Smith | AR | | Little Rock | AR | | Russellville | AR | | Texarkana | AR | | Phoenix | AZ | | Fresno | CA | | Los Angeles | CA | | Oakland | CA | | Sacramento | CA | | San Diego | CA | | San Francisco | CA | | San Jose | CA | | Stockton | CA | | Boulder | CO | | Colorado Springs | CO | | Denver | CO | | Fort Collins | CO | | Fort Morgan | CO | | Grand Junction | CO | | Greeley | CO | | Longmont | CO | | Pueblo | CO | | Bridgeport and Fairfield | CT | | Hartford | CT | | New Britain | CT | | New Haven | CT | | Stamford, Darien, and New Canaan | CT | | Waterbury | CT | | Crestview | FL | | Daytona Beach | FL | | DeFuniak Springs | FL | | DeLand | FL | | Jacksonville | FL | | Miami | FL | | New Smyrna | FL | | Orlando | FL | | Pensacola | FL | | St. Petersburg | FL | | Tampa | FL | | Atlanta | GA | | Augusta | GA | | Columbus | GA | | Macon | GA | | Savannah | GA | | Boone | IA | | Cedar Rapids | IA | | Council Bluffs | IA | | Davenport | IA | | Des Moines | IA | | Dubuque | IA | | Sioux City | IA | | Waterloo | IA | | Aurora | IL | | Chicago | IL | | Decatur | IL | | East St. Louis | IL | | Joliet | IL | | Peoria | IL | | Rockford | IL | | Springfield | IL | | Evansville | IN | | Fort Wayne | IN | | Indianapolis | IN | | Lake Co. Gary | IN | | Muncie | IN | | South Bend | IN | | Terre Haute | IN | | Atchison | KS | | Junction City | KS | | Topeka | KS | | Wichita | KS | | Covington | KY | | Lexington | KY | | Louisville | KY | | New Orleans | LA | | Shreveport | LA | | Arlington | MA | | Belmont | MA | | Boston | MA | | Braintree | MA | | Brockton | MA | | Brookline | MA | | Cambridge | MA | | Chelsea | MA | | Dedham | MA | | Everett | MA | | Fall River | MA | | Fitchburg | MA | | Haverhill | MA | | Holyoke Chicopee | MA | | Lawrence | MA | | Lexington | MA | | Lowell | MA | | Lynn | MA | | Malden | MA | | Medford | MA | | Melrose | MA | | Milton | MA | | Needham | MA | | New Bedford | MA | | Newton | MA | | Pittsfield | MA | | Quincy | MA | | Revere | MA | | Salem | MA | | Saugus | MA | | Somerville | MA | | Springfield | MA | | Waltham | MA | | Watertown | MA | | Winchester | MA | | Winthrop | MA | | Worcester | MA | | Baltimore | MD | | Augusta | ME | | Boothbay | ME | | Portland | ME | | Sanford | ME | | Waterville | ME | | Battle Creek | MI | | Bay City | MI | | Detroit | MI | | Flint | MI | | Grand Rapids | MI | | Jackson | MI | | Kalamazoo | MI | | Lansing | MI | | Muskegon | MI | | Pontiac | MI | | Saginaw | MI | | Austin | MN | | Duluth | MN | | Mankato | MN | | Minneapolis | MN | | Rochester | MN | | St. Cloud | MN | | St. Paul | MN | | Staples | MN | | Cape Girardeau | MO | | Carthage | MO | | Greater Kansas City | MO | | Joplin | MO | | Springfield | MO | | St. Joseph | MO | | St. Louis | MO | | Jackson | MS | | Asheville | NC | | Charlotte | NC | | Durham | NC | | Elizabeth City | NC | | Fayetteville | NC | | Goldsboro | NC | | Greensboro | NC | | Hendersonville | NC | | High Point | NC | | New Bern | NC | | Rocky Mount | NC | | Statesville | NC | | Winston-Salem | NC | | Fargo | ND | | Grand Forks | ND | | Minot | ND | | Williston | ND | | Lincoln | NE | | Omaha | NE | | Manchester | NH | | Atlantic City | NJ | | Bergen Co. | NJ | | Camden | NJ | | Essex Co. | NJ | | Hudson Co. | NJ | | Monmouth | NJ | | Passaic County | NJ | | Perth Amboy | NJ | | Trenton | NJ | | Union Co. | NJ | | Albany | NY | | Binghamton-Johnson City | NY | | Bronx | NY | | Brooklyn | NY | | Buffalo | NY | | Elmira | NY | | Jamestown | NY | | Lower Westchester Co. | NY | | Manhattan | NY | | Niagara Falls | NY | | Poughkeepsie | NY | | Queens | NY | | Rochester | NY | | Schenectady | NY | | Staten Island | NY | | Syracuse | NY | | Troy | NY | | Utica | NY | | Akron | OH | | Canton | OH | | Cleveland | OH | | Columbus | OH | | Dayton | OH | | Hamilton | OH | | Lima | OH | | Lorain | OH | | Portsmouth | OH | | Springfield | OH | | Toledo | OH | | Warren | OH | | Youngstown | OH | | Ada | OK | | Alva | OK | | Enid | OK | | Miami Ottawa County | OK | | Muskogee | OK | | Norman | OK | | Oklahoma City | OK | | South McAlester | OK | | Tulsa | OK | | Portland | OR | | Allentown | PA | | Altoona | PA | | Bethlehem | PA | | Chester | PA | | Erie | PA | | Harrisburg | PA | | Johnstown | PA | | Lancaster | PA | | McKeesport | PA | | New Castle | PA | | Philadelphia | PA | | Pittsburgh | PA | | Wilkes-Barre | PA | | York | PA | | Pawtucket & Central Falls | RI | | Providence | RI | | Woonsocket | RI | | Aiken | SC | | Charleston | SC | | Columbia | SC | | Greater Anderson | SC | | Greater Greenville | SC | | Orangeburg | SC | | Rock Hill | SC | | Spartanburg | SC | | Sumter | SC | | Aberdeen | SD | | Huron | SD | | Milbank | SD | | Mitchell | SD | | Rapid City | SD | | Sioux Falls | SD | | Vermillion | SD | | Watertown | SD | | Chattanooga | TN | | Elizabethton | TN | | Erwin | TN | | Greenville | TN | | Johnson City | TN | | Knoxville | TN | | Memphis | TN | | Nashville | TN | | Amarillo | TX | | Austin | TX | | Beaumont | TX | | Dallas | TX | | El Paso | TX | | Fort Worth | TX | | Galveston | TX | | Houston | TX | | Port Arthur | TX | | San Antonio | TX | | Waco | TX | | Wichita Falls | TX | | Ogden | UT | | Salt Lake City | UT | | Bristol | VA | | Danville | VA | | Harrisonburg | VA | | Lynchburg | VA | | Newport News | VA | | Norfolk | VA | | Petersburg | VA | | Phoebus | VA | | Richmond | VA | | Roanoke | VA | | Staunton | VA | | Bennington | VT | | Brattleboro | VT | | Burlington | VT | | Montpelier | VT | | Newport City | VT | | Poultney | VT | | Rutland | VT | | Springfield | VT | | St. Albans | VT | | St. Johnsbury | VT | | Windsor | VT | | Seattle | WA | | Spokane | WA | | Tacoma | WA | | Kenosha | WI | | Madison | WI | | Milwaukee Co. | WI | | Oshkosh | WI | | Racine | WI | | Charleston | WV | | Huntington | WV | | Wheeling | WV | FUNCTION: Stream HOLC data from a city # Function to load and filter redlining data by city load_city_redlining_data <- function ( city_name ) { # URL to the GeoJSON data url <- \"https://raw.githubusercontent.com/americanpanorama/mapping-inequality-census-crosswalk/main/MIv3Areas_2010TractCrosswalk.geojson\" # Read the GeoJSON file into an sf object redlining_data <- read_sf ( url ) # Filter the data for the specified city and non-empty grades city_redline <- redlining_data %>% filter ( city == city_name ) # Return the filtered data return ( city_redline ) } Stream HOLC data for Denver, CO # Load redlining data for Denver denver_redlining <- load_city_redlining_data ( \"Denver\" ) knitr :: kable ( head ( denver_redlining ), format = \"markdown\" ) | area_id | city | state | city_survey | cat | grade | label | res | com | ind | fill | GEOID10 | GISJOIN | calc_area | pct_tract | geometry | |--------:|:-------|:------|:------------|:-----|:------|:------|:-----|:------|:------|:---------|:------------|:---------------|-------------:|----------:|:-----------------------------| | 6525 | Denver | CO | TRUE | Best | A | A1 | TRUE | FALSE | FALSE | \\#76a865 | 08031004104 | G0800310004104 | 1.525535e+01 | 0.00001 | MULTIPOLYGON (((-104.9125 3\u2026 | | 6525 | Denver | CO | TRUE | Best | A | A1 | TRUE | FALSE | FALSE | \\#76a865 | 08031004201 | G0800310004201 | 3.987458e+05 | 0.20900 | MULTIPOLYGON (((-104.9246 3\u2026 | | 6525 | Denver | CO | TRUE | Best | A | A1 | TRUE | FALSE | FALSE | \\#76a865 | 08031004304 | G0800310004304 | 1.554195e+05 | 0.05927 | MULTIPOLYGON (((-104.9125 3\u2026 | | 6525 | Denver | CO | TRUE | Best | A | A1 | TRUE | FALSE | FALSE | \\#76a865 | 08031004202 | G0800310004202 | 1.117770e+06 | 0.57245 | MULTIPOLYGON (((-104.9125 3\u2026 | | 6529 | Denver | CO | TRUE | Best | A | A2 | TRUE | FALSE | FALSE | \\#76a865 | 08031004302 | G0800310004302 | 3.133415e+05 | 0.28381 | MULTIPOLYGON (((-104.928 39\u2026 | | 6529 | Denver | CO | TRUE | Best | A | A2 | TRUE | FALSE | FALSE | \\#76a865 | 08031004301 | G0800310004301 | 1.221218e+05 | 0.08622 | MULTIPOLYGON (((-104.9305 3\u2026 | FUNCTION: Get Points-of-Interest from city of interest get_places <- function ( polygon_layer , type = \"food\" ) { # Check if the input is an sf object if ( ! inherits ( polygon_layer , \"sf\" )) { stop ( \"The provided object is not an sf object.\" ) } # Create a bounding box from the input sf object bbox_here <- st_bbox ( polygon_layer ) |> st_as_sfc () if ( type == \"food\" ){ my_layer <- \"multipolygons\" my_query <- \"SELECT * FROM multipolygons WHERE ( shop IN ('supermarket', 'bodega', 'market', 'other_market', 'farm', 'garden_centre', 'doityourself', 'farm_supply', 'compost', 'mulch', 'fertilizer') OR amenity IN ('social_facility', 'market', 'restaurant', 'coffee') OR leisure = 'garden' OR landuse IN ('farm', 'farmland', 'row_crops', 'orchard_plantation', 'dairy_grazing') OR building IN ('brewery', 'winery', 'distillery') OR shop = 'greengrocer' OR amenity = 'marketplace' )\" title <- \"food\" } if ( type == \"processed_food\" ) { my_layer <- \"multipolygons\" my_query <- \"SELECT * FROM multipolygons WHERE ( amenity IN ('fast_food', 'cafe', 'pub') OR shop IN ('convenience', 'supermarket') OR shop = 'kiosk' )\" title <- \"Processed Food Locations\" } if ( type == \"natural_habitats\" ){ my_layer <- \"multipolygons\" my_query <- \"SELECT * FROM multipolygons WHERE ( boundary = 'protected_area' OR natural IN ('tree', 'wood') OR landuse = 'forest' OR leisure = 'park' )\" title <- \"Natural habitats or City owned trees\" } if ( type == \"roads\" ){ my_layer <- \"lines\" my_query <- \"SELECT * FROM lines WHERE ( highway IN ('motorway', 'trunk', 'primary', 'secondary', 'tertiary') )\" title <- \"Major roads\" } if ( type == \"rivers\" ){ my_layer <- \"lines\" my_query <- \"SELECT * FROM lines WHERE ( waterway IN ('river'))\" title <- \"Major rivers\" } if ( type == \"internet_access\" ) { my_layer <- \"multipolygons\" my_query <- \"SELECT * FROM multipolygons WHERE ( amenity IN ('library', 'cafe', 'community_centre', 'public_building') AND internet_access = 'yes' )\" title <- \"Internet Access Locations\" } if ( type == \"water_bodies\" ) { my_layer <- \"multipolygons\" my_query <- \"SELECT * FROM multipolygons WHERE ( natural IN ('water', 'lake', 'pond') OR water IN ('lake', 'pond') OR landuse = 'reservoir' )\" title <- \"Water Bodies\" } if ( type == \"government_buildings\" ) { my_layer <- \"multipolygons\" my_query <- \"SELECT * FROM multipolygons WHERE ( amenity IN ('townhall', 'courthouse', 'embassy', 'police', 'fire_station') OR building IN ('capitol', 'government') )\" title <- \"Government Buildings\" } # Use the bbox to get data with oe_get(), specifying the desired layer and a custom SQL query for fresh food places tryCatch ({ places <- oe_get ( place = bbox_here , layer = my_layer , # Adjusted layer; change as per actual data availability query = my_query , quiet = TRUE ) places <- st_make_valid ( places ) # Crop the data to the bounding box cropped_places <- st_crop ( places , bbox_here ) # Plotting the cropped fresh food places plot <- ggplot ( data = cropped_places ) + geom_sf ( fill = \"cornflowerblue\" , color = \"cornflowerblue\" ) + ggtitle ( title ) + theme_tufte () + theme ( legend.position = \"none\" , # Optionally hide the legend axis.text = element_blank (), # Remove axis text axis.title = element_blank (), # Remove axis titles axis.ticks = element_blank (), # Remove axis ticks plot.background = element_rect ( fill = \"white\" , color = NA ), # Set the plot background to white panel.background = element_rect ( fill = \"white\" , color = NA ), # Set the panel background to white panel.grid.major = element_blank (), # Remove major grid lines panel.grid.minor = element_blank (), ) # Save the plot as a PNG file png_filename <- paste0 ( title , \"_\" , Sys.Date (), \".png\" ) ggsave ( png_filename , plot , width = 10 , height = 8 , units = \"in\" ) # Return the cropped dataset return ( cropped_places ) }, error = function ( e ) { stop ( \"Failed to retrieve or plot data: \" , e $ message ) }) } FUNCTION: Plot POI over HOLC grades plot_city_redlining <- function ( redlining_data , filename = \"redlining_plot.png\" ) { # Fetch additional geographic data based on redlining data roads <- get_places ( redlining_data , type = \"roads\" ) rivers <- get_places ( redlining_data , type = \"rivers\" ) # Filter residential zones with valid grades and where city survey is TRUE residential_zones <- redlining_data %>% filter ( city_survey == TRUE & grade != \"\" ) # Colors for the grades colors <- c ( \"#76a865\" , \"#7cb5bd\" , \"#ffff00\" , \"#d9838d\" ) # Plot the data using ggplot2 plot <- ggplot () + geom_sf ( data = roads , lwd = 0.1 ) + geom_sf ( data = rivers , color = \"blue\" , alpha = 0.5 , lwd = 1.1 ) + geom_sf ( data = residential_zones , aes ( fill = grade ), alpha = 0.5 ) + theme_tufte () + scale_fill_manual ( values = colors ) + labs ( fill = 'HOLC Categories' ) + theme ( plot.background = element_rect ( fill = \"white\" , color = NA ), panel.background = element_rect ( fill = \"white\" , color = NA ), panel.grid.major = element_blank (), panel.grid.minor = element_blank (), legend.position = \"right\" ) # Save the plot as a high-resolution PNG file ggsave ( filename , plot , width = 10 , height = 8 , units = \"in\" , dpi = 600 ) # Return the plot object if needed for further manipulation or checking return ( plot ) } Plot Denver Redlining denver_plot <- plot_city_redlining ( denver_redlining ) Stream amenities by category food <- get_places ( denver_redlining , type = \"food\" ) food_processed <- get_places ( denver_redlining , type = \"processed_food\" ) natural_habitats <- get_places ( denver_redlining , type = \"natural_habitats\" ) roads <- get_places ( denver_redlining , type = \"roads\" ) rivers <- get_places ( denver_redlining , type = \"rivers\" ) #water_bodies <- get_places(denver_redlining, type=\"water_bodies\") government_buildings <- get_places ( denver_redlining , type = \"government_buildings\" ) FUNCTION: Plot the HOLC grades individually split_plot <- function ( sf_data , roads , rivers ) { # Filter for grades A, B, C, and D sf_data_filtered <- sf_data %>% filter ( grade %in% c ( 'A' , 'B' , 'C' , 'D' )) # Define a color for each grade grade_colors <- c ( \"A\" = \"#76a865\" , \"B\" = \"#7cb5bd\" , \"C\" = \"#ffff00\" , \"D\" = \"#d9838d\" ) # Create the plot with panels for each grade plot <- ggplot ( data = sf_data_filtered ) + geom_sf ( data = roads , alpha = 0.1 , lwd = 0.1 ) + geom_sf ( data = rivers , color = \"blue\" , alpha = 0.1 , lwd = 1.1 ) + geom_sf ( aes ( fill = grade )) + facet_wrap ( ~ grade , nrow = 1 ) + # Free scales for different zoom levels if needed scale_fill_manual ( values = grade_colors ) + theme_minimal () + labs ( fill = 'HOLC Grade' ) + theme_tufte () + theme ( plot.background = element_rect ( fill = \"white\" , color = NA ), panel.background = element_rect ( fill = \"white\" , color = NA ), legend.position = \"none\" , # Optionally hide the legend axis.text = element_blank (), # Remove axis text axis.title = element_blank (), # Remove axis titles axis.ticks = element_blank (), # Remove axis ticks panel.grid.major = element_blank (), # Remove major grid lines panel.grid.minor = element_blank ()) ggsave ( plot , filename = \"HOLC_grades_individually.png\" , width = 10 , height = 4 , units = \"in\" , dpi = 1200 ) return ( plot ) } Plot 4 HOLC grades individually plot_row <- split_plot ( denver_redlining , roads , rivers ) FUNCTION: Map an amenity over each grade individually process_and_plot_sf_layers <- function ( layer1 , layer2 , output_file = \"output_plot.png\" ) { # Make geometries valid layer1 <- st_make_valid ( layer1 ) layer2 <- st_make_valid ( layer2 ) # Optionally, simplify geometries to remove duplicate vertices layer1 <- st_simplify ( layer1 , preserveTopology = TRUE ) |> filter ( grade != \"\" ) # Prepare a list to store results results <- list () # Loop through each grade and perform operations for ( grade in c ( \"A\" , \"B\" , \"C\" , \"D\" )) { # Filter layer1 for current grade layer1_grade <- layer1 [ layer1 $ grade == grade , ] # Buffer the geometries of the current grade buffered_layer1_grade <- st_buffer ( layer1_grade , dist = 500 ) # Intersect with the second layer intersections <- st_intersects ( layer2 , buffered_layer1_grade , sparse = FALSE ) selected_polygons <- layer2 [ rowSums ( intersections ) > 0 , ] # Add a new column to store the grade information selected_polygons $ grade <- grade # Store the result results [[ grade ]] <- selected_polygons } # Combine all selected polygons from different grades into one sf object final_selected_polygons <- do.call ( rbind , results ) # Define colors for the grades grade_colors <- c ( \"A\" = \"grey\" , \"B\" = \"grey\" , \"C\" = \"grey\" , \"D\" = \"grey\" ) # Create the plot plot <- ggplot () + geom_sf ( data = roads , alpha = 0.05 , lwd = 0.1 ) + geom_sf ( data = rivers , color = \"blue\" , alpha = 0.1 , lwd = 1.1 ) + geom_sf ( data = layer1 , fill = \"grey\" , color = \"grey\" , size = 0.1 ) + facet_wrap ( ~ grade , nrow = 1 ) + geom_sf ( data = final_selected_polygons , fill = \"green\" , color = \"green\" , size = 0.1 ) + facet_wrap ( ~ grade , nrow = 1 ) + #scale_fill_manual(values = grade_colors) + #scale_color_manual(values = grade_colors) + theme_minimal () + labs ( fill = 'HOLC Grade' ) + theme_tufte () + theme ( plot.background = element_rect ( fill = \"white\" , color = NA ), panel.background = element_rect ( fill = \"white\" , color = NA ), legend.position = \"none\" , axis.text = element_blank (), axis.title = element_blank (), axis.ticks = element_blank (), panel.grid.major = element_blank (), panel.grid.minor = element_blank ()) # Save the plot as a high-resolution PNG file ggsave ( output_file , plot , width = 10 , height = 4 , units = \"in\" , dpi = 1200 ) # Return the plot for optional further use return ( list ( plot = plot , sf = final_selected_polygons )) } FUNCTION: Create word cloud per grade create_wordclouds_by_grade <- function ( sf_object , output_file = \"food_word_cloud_per_grade.png\" , title = \"Healthy food place names word cloud\" , max_size = 25 , col_select = \"name\" ) { # Extract relevant data and prepare text data text_data <- sf_object %>% select ( grade , col_select ) %>% filter ( ! is.na ( col_select )) %>% unnest_tokens ( output = \"word\" , input = col_select , token = \"words\" ) %>% count ( grade , word , sort = TRUE ) %>% ungroup () %>% filter ( n () > 1 ) # Filter to remove overly common or single-occurrence words # Ensure there are no NA values in the 'word' column text_data <- text_data %>% filter ( ! is.na ( word )) # Handle cases where text_data might be empty if ( nrow ( text_data ) == 0 ) { stop ( \"No data available for creating word clouds.\" ) } # Create a word cloud using ggplot2 and ggwordcloud p <- ggplot ( ) + geom_text_wordcloud_area ( data = text_data , aes ( label = word , size = n ), rm_outside = TRUE ) + scale_size_area ( max_size = max_size ) + facet_wrap ( ~ grade , nrow = 1 ) + scale_color_gradient ( low = \"darkred\" , high = \"red\" ) + theme_minimal () + theme ( plot.background = element_rect ( fill = \"white\" , color = NA ), panel.background = element_rect ( fill = \"white\" , color = NA ), panel.spacing = unit ( 0.5 , \"lines\" ), plot.title = element_text ( size = 16 , face = \"bold\" ), legend.position = \"none\" ) + labs ( title = title ) # Attempt to save the plot and handle any errors tryCatch ({ ggsave ( output_file , p , width = 10 , height = 4 , units = \"in\" , dpi = 600 ) }, error = function ( e ) { cat ( \"Error in saving the plot: \" , e $ message , \"\\n\" ) }) return ( p ) } Map food over each grade individually layer1 <- denver_redlining layer2 <- food food_match <- process_and_plot_sf_layers ( layer1 , layer2 , \"food_match.png\" ) WORD CLOUD: Names of places with fresh food food_word_cloud <- create_wordclouds_by_grade ( food_match $ sf , output_file = \"food_word_cloud_per_grade.png\" ) Warning: Using an external vector in selections was deprecated in tidyselect 1.1.0. \u2139 Please use `all_of()` or `any_of()` instead. # Was: data %>% select(col_select) # Now: data %>% select(all_of(col_select)) See . Warning in wordcloud_boxes(data_points = points_valid_first, boxes = boxes, : Some words could not fit on page. They have been removed. Map processed food over each grade individually layer1 <- denver_redlining layer2 <- food_processed processed_food_match <- process_and_plot_sf_layers ( layer1 , layer2 , \"processed_food_match.png\" ) WORD CLOUD: Names of places with processed food processed_food_cloud <- create_wordclouds_by_grade ( processed_food_match $ sf , output_file = \"processed_food_word_cloud_per_grade.png\" , title = \"Processed food place names where larger text is more frequent\" , max_size = 17 )","title":"Analysis Methodology"},{"location":"worksheets/worksheet_redlining/#part-2-integrating-environmental-data","text":"","title":"Part 2: Integrating Environmental Data"},{"location":"worksheets/worksheet_redlining/#data-processing","text":"Use satellite data from 2010 to analyze greenspace using NDVI, an index that measures the quantity of vegetation in an area. Apply methods to adjust for potential confounders as described in the study, ensuring that comparisons of greenspace across HOLC grades are valid and not biased by historical or socio-demographic factors. Map natural habitats over each grade individually layer1 <- denver_redlining layer2 <- natural_habitats natural_habitats_match <- process_and_plot_sf_layers ( layer1 , layer2 , \"natural_habitats_match.png\" ) print ( natural_habitats_match $ plot ) ![](worksheet_redlining_files/figure-gfm/unnamed-chunk-18-1.png) WORD CLOUD: Name of natural habitat area natural_habitats_cloud <- create_wordclouds_by_grade ( natural_habitats_match $ sf , output_file = \"natural_habitats_word_cloud_per_grade.png\" , title = \"Natural habitats place names where larger text is more frequent\" , max_size = 35 ) FUNCTION: Stream NDVI data polygon_layer <- denver_redlining # Function to process satellite data based on an SF polygon's extent process_satellite_data <- function ( polygon_layer , start_date , end_date , assets , fps = 1 , output_file = \"anim.gif\" ) { # Record start time start_time <- Sys.time () # Calculate the bbox from the polygon layer bbox <- st_bbox ( polygon_layer ) s = stac ( \"https://earth-search.aws.element84.com/v0\" ) # Use stacR to search for Sentinel-2 images within the bbox and date range items = s |> stac_search ( collections = \"sentinel-s2-l2a-cogs\" , bbox = c ( bbox [ \"xmin\" ], bbox [ \"ymin\" ], bbox [ \"xmax\" ], bbox [ \"ymax\" ]), datetime = paste ( start_date , end_date , sep = \"/\" ), limit = 500 ) %>% post_request () # Define mask for Sentinel-2 image quality #S2.mask <- image_mask(\"SCL\", values = c(3, 8, 9)) # Create a collection of images filtering by cloud cover col <- stac_image_collection ( items $ features , asset_names = assets , property_filter = function ( x ) { x [[ \"eo:cloud_cover\" ]] < 30 }) # Define a view for processing the data v <- cube_view ( srs = \"EPSG:4326\" , extent = list ( t0 = start_date , t1 = end_date , left = bbox [ \"xmin\" ], right = bbox [ \"xmax\" ], top = bbox [ \"ymax\" ], bottom = bbox [ \"ymin\" ]), dx = 0.001 , dy = 0.001 , dt = \"P1M\" , aggregation = \"median\" , resampling = \"bilinear\" ) # Calculate NDVI and create an animation ndvi_col <- function ( n ) { rev ( sequential_hcl ( n , \"Green-Yellow\" )) } #raster_cube(col, v, mask = S2.mask) %>% raster_cube ( col , v ) %>% select_bands ( c ( \"B04\" , \"B08\" )) %>% apply_pixel ( \"(B08-B04)/(B08+B04)\" , \"NDVI\" ) %>% gdalcubes :: animate ( col = ndvi_col , zlim = c ( -0.2 , 1 ), key.pos = 1 , save_as = output_file , fps = fps ) # Calculate processing time end_time <- Sys.time () processing_time <- difftime ( end_time , start_time ) # Return processing time return ( processing_time ) } Stream NDVI data: animation processing_time <- process_satellite_data ( denver_redlining , \"2022-05-31\" , \"2023-05-31\" , c ( \"B04\" , \"B08\" )) FUNCTION: Stream year average NDVI yearly_average_ndvi <- function ( polygon_layer , output_file = \"ndvi.png\" , dx = 0.01 , dy = 0.01 ) { # Record start time start_time <- Sys.time () # Calculate the bbox from the polygon layer bbox <- st_bbox ( polygon_layer ) s = stac ( \"https://earth-search.aws.element84.com/v0\" ) # Search for Sentinel-2 images within the bbox for June items <- s |> stac_search ( collections = \"sentinel-s2-l2a-cogs\" , bbox = c ( bbox [ \"xmin\" ], bbox [ \"ymin\" ], bbox [ \"xmax\" ], bbox [ \"ymax\" ]), datetime = \"2023-01-01/2023-12-31\" , limit = 500 ) %>% post_request () # Create a collection of images filtering by cloud cover col <- stac_image_collection ( items $ features , asset_names = c ( \"B04\" , \"B08\" ), property_filter = function ( x ) { x [[ \"eo:cloud_cover\" ]] < 80 }) # Define a view for processing the data specifically for June v <- cube_view ( srs = \"EPSG:4326\" , extent = list ( t0 = \"2023-01-01\" , t1 = \"2023-12-31\" , left = bbox [ \"xmin\" ], right = bbox [ \"xmax\" ], top = bbox [ \"ymax\" ], bottom = bbox [ \"ymin\" ]), dx = dx , dy = dy , dt = \"P1Y\" , aggregation = \"median\" , resampling = \"bilinear\" ) # Process NDVI ndvi_rast <- raster_cube ( col , v ) %>% select_bands ( c ( \"B04\" , \"B08\" )) %>% apply_pixel ( \"(B08-B04)/(B08+B04)\" , \"NDVI\" ) %>% write_tif () |> terra :: rast () # Convert terra Raster to ggplot using tidyterra ndvi_plot <- ggplot () + geom_spatraster ( data = ndvi_rast , aes ( fill = NDVI )) + scale_fill_viridis_c ( option = \"viridis\" , direction = -1 , name = \"NDVI\" ) + labs ( title = \"NDVI mean for 2023\" ) + theme_minimal () + coord_sf () + theme ( plot.background = element_rect ( fill = \"white\" , color = NA ), panel.background = element_rect ( fill = \"white\" , color = NA ), legend.position = \"right\" , axis.text = element_blank (), axis.title = element_blank (), axis.ticks = element_blank (), panel.grid.major = element_blank (), panel.grid.minor = element_blank ()) # Save the plot as a high-resolution PNG file ggsave ( output_file , ndvi_plot , width = 10 , height = 8 , dpi = 600 ) # Calculate processing time end_time <- Sys.time () processing_time <- difftime ( end_time , start_time ) # Return the plot and processing time return ( list ( plot = ndvi_plot , processing_time = processing_time , raster = ndvi_rast )) } Stream NDVI: high resolution ndvi_background <- yearly_average_ndvi ( denver_redlining , dx = 0.0001 , dy = 0.0001 ) FUNCTION: Map NDVI per HOLC grade individually create_mask_and_plot <- function ( redlining_sf , background_raster = ndvi $ raster , roads = NULL , rivers = NULL ){ start_time <- Sys.time () # Start timing # Validate and prepare the redlining data redlining_sf <- redlining_sf %>% filter ( grade != \"\" ) %>% st_make_valid () bbox <- st_bbox ( redlining_sf ) # Get original bounding box expanded_bbox <- expand_bbox ( bbox , 6000 , 1000 ) # expanded_bbox_poly <- st_as_sfc ( expanded_bbox , crs = st_crs ( redlining_sf )) %>% st_make_valid () # Initialize an empty list to store masks masks <- list () # Iterate over each grade to create masks unique_grades <- unique ( redlining_sf $ grade ) for ( grade in unique_grades ) { # Filter polygons by grade grade_polygons <- redlining_sf [ redlining_sf $ grade == grade , ] # Create an \"inverted\" mask by subtracting these polygons from the background mask <- st_difference ( expanded_bbox_poly , st_union ( grade_polygons )) # Store the mask in the list with the grade as the name masks [[ grade ]] <- st_sf ( geometry = mask , grade = grade ) } # Combine all masks into a single sf object mask_sf <- do.call ( rbind , masks ) # Normalize the grades so that C.2 becomes C, but correctly handle other grades mask_sf $ grade <- ifelse ( mask_sf $ grade == \"C.2\" , \"C\" , mask_sf $ grade ) # Prepare the plot plot <- ggplot () + geom_spatraster ( data = background_raster , aes ( fill = NDVI )) + scale_fill_viridis_c ( name = \"NDVI\" , option = \"viridis\" , direction = -1 ) + geom_sf ( data = mask_sf , aes ( color = grade ), fill = \"white\" , size = 0.1 , show.legend = FALSE ) + scale_color_manual ( values = c ( \"A\" = \"white\" , \"B\" = \"white\" , \"C\" = \"white\" , \"D\" = \"white\" ), name = \"Grade\" ) + facet_wrap ( ~ grade , nrow = 1 ) + geom_sf ( data = roads , alpha = 1 , lwd = 0.1 , color = \"white\" ) + geom_sf ( data = rivers , color = \"white\" , alpha = 0.5 , lwd = 1.1 ) + labs ( title = \"NDVI: Normalized Difference Vegetation Index\" ) + theme_minimal () + coord_sf ( xlim = c ( bbox [ \"xmin\" ], bbox [ \"xmax\" ]), ylim = c ( bbox [ \"ymin\" ], bbox [ \"ymax\" ]), expand = FALSE ) + theme ( plot.background = element_rect ( fill = \"white\" , color = NA ), panel.background = element_rect ( fill = \"white\" , color = NA ), legend.position = \"bottom\" , axis.text = element_blank (), axis.title = element_blank (), axis.ticks = element_blank (), panel.grid.major = element_blank (), panel.grid.minor = element_blank ()) # Save the plot ggsave ( \"redlining_mask_ndvi.png\" , plot , width = 10 , height = 4 , dpi = 600 ) end_time <- Sys.time () # End timing runtime <- end_time - start_time # Return the plot and runtime return ( list ( plot = plot , runtime = runtime , mask_sf = mask_sf )) } Stream NDVI: low resolution ndvi_background_low <- yearly_average_ndvi ( denver_redlining ) Map low resolution NDVI per HOLC grade ndvi <- create_mask_and_plot ( denver_redlining , background_raster = ndvi_background_low $ raster , roads = roads , rivers = rivers ) FUNCTION: Map Denver City provided data per HOLC grade process_city_inventory_data <- function ( address , inner_file , polygon_layer , output_filename , variable_label = 'Tree Density' ) { # Download and read the shapefile full_path <- glue ( \"/vsizip/vsicurl/{address}/{inner_file}\" ) shape_data <- st_read ( full_path , quiet = TRUE ) |> st_as_sf () # Process the shape data with the provided polygon layer processed_data <- process_and_plot_sf_layers ( polygon_layer , shape_data , paste0 ( output_filename , \".png\" )) # Extract trees from the processed data trees <- processed_data $ sf denver_redlining_residential <- polygon_layer |> filter ( grade != \"\" ) # Generate the density plot plot <- ggplot () + geom_sf ( data = roads , alpha = 0.05 , lwd = 0.1 ) + geom_sf ( data = rivers , color = \"blue\" , alpha = 0.1 , lwd = 1.1 ) + geom_sf ( data = denver_redlining_residential , fill = \"grey\" , color = \"grey\" , size = 0.1 ) + facet_wrap ( ~ grade , nrow = 1 ) + stat_density_2d ( data = trees , mapping = aes ( x = map_dbl ( geometry , ~ . [ 1 ]), y = map_dbl ( geometry , ~ . [ 2 ]), fill = stat ( density )), geom = 'tile' , contour = FALSE , alpha = 0.9 ) + scale_fill_gradientn ( colors = c ( \"transparent\" , \"white\" , \"limegreen\" ), values = scales :: rescale ( c ( 0 , 0.1 , 1 )), # Adjust these based on your density range guide = \"colourbar\" ) + theme_minimal () + labs ( fill = variable_label ) + theme_tufte () + theme ( plot.background = element_rect ( fill = \"white\" , color = NA ), panel.background = element_rect ( fill = \"white\" , color = NA ), legend.position = \"bottom\" , axis.text = element_blank (), axis.title = element_blank (), axis.ticks = element_blank (), panel.grid.major = element_blank (), panel.grid.minor = element_blank ()) # Save the plot ggsave ( paste0 ( output_filename , \"_density_plot.png\" ), plot , width = 10 , height = 4 , units = \"in\" , dpi = 600 ) # Return the plot and the tree layer return ( list ( plot = plot , layer = trees )) } Map tree inventory per HOLC grade result <- process_city_inventory_data ( \"https://www.denvergov.org/media/gis/DataCatalog/tree_inventory/shape/tree_inventory.zip\" , \"tree_inventory.shp\" , denver_redlining , \"Denver_tree_inventory_2023\" ) Warning: `stat(density)` was deprecated in ggplot2 3.4.0. \u2139 Please use `after_stat(density)` instead. Map traffic accidents per HOLC grade result <- process_city_inventory_data ( \"https://www.denvergov.org/media/gis/DataCatalog/traffic_accidents/shape/traffic_accidents.zip\" , \"traffic_accidents.shp\" , denver_redlining , \"Denver_traffic_accidents\" , variable_label = 'Traffic accidents density' ) Map stream sampling effort per HOLC grade instream_sampling_sites <- process_city_inventory_data ( \"https://www.denvergov.org/media/gis/DataCatalog/instream_sampling_sites/shape/instream_sampling_sites.zip\" , \"instream_sampling_sites.shp\" , denver_redlining , \"instream_sampling_sites\" , variable_label = 'Instream sampling sites density' ) Map soil sampling effort per HOLC grade soil_samples <- process_city_inventory_data ( \"https://www.denvergov.org/media/gis/DataCatalog/soil_samples/shape/soil_samples.zip\" , \"soil_samples.shp\" , denver_redlining , \"Soil samples\" , variable_label = 'soil samples density' ) Map public art density per HOLC grade public_art <- process_city_inventory_data ( \"https://www.denvergov.org/media/gis/DataCatalog/public_art/shape/public_art.zip\" , \"public_art.shp\" , denver_redlining , \"Public art \" , variable_label = 'Public art density' ) Map liquor licenses density per HOLC grade liquor_licenses <- process_city_inventory_data ( \"https://www.denvergov.org/media/gis/DataCatalog/liquor_licenses/shape/liquor_licenses.zip\" , \"liquor_licenses.shp\" , denver_redlining , \"liquor licenses \" , variable_label = 'liquor licenses density' ) Map crime density per HOLC grade Crime <- process_city_inventory_data ( \"https://www.denvergov.org/media/gis/DataCatalog/crime/shape/crime.zip\" , \"crime.shp\" , denver_redlining , \"crime\" , variable_label = 'Crime density' ) WORD CLOUD: Types of crimes crime_cloud <- create_wordclouds_by_grade ( Crime $ layer , output_file = \"Crime_word_cloud_per_grade.png\" , title = \"Crime type where larger text is more frequent\" , max_size = 25 , col_select = \"OFFENSE_TY\" ) Warning: Using an external vector in selections was deprecated in tidyselect 1.1.0. \u2139 Please use `all_of()` or `any_of()` instead. # Was: data %>% select(col_select) # Now: data %>% select(all_of(col_select)) See . Map police shooting density per HOLC grade Denver_police_shootings <- process_city_inventory_data ( \"https://www.denvergov.org/media/gis/DataCatalog/denver_police_officer_involved_shootings/shape/denver_police_officer_involved_shootings.zip\" , \"denver_police_officer_involved_shootings.shp\" , denver_redlining , \"Police shootings\" , variable_label = 'Police shootings density' ) Not enough data for density across all 4 WORD CLOUD: Police involved shootings Denver_police_shootings_cloud <- create_wordclouds_by_grade ( Denver_police_shootings $ layer , output_file = \"police_shootings_word_cloud_per_grade.png\" , title = \"police involved shooting per crime type where larger text is more frequent\" , max_size = 35 , col_select = \"SHOOT_ACTI\" )","title":"Data Processing"},{"location":"worksheets/worksheet_redlining/#part-3-comparative-analysis-and-visualization","text":"","title":"Part 3: Comparative Analysis and Visualization"},{"location":"worksheets/worksheet_redlining/#statistical-analysis","text":"Conduct a detailed statistical analysis to compare greenspace across different HOLC grades, using techniques like Targeted Maximum Likelihood Estimation (TMLE) to assess the association between historical redlining and current greenspace levels. Visualize the disparities in greenspace distribution using GIS tools, highlighting how redlining has shaped urban ecological landscapes.","title":"Statistical Analysis"},{"location":"worksheets/worksheet_redlining/#conclusion","text":"This tutorial provides tools and methodologies to explore the lingering effects of historic redlining on urban greenspace, offering insights into the intersection of urban planning, environmental justice, and public health.","title":"Conclusion"},{"location":"worksheets/worksheet_redlining/#references_1","text":"Nardone, A., Rudolph, K. E., Morello-Frosch, R., & Casey, J. A. (2021). Redlines and Greenspace: The Relationship between Historical Redlining and 2010 Greenspace across the United States. Environmental Health Perspectives , 129(1), 017006. DOI:10.1289/EHP7495. Available online","title":"References"},{"location":"worksheets/worksheet_redlining_student_edition/","text":"student edition \u00b6 img { width: 100%; } details summary { color: black; background-color: white; } details[open] summary { color: black; } R libraries we use in this analysis if ( ! requireNamespace ( \"tidytext\" , quietly = TRUE )) { install.packages ( \"tidytext\" ) } library ( tidytext ) ## Warning: package 'tidytext' was built under R version 4.3.2 library ( sf ) ## Warning: package 'sf' was built under R version 4.3.2 ## Linking to GEOS 3.11.0, GDAL 3.5.3, PROJ 9.1.0; sf_use_s2() is TRUE library ( ggplot2 ) ## Warning: package 'ggplot2' was built under R version 4.3.2 library ( ggthemes ) ## Warning: package 'ggthemes' was built under R version 4.3.2 library ( dplyr ) ## ## Attaching package: 'dplyr' ## The following objects are masked from 'package:stats': ## ## filter, lag ## The following objects are masked from 'package:base': ## ## intersect, setdiff, setequal, union library ( rstac ) ## Warning: package 'rstac' was built under R version 4.3.2 library ( gdalcubes ) ## Warning: package 'gdalcubes' was built under R version 4.3.2 library ( gdalUtils ) ## Please note that rgdal will be retired during October 2023, ## plan transition to sf/stars/terra functions using GDAL and PROJ ## at your earliest convenience. ## See https://r-spatial.org/r/2023/05/15/evolution4.html and https://github.com/r-spatial/evolution ## rgdal: version: 1.6-7, (SVN revision 1203) ## Geospatial Data Abstraction Library extensions to R successfully loaded ## Loaded GDAL runtime: GDAL 3.5.3, released 2022/10/21 ## Path to GDAL shared files: /Library/Frameworks/R.framework/Versions/4.3-x86_64/Resources/library/rgdal/gdal ## GDAL does not use iconv for recoding strings. ## GDAL binary built with GEOS: TRUE ## Loaded PROJ runtime: Rel. 9.1.0, September 1st, 2022, [PJ_VERSION: 910] ## Path to PROJ shared files: /Library/Frameworks/R.framework/Versions/4.3-x86_64/Resources/library/gdalcubes/proj ## PROJ CDN enabled: FALSE ## Linking to sp version:1.6-1 ## To mute warnings of possible GDAL/OSR exportToProj4() degradation, ## use options(\"rgdal_show_exportToProj4_warnings\"=\"none\") before loading sp or rgdal. ## ## Attaching package: 'gdalUtils' ## The following object is masked from 'package:sf': ## ## gdal_rasterize library ( gdalcubes ) library ( colorspace ) library ( terra ) ## Warning: package 'terra' was built under R version 4.3.2 ## terra 1.7.71 ## ## Attaching package: 'terra' ## The following object is masked from 'package:colorspace': ## ## RGB ## The following objects are masked from 'package:gdalcubes': ## ## animate, crop, size library ( tidyterra ) ## ## Attaching package: 'tidyterra' ## The following object is masked from 'package:stats': ## ## filter library ( basemapR ) library ( tidytext ) library ( ggwordcloud ) library ( osmextract ) ## Data (c) OpenStreetMap contributors, ODbL 1.0. https://www.openstreetmap.org/copyright. ## Check the package website, https://docs.ropensci.org/osmextract/, for more details. library ( sf ) library ( ggplot2 ) library ( ggthemes ) library ( glue ) ## ## Attaching package: 'glue' ## The following object is masked from 'package:terra': ## ## trim library ( purrr ) FUNCTION: Stream HOLC data from a city # Function to load and filter redlining data by city load_city_redlining_data <- function ( city_name ) { # URL to the GeoJSON data url <- \"https://raw.githubusercontent.com/americanpanorama/mapping-inequality-census-crosswalk/main/MIv3Areas_2010TractCrosswalk.geojson\" # Read the GeoJSON file into an sf object redlining_data <- read_sf ( url ) # Filter the data for the specified city and non-empty grades city_redline <- redlining_data %>% filter ( city == city_name ) # Return the filtered data return ( city_redline ) } Stream HOLC data for Denver, CO # Load redlining data for Denver denver_redlining <- load_city_redlining_data ( \"Denver\" ) knitr :: kable ( head ( denver_redlining ), format = \"markdown\" ) | area_id | city | state | city_survey | cat | grade | label | res | com | ind | fill | GEOID10 | GISJOIN | calc_area | pct_tract | geometry | |--------:|:-------|:------|:------------|:-----|:------|:------|:-----|:------|:------|:---------|:------------|:---------------|-------------:|----------:|:-----------------------------| | 6525 | Denver | CO | TRUE | Best | A | A1 | TRUE | FALSE | FALSE | \\#76a865 | 08031004104 | G0800310004104 | 1.525535e+01 | 0.00001 | MULTIPOLYGON (((-104.9125 3\u2026 | | 6525 | Denver | CO | TRUE | Best | A | A1 | TRUE | FALSE | FALSE | \\#76a865 | 08031004201 | G0800310004201 | 3.987458e+05 | 0.20900 | MULTIPOLYGON (((-104.9246 3\u2026 | | 6525 | Denver | CO | TRUE | Best | A | A1 | TRUE | FALSE | FALSE | \\#76a865 | 08031004304 | G0800310004304 | 1.554195e+05 | 0.05927 | MULTIPOLYGON (((-104.9125 3\u2026 | | 6525 | Denver | CO | TRUE | Best | A | A1 | TRUE | FALSE | FALSE | \\#76a865 | 08031004202 | G0800310004202 | 1.117770e+06 | 0.57245 | MULTIPOLYGON (((-104.9125 3\u2026 | | 6529 | Denver | CO | TRUE | Best | A | A2 | TRUE | FALSE | FALSE | \\#76a865 | 08031004302 | G0800310004302 | 3.133415e+05 | 0.28381 | MULTIPOLYGON (((-104.928 39\u2026 | | 6529 | Denver | CO | TRUE | Best | A | A2 | TRUE | FALSE | FALSE | \\#76a865 | 08031004301 | G0800310004301 | 1.221218e+05 | 0.08622 | MULTIPOLYGON (((-104.9305 3\u2026 | FUNCTION: Get Points-of-Interest from city of interest get_places <- function ( polygon_layer , type = \"food\" ) { # Check if the input is an sf object if ( ! inherits ( polygon_layer , \"sf\" )) { stop ( \"The provided object is not an sf object.\" ) } # Create a bounding box from the input sf object bbox_here <- st_bbox ( polygon_layer ) |> st_as_sfc () if ( type == \"roads\" ){ my_layer <- \"lines\" my_query <- \"SELECT * FROM lines WHERE ( highway IN ('motorway', 'trunk', 'primary', 'secondary', 'tertiary') )\" title <- \"Major roads\" } if ( type == \"rivers\" ){ my_layer <- \"lines\" my_query <- \"SELECT * FROM lines WHERE ( waterway IN ('river'))\" title <- \"Major rivers\" } # Use the bbox to get data with oe_get(), specifying the desired layer and a custom SQL query for fresh food places tryCatch ({ places <- oe_get ( place = bbox_here , layer = my_layer , # Adjusted layer; change as per actual data availability query = my_query , quiet = TRUE ) places <- st_make_valid ( places ) # Crop the data to the bounding box cropped_places <- st_crop ( places , bbox_here ) # Plotting the cropped fresh food places plot <- ggplot ( data = cropped_places ) + geom_sf ( fill = \"cornflowerblue\" , color = \"cornflowerblue\" ) + ggtitle ( title ) + theme_tufte () + theme ( legend.position = \"none\" , # Optionally hide the legend axis.text = element_blank (), # Remove axis text axis.title = element_blank (), # Remove axis titles axis.ticks = element_blank (), # Remove axis ticks plot.background = element_rect ( fill = \"white\" , color = NA ), # Set the plot background to white panel.background = element_rect ( fill = \"white\" , color = NA ), # Set the panel background to white panel.grid.major = element_blank (), # Remove major grid lines panel.grid.minor = element_blank (), ) # Save the plot as a PNG file png_filename <- paste0 ( title , \"_\" , Sys.Date (), \".png\" ) ggsave ( png_filename , plot , width = 10 , height = 8 , units = \"in\" ) # Return the cropped dataset return ( cropped_places ) }, error = function ( e ) { stop ( \"Failed to retrieve or plot data: \" , e $ message ) }) } Stream amenities by category roads <- get_places ( denver_redlining , type = \"roads\" ) rivers <- get_places ( denver_redlining , type = \"rivers\" ) FUNCTION: Plot POI over HOLC grades plot_city_redlining <- function ( redlining_data , filename = \"redlining_plot.png\" ) { # Fetch additional geographic data based on redlining data roads <- get_places ( redlining_data , type = \"roads\" ) rivers <- get_places ( redlining_data , type = \"rivers\" ) # Filter residential zones with valid grades and where city survey is TRUE residential_zones <- redlining_data %>% filter ( city_survey == TRUE & grade != \"\" ) # Colors for the grades colors <- c ( \"#76a865\" , \"#7cb5bd\" , \"#ffff00\" , \"#d9838d\" ) # Plot the data using ggplot2 plot <- ggplot () + geom_sf ( data = roads , lwd = 0.1 ) + geom_sf ( data = rivers , color = \"blue\" , alpha = 0.5 , lwd = 1.1 ) + geom_sf ( data = residential_zones , aes ( fill = grade ), alpha = 0.5 ) + theme_tufte () + scale_fill_manual ( values = colors ) + labs ( fill = 'HOLC Categories' ) + theme ( plot.background = element_rect ( fill = \"white\" , color = NA ), panel.background = element_rect ( fill = \"white\" , color = NA ), panel.grid.major = element_blank (), panel.grid.minor = element_blank (), legend.position = \"right\" ) # Save the plot as a high-resolution PNG file ggsave ( filename , plot , width = 10 , height = 8 , units = \"in\" , dpi = 600 ) # Return the plot object if needed for further manipulation or checking return ( plot ) } FUNCTION: Plot the HOLC grades individually split_plot <- function ( sf_data , roads , rivers ) { # Filter for grades A, B, C, and D sf_data_filtered <- sf_data %>% filter ( grade %in% c ( 'A' , 'B' , 'C' , 'D' )) # Define a color for each grade grade_colors <- c ( \"A\" = \"#76a865\" , \"B\" = \"#7cb5bd\" , \"C\" = \"#ffff00\" , \"D\" = \"#d9838d\" ) # Create the plot with panels for each grade plot <- ggplot ( data = sf_data_filtered ) + geom_sf ( data = roads , alpha = 0.1 , lwd = 0.1 ) + geom_sf ( data = rivers , color = \"blue\" , alpha = 0.1 , lwd = 1.1 ) + geom_sf ( aes ( fill = grade )) + facet_wrap ( ~ grade , nrow = 1 ) + # Free scales for different zoom levels if needed scale_fill_manual ( values = grade_colors ) + theme_minimal () + labs ( fill = 'HOLC Grade' ) + theme_tufte () + theme ( plot.background = element_rect ( fill = \"white\" , color = NA ), panel.background = element_rect ( fill = \"white\" , color = NA ), legend.position = \"none\" , # Optionally hide the legend axis.text = element_blank (), # Remove axis text axis.title = element_blank (), # Remove axis titles axis.ticks = element_blank (), # Remove axis ticks panel.grid.major = element_blank (), # Remove major grid lines panel.grid.minor = element_blank ()) ggsave ( plot , filename = \"HOLC_grades_individually.png\" , width = 10 , height = 4 , units = \"in\" , dpi = 1200 ) return ( plot ) } Plot Denver Redlining denver_plot <- plot_city_redlining ( denver_redlining ) denver_plot ![](worksheet_redlining_student_edition_files/figure-gfm/unnamed-chunk-8-1.png) Plot 4 HOLC grades individually plot_row <- split_plot ( denver_redlining , roads , rivers ) plot_row ![](worksheet_redlining_student_edition_files/figure-gfm/unnamed-chunk-9-1.png) FUNCTION: Map an amenity over each grade individually process_and_plot_sf_layers <- function ( layer1 , layer2 , output_file = \"output_plot.png\" ) { # Make geometries valid layer1 <- st_make_valid ( layer1 ) layer2 <- st_make_valid ( layer2 ) # Optionally, simplify geometries to remove duplicate vertices layer1 <- st_simplify ( layer1 , preserveTopology = TRUE ) |> filter ( grade != \"\" ) # Prepare a list to store results results <- list () # Loop through each grade and perform operations for ( grade in c ( \"A\" , \"B\" , \"C\" , \"D\" )) { # Filter layer1 for current grade layer1_grade <- layer1 [ layer1 $ grade == grade , ] # Buffer the geometries of the current grade buffered_layer1_grade <- st_buffer ( layer1_grade , dist = 500 ) # Intersect with the second layer intersections <- st_intersects ( layer2 , buffered_layer1_grade , sparse = FALSE ) selected_polygons <- layer2 [ rowSums ( intersections ) > 0 , ] # Add a new column to store the grade information selected_polygons $ grade <- grade # Store the result results [[ grade ]] <- selected_polygons } # Combine all selected polygons from different grades into one sf object final_selected_polygons <- do.call ( rbind , results ) # Define colors for the grades grade_colors <- c ( \"A\" = \"grey\" , \"B\" = \"grey\" , \"C\" = \"grey\" , \"D\" = \"grey\" ) # Create the plot plot <- ggplot () + geom_sf ( data = roads , alpha = 0.05 , lwd = 0.1 ) + geom_sf ( data = rivers , color = \"blue\" , alpha = 0.1 , lwd = 1.1 ) + geom_sf ( data = layer1 , fill = \"grey\" , color = \"grey\" , size = 0.1 ) + facet_wrap ( ~ grade , nrow = 1 ) + geom_sf ( data = final_selected_polygons , fill = \"green\" , color = \"green\" , size = 0.1 ) + facet_wrap ( ~ grade , nrow = 1 ) + #scale_fill_manual(values = grade_colors) + #scale_color_manual(values = grade_colors) + theme_minimal () + labs ( fill = 'HOLC Grade' ) + theme_tufte () + theme ( plot.background = element_rect ( fill = \"white\" , color = NA ), panel.background = element_rect ( fill = \"white\" , color = NA ), legend.position = \"none\" , axis.text = element_blank (), axis.title = element_blank (), axis.ticks = element_blank (), panel.grid.major = element_blank (), panel.grid.minor = element_blank ()) # Save the plot as a high-resolution PNG file ggsave ( output_file , plot , width = 10 , height = 4 , units = \"in\" , dpi = 1200 ) # Return the plot for optional further use return ( list ( plot = plot , sf = final_selected_polygons )) } Part 2: Integrating Environmental Data \u00b6 Data Processing \u00b6 Use satellite data from 2010 to analyze greenspace using NDVI, an index that measures the quantity of vegetation in an area. Apply methods to adjust for potential confounders as described in the study, ensuring that comparisons of greenspace across HOLC grades are valid and not biased by historical or socio-demographic factors. FUNCTION: Stream NDVI data polygon_layer <- denver_redlining # Function to process satellite data based on an SF polygon's extent process_satellite_data <- function ( polygon_layer , start_date , end_date , assets , fps = 1 , output_file = \"anim.gif\" ) { # Record start time start_time <- Sys.time () # Calculate the bbox from the polygon layer bbox <- st_bbox ( polygon_layer ) s = stac ( \"https://earth-search.aws.element84.com/v0\" ) # Use stacR to search for Sentinel-2 images within the bbox and date range items = s |> stac_search ( collections = \"sentinel-s2-l2a-cogs\" , bbox = c ( bbox [ \"xmin\" ], bbox [ \"ymin\" ], bbox [ \"xmax\" ], bbox [ \"ymax\" ]), datetime = paste ( start_date , end_date , sep = \"/\" ), limit = 500 ) %>% post_request () # Define mask for Sentinel-2 image quality #S2.mask <- image_mask(\"SCL\", values = c(3, 8, 9)) # Create a collection of images filtering by cloud cover col <- stac_image_collection ( items $ features , asset_names = assets , property_filter = function ( x ) { x [[ \"eo:cloud_cover\" ]] < 30 }) # Define a view for processing the data v <- cube_view ( srs = \"EPSG:4326\" , extent = list ( t0 = start_date , t1 = end_date , left = bbox [ \"xmin\" ], right = bbox [ \"xmax\" ], top = bbox [ \"ymax\" ], bottom = bbox [ \"ymin\" ]), dx = 0.001 , dy = 0.001 , dt = \"P1M\" , aggregation = \"median\" , resampling = \"bilinear\" ) # Calculate NDVI and create an animation ndvi_col <- function ( n ) { rev ( sequential_hcl ( n , \"Green-Yellow\" )) } #raster_cube(col, v, mask = S2.mask) %>% raster_cube ( col , v ) %>% select_bands ( c ( \"B04\" , \"B08\" )) %>% apply_pixel ( \"(B08-B04)/(B08+B04)\" , \"NDVI\" ) %>% gdalcubes :: animate ( col = ndvi_col , zlim = c ( -0.2 , 1 ), key.pos = 1 , save_as = output_file , fps = fps ) # Calculate processing time end_time <- Sys.time () processing_time <- difftime ( end_time , start_time ) # Return processing time return ( processing_time ) } Stream NDVI data: animation processing_time <- process_satellite_data ( denver_redlining , \"2022-05-31\" , \"2023-05-31\" , c ( \"B04\" , \"B08\" )) FUNCTION: Map NDVI per HOLC grade individually create_mask_and_plot <- function ( redlining_sf , background_raster = ndvi $ raster , roads = NULL , rivers = NULL ){ start_time <- Sys.time () # Start timing # Validate and prepare the redlining data redlining_sf <- redlining_sf %>% filter ( grade != \"\" ) %>% st_make_valid () bbox <- st_bbox ( redlining_sf ) # Get original bounding box expanded_bbox <- expand_bbox ( bbox , 6000 , 1000 ) # expanded_bbox_poly <- st_as_sfc ( expanded_bbox , crs = st_crs ( redlining_sf )) %>% st_make_valid () # Initialize an empty list to store masks masks <- list () # Iterate over each grade to create masks unique_grades <- unique ( redlining_sf $ grade ) for ( grade in unique_grades ) { # Filter polygons by grade grade_polygons <- redlining_sf [ redlining_sf $ grade == grade , ] # Create an \"inverted\" mask by subtracting these polygons from the background mask <- st_difference ( expanded_bbox_poly , st_union ( grade_polygons )) # Store the mask in the list with the grade as the name masks [[ grade ]] <- st_sf ( geometry = mask , grade = grade ) } # Combine all masks into a single sf object mask_sf <- do.call ( rbind , masks ) # Normalize the grades so that C.2 becomes C, but correctly handle other grades mask_sf $ grade <- ifelse ( mask_sf $ grade == \"C.2\" , \"C\" , mask_sf $ grade ) # Prepare the plot plot <- ggplot () + geom_spatraster ( data = background_raster , aes ( fill = NDVI )) + scale_fill_viridis_c ( name = \"NDVI\" , option = \"viridis\" , direction = -1 ) + geom_sf ( data = mask_sf , aes ( color = grade ), fill = \"white\" , size = 0.1 , show.legend = FALSE ) + scale_color_manual ( values = c ( \"A\" = \"white\" , \"B\" = \"white\" , \"C\" = \"white\" , \"D\" = \"white\" ), name = \"Grade\" ) + facet_wrap ( ~ grade , nrow = 1 ) + geom_sf ( data = roads , alpha = 1 , lwd = 0.1 , color = \"white\" ) + geom_sf ( data = rivers , color = \"white\" , alpha = 0.5 , lwd = 1.1 ) + labs ( title = \"NDVI: Normalized Difference Vegetation Index\" ) + theme_minimal () + coord_sf ( xlim = c ( bbox [ \"xmin\" ], bbox [ \"xmax\" ]), ylim = c ( bbox [ \"ymin\" ], bbox [ \"ymax\" ]), expand = FALSE ) + theme ( plot.background = element_rect ( fill = \"white\" , color = NA ), panel.background = element_rect ( fill = \"white\" , color = NA ), legend.position = \"bottom\" , axis.text = element_blank (), axis.title = element_blank (), axis.ticks = element_blank (), panel.grid.major = element_blank (), panel.grid.minor = element_blank ()) # Save the plot ggsave ( \"redlining_mask_ndvi.png\" , plot , width = 10 , height = 4 , dpi = 600 ) end_time <- Sys.time () # End timing runtime <- end_time - start_time # Return the plot and runtime return ( list ( plot = plot , runtime = runtime , mask_sf = mask_sf )) } FUNCTION: Stream year average NDVI yearly_average_ndvi <- function ( polygon_layer , output_file = \"ndvi.png\" , dx = 0.01 , dy = 0.01 ) { # Record start time start_time <- Sys.time () # Calculate the bbox from the polygon layer bbox <- st_bbox ( polygon_layer ) s = stac ( \"https://earth-search.aws.element84.com/v0\" ) # Search for Sentinel-2 images within the bbox for June items <- s |> stac_search ( collections = \"sentinel-s2-l2a-cogs\" , bbox = c ( bbox [ \"xmin\" ], bbox [ \"ymin\" ], bbox [ \"xmax\" ], bbox [ \"ymax\" ]), datetime = \"2023-01-01/2023-12-31\" , limit = 500 ) %>% post_request () # Create a collection of images filtering by cloud cover col <- stac_image_collection ( items $ features , asset_names = c ( \"B04\" , \"B08\" ), property_filter = function ( x ) { x [[ \"eo:cloud_cover\" ]] < 80 }) # Define a view for processing the data specifically for June v <- cube_view ( srs = \"EPSG:4326\" , extent = list ( t0 = \"2023-01-01\" , t1 = \"2023-12-31\" , left = bbox [ \"xmin\" ], right = bbox [ \"xmax\" ], top = bbox [ \"ymax\" ], bottom = bbox [ \"ymin\" ]), dx = dx , dy = dy , dt = \"P1Y\" , aggregation = \"median\" , resampling = \"bilinear\" ) # Process NDVI ndvi_rast <- raster_cube ( col , v ) %>% select_bands ( c ( \"B04\" , \"B08\" )) %>% apply_pixel ( \"(B08-B04)/(B08+B04)\" , \"NDVI\" ) %>% write_tif () |> terra :: rast () # Convert terra Raster to ggplot using tidyterra ndvi_plot <- ggplot () + geom_spatraster ( data = ndvi_rast , aes ( fill = NDVI )) + scale_fill_viridis_c ( option = \"viridis\" , direction = -1 , name = \"NDVI\" ) + labs ( title = \"NDVI mean for 2023\" ) + theme_minimal () + coord_sf () + theme ( plot.background = element_rect ( fill = \"white\" , color = NA ), panel.background = element_rect ( fill = \"white\" , color = NA ), legend.position = \"right\" , axis.text = element_blank (), axis.title = element_blank (), axis.ticks = element_blank (), panel.grid.major = element_blank (), panel.grid.minor = element_blank ()) # Save the plot as a high-resolution PNG file ggsave ( output_file , ndvi_plot , width = 10 , height = 8 , dpi = 600 ) # Calculate processing time end_time <- Sys.time () processing_time <- difftime ( end_time , start_time ) # Return the plot and processing time return ( list ( plot = ndvi_plot , processing_time = processing_time , raster = ndvi_rast )) } Stream NDVI: low resolution ndvi_background_low <- yearly_average_ndvi ( denver_redlining ) Map low resolution NDVI per HOLC grade ndvi <- create_mask_and_plot ( denver_redlining , background_raster = ndvi_background_low $ raster , roads = roads , rivers = rivers ) FUNCTION: Map Denver City provided data per HOLC grade process_city_inventory_data <- function ( address , inner_file , polygon_layer , output_filename , variable_label = 'Tree Density' ) { # Download and read the shapefile full_path <- glue ( \"/vsizip/vsicurl/{address}/{inner_file}\" ) shape_data <- st_read ( full_path , quiet = TRUE ) |> st_as_sf () # Process the shape data with the provided polygon layer processed_data <- process_and_plot_sf_layers ( polygon_layer , shape_data , paste0 ( output_filename , \".png\" )) # Extract trees from the processed data trees <- processed_data $ sf denver_redlining_residential <- polygon_layer |> filter ( grade != \"\" ) # Generate the density plot plot <- ggplot () + geom_sf ( data = roads , alpha = 0.05 , lwd = 0.1 ) + geom_sf ( data = rivers , color = \"blue\" , alpha = 0.1 , lwd = 1.1 ) + geom_sf ( data = denver_redlining_residential , fill = \"grey\" , color = \"grey\" , size = 0.1 ) + facet_wrap ( ~ grade , nrow = 1 ) + stat_density_2d ( data = trees , mapping = aes ( x = map_dbl ( geometry , ~ . [ 1 ]), y = map_dbl ( geometry , ~ . [ 2 ]), fill = stat ( density )), geom = 'tile' , contour = FALSE , alpha = 0.9 ) + scale_fill_gradientn ( colors = c ( \"transparent\" , \"white\" , \"limegreen\" ), values = scales :: rescale ( c ( 0 , 0.1 , 1 )), # Adjust these based on your density range guide = \"colourbar\" ) + theme_minimal () + labs ( fill = variable_label ) + theme_tufte () + theme ( plot.background = element_rect ( fill = \"white\" , color = NA ), panel.background = element_rect ( fill = \"white\" , color = NA ), legend.position = \"bottom\" , axis.text = element_blank (), axis.title = element_blank (), axis.ticks = element_blank (), panel.grid.major = element_blank (), panel.grid.minor = element_blank ()) # Save the plot ggsave ( paste0 ( output_filename , \"_density_plot.png\" ), plot , width = 10 , height = 4 , units = \"in\" , dpi = 600 ) # Return the plot and the tree layer return ( list ( plot = plot , layer = trees )) } Map tree inventory per HOLC grade result <- process_city_inventory_data ( \"https://www.denvergov.org/media/gis/DataCatalog/tree_inventory/shape/tree_inventory.zip\" , \"tree_inventory.shp\" , denver_redlining , \"Denver_tree_inventory_2023\" ) Warning: `stat(density)` was deprecated in ggplot2 3.4.0. \u2139 Please use `after_stat(density)` instead. Map traffic accidents per HOLC grade result <- process_city_inventory_data ( \"https://www.denvergov.org/media/gis/DataCatalog/traffic_accidents/shape/traffic_accidents.zip\" , \"traffic_accidents.shp\" , denver_redlining , \"Denver_traffic_accidents\" , variable_label = 'Traffic accidents density' ) Map stream sampling effort per HOLC grade instream_sampling_sites <- process_city_inventory_data ( \"https://www.denvergov.org/media/gis/DataCatalog/instream_sampling_sites/shape/instream_sampling_sites.zip\" , \"instream_sampling_sites.shp\" , denver_redlining , \"instream_sampling_sites\" , variable_label = 'Instream sampling sites density' ) Map soil sampling effort per HOLC grade soil_samples <- process_city_inventory_data ( \"https://www.denvergov.org/media/gis/DataCatalog/soil_samples/shape/soil_samples.zip\" , \"soil_samples.shp\" , denver_redlining , \"Soil samples\" , variable_label = 'soil samples density' ) Map public art density per HOLC grade public_art <- process_city_inventory_data ( \"https://www.denvergov.org/media/gis/DataCatalog/public_art/shape/public_art.zip\" , \"public_art.shp\" , denver_redlining , \"Public art \" , variable_label = 'Public art density' ) Map liquor licenses density per HOLC grade liquor_licenses <- process_city_inventory_data ( \"https://www.denvergov.org/media/gis/DataCatalog/liquor_licenses/shape/liquor_licenses.zip\" , \"liquor_licenses.shp\" , denver_redlining , \"liquor licenses \" , variable_label = 'liquor licenses density' ) Map crime density per HOLC grade Crime <- process_city_inventory_data ( \"https://www.denvergov.org/media/gis/DataCatalog/crime/shape/crime.zip\" , \"crime.shp\" , denver_redlining , \"crime\" , variable_label = 'Crime density' ) Map police shooting density per HOLC grade Denver_police_shootings <- process_city_inventory_data ( \"https://www.denvergov.org/media/gis/DataCatalog/denver_police_officer_involved_shootings/shape/denver_police_officer_involved_shootings.zip\" , \"denver_police_officer_involved_shootings.shp\" , denver_redlining , \"Police shootings\" , variable_label = 'Police shootings density' ) Part 3: Comparative Analysis and Visualization \u00b6 Statistical Analysis \u00b6 Conduct a detailed statistical analysis to compare greenspace across different HOLC grades, using techniques like Targeted Maximum Likelihood Estimation (TMLE) to assess the association between historical redlining and current greenspace levels. Visualize the disparities in greenspace distribution using GIS tools, highlighting how redlining has shaped urban ecological landscapes. Conclusion \u00b6 This tutorial provides tools and methodologies to explore the lingering effects of historic redlining on urban greenspace, offering insights into the intersection of urban planning, environmental justice, and public health. References \u00b6 Nardone, A., Rudolph, K. E., Morello-Frosch, R., & Casey, J. A. (2021). Redlines and Greenspace: The Relationship between Historical Redlining and 2010 Greenspace across the United States. Environmental Health Perspectives , 129(1), 017006. DOI:10.1289/EHP7495. Available online","title":"student edition"},{"location":"worksheets/worksheet_redlining_student_edition/#student-edition","text":"img { width: 100%; } details summary { color: black; background-color: white; } details[open] summary { color: black; } R libraries we use in this analysis if ( ! requireNamespace ( \"tidytext\" , quietly = TRUE )) { install.packages ( \"tidytext\" ) } library ( tidytext ) ## Warning: package 'tidytext' was built under R version 4.3.2 library ( sf ) ## Warning: package 'sf' was built under R version 4.3.2 ## Linking to GEOS 3.11.0, GDAL 3.5.3, PROJ 9.1.0; sf_use_s2() is TRUE library ( ggplot2 ) ## Warning: package 'ggplot2' was built under R version 4.3.2 library ( ggthemes ) ## Warning: package 'ggthemes' was built under R version 4.3.2 library ( dplyr ) ## ## Attaching package: 'dplyr' ## The following objects are masked from 'package:stats': ## ## filter, lag ## The following objects are masked from 'package:base': ## ## intersect, setdiff, setequal, union library ( rstac ) ## Warning: package 'rstac' was built under R version 4.3.2 library ( gdalcubes ) ## Warning: package 'gdalcubes' was built under R version 4.3.2 library ( gdalUtils ) ## Please note that rgdal will be retired during October 2023, ## plan transition to sf/stars/terra functions using GDAL and PROJ ## at your earliest convenience. ## See https://r-spatial.org/r/2023/05/15/evolution4.html and https://github.com/r-spatial/evolution ## rgdal: version: 1.6-7, (SVN revision 1203) ## Geospatial Data Abstraction Library extensions to R successfully loaded ## Loaded GDAL runtime: GDAL 3.5.3, released 2022/10/21 ## Path to GDAL shared files: /Library/Frameworks/R.framework/Versions/4.3-x86_64/Resources/library/rgdal/gdal ## GDAL does not use iconv for recoding strings. ## GDAL binary built with GEOS: TRUE ## Loaded PROJ runtime: Rel. 9.1.0, September 1st, 2022, [PJ_VERSION: 910] ## Path to PROJ shared files: /Library/Frameworks/R.framework/Versions/4.3-x86_64/Resources/library/gdalcubes/proj ## PROJ CDN enabled: FALSE ## Linking to sp version:1.6-1 ## To mute warnings of possible GDAL/OSR exportToProj4() degradation, ## use options(\"rgdal_show_exportToProj4_warnings\"=\"none\") before loading sp or rgdal. ## ## Attaching package: 'gdalUtils' ## The following object is masked from 'package:sf': ## ## gdal_rasterize library ( gdalcubes ) library ( colorspace ) library ( terra ) ## Warning: package 'terra' was built under R version 4.3.2 ## terra 1.7.71 ## ## Attaching package: 'terra' ## The following object is masked from 'package:colorspace': ## ## RGB ## The following objects are masked from 'package:gdalcubes': ## ## animate, crop, size library ( tidyterra ) ## ## Attaching package: 'tidyterra' ## The following object is masked from 'package:stats': ## ## filter library ( basemapR ) library ( tidytext ) library ( ggwordcloud ) library ( osmextract ) ## Data (c) OpenStreetMap contributors, ODbL 1.0. https://www.openstreetmap.org/copyright. ## Check the package website, https://docs.ropensci.org/osmextract/, for more details. library ( sf ) library ( ggplot2 ) library ( ggthemes ) library ( glue ) ## ## Attaching package: 'glue' ## The following object is masked from 'package:terra': ## ## trim library ( purrr ) FUNCTION: Stream HOLC data from a city # Function to load and filter redlining data by city load_city_redlining_data <- function ( city_name ) { # URL to the GeoJSON data url <- \"https://raw.githubusercontent.com/americanpanorama/mapping-inequality-census-crosswalk/main/MIv3Areas_2010TractCrosswalk.geojson\" # Read the GeoJSON file into an sf object redlining_data <- read_sf ( url ) # Filter the data for the specified city and non-empty grades city_redline <- redlining_data %>% filter ( city == city_name ) # Return the filtered data return ( city_redline ) } Stream HOLC data for Denver, CO # Load redlining data for Denver denver_redlining <- load_city_redlining_data ( \"Denver\" ) knitr :: kable ( head ( denver_redlining ), format = \"markdown\" ) | area_id | city | state | city_survey | cat | grade | label | res | com | ind | fill | GEOID10 | GISJOIN | calc_area | pct_tract | geometry | |--------:|:-------|:------|:------------|:-----|:------|:------|:-----|:------|:------|:---------|:------------|:---------------|-------------:|----------:|:-----------------------------| | 6525 | Denver | CO | TRUE | Best | A | A1 | TRUE | FALSE | FALSE | \\#76a865 | 08031004104 | G0800310004104 | 1.525535e+01 | 0.00001 | MULTIPOLYGON (((-104.9125 3\u2026 | | 6525 | Denver | CO | TRUE | Best | A | A1 | TRUE | FALSE | FALSE | \\#76a865 | 08031004201 | G0800310004201 | 3.987458e+05 | 0.20900 | MULTIPOLYGON (((-104.9246 3\u2026 | | 6525 | Denver | CO | TRUE | Best | A | A1 | TRUE | FALSE | FALSE | \\#76a865 | 08031004304 | G0800310004304 | 1.554195e+05 | 0.05927 | MULTIPOLYGON (((-104.9125 3\u2026 | | 6525 | Denver | CO | TRUE | Best | A | A1 | TRUE | FALSE | FALSE | \\#76a865 | 08031004202 | G0800310004202 | 1.117770e+06 | 0.57245 | MULTIPOLYGON (((-104.9125 3\u2026 | | 6529 | Denver | CO | TRUE | Best | A | A2 | TRUE | FALSE | FALSE | \\#76a865 | 08031004302 | G0800310004302 | 3.133415e+05 | 0.28381 | MULTIPOLYGON (((-104.928 39\u2026 | | 6529 | Denver | CO | TRUE | Best | A | A2 | TRUE | FALSE | FALSE | \\#76a865 | 08031004301 | G0800310004301 | 1.221218e+05 | 0.08622 | MULTIPOLYGON (((-104.9305 3\u2026 | FUNCTION: Get Points-of-Interest from city of interest get_places <- function ( polygon_layer , type = \"food\" ) { # Check if the input is an sf object if ( ! inherits ( polygon_layer , \"sf\" )) { stop ( \"The provided object is not an sf object.\" ) } # Create a bounding box from the input sf object bbox_here <- st_bbox ( polygon_layer ) |> st_as_sfc () if ( type == \"roads\" ){ my_layer <- \"lines\" my_query <- \"SELECT * FROM lines WHERE ( highway IN ('motorway', 'trunk', 'primary', 'secondary', 'tertiary') )\" title <- \"Major roads\" } if ( type == \"rivers\" ){ my_layer <- \"lines\" my_query <- \"SELECT * FROM lines WHERE ( waterway IN ('river'))\" title <- \"Major rivers\" } # Use the bbox to get data with oe_get(), specifying the desired layer and a custom SQL query for fresh food places tryCatch ({ places <- oe_get ( place = bbox_here , layer = my_layer , # Adjusted layer; change as per actual data availability query = my_query , quiet = TRUE ) places <- st_make_valid ( places ) # Crop the data to the bounding box cropped_places <- st_crop ( places , bbox_here ) # Plotting the cropped fresh food places plot <- ggplot ( data = cropped_places ) + geom_sf ( fill = \"cornflowerblue\" , color = \"cornflowerblue\" ) + ggtitle ( title ) + theme_tufte () + theme ( legend.position = \"none\" , # Optionally hide the legend axis.text = element_blank (), # Remove axis text axis.title = element_blank (), # Remove axis titles axis.ticks = element_blank (), # Remove axis ticks plot.background = element_rect ( fill = \"white\" , color = NA ), # Set the plot background to white panel.background = element_rect ( fill = \"white\" , color = NA ), # Set the panel background to white panel.grid.major = element_blank (), # Remove major grid lines panel.grid.minor = element_blank (), ) # Save the plot as a PNG file png_filename <- paste0 ( title , \"_\" , Sys.Date (), \".png\" ) ggsave ( png_filename , plot , width = 10 , height = 8 , units = \"in\" ) # Return the cropped dataset return ( cropped_places ) }, error = function ( e ) { stop ( \"Failed to retrieve or plot data: \" , e $ message ) }) } Stream amenities by category roads <- get_places ( denver_redlining , type = \"roads\" ) rivers <- get_places ( denver_redlining , type = \"rivers\" ) FUNCTION: Plot POI over HOLC grades plot_city_redlining <- function ( redlining_data , filename = \"redlining_plot.png\" ) { # Fetch additional geographic data based on redlining data roads <- get_places ( redlining_data , type = \"roads\" ) rivers <- get_places ( redlining_data , type = \"rivers\" ) # Filter residential zones with valid grades and where city survey is TRUE residential_zones <- redlining_data %>% filter ( city_survey == TRUE & grade != \"\" ) # Colors for the grades colors <- c ( \"#76a865\" , \"#7cb5bd\" , \"#ffff00\" , \"#d9838d\" ) # Plot the data using ggplot2 plot <- ggplot () + geom_sf ( data = roads , lwd = 0.1 ) + geom_sf ( data = rivers , color = \"blue\" , alpha = 0.5 , lwd = 1.1 ) + geom_sf ( data = residential_zones , aes ( fill = grade ), alpha = 0.5 ) + theme_tufte () + scale_fill_manual ( values = colors ) + labs ( fill = 'HOLC Categories' ) + theme ( plot.background = element_rect ( fill = \"white\" , color = NA ), panel.background = element_rect ( fill = \"white\" , color = NA ), panel.grid.major = element_blank (), panel.grid.minor = element_blank (), legend.position = \"right\" ) # Save the plot as a high-resolution PNG file ggsave ( filename , plot , width = 10 , height = 8 , units = \"in\" , dpi = 600 ) # Return the plot object if needed for further manipulation or checking return ( plot ) } FUNCTION: Plot the HOLC grades individually split_plot <- function ( sf_data , roads , rivers ) { # Filter for grades A, B, C, and D sf_data_filtered <- sf_data %>% filter ( grade %in% c ( 'A' , 'B' , 'C' , 'D' )) # Define a color for each grade grade_colors <- c ( \"A\" = \"#76a865\" , \"B\" = \"#7cb5bd\" , \"C\" = \"#ffff00\" , \"D\" = \"#d9838d\" ) # Create the plot with panels for each grade plot <- ggplot ( data = sf_data_filtered ) + geom_sf ( data = roads , alpha = 0.1 , lwd = 0.1 ) + geom_sf ( data = rivers , color = \"blue\" , alpha = 0.1 , lwd = 1.1 ) + geom_sf ( aes ( fill = grade )) + facet_wrap ( ~ grade , nrow = 1 ) + # Free scales for different zoom levels if needed scale_fill_manual ( values = grade_colors ) + theme_minimal () + labs ( fill = 'HOLC Grade' ) + theme_tufte () + theme ( plot.background = element_rect ( fill = \"white\" , color = NA ), panel.background = element_rect ( fill = \"white\" , color = NA ), legend.position = \"none\" , # Optionally hide the legend axis.text = element_blank (), # Remove axis text axis.title = element_blank (), # Remove axis titles axis.ticks = element_blank (), # Remove axis ticks panel.grid.major = element_blank (), # Remove major grid lines panel.grid.minor = element_blank ()) ggsave ( plot , filename = \"HOLC_grades_individually.png\" , width = 10 , height = 4 , units = \"in\" , dpi = 1200 ) return ( plot ) } Plot Denver Redlining denver_plot <- plot_city_redlining ( denver_redlining ) denver_plot ![](worksheet_redlining_student_edition_files/figure-gfm/unnamed-chunk-8-1.png) Plot 4 HOLC grades individually plot_row <- split_plot ( denver_redlining , roads , rivers ) plot_row ![](worksheet_redlining_student_edition_files/figure-gfm/unnamed-chunk-9-1.png) FUNCTION: Map an amenity over each grade individually process_and_plot_sf_layers <- function ( layer1 , layer2 , output_file = \"output_plot.png\" ) { # Make geometries valid layer1 <- st_make_valid ( layer1 ) layer2 <- st_make_valid ( layer2 ) # Optionally, simplify geometries to remove duplicate vertices layer1 <- st_simplify ( layer1 , preserveTopology = TRUE ) |> filter ( grade != \"\" ) # Prepare a list to store results results <- list () # Loop through each grade and perform operations for ( grade in c ( \"A\" , \"B\" , \"C\" , \"D\" )) { # Filter layer1 for current grade layer1_grade <- layer1 [ layer1 $ grade == grade , ] # Buffer the geometries of the current grade buffered_layer1_grade <- st_buffer ( layer1_grade , dist = 500 ) # Intersect with the second layer intersections <- st_intersects ( layer2 , buffered_layer1_grade , sparse = FALSE ) selected_polygons <- layer2 [ rowSums ( intersections ) > 0 , ] # Add a new column to store the grade information selected_polygons $ grade <- grade # Store the result results [[ grade ]] <- selected_polygons } # Combine all selected polygons from different grades into one sf object final_selected_polygons <- do.call ( rbind , results ) # Define colors for the grades grade_colors <- c ( \"A\" = \"grey\" , \"B\" = \"grey\" , \"C\" = \"grey\" , \"D\" = \"grey\" ) # Create the plot plot <- ggplot () + geom_sf ( data = roads , alpha = 0.05 , lwd = 0.1 ) + geom_sf ( data = rivers , color = \"blue\" , alpha = 0.1 , lwd = 1.1 ) + geom_sf ( data = layer1 , fill = \"grey\" , color = \"grey\" , size = 0.1 ) + facet_wrap ( ~ grade , nrow = 1 ) + geom_sf ( data = final_selected_polygons , fill = \"green\" , color = \"green\" , size = 0.1 ) + facet_wrap ( ~ grade , nrow = 1 ) + #scale_fill_manual(values = grade_colors) + #scale_color_manual(values = grade_colors) + theme_minimal () + labs ( fill = 'HOLC Grade' ) + theme_tufte () + theme ( plot.background = element_rect ( fill = \"white\" , color = NA ), panel.background = element_rect ( fill = \"white\" , color = NA ), legend.position = \"none\" , axis.text = element_blank (), axis.title = element_blank (), axis.ticks = element_blank (), panel.grid.major = element_blank (), panel.grid.minor = element_blank ()) # Save the plot as a high-resolution PNG file ggsave ( output_file , plot , width = 10 , height = 4 , units = \"in\" , dpi = 1200 ) # Return the plot for optional further use return ( list ( plot = plot , sf = final_selected_polygons )) }","title":"student edition"},{"location":"worksheets/worksheet_redlining_student_edition/#part-2-integrating-environmental-data","text":"","title":"Part 2: Integrating Environmental Data"},{"location":"worksheets/worksheet_redlining_student_edition/#data-processing","text":"Use satellite data from 2010 to analyze greenspace using NDVI, an index that measures the quantity of vegetation in an area. Apply methods to adjust for potential confounders as described in the study, ensuring that comparisons of greenspace across HOLC grades are valid and not biased by historical or socio-demographic factors. FUNCTION: Stream NDVI data polygon_layer <- denver_redlining # Function to process satellite data based on an SF polygon's extent process_satellite_data <- function ( polygon_layer , start_date , end_date , assets , fps = 1 , output_file = \"anim.gif\" ) { # Record start time start_time <- Sys.time () # Calculate the bbox from the polygon layer bbox <- st_bbox ( polygon_layer ) s = stac ( \"https://earth-search.aws.element84.com/v0\" ) # Use stacR to search for Sentinel-2 images within the bbox and date range items = s |> stac_search ( collections = \"sentinel-s2-l2a-cogs\" , bbox = c ( bbox [ \"xmin\" ], bbox [ \"ymin\" ], bbox [ \"xmax\" ], bbox [ \"ymax\" ]), datetime = paste ( start_date , end_date , sep = \"/\" ), limit = 500 ) %>% post_request () # Define mask for Sentinel-2 image quality #S2.mask <- image_mask(\"SCL\", values = c(3, 8, 9)) # Create a collection of images filtering by cloud cover col <- stac_image_collection ( items $ features , asset_names = assets , property_filter = function ( x ) { x [[ \"eo:cloud_cover\" ]] < 30 }) # Define a view for processing the data v <- cube_view ( srs = \"EPSG:4326\" , extent = list ( t0 = start_date , t1 = end_date , left = bbox [ \"xmin\" ], right = bbox [ \"xmax\" ], top = bbox [ \"ymax\" ], bottom = bbox [ \"ymin\" ]), dx = 0.001 , dy = 0.001 , dt = \"P1M\" , aggregation = \"median\" , resampling = \"bilinear\" ) # Calculate NDVI and create an animation ndvi_col <- function ( n ) { rev ( sequential_hcl ( n , \"Green-Yellow\" )) } #raster_cube(col, v, mask = S2.mask) %>% raster_cube ( col , v ) %>% select_bands ( c ( \"B04\" , \"B08\" )) %>% apply_pixel ( \"(B08-B04)/(B08+B04)\" , \"NDVI\" ) %>% gdalcubes :: animate ( col = ndvi_col , zlim = c ( -0.2 , 1 ), key.pos = 1 , save_as = output_file , fps = fps ) # Calculate processing time end_time <- Sys.time () processing_time <- difftime ( end_time , start_time ) # Return processing time return ( processing_time ) } Stream NDVI data: animation processing_time <- process_satellite_data ( denver_redlining , \"2022-05-31\" , \"2023-05-31\" , c ( \"B04\" , \"B08\" )) FUNCTION: Map NDVI per HOLC grade individually create_mask_and_plot <- function ( redlining_sf , background_raster = ndvi $ raster , roads = NULL , rivers = NULL ){ start_time <- Sys.time () # Start timing # Validate and prepare the redlining data redlining_sf <- redlining_sf %>% filter ( grade != \"\" ) %>% st_make_valid () bbox <- st_bbox ( redlining_sf ) # Get original bounding box expanded_bbox <- expand_bbox ( bbox , 6000 , 1000 ) # expanded_bbox_poly <- st_as_sfc ( expanded_bbox , crs = st_crs ( redlining_sf )) %>% st_make_valid () # Initialize an empty list to store masks masks <- list () # Iterate over each grade to create masks unique_grades <- unique ( redlining_sf $ grade ) for ( grade in unique_grades ) { # Filter polygons by grade grade_polygons <- redlining_sf [ redlining_sf $ grade == grade , ] # Create an \"inverted\" mask by subtracting these polygons from the background mask <- st_difference ( expanded_bbox_poly , st_union ( grade_polygons )) # Store the mask in the list with the grade as the name masks [[ grade ]] <- st_sf ( geometry = mask , grade = grade ) } # Combine all masks into a single sf object mask_sf <- do.call ( rbind , masks ) # Normalize the grades so that C.2 becomes C, but correctly handle other grades mask_sf $ grade <- ifelse ( mask_sf $ grade == \"C.2\" , \"C\" , mask_sf $ grade ) # Prepare the plot plot <- ggplot () + geom_spatraster ( data = background_raster , aes ( fill = NDVI )) + scale_fill_viridis_c ( name = \"NDVI\" , option = \"viridis\" , direction = -1 ) + geom_sf ( data = mask_sf , aes ( color = grade ), fill = \"white\" , size = 0.1 , show.legend = FALSE ) + scale_color_manual ( values = c ( \"A\" = \"white\" , \"B\" = \"white\" , \"C\" = \"white\" , \"D\" = \"white\" ), name = \"Grade\" ) + facet_wrap ( ~ grade , nrow = 1 ) + geom_sf ( data = roads , alpha = 1 , lwd = 0.1 , color = \"white\" ) + geom_sf ( data = rivers , color = \"white\" , alpha = 0.5 , lwd = 1.1 ) + labs ( title = \"NDVI: Normalized Difference Vegetation Index\" ) + theme_minimal () + coord_sf ( xlim = c ( bbox [ \"xmin\" ], bbox [ \"xmax\" ]), ylim = c ( bbox [ \"ymin\" ], bbox [ \"ymax\" ]), expand = FALSE ) + theme ( plot.background = element_rect ( fill = \"white\" , color = NA ), panel.background = element_rect ( fill = \"white\" , color = NA ), legend.position = \"bottom\" , axis.text = element_blank (), axis.title = element_blank (), axis.ticks = element_blank (), panel.grid.major = element_blank (), panel.grid.minor = element_blank ()) # Save the plot ggsave ( \"redlining_mask_ndvi.png\" , plot , width = 10 , height = 4 , dpi = 600 ) end_time <- Sys.time () # End timing runtime <- end_time - start_time # Return the plot and runtime return ( list ( plot = plot , runtime = runtime , mask_sf = mask_sf )) } FUNCTION: Stream year average NDVI yearly_average_ndvi <- function ( polygon_layer , output_file = \"ndvi.png\" , dx = 0.01 , dy = 0.01 ) { # Record start time start_time <- Sys.time () # Calculate the bbox from the polygon layer bbox <- st_bbox ( polygon_layer ) s = stac ( \"https://earth-search.aws.element84.com/v0\" ) # Search for Sentinel-2 images within the bbox for June items <- s |> stac_search ( collections = \"sentinel-s2-l2a-cogs\" , bbox = c ( bbox [ \"xmin\" ], bbox [ \"ymin\" ], bbox [ \"xmax\" ], bbox [ \"ymax\" ]), datetime = \"2023-01-01/2023-12-31\" , limit = 500 ) %>% post_request () # Create a collection of images filtering by cloud cover col <- stac_image_collection ( items $ features , asset_names = c ( \"B04\" , \"B08\" ), property_filter = function ( x ) { x [[ \"eo:cloud_cover\" ]] < 80 }) # Define a view for processing the data specifically for June v <- cube_view ( srs = \"EPSG:4326\" , extent = list ( t0 = \"2023-01-01\" , t1 = \"2023-12-31\" , left = bbox [ \"xmin\" ], right = bbox [ \"xmax\" ], top = bbox [ \"ymax\" ], bottom = bbox [ \"ymin\" ]), dx = dx , dy = dy , dt = \"P1Y\" , aggregation = \"median\" , resampling = \"bilinear\" ) # Process NDVI ndvi_rast <- raster_cube ( col , v ) %>% select_bands ( c ( \"B04\" , \"B08\" )) %>% apply_pixel ( \"(B08-B04)/(B08+B04)\" , \"NDVI\" ) %>% write_tif () |> terra :: rast () # Convert terra Raster to ggplot using tidyterra ndvi_plot <- ggplot () + geom_spatraster ( data = ndvi_rast , aes ( fill = NDVI )) + scale_fill_viridis_c ( option = \"viridis\" , direction = -1 , name = \"NDVI\" ) + labs ( title = \"NDVI mean for 2023\" ) + theme_minimal () + coord_sf () + theme ( plot.background = element_rect ( fill = \"white\" , color = NA ), panel.background = element_rect ( fill = \"white\" , color = NA ), legend.position = \"right\" , axis.text = element_blank (), axis.title = element_blank (), axis.ticks = element_blank (), panel.grid.major = element_blank (), panel.grid.minor = element_blank ()) # Save the plot as a high-resolution PNG file ggsave ( output_file , ndvi_plot , width = 10 , height = 8 , dpi = 600 ) # Calculate processing time end_time <- Sys.time () processing_time <- difftime ( end_time , start_time ) # Return the plot and processing time return ( list ( plot = ndvi_plot , processing_time = processing_time , raster = ndvi_rast )) } Stream NDVI: low resolution ndvi_background_low <- yearly_average_ndvi ( denver_redlining ) Map low resolution NDVI per HOLC grade ndvi <- create_mask_and_plot ( denver_redlining , background_raster = ndvi_background_low $ raster , roads = roads , rivers = rivers ) FUNCTION: Map Denver City provided data per HOLC grade process_city_inventory_data <- function ( address , inner_file , polygon_layer , output_filename , variable_label = 'Tree Density' ) { # Download and read the shapefile full_path <- glue ( \"/vsizip/vsicurl/{address}/{inner_file}\" ) shape_data <- st_read ( full_path , quiet = TRUE ) |> st_as_sf () # Process the shape data with the provided polygon layer processed_data <- process_and_plot_sf_layers ( polygon_layer , shape_data , paste0 ( output_filename , \".png\" )) # Extract trees from the processed data trees <- processed_data $ sf denver_redlining_residential <- polygon_layer |> filter ( grade != \"\" ) # Generate the density plot plot <- ggplot () + geom_sf ( data = roads , alpha = 0.05 , lwd = 0.1 ) + geom_sf ( data = rivers , color = \"blue\" , alpha = 0.1 , lwd = 1.1 ) + geom_sf ( data = denver_redlining_residential , fill = \"grey\" , color = \"grey\" , size = 0.1 ) + facet_wrap ( ~ grade , nrow = 1 ) + stat_density_2d ( data = trees , mapping = aes ( x = map_dbl ( geometry , ~ . [ 1 ]), y = map_dbl ( geometry , ~ . [ 2 ]), fill = stat ( density )), geom = 'tile' , contour = FALSE , alpha = 0.9 ) + scale_fill_gradientn ( colors = c ( \"transparent\" , \"white\" , \"limegreen\" ), values = scales :: rescale ( c ( 0 , 0.1 , 1 )), # Adjust these based on your density range guide = \"colourbar\" ) + theme_minimal () + labs ( fill = variable_label ) + theme_tufte () + theme ( plot.background = element_rect ( fill = \"white\" , color = NA ), panel.background = element_rect ( fill = \"white\" , color = NA ), legend.position = \"bottom\" , axis.text = element_blank (), axis.title = element_blank (), axis.ticks = element_blank (), panel.grid.major = element_blank (), panel.grid.minor = element_blank ()) # Save the plot ggsave ( paste0 ( output_filename , \"_density_plot.png\" ), plot , width = 10 , height = 4 , units = \"in\" , dpi = 600 ) # Return the plot and the tree layer return ( list ( plot = plot , layer = trees )) } Map tree inventory per HOLC grade result <- process_city_inventory_data ( \"https://www.denvergov.org/media/gis/DataCatalog/tree_inventory/shape/tree_inventory.zip\" , \"tree_inventory.shp\" , denver_redlining , \"Denver_tree_inventory_2023\" ) Warning: `stat(density)` was deprecated in ggplot2 3.4.0. \u2139 Please use `after_stat(density)` instead. Map traffic accidents per HOLC grade result <- process_city_inventory_data ( \"https://www.denvergov.org/media/gis/DataCatalog/traffic_accidents/shape/traffic_accidents.zip\" , \"traffic_accidents.shp\" , denver_redlining , \"Denver_traffic_accidents\" , variable_label = 'Traffic accidents density' ) Map stream sampling effort per HOLC grade instream_sampling_sites <- process_city_inventory_data ( \"https://www.denvergov.org/media/gis/DataCatalog/instream_sampling_sites/shape/instream_sampling_sites.zip\" , \"instream_sampling_sites.shp\" , denver_redlining , \"instream_sampling_sites\" , variable_label = 'Instream sampling sites density' ) Map soil sampling effort per HOLC grade soil_samples <- process_city_inventory_data ( \"https://www.denvergov.org/media/gis/DataCatalog/soil_samples/shape/soil_samples.zip\" , \"soil_samples.shp\" , denver_redlining , \"Soil samples\" , variable_label = 'soil samples density' ) Map public art density per HOLC grade public_art <- process_city_inventory_data ( \"https://www.denvergov.org/media/gis/DataCatalog/public_art/shape/public_art.zip\" , \"public_art.shp\" , denver_redlining , \"Public art \" , variable_label = 'Public art density' ) Map liquor licenses density per HOLC grade liquor_licenses <- process_city_inventory_data ( \"https://www.denvergov.org/media/gis/DataCatalog/liquor_licenses/shape/liquor_licenses.zip\" , \"liquor_licenses.shp\" , denver_redlining , \"liquor licenses \" , variable_label = 'liquor licenses density' ) Map crime density per HOLC grade Crime <- process_city_inventory_data ( \"https://www.denvergov.org/media/gis/DataCatalog/crime/shape/crime.zip\" , \"crime.shp\" , denver_redlining , \"crime\" , variable_label = 'Crime density' ) Map police shooting density per HOLC grade Denver_police_shootings <- process_city_inventory_data ( \"https://www.denvergov.org/media/gis/DataCatalog/denver_police_officer_involved_shootings/shape/denver_police_officer_involved_shootings.zip\" , \"denver_police_officer_involved_shootings.shp\" , denver_redlining , \"Police shootings\" , variable_label = 'Police shootings density' )","title":"Data Processing"},{"location":"worksheets/worksheet_redlining_student_edition/#part-3-comparative-analysis-and-visualization","text":"","title":"Part 3: Comparative Analysis and Visualization"},{"location":"worksheets/worksheet_redlining_student_edition/#statistical-analysis","text":"Conduct a detailed statistical analysis to compare greenspace across different HOLC grades, using techniques like Targeted Maximum Likelihood Estimation (TMLE) to assess the association between historical redlining and current greenspace levels. Visualize the disparities in greenspace distribution using GIS tools, highlighting how redlining has shaped urban ecological landscapes.","title":"Statistical Analysis"},{"location":"worksheets/worksheet_redlining_student_edition/#conclusion","text":"This tutorial provides tools and methodologies to explore the lingering effects of historic redlining on urban greenspace, offering insights into the intersection of urban planning, environmental justice, and public health.","title":"Conclusion"},{"location":"worksheets/worksheet_redlining_student_edition/#references","text":"Nardone, A., Rudolph, K. E., Morello-Frosch, R., & Casey, J. A. (2021). Redlines and Greenspace: The Relationship between Historical Redlining and 2010 Greenspace across the United States. Environmental Health Perspectives , 129(1), 017006. DOI:10.1289/EHP7495. Available online","title":"References"}]} \ No newline at end of file diff --git a/sitemap.xml b/sitemap.xml new file mode 100644 index 0000000..84e2cd6 --- /dev/null +++ b/sitemap.xml @@ -0,0 +1,338 @@ + + + + https://cu-esiil.github.io/innovation-summit-2024-4b/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/agenda/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/breakout/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/teams/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/virtual-meetings/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/worksheet_redlining/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/additional-resources/bilingualism_md/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/additional-resources/code-of-conduct/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/additional-resources/cyverse_hacks/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/additional-resources/participant_agreement/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/additional-resources/useful_links/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/collaborating-on-the-cloud/cyverse-instructions/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/collaborating-on-the-cloud/cyverse_data_management/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/collaborating-on-the-cloud/github-basics/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/collaborating-on-the-cloud/markdown_basics/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/data-library/Pull_Sentinal2_l2_data/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/data-library/disturbance-stack/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/data-library/drought/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/data-library/epa-ecoregions/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/data-library/esiil-data-library/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/data-library/fia/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/data-library/fire-cbi/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/data-library/gedi/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/data-library/landfire-events/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/data-library/lcmap/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/data-library/modis-vcf/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/data-library/mounting-via-vsi/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/data-library/move-data-to-instance/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/data-library/stac_mount_save/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/data-library/stac_simple/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/data-library/treemap/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/project-documentation/methods/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/project-documentation/project-notes/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/project-documentation/project-presentation/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/resources/art%20gallery/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/resources/citations/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/resources/cyverse_basics/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/resources/cyverse_hacks/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/resources/cyverse_move_and_save_data/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/resources/cyverse_shutdown/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/resources/cyverse_startup/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/resources/cyverse_stream_data/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/resources/data_analysis/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/resources/data_processing/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/resources/esiil_training/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/resources/first_meeting_notes/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/resources/github_basics/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/resources/manuscript/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/resources/markdown_basics/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/resources/notes_from_readings/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/resources/post_meeting_notes/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/resources/pre_meeting_notes/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/resources/second_meeting_notes/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/resources/third_meeting_notes/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/resources/visualizations/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/resources/working_groups_and_postdocs/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/trainings/training_2_code/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/trainings/training_one/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/trainings/training_three/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/trainings/training_two/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/worksheets/worksheet_0/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/worksheets/worksheet_2/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/worksheets/worksheet_3/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/worksheets/worksheet_4/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/worksheets/worksheet_5/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/worksheets/worksheet_redlining/ + 2024-09-23 + daily + + + https://cu-esiil.github.io/innovation-summit-2024-4b/worksheets/worksheet_redlining_student_edition/ + 2024-09-23 + daily + + \ No newline at end of file diff --git a/sitemap.xml.gz b/sitemap.xml.gz new file mode 100644 index 0000000..9e285e3 Binary files /dev/null and b/sitemap.xml.gz differ diff --git a/states_plot.png b/states_plot.png new file mode 100644 index 0000000..c15964f Binary files /dev/null and b/states_plot.png differ diff --git a/teams/index.html b/teams/index.html new file mode 100644 index 0000000..f83458d --- /dev/null +++ b/teams/index.html @@ -0,0 +1,1353 @@ + + + + + + + + + + + + + + + + + + + + + + Event Logistics - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    Event Logistics

    +

    Venue Information

    +

    The ESIIL Innovation Summit will be held at the University of Colorado Boulder East Campus SEEC Building (4001 Discovery Dr, Boulder, CO 80303). Directions to the SEEC Building here.

    +

    (Building Maps to follow)

    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/teams/teams.md b/teams/teams.md new file mode 100644 index 0000000..207baf1 --- /dev/null +++ b/teams/teams.md @@ -0,0 +1,11 @@ +# Event Logistics + +## Venue Information +The ESIIL Innovation Summit will be held at the University of Colorado Boulder East Campus SEEC Building (4001 Discovery Dr, Boulder, CO 80303). Directions to the SEEC Building here. + +(Building Maps to follow) + + + + + diff --git a/temp_plot.html b/temp_plot.html new file mode 100644 index 0000000..17ac638 --- /dev/null +++ b/temp_plot.html @@ -0,0 +1,1961 @@ + + + + +plotly + + + + + + + + + + + + +
    +
    +
    + + + + diff --git a/trainings/Denver_redline.png b/trainings/Denver_redline.png new file mode 100644 index 0000000..bd9b6d6 Binary files /dev/null and b/trainings/Denver_redline.png differ diff --git a/trainings/anim.gif b/trainings/anim.gif new file mode 100644 index 0000000..9fa4996 Binary files /dev/null and b/trainings/anim.gif differ diff --git a/trainings/training_2_code.qmd b/trainings/training_2_code.qmd new file mode 100644 index 0000000..344b4ad --- /dev/null +++ b/trainings/training_2_code.qmd @@ -0,0 +1,215 @@ +--- +title: "Pre-summit training" +format: gfm +--- + + +```{r, collapse=TRUE} +if (!requireNamespace("tidytext", quietly = TRUE)) { + install.packages("tidytext") +} +library(tidytext) +library(sf) +library(ggplot2) +library(ggthemes) +library(dplyr) +library(rstac) +library(gdalcubes) +library(gdalUtils) +library(gdalcubes) +library(colorspace) +library(terra) +library(tidyterra) +library(basemapR) +library(tidytext) +library(ggwordcloud) +library(osmextract) +library(sf) +library(ggplot2) +library(ggthemes) +library(glue) + +library(purrr) +``` + + + +```{r} + address <- "https://www.denvergov.org/media/gis/DataCatalog/tree_inventory/shape/tree_inventory.zip" +inner_file <- "tree_inventory.shp" + full_path <- glue("/vsizip/vsicurl/{address}/{inner_file}") + shape_data <- st_read(full_path, quiet = TRUE) |> st_as_sf() + shape_data +``` + + +```{r} + url <- "https://raw.githubusercontent.com/americanpanorama/mapping-inequality-census-crosswalk/main/MIv3Areas_2010TractCrosswalk.geojson" + +city_state_df <- redlining_data |> + select(city, state) |> + st_set_geometry(NULL) |> # Drop the geometry to avoid issues with invalid shapes + distinct(city, state) |> + arrange(state, city ) # Arrange the list alphabetically by state, then by city + +city_state_df + + +``` + + + +```{r} +redlining_data <- read_sf(url) + city_redline <- redlining_data |> + filter(city == "Denver" & city_survey == "TRUE" & grade != "") + +city_redline +``` + + + +```{r} +bbox_here <- st_bbox(city_redline) |> + st_as_sfc() + + + +my_layer <- "lines" +my_query <- "SELECT * FROM lines WHERE ( + highway IN ('motorway', 'trunk', 'primary', 'secondary', 'tertiary') )" +title <- "Major roads" + +places <- oe_get( + place = bbox_here, + layer = my_layer, # Adjusted layer; change as per actual data availability + query = my_query, + quiet = TRUE + ) + +places <- st_make_valid(places) + + # Crop the data to the bounding box +roads <- st_crop(places, bbox_here) + +roads + + +my_layer <- "lines" +my_query <- "SELECT * FROM lines WHERE ( + waterway IN ('river'))" +title <- "Major rivers" + +places <- oe_get( + place = bbox_here, + layer = my_layer, # Adjusted layer; change as per actual data availability + query = my_query, + quiet = TRUE + ) + +places <- st_make_valid(places) + + # Crop the data to the bounding box +rivers <- st_crop(places, bbox_here) + +rivers +``` + + +```{r} +# Colors for the grades + colors <- c("#76a865", "#7cb5bd", "#ffff00", "#d9838d") + + # Plot the data using ggplot2 + plot <- ggplot() + + geom_sf(data = roads, lwd = 0.1) + + geom_sf(data = rivers, color = "blue", alpha = 0.5, lwd = 1.1) + + geom_sf(data = city_redline, aes(fill = grade), alpha = 0.5) + + theme_tufte() + + scale_fill_manual(values = colors) + + labs(fill = 'HOLC Categories') + + theme( + plot.background = element_rect(fill = "white", color = NA), + panel.background = element_rect(fill = "white", color = NA), + panel.grid.major = element_blank(), + panel.grid.minor = element_blank(), + legend.position = "right" + ) + + # Save the plot as a high-resolution PNG file + ggsave("Denver_redline.png", plot, width = 10, height = 8, units = "in", dpi = 600) + + plot +``` + + + + +```{r, collapse=TRUE} + +process_satellite_data <- function(polygon_layer, start_date, end_date, assets, fps = 1, output_file = "anim.gif") { + # Record start time + start_time <- Sys.time() + + # Calculate the bbox from the polygon layer + bbox <- st_bbox(polygon_layer) + + s = stac("https://earth-search.aws.element84.com/v0") + + + # Use stacR to search for Sentinel-2 images within the bbox and date range + items = s |> stac_search( + collections = "sentinel-s2-l2a-cogs", + bbox = c(bbox["xmin"], bbox["ymin"], bbox["xmax"], bbox["ymax"]), + datetime = paste(start_date, end_date, sep = "/"), + limit = 500 + ) %>% + post_request() + + # Define mask for Sentinel-2 image quality + #S2.mask <- image_mask("SCL", values = c(3, 8, 9)) + + # Create a collection of images filtering by cloud cover + col <- stac_image_collection(items$features, asset_names = assets, property_filter = function(x) {x[["eo:cloud_cover"]] < 30}) + + # Define a view for processing the data + v <- cube_view(srs = "EPSG:4326", + extent = list(t0 = start_date, t1 = end_date, + left = bbox["xmin"], right = bbox["xmax"], + top = bbox["ymax"], bottom = bbox["ymin"]), + dx = 0.001, dy = 0.001, dt = "P1M", + aggregation = "median", resampling = "bilinear") + + # Calculate NDVI and create an animation + ndvi_col <- function(n) { + rev(sequential_hcl(n, "Green-Yellow")) + } + + #raster_cube(col, v, mask = S2.mask) %>% + raster_cube(col, v) %>% + select_bands(c("B04", "B08")) %>% + apply_pixel("(B08-B04)/(B08+B04)", "NDVI") %>% + gdalcubes::animate(col = ndvi_col, zlim = c(-0.2, 1), key.pos = 1, save_as = output_file, fps = fps) + + # Calculate processing time + end_time <- Sys.time() + processing_time <- difftime(end_time, start_time) + + # Return processing time + return(processing_time) +} + + +``` + + + +```{r, cache=TRUE, warning=FALSE, message=FALSE} +processing_time <- process_satellite_data(city_redline, "2022-05-31", "2023-05-31", c("B04", "B08")) + +``` + +![](anim.gif) + + + diff --git a/trainings/training_2_code/index.html b/trainings/training_2_code/index.html new file mode 100644 index 0000000..f330e07 --- /dev/null +++ b/trainings/training_2_code/index.html @@ -0,0 +1,1350 @@ + + + + + + + + + + + + + + + + + + + + + + Pre-summit training - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    Pre-summit training

    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/trainings/training_2_code/training_2_code.md b/trainings/training_2_code/training_2_code.md new file mode 100644 index 0000000..11359e3 --- /dev/null +++ b/trainings/training_2_code/training_2_code.md @@ -0,0 +1,3 @@ +Pre-summit training +================ + diff --git a/trainings/training_one/index.html b/trainings/training_one/index.html new file mode 100644 index 0000000..6917249 --- /dev/null +++ b/trainings/training_one/index.html @@ -0,0 +1,2099 @@ + + + + + + + + + + + + + + + + + + + + + + Markdown for the Modern Researcher at ESIIL - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    Markdown for the Modern Researcher at ESIIL

    +

    Introduction

    +
      +
    • Overview of Markdown's relevance and utility in modern research.
    • +
    • How Markdown streamlines documentation in diverse scientific and coding environments.
    • +
    +

    Section 1: Mastering Markdown Syntax

    +
      +
    • Objective: Equip researchers with a thorough understanding of Markdown syntax and its diverse applications.
    • +
    • Topics Covered:
    • +
    • Fundamentals of Text Formatting (headings, lists, bold, italics)
    • +
    • Advanced Structures (tables, blockquotes)
    • +
    • Integrating Multimedia (image and video links)
    • +
    • Diagrams with Mermaid (creating flowcharts, mind maps, timelines)
    • +
    • Interactive Elements (hyperlinks, embedding interactive content)
    • +
    • Activities:
    • +
    • Crafting a Markdown document with various formatting elements.
    • +
    • Developing diagrams using Mermaid for research presentations.
    • +
    • Embedding multimedia elements in a Markdown document for enhanced communication.
    • +
    +

    Section 2: Markdown in Research Tools

    +
      +
    • Objective: Showcase the integration of Markdown in RStudio and Jupyter Notebooks for scientific documentation.
    • +
    • Topics Covered:
    • +
    • Implementing Markdown in RStudio (R Markdown, knitting to HTML/PDF)
    • +
    • Utilizing Markdown in Jupyter Notebooks (code and Markdown cells)
    • +
    • Best practices for documenting research code
    • +
    • Including code outputs and visualizations in documentation
    • +
    • Activities:
    • +
    • Creating and sharing an R Markdown document with annotated research data.
    • +
    • Building a comprehensive Jupyter Notebook with integrated Markdown annotations.
    • +
    +

    Section 3: Disseminating Research with Markdown and GitHub Pages

    +
      +
    • Objective: Teach researchers how to publish and manage Markdown-based documentation as web pages.
    • +
    • Topics Covered:
    • +
    • Setting up a GitHub repository for hosting documentation
    • +
    • Transforming Markdown files into web-friendly formats
    • +
    • Customizing web page layouts and themes
    • +
    • Advanced features using Jekyll
    • +
    • Version control and content management for documentation
    • +
    • Activities:
    • +
    • Publishing a research project documentation on GitHub Pages.
    • +
    • Applying custom themes and layouts to enhance online documentation.
    • +
    +

    Conclusion

    +
      +
    • Review of Markdown's role in enhancing research efficiency and clarity.
    • +
    • Encouraging the integration of Markdown into daily research activities for improved documentation and dissemination.
    • +
    +

    Additional Resources

    +
      +
    • Curated list of advanced Markdown tutorials, guides for GitHub Pages, and Jekyll resources for researchers.
    • +
    +

    Section 1: Mastering Markdown Syntax

    +

    1. Fundamentals of Text Formatting

    +
      +
    • Headings: Use # for different levels of headings.
    • +
    • +

      Heading Level 1

      +
    • +
    • +

      Heading Level 2

      +
    • +
    • +

      Heading Level 3

      +
    • +
    • +

      Lists: Bulleted lists use asterisks, numbers for ordered lists.

      +
    • +
    • Item 1
    • +
    • Item 2
        +
      • Subitem 2.1
      • +
      • Subitem 2.2
      • +
      +
    • +
    • +
        +
      1. First item
      2. +
      +
    • +
    • +
        +
      1. Second item
      2. +
      +
    • +
    • +

      Bold and Italics: Use asterisks or underscores.

      +
    • +
    • Bold Text
    • +
    • Italic Text
    • +
    +

    2. Advanced Structures

    +
      +
    • Tables: Create tables using dashes and pipes.
    • +
    • + + + + + + + + + + + + + + + + + + + + +
      Header 1Header 2Header 3
      Row 1DataData
      Row 2DataData
      +
    • +
    • +

      Add a ":"" to change text justification. Here the : is added on the left for left justification. + | Header 1 | Header 2 | Header 3 | + |---------:|--------- |----------| + | Row 1 | Data | Data | + | Row 2 | Data | Data |

      +
    • +
    • + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
      ANALYTICSENR
      EINVIRONMENT
      VELOPMOCOMUN
      EGAGELLAHCNE
      RATADEVELOPW
      EITSITNEICSR
      SOIGOLOIBHTL
      AHTLAEWEGNEL
      TITSITNEICSN
      IEESREHTOENI
      CSLLAHCEGLAN
      EGALLEHCNEIC
      +
    • +
    • +

      If you hit the boundaries of Markdown's capabilities, you can start to add html directly. Remember, this entire exercisse is to translate to html.

      +
    • +
    +

    Sudoku Puzzle +Fill in the blank cells with numbers from 1 to 9, such that each row, column, and 3x3 subgrid contains all the numbers from 1 to 9 without repetition.

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    537
    6195
    986
    863
    4831
    726
    628
    4195
    879
    + + + + + + + + + + +
    534678912
    672195348
    198342567
    859761423
    426853791
    713924856
    961537284
    287419635
    345286179
    + +
      +
    • Blockquotes: Use > for blockquotes.
    • +
    • +
      +

      This is a blockquote.

      +
      +
    • +
    • +
      +

      It can span multiple lines.

      +
      +
    • +
    +

    3. Integrating Multimedia

    +
      +
    • Images: Add images using the format ![alt text](image_url).
    • +
    • +

      Markdown Logo

      +
    • +
    • +

      Videos: Embed videos using HTML in Markdown.

      +
    • +
    • <iframe width="560" height="315" src="https://www.youtube.com/embed/dQw4w9WgXcQ" frameborder="0" allowfullscreen></iframe>
    • +
    +

    4. Diagrams with Mermaid

    +
      +
    • Flowcharts:
    • +
    +
        graph TD
    +    A[Start] --> B[Analyze Data]
    +    B --> C{Is Data Large?}
    +    C -->|Yes| D[Apply Big Data Solutions]
    +    C -->|No| E[Use Traditional Methods]
    +    D --> F[Machine Learning]
    +    E --> G[Statistical Analysis]
    +    F --> H{Model Accurate?}
    +    G --> I[Report Results]
    +    H -->|Yes| J[Deploy Model]
    +    H -->|No| K[Refine Model]
    +    J --> L[Monitor Performance]
    +    K --> F
    +    L --> M[End: Success]
    +    I --> N[End: Report Generated]
    +    style A fill:#f9f,stroke:#333,stroke-width:2px
    +    style M fill:#9f9,stroke:#333,stroke-width:2px
    +    style N fill:#9f9,stroke:#333,stroke-width:2px
    +
      +
    • +

      Mind Maps: +

          mindmap
      +  root((ESIIL))
      +    section Data Sources
      +      Satellite Imagery
      +        ::icon(fa fa-satellite)
      +      Remote Sensing Data
      +        Drones
      +        Aircraft
      +      On-ground Sensors
      +        Weather Stations
      +        IoT Devices
      +      Open Environmental Data
      +        Public Datasets
      +        ::icon(fa fa-database)
      +    section Research Focus
      +      Climate Change Analysis
      +        Ice Melt Patterns
      +        Sea Level Rise
      +      Biodiversity Monitoring
      +        Species Distribution
      +        Habitat Fragmentation
      +      Geospatial Analysis Techniques
      +        Machine Learning Models
      +        Predictive Analytics
      +    section Applications
      +      Conservation Strategies
      +        ::icon(fa fa-leaf)
      +      Urban Planning
      +        Green Spaces
      +      Disaster Response
      +        Flood Mapping
      +        Wildfire Tracking
      +    section Tools and Technologies
      +      GIS Software
      +        QGIS
      +        ArcGIS
      +      Programming Languages
      +        Python
      +        R
      +      Cloud Computing Platforms
      +        AWS
      +        Google Earth Engine
      +      Data Visualization
      +        D3.js
      +        Tableau

      +
    • +
    • +

      Timelines:

      +
    • +
    +
    gantt
    +    title ESIIL Year 2 Project Schedule
    +    dateFormat  YYYY-MM-DD
    +    section CI
    +    Sovereign OASIS via private jupiterhubs :2024-08-01, 2024-10-30
    +    OASIS documentation                    :2024-09-15, 70d
    +    Data cube OASIS via cyverse account    :2024-09-15, 100d
    +    Integrate with ESIIL User Management system :2024-08-01, 2024-11-30
    +    Build badges to deploy DE from mkdoc   :2024-09-01, 2024-12-15
    +    Streamline Github ssh key management   :2024-10-01, 2024-12-31
    +    Cyverse support (R proxy link)         :2024-11-01, 2024-12-31
    +    Cyverse use summary and statistics     :2024-08-01, 2024-12-15
    +
    +    section CI Consultation and Education
    +    Conferences/Invited talks              :2024-08-01, 2024-12-31
    +    Office hours                           :2024-08-15, 2024-12-15
    +    Proposals                              :2024-09-01, 2024-11-15
    +    Private lessons                        :2024-09-15, 2024-11-30
    +    Pre-event trainings                    :2024-10-01, 2024-12-15
    +    Textbook development w/ education team :2024-08-01, 2024-12-15
    +    Train the trainers / group lessons     :2024-08-15, 2024-11-30
    +    Tribal engagement                      :2024-09-01, 2024-12-15
    +    Ethical Space training                 :2024-09-15, 2024-12-31
    +
    +    section CI Design and Build
    +    Data library (repository)              :2024-08-01, 2024-10-30
    +    Analytics library (repository)         :2024-08-15, 2024-11-15
    +    Containers (repository)                :2024-09-01, 2024-11-30
    +    Cloud infrastructure templates (repository) :2024-09-15, 2024-12-15
    +    Tribal resilience Data Cube            :2024-10-01, 2024-12-31
    +
    
    +%%{init: { 'logLevel': 'debug', 'theme': 'base', 'gitGraph': {'rotateCommitLabel': true}} }%%
    +gitGraph
    +  commit id: "Start from template"
    +  branch c1
    +  commit id: "Set up SSH key pair"
    +  commit id: "Modify _config.yml for GitHub Pages"
    +  commit id: "Initial website structure"
    +  commit id: "Add new markdown pages"
    +  commit id: "Update navigation tree"
    +  commit id: "Edit existing pages"
    +  commit id: "Delete old markdown pages"
    +  commit id: "Finalize website updates"
    +  commit id: "Add new markdown pages"
    +  commit id: "Update navigation tree"
    +checkout c1
    +
    +  branch b1
    +
    +  commit
    +  commit
    +  checkout c1
    +  merge b1
    +
    %%{init: {"quadrantChart": {"chartWidth": 400, "chartHeight": 400}, "themeVariables": {"quadrant1TextFill": "#ff0000"} }}%%
    +quadrantChart
    +  x-axis Urgent --> Not Urgent
    +  y-axis Not Important --> "Important ❤"
    +  quadrant-1 Plan
    +  quadrant-2 Do
    +  quadrant-3 Delegate
    +  quadrant-4 Delete
    +
    timeline
    +    title Major Events in Environmental Science and Data Science
    +    section Environmental Science
    +        19th century : Foundations in Ecology and Conservation
    +        1962 : Publication of 'Silent Spring' by Rachel Carson
    +        1970 : First Earth Day
    +        1987 : Brundtland Report introduces Sustainable Development
    +        1992 : Rio Earth Summit
    +        2015 : Paris Agreement on Climate Change
    +    section Data Science
    +        1960s-1970s : Development of Database Management Systems
    +        1980s : Emergence of Data Warehousing
    +        1990s : Growth of the World Wide Web and Data Mining
    +        2000s : Big Data and Predictive Analytics
    +        2010s : AI and Machine Learning Revolution
    +        2020s : Integration of AI in Environmental Research
    +
    erDiagram
    +    CAR ||--o{ NAMED-DRIVER : allows
    +    CAR {
    +        string registrationNumber
    +        string make
    +        string model
    +    }
    +    PERSON ||--o{ NAMED-DRIVER : is
    +    PERSON {
    +        string firstName
    +        string lastName
    +        int age
    +    }
    +
    ---
    +config:
    +  sankey:
    +    showValues: false
    +---
    +sankey-beta
    +
    +NASA Data,Big Data Harmonization,100
    +    Satellite Imagery,Big Data Harmonization,80
    +    Open Environmental Data,Big Data Harmonization,70
    +    Remote Sensing Data,Big Data Harmonization,90
    +    Big Data Harmonization, Data Analysis and Integration,340
    +    Data Analysis and Integration,Climate Change Research,100
    +    Data Analysis and Integration,Biodiversity Monitoring,80
    +    Data Analysis and Integration,Geospatial Mapping,60
    +    Data Analysis and Integration,Urban Planning,50
    +    Data Analysis and Integration,Disaster Response,50
    +

    5. Interactive Elements

    +
      +
    • Hyperlinks: Use the format [link text](URL).
    • +
    • Google
    • +
    • +

      Play Tetris

      +
    • +
    • +

      Embedding Interactive Content: Use HTML tags or specific platform embed codes.

      +
    • +
    • <iframe src="https://example.com/interactive-content" width="600" height="400"></iframe>
    • +
    +

    6. Math Notation

    +

    Markdown can be combined with LaTeX for mathematical notation, useful in environmental data science for expressing statistical distributions, coordinate systems, and more. This requires a Markdown renderer with LaTeX support (like MathJax or KaTeX).

    +
      +
    • Inline Math: Use single dollar signs for inline math expressions. Representing the normal distribution.
    • +
    +

    Example: The probability density function of the normal distribution is given by \(f(x|\mu,\sigma) = \frac{1}{\sigma\sqrt{2\pi}}e^{-\frac{1}{2}\left(\frac{x-\mu}{\sigma}\right)^2}\).`

    +
      +
    • Display Math: Use double dollar signs for standalone equations.
    • +
    +

    Example: + $$ + f(x|\mu,\sigma) = \frac{1}{\sigma\sqrt{2\pi}}e{-\frac{1}{2}\left(\frac{x-\mu}{\sigma}\right)2} + $$

    +
      +
    • Common LaTeX Elements for Environmental Data Science:
    • +
    • Statistical Distributions:
        +
      • Normal Distribution: \frac{1}{\sigma\sqrt{2\pi}}e^{-\frac{1}{2}\left(\frac{x-\mu}{\sigma}\right)^2} for \(\frac{1}{\sigma\sqrt{2\pi}}e^{-\frac{1}{2}\left(\frac{x-\mu}{\sigma}\right)^2}\)
      • +
      • Poisson Distribution: P(k; \lambda) = \frac{\lambda^k e^{-\lambda}}{k!} for \(P(k; \lambda) = \frac{\lambda^k e^{-\lambda}}{k!}\)
      • +
      +
    • +
    • Coordinate Systems:
        +
      • Spherical Coordinates: (r, \theta, \phi) for \((r, \theta, \phi)\)
      • +
      • Cartesian Coordinates: (x, y, z) for \((x, y, z)\)
      • +
      +
    • +
    • Geospatial Equations:
        +
      • Haversine Formula for Distance: a = \sin^2\left(\frac{\Delta\phi}{2}\right) + \cos(\phi_1)\cos(\phi_2)\sin^2\left(\frac{\Delta\lambda}{2}\right) for \(a = \sin^2\left(\frac{\Delta\phi}{2}\right) + \cos(\phi_1)\cos(\phi_2)\sin^2\left(\frac{\Delta\lambda}{2}\right)\)
      • +
      +
    • +
    +

    Note: The rendering of these equations as formatted math will depend on your Markdown viewer's LaTeX capabilities.

    +

    7. Effective Citations in Markdown

    +

    Inline Citations

    +
      +
    • Objective: Learn how to use inline citations in Markdown.
    • +
    • Example Usage:
    • +
    • Inline citation of a single work:
        +
      • Some text with an inline citation. [@jones:envstudy:2020]
      • +
      +
    • +
    • Inline citation with specific page or section:
        +
      • More text with a specific section cited. [See @jones:envstudy:2020, §4.2]
      • +
      +
    • +
    • Contrasting views:
        +
      • Discussion of a topic with a contrasting view. [Contra @smith:climatechange:2019, p. 78]
      • +
      +
    • +
    +

    Footnote Citations

    +
      +
    • Objective: Understand how to use footnote citations in Markdown.
    • +
    • Example Usage:
    • +
    • Citing with a footnote:
        +
      • Some statement in the text.1
      • +
      +
    • +
    • Multiple references to the same footnote:
        +
      • Another statement referring to the same source.1
      • +
      +
    • +
    • A different citation:
        +
      • Additional comment with a new citation.2
      • +
      +
    • +
    +

    Creating Footnotes

    +
      +
    • Example Syntax:
    • +
    • +
    • +
    +
    +
    +
      +
    1. +

      First reference details. Example: Emma Jones, "Environmental Study," Nature Journal, May 2020, https://nature-journal.com/envstudy2020

      +
    2. +
    3. +

      Second reference details. Example: David Smith, "Climate Change Controversies," Science Daily, August 2019, https://sciencedaily.com/climatechange2019

      +
    4. +
    +
    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/trainings/training_one/training_one.md b/trainings/training_one/training_one.md new file mode 100644 index 0000000..fa69ea4 --- /dev/null +++ b/trainings/training_one/training_one.md @@ -0,0 +1,434 @@ +# Markdown for the Modern Researcher at ESIIL + +## Introduction + +- Overview of Markdown's relevance and utility in modern research. +- How Markdown streamlines documentation in diverse scientific and coding environments. + +## Section 1: Mastering Markdown Syntax + +- **Objective:** Equip researchers with a thorough understanding of Markdown syntax and its diverse applications. +- **Topics Covered:** + - Fundamentals of Text Formatting (headings, lists, bold, italics) + - Advanced Structures (tables, blockquotes) + - Integrating Multimedia (image and video links) + - Diagrams with Mermaid (creating flowcharts, mind maps, timelines) + - Interactive Elements (hyperlinks, embedding interactive content) +- **Activities:** + - Crafting a Markdown document with various formatting elements. + - Developing diagrams using Mermaid for research presentations. + - Embedding multimedia elements in a Markdown document for enhanced communication. + +## Section 2: Markdown in Research Tools + +- **Objective:** Showcase the integration of Markdown in RStudio and Jupyter Notebooks for scientific documentation. +- **Topics Covered:** + - Implementing Markdown in RStudio (R Markdown, knitting to HTML/PDF) + - Utilizing Markdown in Jupyter Notebooks (code and Markdown cells) + - Best practices for documenting research code + - Including code outputs and visualizations in documentation +- **Activities:** + - Creating and sharing an R Markdown document with annotated research data. + - Building a comprehensive Jupyter Notebook with integrated Markdown annotations. + +## Section 3: Disseminating Research with Markdown and GitHub Pages + +- **Objective:** Teach researchers how to publish and manage Markdown-based documentation as web pages. +- **Topics Covered:** + - Setting up a GitHub repository for hosting documentation + - Transforming Markdown files into web-friendly formats + - Customizing web page layouts and themes + - Advanced features using Jekyll + - Version control and content management for documentation +- **Activities:** + - Publishing a research project documentation on GitHub Pages. + - Applying custom themes and layouts to enhance online documentation. + +## Conclusion + +- Review of Markdown's role in enhancing research efficiency and clarity. +- Encouraging the integration of Markdown into daily research activities for improved documentation and dissemination. + +## Additional Resources + +- Curated list of advanced Markdown tutorials, guides for GitHub Pages, and Jekyll resources for researchers. + + + + +## Section 1: Mastering Markdown Syntax + +### 1. Fundamentals of Text Formatting + +- **Headings**: Use `#` for different levels of headings. + - # Heading Level 1 + - ## Heading Level 2 + - ### Heading Level 3 + +- **Lists**: Bulleted lists use asterisks, numbers for ordered lists. + - Item 1 + - Item 2 + - Subitem 2.1 + - Subitem 2.2 + - 1. First item + - 2. Second item + +- **Bold and Italics**: Use asterisks or underscores. + - **Bold Text** + - *Italic Text* + +### 2. Advanced Structures + +- **Tables**: Create tables using dashes and pipes. + - | Header 1 | Header 2 | Header 3 | + |----------|----------|----------| + | Row 1 | Data | Data | + | Row 2 | Data | Data | + - Add a ":"" to change text justification. Here the : is added on the left for left justification. + | Header 1 | Header 2 | Header 3 | + |---------:|--------- |----------| + | Row 1 | Data | Data | + | Row 2 | Data | Data | + + - | | | | | | | | | | | | | + |---|---|---|---|---|---|---|---|---|---|---|---| + | A | N | A | L | Y | T | I | C | S | E | N | R | + | E | I | N | V | I | R | O | N | M | E | N | T | + | V | E | L | O | P | M | O | C | O | M | U | N | + | E | G | A | G | E | L | L | A | H | C | N | E | + | R | A | T | A | D | E | V | E | L | O | P | W | + | E | I | T | S | I | T | N | E | I | C | S | R | + | S | O | I | G | O | L | O | I | B | H | T | L | + | A | H | T | L | A | E | W | E | G | N | E | L | + | T | I | T | S | I | T | N | E | I | C | S | N | + | I | E | E | S | R | E | H | T | O | E | N | I | + | C | S | L | L | A | H | C | E | G | L | A | N | + | E | G | A | L | L | E | H | C | N | E | I | C | + + - If you hit the boundaries of Markdown's capabilities, you can start to add html directly. Remember, this entire exercisse is to translate to html. + +**Sudoku Puzzle** +Fill in the blank cells with numbers from 1 to 9, such that each row, column, and 3x3 subgrid contains all the numbers from 1 to 9 without repetition. + +| | | | | | | | | | +|---|---|---|---|---|---|---|---|---| +| 5 | 3 | | | 7 | | | | | +| 6 | | | 1 | 9 | 5 | | | | +| | 9 | 8 | | | | | 6 | | +| 8 | | | | 6 | | | | 3 | +| 4 | | | 8 | | 3 | | | 1 | +| 7 | | | | 2 | | | | 6 | +| | 6 | | | | | 2 | 8 | | +| | | | 4 | 1 | 9 | | | 5 | +| | | | | 8 | | | 7 | 9 | + + + + + + + + + + + + + + +
    534678912
    672195348
    198342567
    859761423
    426853791
    713924856
    961537284
    287419635
    345286179
    + + + + + +- **Blockquotes**: Use `>` for blockquotes. + - > This is a blockquote. + - > It can span multiple lines. + +### 3. Integrating Multimedia + +- **Images**: Add images using the format `![alt text](image_url)`. + - ![Markdown Logo](https://example.com/markdown-logo.png) + +- **Videos**: Embed videos using HTML in Markdown. + - `` + +### 4. Diagrams with Mermaid + +- **Flowcharts**: + +```mermaid + graph TD + A[Start] --> B[Analyze Data] + B --> C{Is Data Large?} + C -->|Yes| D[Apply Big Data Solutions] + C -->|No| E[Use Traditional Methods] + D --> F[Machine Learning] + E --> G[Statistical Analysis] + F --> H{Model Accurate?} + G --> I[Report Results] + H -->|Yes| J[Deploy Model] + H -->|No| K[Refine Model] + J --> L[Monitor Performance] + K --> F + L --> M[End: Success] + I --> N[End: Report Generated] + style A fill:#f9f,stroke:#333,stroke-width:2px + style M fill:#9f9,stroke:#333,stroke-width:2px + style N fill:#9f9,stroke:#333,stroke-width:2px +``` + +- **Mind Maps**: +```mermaid + mindmap + root((ESIIL)) + section Data Sources + Satellite Imagery + ::icon(fa fa-satellite) + Remote Sensing Data + Drones + Aircraft + On-ground Sensors + Weather Stations + IoT Devices + Open Environmental Data + Public Datasets + ::icon(fa fa-database) + section Research Focus + Climate Change Analysis + Ice Melt Patterns + Sea Level Rise + Biodiversity Monitoring + Species Distribution + Habitat Fragmentation + Geospatial Analysis Techniques + Machine Learning Models + Predictive Analytics + section Applications + Conservation Strategies + ::icon(fa fa-leaf) + Urban Planning + Green Spaces + Disaster Response + Flood Mapping + Wildfire Tracking + section Tools and Technologies + GIS Software + QGIS + ArcGIS + Programming Languages + Python + R + Cloud Computing Platforms + AWS + Google Earth Engine + Data Visualization + D3.js + Tableau +``` + +- **Timelines**: + +```mermaid +gantt + title ESIIL Year 2 Project Schedule + dateFormat YYYY-MM-DD + section CI + Sovereign OASIS via private jupiterhubs :2024-08-01, 2024-10-30 + OASIS documentation :2024-09-15, 70d + Data cube OASIS via cyverse account :2024-09-15, 100d + Integrate with ESIIL User Management system :2024-08-01, 2024-11-30 + Build badges to deploy DE from mkdoc :2024-09-01, 2024-12-15 + Streamline Github ssh key management :2024-10-01, 2024-12-31 + Cyverse support (R proxy link) :2024-11-01, 2024-12-31 + Cyverse use summary and statistics :2024-08-01, 2024-12-15 + + section CI Consultation and Education + Conferences/Invited talks :2024-08-01, 2024-12-31 + Office hours :2024-08-15, 2024-12-15 + Proposals :2024-09-01, 2024-11-15 + Private lessons :2024-09-15, 2024-11-30 + Pre-event trainings :2024-10-01, 2024-12-15 + Textbook development w/ education team :2024-08-01, 2024-12-15 + Train the trainers / group lessons :2024-08-15, 2024-11-30 + Tribal engagement :2024-09-01, 2024-12-15 + Ethical Space training :2024-09-15, 2024-12-31 + + section CI Design and Build + Data library (repository) :2024-08-01, 2024-10-30 + Analytics library (repository) :2024-08-15, 2024-11-15 + Containers (repository) :2024-09-01, 2024-11-30 + Cloud infrastructure templates (repository) :2024-09-15, 2024-12-15 + Tribal resilience Data Cube :2024-10-01, 2024-12-31 +``` + +```mermaid + +%%{init: { 'logLevel': 'debug', 'theme': 'base', 'gitGraph': {'rotateCommitLabel': true}} }%% +gitGraph + commit id: "Start from template" + branch c1 + commit id: "Set up SSH key pair" + commit id: "Modify _config.yml for GitHub Pages" + commit id: "Initial website structure" + commit id: "Add new markdown pages" + commit id: "Update navigation tree" + commit id: "Edit existing pages" + commit id: "Delete old markdown pages" + commit id: "Finalize website updates" + commit id: "Add new markdown pages" + commit id: "Update navigation tree" +checkout c1 + + branch b1 + + commit + commit + checkout c1 + merge b1 +``` + +```mermaid +%%{init: {"quadrantChart": {"chartWidth": 400, "chartHeight": 400}, "themeVariables": {"quadrant1TextFill": "#ff0000"} }}%% +quadrantChart + x-axis Urgent --> Not Urgent + y-axis Not Important --> "Important ❤" + quadrant-1 Plan + quadrant-2 Do + quadrant-3 Delegate + quadrant-4 Delete +``` + + +```mermaid +timeline + title Major Events in Environmental Science and Data Science + section Environmental Science + 19th century : Foundations in Ecology and Conservation + 1962 : Publication of 'Silent Spring' by Rachel Carson + 1970 : First Earth Day + 1987 : Brundtland Report introduces Sustainable Development + 1992 : Rio Earth Summit + 2015 : Paris Agreement on Climate Change + section Data Science + 1960s-1970s : Development of Database Management Systems + 1980s : Emergence of Data Warehousing + 1990s : Growth of the World Wide Web and Data Mining + 2000s : Big Data and Predictive Analytics + 2010s : AI and Machine Learning Revolution + 2020s : Integration of AI in Environmental Research +``` + + + + +```mermaid +erDiagram + CAR ||--o{ NAMED-DRIVER : allows + CAR { + string registrationNumber + string make + string model + } + PERSON ||--o{ NAMED-DRIVER : is + PERSON { + string firstName + string lastName + int age + } +``` + +```mermaid +--- +config: + sankey: + showValues: false +--- +sankey-beta + +NASA Data,Big Data Harmonization,100 + Satellite Imagery,Big Data Harmonization,80 + Open Environmental Data,Big Data Harmonization,70 + Remote Sensing Data,Big Data Harmonization,90 + Big Data Harmonization, Data Analysis and Integration,340 + Data Analysis and Integration,Climate Change Research,100 + Data Analysis and Integration,Biodiversity Monitoring,80 + Data Analysis and Integration,Geospatial Mapping,60 + Data Analysis and Integration,Urban Planning,50 + Data Analysis and Integration,Disaster Response,50 +``` + + +### 5. Interactive Elements + +- **Hyperlinks**: Use the format `[link text](URL)`. + - [Google](https://www.google.com) + - [Play Tetris](https://tetris.com/play-tetris) + +- **Embedding Interactive Content**: Use HTML tags or specific platform embed codes. + - `` + + + + +### 6. Math Notation + +Markdown can be combined with LaTeX for mathematical notation, useful in environmental data science for expressing statistical distributions, coordinate systems, and more. This requires a Markdown renderer with LaTeX support (like MathJax or KaTeX). + +- **Inline Math**: Use single dollar signs for inline math expressions. Representing the normal distribution. + + Example: The probability density function of the normal distribution is given by $f(x|\mu,\sigma) = \frac{1}{\sigma\sqrt{2\pi}}e^{-\frac{1}{2}\left(\frac{x-\mu}{\sigma}\right)^2}$.` + +- **Display Math**: Use double dollar signs for standalone equations. + + Example: + $$ + f(x|\mu,\sigma) = \frac{1}{\sigma\sqrt{2\pi}}e^{-\frac{1}{2}\left(\frac{x-\mu}{\sigma}\right)^2} + $$ + + +- **Common LaTeX Elements for Environmental Data Science**: + - **Statistical Distributions**: + - Normal Distribution: `\frac{1}{\sigma\sqrt{2\pi}}e^{-\frac{1}{2}\left(\frac{x-\mu}{\sigma}\right)^2}` for $\frac{1}{\sigma\sqrt{2\pi}}e^{-\frac{1}{2}\left(\frac{x-\mu}{\sigma}\right)^2}$ + - Poisson Distribution: `P(k; \lambda) = \frac{\lambda^k e^{-\lambda}}{k!}` for $P(k; \lambda) = \frac{\lambda^k e^{-\lambda}}{k!}$ + - **Coordinate Systems**: + - Spherical Coordinates: `(r, \theta, \phi)` for $(r, \theta, \phi)$ + - Cartesian Coordinates: `(x, y, z)` for $(x, y, z)$ + - **Geospatial Equations**: + - Haversine Formula for Distance: `a = \sin^2\left(\frac{\Delta\phi}{2}\right) + \cos(\phi_1)\cos(\phi_2)\sin^2\left(\frac{\Delta\lambda}{2}\right)` for $a = \sin^2\left(\frac{\Delta\phi}{2}\right) + \cos(\phi_1)\cos(\phi_2)\sin^2\left(\frac{\Delta\lambda}{2}\right)$ + +Note: The rendering of these equations as formatted math will depend on your Markdown viewer's LaTeX capabilities. + + + +### 7. Effective Citations in Markdown + +## Inline Citations + +- **Objective:** Learn how to use inline citations in Markdown. +- **Example Usage:** + - Inline citation of a single work: + - Some text with an inline citation. [@jones:envstudy:2020] + - Inline citation with specific page or section: + - More text with a specific section cited. [See @jones:envstudy:2020, §4.2] + - Contrasting views: + - Discussion of a topic with a contrasting view. [Contra @smith:climatechange:2019, p. 78] + +## Footnote Citations + +- **Objective:** Understand how to use footnote citations in Markdown. +- **Example Usage:** + - Citing with a footnote: + - Some statement in the text.[^1] + - Multiple references to the same footnote: + - Another statement referring to the same source.[^1] + - A different citation: + - Additional comment with a new citation.[^2] + +## Creating Footnotes + +- **Example Syntax:** + - [^1]: First reference details. Example: Emma Jones, "Environmental Study," Nature Journal, May 2020, https://nature-journal.com/envstudy2020. + - [^2]: Second reference details. Example: David Smith, "Climate Change Controversies," Science Daily, August 2019, https://sciencedaily.com/climatechange2019. + + diff --git a/trainings/training_three/index.html b/trainings/training_three/index.html new file mode 100644 index 0000000..d01a89d --- /dev/null +++ b/trainings/training_three/index.html @@ -0,0 +1,1414 @@ + + + + + + + + + + + + + + + + + + + + + + Voices in Concert - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    + +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/trainings/training_three/training_three.md b/trainings/training_three/training_three.md new file mode 100644 index 0000000..dcade80 --- /dev/null +++ b/trainings/training_three/training_three.md @@ -0,0 +1,15 @@ +Team Science + +Community Skills + +- [ESIIL Behavior Expectations Poster](https://drive.google.com/file/d/1sXnufOC1PbRUSSxgE0MX21wxwal1EHEz/view?usp=sharing) + +- [Calling People In Handout](https://drive.google.com/file/d/1xvtRWKiaC-OB6MgmXqJfJWci2z0-jj_T/view?usp=sharing) + +- [Tool: Interrupting Microagressions](https://drive.google.com/file/d/1QzV0PAokAHhDKt6Hdn96j1U8oWNrFFXq/view?usp=sharing) + +- [How to Apologize](https://hbr.org/2020/07/youve-been-called-out-for-a-microaggression-what-do-you-do) + +- [Voices in Concert Jamboard](https://jamboard.google.com/d/1-G44P6aE0pY2mDdanbqYr6o2ckOw0ezgrBbxk9Qfq6Y/edit?usp=sharing) + +Cultural Intelligence diff --git a/trainings/training_two/index.html b/trainings/training_two/index.html new file mode 100644 index 0000000..28eaaef --- /dev/null +++ b/trainings/training_two/index.html @@ -0,0 +1,2126 @@ + + + + + + + + + + + + + + + + + + + + + + Markdown for the Modern Researcher at ESIIL - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    Markdown for the Modern Researcher at ESIIL

    +
    +Definitions +

    "Open Science is defined as an inclusive construct that combines various movements and practices aiming to make multilingual scientific knowledge openly available, accessible and reusable for everyone, to increase scientific collaborations and sharing of information for the benefits of science and society, and to open the processes of scientific knowledge creation, evaluation and communication to societal actors beyond the traditional scientific community." - UNESCO Definition

    + +

    "Open Science is the movement to make scientific research (including publications, data, physical samples, and software) and its dissemination accessible to all levels of society, amateur or professional..." Wikipedia definition

    +

    Open and Collaborative Science Network's Open Science Manifesto

    +
    +Six Pillars of Open Science +

    Open Access Publications

    +

    Open Data

    +

    Open Educational Resources

    +

    Open Methodology

    +

    Open Peer Review

    +

    Open Source Software

    +
    +Wait, how many pillars of Open Science Really Are There? +

    The number can be from 4 to 8

    +
    +
    +Foster Open Science Diagram +

    foster

    +

    Graphic by Foster Open Science

    +
    +
    +
    +

    Introduction

    +
      +
    • Overview of Markdown's relevance and utility in modern research.
    • +
    • How Markdown streamlines documentation in diverse scientific and coding environments.
    • +
    +

    Section 1: Mastering Markdown Syntax

    +
      +
    • Objective: Equip researchers with a thorough understanding of Markdown syntax and its diverse applications.
    • +
    • Topics Covered:
    • +
    • Fundamentals of Text Formatting (headings, lists, bold, italics)
    • +
    • Advanced Structures (tables, blockquotes)
    • +
    • Integrating Multimedia (image and video links)
    • +
    • Diagrams with Mermaid (creating flowcharts, mind maps, timelines)
    • +
    • Interactive Elements (hyperlinks, embedding interactive content)
    • +
    • Activities:
    • +
    • Crafting a Markdown document with various formatting elements.
    • +
    • Developing diagrams using Mermaid for research presentations.
    • +
    • Embedding multimedia elements in a Markdown document for enhanced communication.
    • +
    +

    Section 2: Markdown in Research Tools

    +
      +
    • Objective: Showcase the integration of Markdown in RStudio and Jupyter Notebooks for scientific documentation.
    • +
    • Topics Covered:
    • +
    • Implementing Markdown in RStudio (R Markdown, knitting to HTML/PDF)
    • +
    • Utilizing Markdown in Jupyter Notebooks (code and Markdown cells)
    • +
    • Best practices for documenting research code
    • +
    • Including code outputs and visualizations in documentation
    • +
    • Activities:
    • +
    • Creating and sharing an R Markdown document with annotated research data.
    • +
    • Building a comprehensive Jupyter Notebook with integrated Markdown annotations.
    • +
    +

    Section 3: Disseminating Research with Markdown and GitHub Pages

    +
      +
    • Objective: Teach researchers how to publish and manage Markdown-based documentation as web pages.
    • +
    • Topics Covered:
    • +
    • Setting up a GitHub repository for hosting documentation
    • +
    • Transforming Markdown files into web-friendly formats
    • +
    • Customizing web page layouts and themes
    • +
    • Advanced features using Jekyll
    • +
    • Version control and content management for documentation
    • +
    • Activities:
    • +
    • Publishing a research project documentation on GitHub Pages.
    • +
    • Applying custom themes and layouts to enhance online documentation.
    • +
    +

    Conclusion

    +
      +
    • Review of Markdown's role in enhancing research efficiency and clarity.
    • +
    • Encouraging the integration of Markdown into daily research activities for improved documentation and dissemination.
    • +
    +

    Additional Resources

    +
      +
    • Curated list of advanced Markdown tutorials, guides for GitHub Pages, and Jekyll resources for researchers.
    • +
    +

    Section 1: Mastering Markdown Syntax

    +

    1. Fundamentals of Text Formatting

    +
      +
    • Headings: Use # for different levels of headings.
    • +
    • +

      Heading Level 1

      +
    • +
    • +

      Heading Level 2

      +
    • +
    • +

      Heading Level 3

      +
    • +
    • +

      Lists: Bulleted lists use asterisks, numbers for ordered lists.

      +
    • +
    • Item 1
    • +
    • Item 2
        +
      • Subitem 2.1
      • +
      • Subitem 2.2
      • +
      +
    • +
    • +
        +
      1. First item
      2. +
      +
    • +
    • +
        +
      1. Second item
      2. +
      +
    • +
    • +

      Bold and Italics: Use asterisks or underscores.

      +
    • +
    • Bold Text
    • +
    • Italic Text
    • +
    +

    2. Advanced Structures

    +
      +
    • Tables: Create tables using dashes and pipes.
    • +
    • + + + + + + + + + + + + + + + + + + + + +
      Header 1Header 2Header 3
      Row 1DataData
      Row 2DataData
      +
    • +
    • +

      Add a ":"" to change text justification. Here the : is added on the left for left justification. + | Header 1 | Header 2 | Header 3 | + |---------:|--------- |----------| + | Row 1 | Data | Data | + | Row 2 | Data | Data |

      +
    • +
    • + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
      ANALYTICSENR
      EINVIRONMENT
      VELOPMOCOMUN
      EGAGELLAHCNE
      RATADEVELOPW
      EITSITNEICSR
      SOIGOLOIBHTL
      AHTLAEWEGNEL
      TITSITNEICSN
      IEESREHTOENI
      CSLLAHCEGLAN
      EGALLEHCNEIC
      +
    • +
    • +

      If you hit the boundaries of Markdown's capabilities, you can start to add html directly. Remember, this entire exercisse is to translate to html.

      +
    • +
    +

    Sudoku Puzzle +Fill in the blank cells with numbers from 1 to 9, such that each row, column, and 3x3 subgrid contains all the numbers from 1 to 9 without repetition.

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    537
    6195
    986
    863
    4831
    726
    628
    4195
    879
    + + + + + + + + + + +
    534678912
    672195348
    198342567
    859761423
    426853791
    713924856
    961537284
    287419635
    345286179
    + +
      +
    • Blockquotes: Use > for blockquotes.
    • +
    • +
      +

      This is a blockquote.

      +
      +
    • +
    • +
      +

      It can span multiple lines.

      +
      +
    • +
    +

    3. Integrating Multimedia

    +
      +
    • Images: Add images using the format ![alt text](image_url).
    • +
    • +

      Markdown Logo

      +
    • +
    • +

      Videos: Embed videos using HTML in Markdown.

      +
    • +
    • <iframe width="560" height="315" src="https://www.youtube.com/embed/dQw4w9WgXcQ" frameborder="0" allowfullscreen></iframe>
    • +
    +

    4. Diagrams with Mermaid

    +
      +
    • Flowcharts:
    • +
    +
        graph TD
    +    A[Start] --> B[Analyze Data]
    +    B --> C{Is Data Large?}
    +    C -->|Yes| D[Apply Big Data Solutions]
    +    C -->|No| E[Use Traditional Methods]
    +    D --> F[Machine Learning]
    +    E --> G[Statistical Analysis]
    +    F --> H{Model Accurate?}
    +    G --> I[Report Results]
    +    H -->|Yes| J[Deploy Model]
    +    H -->|No| K[Refine Model]
    +    J --> L[Monitor Performance]
    +    K --> F
    +    L --> M[End: Success]
    +    I --> N[End: Report Generated]
    +    style A fill:#f9f,stroke:#333,stroke-width:2px
    +    style M fill:#9f9,stroke:#333,stroke-width:2px
    +    style N fill:#9f9,stroke:#333,stroke-width:2px
    +
      +
    • +

      Mind Maps: +

          mindmap
      +  root((ESIIL))
      +    section Data Sources
      +      Satellite Imagery
      +        ::icon(fa fa-satellite)
      +      Remote Sensing Data
      +        Drones
      +        Aircraft
      +      On-ground Sensors
      +        Weather Stations
      +        IoT Devices
      +      Open Environmental Data
      +        Public Datasets
      +        ::icon(fa fa-database)
      +    section Research Focus
      +      Climate Change Analysis
      +        Ice Melt Patterns
      +        Sea Level Rise
      +      Biodiversity Monitoring
      +        Species Distribution
      +        Habitat Fragmentation
      +      Geospatial Analysis Techniques
      +        Machine Learning Models
      +        Predictive Analytics
      +    section Applications
      +      Conservation Strategies
      +        ::icon(fa fa-leaf)
      +      Urban Planning
      +        Green Spaces
      +      Disaster Response
      +        Flood Mapping
      +        Wildfire Tracking
      +    section Tools and Technologies
      +      GIS Software
      +        QGIS
      +        ArcGIS
      +      Programming Languages
      +        Python
      +        R
      +      Cloud Computing Platforms
      +        AWS
      +        Google Earth Engine
      +      Data Visualization
      +        D3.js
      +        Tableau

      +
    • +
    • +

      Timelines:

      +
    • +
    +
    gantt
    +    title ESIIL Year 2 Project Schedule
    +    dateFormat  YYYY-MM-DD
    +    section CI
    +    Sovereign OASIS via private jupiterhubs :2024-08-01, 2024-10-30
    +    OASIS documentation                    :2024-09-15, 70d
    +    Data cube OASIS via cyverse account    :2024-09-15, 100d
    +    Integrate with ESIIL User Management system :2024-08-01, 2024-11-30
    +    Build badges to deploy DE from mkdoc   :2024-09-01, 2024-12-15
    +    Streamline Github ssh key management   :2024-10-01, 2024-12-31
    +    Cyverse support (R proxy link)         :2024-11-01, 2024-12-31
    +    Cyverse use summary and statistics     :2024-08-01, 2024-12-15
    +
    +    section CI Consultation and Education
    +    Conferences/Invited talks              :2024-08-01, 2024-12-31
    +    Office hours                           :2024-08-15, 2024-12-15
    +    Proposals                              :2024-09-01, 2024-11-15
    +    Private lessons                        :2024-09-15, 2024-11-30
    +    Pre-event trainings                    :2024-10-01, 2024-12-15
    +    Textbook development w/ education team :2024-08-01, 2024-12-15
    +    Train the trainers / group lessons     :2024-08-15, 2024-11-30
    +    Tribal engagement                      :2024-09-01, 2024-12-15
    +    Ethical Space training                 :2024-09-15, 2024-12-31
    +
    +    section CI Design and Build
    +    Data library (repository)              :2024-08-01, 2024-10-30
    +    Analytics library (repository)         :2024-08-15, 2024-11-15
    +    Containers (repository)                :2024-09-01, 2024-11-30
    +    Cloud infrastructure templates (repository) :2024-09-15, 2024-12-15
    +    Tribal resilience Data Cube            :2024-10-01, 2024-12-31
    +
    
    +%%{init: { 'logLevel': 'debug', 'theme': 'base', 'gitGraph': {'rotateCommitLabel': true}} }%%
    +gitGraph
    +  commit id: "Start from template"
    +  branch c1
    +  commit id: "Set up SSH key pair"
    +  commit id: "Modify _config.yml for GitHub Pages"
    +  commit id: "Initial website structure"
    +  commit id: "Add new markdown pages"
    +  commit id: "Update navigation tree"
    +  commit id: "Edit existing pages"
    +  commit id: "Delete old markdown pages"
    +  commit id: "Finalize website updates"
    +  commit id: "Add new markdown pages"
    +  commit id: "Update navigation tree"
    +checkout c1
    +
    +  branch b1
    +
    +  commit
    +  commit
    +  checkout c1
    +  merge b1
    +
    %%{init: {"quadrantChart": {"chartWidth": 400, "chartHeight": 400}, "themeVariables": {"quadrant1TextFill": "#ff0000"} }}%%
    +quadrantChart
    +  x-axis Urgent --> Not Urgent
    +  y-axis Not Important --> "Important ❤"
    +  quadrant-1 Plan
    +  quadrant-2 Do
    +  quadrant-3 Delegate
    +  quadrant-4 Delete
    +
    timeline
    +    title Major Events in Environmental Science and Data Science
    +    section Environmental Science
    +        19th century : Foundations in Ecology and Conservation
    +        1962 : Publication of 'Silent Spring' by Rachel Carson
    +        1970 : First Earth Day
    +        1987 : Brundtland Report introduces Sustainable Development
    +        1992 : Rio Earth Summit
    +        2015 : Paris Agreement on Climate Change
    +    section Data Science
    +        1960s-1970s : Development of Database Management Systems
    +        1980s : Emergence of Data Warehousing
    +        1990s : Growth of the World Wide Web and Data Mining
    +        2000s : Big Data and Predictive Analytics
    +        2010s : AI and Machine Learning Revolution
    +        2020s : Integration of AI in Environmental Research
    +
    erDiagram
    +    CAR ||--o{ NAMED-DRIVER : allows
    +    CAR {
    +        string registrationNumber
    +        string make
    +        string model
    +    }
    +    PERSON ||--o{ NAMED-DRIVER : is
    +    PERSON {
    +        string firstName
    +        string lastName
    +        int age
    +    }
    +
    ---
    +config:
    +  sankey:
    +    showValues: false
    +---
    +sankey-beta
    +
    +NASA Data,Big Data Harmonization,100
    +    Satellite Imagery,Big Data Harmonization,80
    +    Open Environmental Data,Big Data Harmonization,70
    +    Remote Sensing Data,Big Data Harmonization,90
    +    Big Data Harmonization, Data Analysis and Integration,340
    +    Data Analysis and Integration,Climate Change Research,100
    +    Data Analysis and Integration,Biodiversity Monitoring,80
    +    Data Analysis and Integration,Geospatial Mapping,60
    +    Data Analysis and Integration,Urban Planning,50
    +    Data Analysis and Integration,Disaster Response,50
    +

    5. Interactive Elements

    +
      +
    • Hyperlinks: Use the format [link text](URL).
    • +
    • Google
    • +
    • +

      Play Tetris

      +
    • +
    • +

      Embedding Interactive Content: Use HTML tags or specific platform embed codes.

      +
    • +
    • <iframe src="https://example.com/interactive-content" width="600" height="400"></iframe>
    • +
    +

    6. Math Notation

    +

    Markdown can be combined with LaTeX for mathematical notation, useful in environmental data science for expressing statistical distributions, coordinate systems, and more. This requires a Markdown renderer with LaTeX support (like MathJax or KaTeX).

    +
      +
    • Inline Math: Use single dollar signs for inline math expressions. Representing the normal distribution.
    • +
    +

    Example: The probability density function of the normal distribution is given by \(f(x|\mu,\sigma) = \frac{1}{\sigma\sqrt{2\pi}}e^{-\frac{1}{2}\left(\frac{x-\mu}{\sigma}\right)^2}\).`

    +
      +
    • Display Math: Use double dollar signs for standalone equations.
    • +
    +

    Example: + $$ + f(x|\mu,\sigma) = \frac{1}{\sigma\sqrt{2\pi}}e{-\frac{1}{2}\left(\frac{x-\mu}{\sigma}\right)2} + $$

    +
      +
    • Common LaTeX Elements for Environmental Data Science:
    • +
    • Statistical Distributions:
        +
      • Normal Distribution: \frac{1}{\sigma\sqrt{2\pi}}e^{-\frac{1}{2}\left(\frac{x-\mu}{\sigma}\right)^2} for \(\frac{1}{\sigma\sqrt{2\pi}}e^{-\frac{1}{2}\left(\frac{x-\mu}{\sigma}\right)^2}\)
      • +
      • Poisson Distribution: P(k; \lambda) = \frac{\lambda^k e^{-\lambda}}{k!} for \(P(k; \lambda) = \frac{\lambda^k e^{-\lambda}}{k!}\)
      • +
      +
    • +
    • Coordinate Systems:
        +
      • Spherical Coordinates: (r, \theta, \phi) for \((r, \theta, \phi)\)
      • +
      • Cartesian Coordinates: (x, y, z) for \((x, y, z)\)
      • +
      +
    • +
    • Geospatial Equations:
        +
      • Haversine Formula for Distance: a = \sin^2\left(\frac{\Delta\phi}{2}\right) + \cos(\phi_1)\cos(\phi_2)\sin^2\left(\frac{\Delta\lambda}{2}\right) for \(a = \sin^2\left(\frac{\Delta\phi}{2}\right) + \cos(\phi_1)\cos(\phi_2)\sin^2\left(\frac{\Delta\lambda}{2}\right)\)
      • +
      +
    • +
    +

    Note: The rendering of these equations as formatted math will depend on your Markdown viewer's LaTeX capabilities.

    +

    7. Effective Citations in Markdown

    +

    Inline Citations

    +
      +
    • Objective: Learn how to use inline citations in Markdown.
    • +
    • Example Usage:
    • +
    • Inline citation of a single work:
        +
      • Some text with an inline citation. [@jones:envstudy:2020]
      • +
      +
    • +
    • Inline citation with specific page or section:
        +
      • More text with a specific section cited. [See @jones:envstudy:2020, §4.2]
      • +
      +
    • +
    • Contrasting views:
        +
      • Discussion of a topic with a contrasting view. [Contra @smith:climatechange:2019, p. 78]
      • +
      +
    • +
    +

    Footnote Citations

    +
      +
    • Objective: Understand how to use footnote citations in Markdown.
    • +
    • Example Usage:
    • +
    • Citing with a footnote:
        +
      • Some statement in the text.1
      • +
      +
    • +
    • Multiple references to the same footnote:
        +
      • Another statement referring to the same source.1
      • +
      +
    • +
    • A different citation:
        +
      • Additional comment with a new citation.2
      • +
      +
    • +
    +

    Creating Footnotes

    +
      +
    • Example Syntax:
    • +
    • +
    • +
    +
    +
    +
      +
    1. +

      First reference details. Example: Emma Jones, "Environmental Study," Nature Journal, May 2020, https://nature-journal.com/envstudy2020

      +
    2. +
    3. +

      Second reference details. Example: David Smith, "Climate Change Controversies," Science Daily, August 2019, https://sciencedaily.com/climatechange2019

      +
    4. +
    +
    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/trainings/training_two/training_two.md b/trainings/training_two/training_two.md new file mode 100644 index 0000000..9dbc8e0 --- /dev/null +++ b/trainings/training_two/training_two.md @@ -0,0 +1,468 @@ +# Markdown for the Modern Researcher at ESIIL + +??? Quote "Definitions" + + "Open Science is defined as an inclusive construct that combines various movements and practices aiming to make multilingual scientific knowledge openly available, accessible and reusable for everyone, to increase scientific collaborations and sharing of information for the benefits of science and society, and to open the processes of scientific knowledge creation, evaluation and communication to societal actors beyond the traditional scientific community." - [UNESCO Definition](https://www.unesco.org/en/natural-sciences/open-science){target=_blank} + + - [UNESCO's Recommendation on Open Science](https://unesdoc.unesco.org/ark:/48223/pf0000379949.locale=en){target=_blank} + + "Open Science is the movement to make scientific research (including publications, data, physical samples, and software) and its dissemination accessible to all levels of society, amateur or professional..." [ :material-wikipedia: Wikipedia definition](https://en.wikipedia.org/wiki/Open_science){target=_blank} + + Open and Collaborative Science Network's [Open Science Manifesto](https://ocsdnet.org/manifesto/open-science-manifesto/){target=_blank} + + ??? Example "Six Pillars :material-pillar: of Open Science" + + **:material-pillar: Open Access Publications** + + **:material-pillar: Open Data** + + **:material-pillar: Open Educational Resources** + + **:material-pillar: Open Methodology** + + **:material-pillar: Open Peer Review** + + **:material-pillar: Open Source Software** + + ??? Question "Wait, how many pillars :material-pillar: of Open Science Really Are There?" + + The number can be from [4 :material-pillar:](https://narratives.insidehighered.com/four-pillars-of-open-science/){target=_blank} to [8 :material-pillar:](https://www.ucl.ac.uk/library/research-support/open-science/8-pillars-open-science){target=_blank} + + ??? Tip "Foster Open Science Diagram" + + [![foster](https://www.fosteropenscience.eu/themes/fosterstrap/images/taxonomies/os_taxonomy.png)](https://www.fosteropenscience.eu/resources){target=_blank} + + Graphic by [Foster Open Science](https://www.fosteropenscience.eu/){target=_blank} + +## Introduction + +- Overview of Markdown's relevance and utility in modern research. +- How Markdown streamlines documentation in diverse scientific and coding environments. + +## Section 1: Mastering Markdown Syntax + +- **Objective:** Equip researchers with a thorough understanding of Markdown syntax and its diverse applications. +- **Topics Covered:** + - Fundamentals of Text Formatting (headings, lists, bold, italics) + - Advanced Structures (tables, blockquotes) + - Integrating Multimedia (image and video links) + - Diagrams with Mermaid (creating flowcharts, mind maps, timelines) + - Interactive Elements (hyperlinks, embedding interactive content) +- **Activities:** + - Crafting a Markdown document with various formatting elements. + - Developing diagrams using Mermaid for research presentations. + - Embedding multimedia elements in a Markdown document for enhanced communication. + +## Section 2: Markdown in Research Tools + +- **Objective:** Showcase the integration of Markdown in RStudio and Jupyter Notebooks for scientific documentation. +- **Topics Covered:** + - Implementing Markdown in RStudio (R Markdown, knitting to HTML/PDF) + - Utilizing Markdown in Jupyter Notebooks (code and Markdown cells) + - Best practices for documenting research code + - Including code outputs and visualizations in documentation +- **Activities:** + - Creating and sharing an R Markdown document with annotated research data. + - Building a comprehensive Jupyter Notebook with integrated Markdown annotations. + +## Section 3: Disseminating Research with Markdown and GitHub Pages + +- **Objective:** Teach researchers how to publish and manage Markdown-based documentation as web pages. +- **Topics Covered:** + - Setting up a GitHub repository for hosting documentation + - Transforming Markdown files into web-friendly formats + - Customizing web page layouts and themes + - Advanced features using Jekyll + - Version control and content management for documentation +- **Activities:** + - Publishing a research project documentation on GitHub Pages. + - Applying custom themes and layouts to enhance online documentation. + +## Conclusion + +- Review of Markdown's role in enhancing research efficiency and clarity. +- Encouraging the integration of Markdown into daily research activities for improved documentation and dissemination. + +## Additional Resources + +- Curated list of advanced Markdown tutorials, guides for GitHub Pages, and Jekyll resources for researchers. + + + + +## Section 1: Mastering Markdown Syntax + +### 1. Fundamentals of Text Formatting + +- **Headings**: Use `#` for different levels of headings. + - # Heading Level 1 + - ## Heading Level 2 + - ### Heading Level 3 + +- **Lists**: Bulleted lists use asterisks, numbers for ordered lists. + - Item 1 + - Item 2 + - Subitem 2.1 + - Subitem 2.2 + - 1. First item + - 2. Second item + +- **Bold and Italics**: Use asterisks or underscores. + - **Bold Text** + - *Italic Text* + +### 2. Advanced Structures + +- **Tables**: Create tables using dashes and pipes. + - | Header 1 | Header 2 | Header 3 | + |----------|----------|----------| + | Row 1 | Data | Data | + | Row 2 | Data | Data | + - Add a ":"" to change text justification. Here the : is added on the left for left justification. + | Header 1 | Header 2 | Header 3 | + |---------:|--------- |----------| + | Row 1 | Data | Data | + | Row 2 | Data | Data | + + - | | | | | | | | | | | | | + |---|---|---|---|---|---|---|---|---|---|---|---| + | A | N | A | L | Y | T | I | C | S | E | N | R | + | E | I | N | V | I | R | O | N | M | E | N | T | + | V | E | L | O | P | M | O | C | O | M | U | N | + | E | G | A | G | E | L | L | A | H | C | N | E | + | R | A | T | A | D | E | V | E | L | O | P | W | + | E | I | T | S | I | T | N | E | I | C | S | R | + | S | O | I | G | O | L | O | I | B | H | T | L | + | A | H | T | L | A | E | W | E | G | N | E | L | + | T | I | T | S | I | T | N | E | I | C | S | N | + | I | E | E | S | R | E | H | T | O | E | N | I | + | C | S | L | L | A | H | C | E | G | L | A | N | + | E | G | A | L | L | E | H | C | N | E | I | C | + + - If you hit the boundaries of Markdown's capabilities, you can start to add html directly. Remember, this entire exercisse is to translate to html. + +**Sudoku Puzzle** +Fill in the blank cells with numbers from 1 to 9, such that each row, column, and 3x3 subgrid contains all the numbers from 1 to 9 without repetition. + +| | | | | | | | | | +|---|---|---|---|---|---|---|---|---| +| 5 | 3 | | | 7 | | | | | +| 6 | | | 1 | 9 | 5 | | | | +| | 9 | 8 | | | | | 6 | | +| 8 | | | | 6 | | | | 3 | +| 4 | | | 8 | | 3 | | | 1 | +| 7 | | | | 2 | | | | 6 | +| | 6 | | | | | 2 | 8 | | +| | | | 4 | 1 | 9 | | | 5 | +| | | | | 8 | | | 7 | 9 | + + + + + + + + + + + + + + +
    534678912
    672195348
    198342567
    859761423
    426853791
    713924856
    961537284
    287419635
    345286179
    + + + + + +- **Blockquotes**: Use `>` for blockquotes. + - > This is a blockquote. + - > It can span multiple lines. + +### 3. Integrating Multimedia + +- **Images**: Add images using the format `![alt text](image_url)`. + - ![Markdown Logo](https://example.com/markdown-logo.png) + +- **Videos**: Embed videos using HTML in Markdown. + - `` + +### 4. Diagrams with Mermaid + +- **Flowcharts**: + +```mermaid + graph TD + A[Start] --> B[Analyze Data] + B --> C{Is Data Large?} + C -->|Yes| D[Apply Big Data Solutions] + C -->|No| E[Use Traditional Methods] + D --> F[Machine Learning] + E --> G[Statistical Analysis] + F --> H{Model Accurate?} + G --> I[Report Results] + H -->|Yes| J[Deploy Model] + H -->|No| K[Refine Model] + J --> L[Monitor Performance] + K --> F + L --> M[End: Success] + I --> N[End: Report Generated] + style A fill:#f9f,stroke:#333,stroke-width:2px + style M fill:#9f9,stroke:#333,stroke-width:2px + style N fill:#9f9,stroke:#333,stroke-width:2px +``` + +- **Mind Maps**: +```mermaid + mindmap + root((ESIIL)) + section Data Sources + Satellite Imagery + ::icon(fa fa-satellite) + Remote Sensing Data + Drones + Aircraft + On-ground Sensors + Weather Stations + IoT Devices + Open Environmental Data + Public Datasets + ::icon(fa fa-database) + section Research Focus + Climate Change Analysis + Ice Melt Patterns + Sea Level Rise + Biodiversity Monitoring + Species Distribution + Habitat Fragmentation + Geospatial Analysis Techniques + Machine Learning Models + Predictive Analytics + section Applications + Conservation Strategies + ::icon(fa fa-leaf) + Urban Planning + Green Spaces + Disaster Response + Flood Mapping + Wildfire Tracking + section Tools and Technologies + GIS Software + QGIS + ArcGIS + Programming Languages + Python + R + Cloud Computing Platforms + AWS + Google Earth Engine + Data Visualization + D3.js + Tableau +``` + +- **Timelines**: + +```mermaid +gantt + title ESIIL Year 2 Project Schedule + dateFormat YYYY-MM-DD + section CI + Sovereign OASIS via private jupiterhubs :2024-08-01, 2024-10-30 + OASIS documentation :2024-09-15, 70d + Data cube OASIS via cyverse account :2024-09-15, 100d + Integrate with ESIIL User Management system :2024-08-01, 2024-11-30 + Build badges to deploy DE from mkdoc :2024-09-01, 2024-12-15 + Streamline Github ssh key management :2024-10-01, 2024-12-31 + Cyverse support (R proxy link) :2024-11-01, 2024-12-31 + Cyverse use summary and statistics :2024-08-01, 2024-12-15 + + section CI Consultation and Education + Conferences/Invited talks :2024-08-01, 2024-12-31 + Office hours :2024-08-15, 2024-12-15 + Proposals :2024-09-01, 2024-11-15 + Private lessons :2024-09-15, 2024-11-30 + Pre-event trainings :2024-10-01, 2024-12-15 + Textbook development w/ education team :2024-08-01, 2024-12-15 + Train the trainers / group lessons :2024-08-15, 2024-11-30 + Tribal engagement :2024-09-01, 2024-12-15 + Ethical Space training :2024-09-15, 2024-12-31 + + section CI Design and Build + Data library (repository) :2024-08-01, 2024-10-30 + Analytics library (repository) :2024-08-15, 2024-11-15 + Containers (repository) :2024-09-01, 2024-11-30 + Cloud infrastructure templates (repository) :2024-09-15, 2024-12-15 + Tribal resilience Data Cube :2024-10-01, 2024-12-31 +``` + +```mermaid + +%%{init: { 'logLevel': 'debug', 'theme': 'base', 'gitGraph': {'rotateCommitLabel': true}} }%% +gitGraph + commit id: "Start from template" + branch c1 + commit id: "Set up SSH key pair" + commit id: "Modify _config.yml for GitHub Pages" + commit id: "Initial website structure" + commit id: "Add new markdown pages" + commit id: "Update navigation tree" + commit id: "Edit existing pages" + commit id: "Delete old markdown pages" + commit id: "Finalize website updates" + commit id: "Add new markdown pages" + commit id: "Update navigation tree" +checkout c1 + + branch b1 + + commit + commit + checkout c1 + merge b1 +``` + +```mermaid +%%{init: {"quadrantChart": {"chartWidth": 400, "chartHeight": 400}, "themeVariables": {"quadrant1TextFill": "#ff0000"} }}%% +quadrantChart + x-axis Urgent --> Not Urgent + y-axis Not Important --> "Important ❤" + quadrant-1 Plan + quadrant-2 Do + quadrant-3 Delegate + quadrant-4 Delete +``` + + +```mermaid +timeline + title Major Events in Environmental Science and Data Science + section Environmental Science + 19th century : Foundations in Ecology and Conservation + 1962 : Publication of 'Silent Spring' by Rachel Carson + 1970 : First Earth Day + 1987 : Brundtland Report introduces Sustainable Development + 1992 : Rio Earth Summit + 2015 : Paris Agreement on Climate Change + section Data Science + 1960s-1970s : Development of Database Management Systems + 1980s : Emergence of Data Warehousing + 1990s : Growth of the World Wide Web and Data Mining + 2000s : Big Data and Predictive Analytics + 2010s : AI and Machine Learning Revolution + 2020s : Integration of AI in Environmental Research +``` + + + + +```mermaid +erDiagram + CAR ||--o{ NAMED-DRIVER : allows + CAR { + string registrationNumber + string make + string model + } + PERSON ||--o{ NAMED-DRIVER : is + PERSON { + string firstName + string lastName + int age + } +``` + +```mermaid +--- +config: + sankey: + showValues: false +--- +sankey-beta + +NASA Data,Big Data Harmonization,100 + Satellite Imagery,Big Data Harmonization,80 + Open Environmental Data,Big Data Harmonization,70 + Remote Sensing Data,Big Data Harmonization,90 + Big Data Harmonization, Data Analysis and Integration,340 + Data Analysis and Integration,Climate Change Research,100 + Data Analysis and Integration,Biodiversity Monitoring,80 + Data Analysis and Integration,Geospatial Mapping,60 + Data Analysis and Integration,Urban Planning,50 + Data Analysis and Integration,Disaster Response,50 +``` + + +### 5. Interactive Elements + +- **Hyperlinks**: Use the format `[link text](URL)`. + - [Google](https://www.google.com) + - [Play Tetris](https://tetris.com/play-tetris) + +- **Embedding Interactive Content**: Use HTML tags or specific platform embed codes. + - `` + + + + +### 6. Math Notation + +Markdown can be combined with LaTeX for mathematical notation, useful in environmental data science for expressing statistical distributions, coordinate systems, and more. This requires a Markdown renderer with LaTeX support (like MathJax or KaTeX). + +- **Inline Math**: Use single dollar signs for inline math expressions. Representing the normal distribution. + + Example: The probability density function of the normal distribution is given by $f(x|\mu,\sigma) = \frac{1}{\sigma\sqrt{2\pi}}e^{-\frac{1}{2}\left(\frac{x-\mu}{\sigma}\right)^2}$.` + +- **Display Math**: Use double dollar signs for standalone equations. + + Example: + $$ + f(x|\mu,\sigma) = \frac{1}{\sigma\sqrt{2\pi}}e^{-\frac{1}{2}\left(\frac{x-\mu}{\sigma}\right)^2} + $$ + + +- **Common LaTeX Elements for Environmental Data Science**: + - **Statistical Distributions**: + - Normal Distribution: `\frac{1}{\sigma\sqrt{2\pi}}e^{-\frac{1}{2}\left(\frac{x-\mu}{\sigma}\right)^2}` for $\frac{1}{\sigma\sqrt{2\pi}}e^{-\frac{1}{2}\left(\frac{x-\mu}{\sigma}\right)^2}$ + - Poisson Distribution: `P(k; \lambda) = \frac{\lambda^k e^{-\lambda}}{k!}` for $P(k; \lambda) = \frac{\lambda^k e^{-\lambda}}{k!}$ + - **Coordinate Systems**: + - Spherical Coordinates: `(r, \theta, \phi)` for $(r, \theta, \phi)$ + - Cartesian Coordinates: `(x, y, z)` for $(x, y, z)$ + - **Geospatial Equations**: + - Haversine Formula for Distance: `a = \sin^2\left(\frac{\Delta\phi}{2}\right) + \cos(\phi_1)\cos(\phi_2)\sin^2\left(\frac{\Delta\lambda}{2}\right)` for $a = \sin^2\left(\frac{\Delta\phi}{2}\right) + \cos(\phi_1)\cos(\phi_2)\sin^2\left(\frac{\Delta\lambda}{2}\right)$ + +Note: The rendering of these equations as formatted math will depend on your Markdown viewer's LaTeX capabilities. + + + +### 7. Effective Citations in Markdown + +## Inline Citations + +- **Objective:** Learn how to use inline citations in Markdown. +- **Example Usage:** + - Inline citation of a single work: + - Some text with an inline citation. [@jones:envstudy:2020] + - Inline citation with specific page or section: + - More text with a specific section cited. [See @jones:envstudy:2020, §4.2] + - Contrasting views: + - Discussion of a topic with a contrasting view. [Contra @smith:climatechange:2019, p. 78] + +## Footnote Citations + +- **Objective:** Understand how to use footnote citations in Markdown. +- **Example Usage:** + - Citing with a footnote: + - Some statement in the text.[^1] + - Multiple references to the same footnote: + - Another statement referring to the same source.[^1] + - A different citation: + - Additional comment with a new citation.[^2] + +## Creating Footnotes + +- **Example Syntax:** + - [^1]: First reference details. Example: Emma Jones, "Environmental Study," Nature Journal, May 2020, https://nature-journal.com/envstudy2020. + - [^2]: Second reference details. Example: David Smith, "Climate Change Controversies," Science Daily, August 2019, https://sciencedaily.com/climatechange2019. + + diff --git a/virtual-meetings/index.html b/virtual-meetings/index.html new file mode 100644 index 0000000..5088279 --- /dev/null +++ b/virtual-meetings/index.html @@ -0,0 +1,1464 @@ + + + + + + + + + + + + + + + + + + + + + + Overview - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    Pre-Summit Virtual Meetings

    +

    There are three virtual meetings associated with the 2024 ESIIL Summit.

    +

    Virtual Meeting 1

    +

    Head in the Clouds: Navigating the Basics of Cloud Computing

    +

    Date: April 24, 2024

    +

    Time: 12:00-2:00 PM MST

    +

    Virtual Meeting 1 Recording: https://www.youtube.com/watch?v=JxVPjDtIBmU

    +

    Important Note: Please set up a GitHub account and a Cyverse account prior to this training.

    +

    Virtual Meeting 2

    +

    Feet on the ground: Collaborating with Other People Using Cloud Computing

    +

    Date: May 1, 2024

    +

    Time: 12:00-2:00 PM MST

    +

    Virtual Meeting 2 Recording: https://www.youtube.com/watch?v=213C7faZVFQ

    +

    Virtual Meeting 3

    +

    Voices in Concert: Cultural Intelligence, the Art of Team Science, and Community Skills

    +

    Date: May 6, 2024

    +

    Time: 9-11 AM MST

    +

    Virtual Meeting 3 Recording: https://youtu.be/Ea21i3do9sA

    +

    Science of Team Science Slides

    +

    Community Skills Slides

    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/virtual-meetings/virtual-meetings.md b/virtual-meetings/virtual-meetings.md new file mode 100644 index 0000000..69828e6 --- /dev/null +++ b/virtual-meetings/virtual-meetings.md @@ -0,0 +1,42 @@ +# Pre-Summit Virtual Meetings + + +There are three virtual meetings associated with the 2024 ESIIL Summit. +## Virtual Meeting 1 + +**Head in the Clouds: Navigating the Basics of Cloud Computing** + +Date: April 24, 2024 + +Time: 12:00-2:00 PM MST + +**Virtual Meeting 1 Recording**: [https://www.youtube.com/watch?v=JxVPjDtIBmU](https://www.youtube.com/watch?v=JxVPjDtIBmU) + +**Important Note:** Please set up a GitHub account and a Cyverse account prior to this training. + +## Virtual Meeting 2 + +**Feet on the ground: Collaborating with Other People Using Cloud Computing** + +Date: May 1, 2024 + +Time: 12:00-2:00 PM MST + + +**Virtual Meeting 2 Recording**: [https://www.youtube.com/watch?v=213C7faZVFQ](https://www.youtube.com/watch?v=213C7faZVFQ) + + + +## Virtual Meeting 3 + +**Voices in Concert: Cultural Intelligence, the Art of Team Science, and Community Skills** + +Date: May 6, 2024 + +Time: 9-11 AM MST + +**Virtual Meeting 3 Recording**: [https://youtu.be/Ea21i3do9sA](https://youtu.be/Ea21i3do9sA) + +**[Science of Team Science Slides](https://o365coloradoedu.sharepoint.com/:b:/s/CIRES-EarthLab/ESBIjItMBfFIsl_ZELF1t5kBjEpK09UK8fiONZ-fnKuxuw?e=MDZdKg)** + +**[Community Skills Slides](https://o365coloradoedu.sharepoint.com/:b:/s/CIRES-EarthLab/ESBIjItMBfFIsl_ZELF1t5kBjEpK09UK8fiONZ-fnKuxuw?e=lcfRbJ)** diff --git a/worksheet_redlining/index.html b/worksheet_redlining/index.html new file mode 100644 index 0000000..b1b31c7 --- /dev/null +++ b/worksheet_redlining/index.html @@ -0,0 +1,2973 @@ + + + + + + + + + + + + + + + + + + + + + + Redlining - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    Redlining

    +

    Exploring the Impact of Historical Redlining on Urban Greenspace: A Collaborative Examination of Maps, Justice, and Resilience

    +

    Introduction

    +

    This group exploration delves into the long-term impacts of historical +redlining on urban greenspace, emphasizing the powerful role of maps in +shaping environmental and social landscapes. By drawing on the research +by Nardone et al. (2021), you will collaboratively investigate how +discriminatory practices encoded in maps have led to persistent +disparities in urban settings. This exploration aims to uncover the +resilience of communities in adapting to these entrenched injustices and +to foster a deeper understanding of how mapping can serve both as a tool +of exclusion and as a means for promoting social equity.

    +

    1938 Map of Atlanta uses colors as grades for neighborhoods. The red
+swaths identify each area with large African-American populations that
+were deemed “less safe.”)

    +

    Understanding Redlining as a Systemic Disturbance

    +

    Redlining originated in the 1930s as a discriminatory practice where the +Home Owners’ Loan Corporation (HOLC) systematically denied mortgages or +offered unfavorable terms based on racial and ethnic compositions. This +methodical exclusion, executed through maps that color-coded “risky” +investment areas in red, marked minority-populated areas, denying them +crucial investment and development opportunities and initiating a +profound and lasting disturbance in the urban fabric.

    +

    Maps serve as powerful tools beyond navigation; they communicate and +enforce control. By defining neighborhood boundaries through redlining, +HOLC maps not only mirrored societal biases but also perpetuated and +embedded them into the urban landscape. This manipulation of geographic +data set a trajectory that limited economic growth, dictated the +allocation of services, and influenced the development or deterioration +of community infrastructure.

    +

    Figure 1: 1938 Map of Atlanta uses colors as grades for +neighborhoods. The red swaths identify each area with large +African-American populations that were deemed “less safe.”

    +
    + +[![](../assets/redlining/georectified-thumbnail.png)](https://storymaps.arcgis.com/stories/0f58d49c566b486482b3e64e9e5f7ac9) + +ArcGIS Story Map + +
    + +

    Explore the Story Map: Click on the image above to explore the +interactive story map about [subject of the story map].

    +

    Resilience and Adaptation in Urban Environments

    +

    The legacy of redlining presents both a challenge and an opportunity for +resilience and adaptation. Economically and socially, redlining +entrenched cycles of poverty and racial segregation, creating a +resilient wealth gap that has been difficult to dismantle. +Environmentally, the neighborhoods targeted by redlining continue to +face significant challenges—they generally feature less greenspace, +suffer from higher pollution levels, and are more vulnerable to the +impacts of climate change. These factors compound the health and +wellness challenges faced by residents.

    +

    Despite these adversities, urban communities have continually +demonstrated remarkable resilience. Adaptation strategies, such as +community-led green initiatives, urban agriculture, and grassroots +activism, have emerged as responses to these systemic disturbances. By +enhancing green infrastructure and advocating for equitable +environmental policies, these communities strive to increase their +resilience against both historical inequities and environmental +challenges.

    +
    + +[![](https://img.youtube.com/vi/O5FBJyqfoLM/hqdefault.jpg)](https://youtu.be/O5FBJyqfoLM) + +Watch the video + +
    + +

    Video Title: Exploring the Impacts of Historical Redlining on Urban +Development
    +Description: Click on the image above to watch a video that delves +into the consequences of historical redlining and its ongoing impact on +urban environments. This educational piece offers insights into how such +discriminatory practices have shaped cities and what can be learned from +them.

    +

    The following group exercise will not only uncover the impact of +redlining on urban greenspace but also highlight the adaptive strategies +developed in response to this enduring disturbance. Through mapping and +analysis, we aim to illustrate the powerful role that geographic data +can play in understanding and fostering urban resilience and social +equity.

    +

    References

    +
      +
    • Nardone, A., Rudolph, K. E., Morello-Frosch, R., & Casey, J. A. + (2021). Redlines and Greenspace: The Relationship between Historical + Redlining and 2010 Greenspace across the United States. + Environmental Health Perspectives, 129(1), 017006. + DOI:10.1289/EHP7495.
    • +
    • Hoffman, J. S., Shandas, V., & Pendleton, N. (2020). The Effects of + Historical Housing Policies on Resident Exposure to Intra-Urban + Heat: A Study of 108 US Urban Areas. Climate, 8(1), 12. + DOI:10.3390/cli8010012.
    • +
    +

    Goals of this group activity

    +

    The primary objectives of this tutorial are: 1. To practice coding in +CyVerse. 2. To analyze the relationship between HOLC grades and the +presence of urban greenspace. 3. To understand how historic policies +continue to affect the spatial distribution of environmental amenities.

    +

    Part 1: Accessing and Visualizing Historic Redlining Data

    +

    We will begin by accessing HOLC maps from the Mapping Inequality project +and overlaying this data with modern geographic datasets to visualize +the historical impact on contemporary urban landscapes.

    +

    Data Acquisition

    +
      +
    • Download HOLC map shapefiles from the University of Richmond’s + Mapping Inequality Project.
    • +
    • Utilize satellite imagery and other geospatial data to map current + greenspace using the normalized difference vegetation index (NDVI).
    • +
    +

    Analysis Methodology

    +
      +
    • Replicate the approach used by Nardone et al. to calculate NDVI + values for each HOLC neighborhood, assessing greenspace as a + health-promoting resource.
    • +
    • Employ statistical methods such as propensity score matching to + control for confounding variables and estimate the true impact of + HOLC grades on urban greenspace.
    • +
    + +
    + +R libraries we use in this analysis + + +
    if (!requireNamespace("tidytext", quietly = TRUE)) {
    +  install.packages("tidytext")
    +}
    +library(tidytext)
    +## Warning: package 'tidytext' was built under R version 4.3.2
    +library(sf)
    +## Warning: package 'sf' was built under R version 4.3.2
    +## Linking to GEOS 3.11.0, GDAL 3.5.3, PROJ 9.1.0; sf_use_s2() is TRUE
    +library(ggplot2)
    +## Warning: package 'ggplot2' was built under R version 4.3.2
    +library(ggthemes)
    +## Warning: package 'ggthemes' was built under R version 4.3.2
    +library(dplyr)
    +## 
    +## Attaching package: 'dplyr'
    +## The following objects are masked from 'package:stats':
    +## 
    +##     filter, lag
    +## The following objects are masked from 'package:base':
    +## 
    +##     intersect, setdiff, setequal, union
    +library(rstac)
    +## Warning: package 'rstac' was built under R version 4.3.2
    +library(gdalcubes)
    +## Warning: package 'gdalcubes' was built under R version 4.3.2
    +library(gdalUtils)
    +## Please note that rgdal will be retired during October 2023,
    +## plan transition to sf/stars/terra functions using GDAL and PROJ
    +## at your earliest convenience.
    +## See https://r-spatial.org/r/2023/05/15/evolution4.html and https://github.com/r-spatial/evolution
    +## rgdal: version: 1.6-7, (SVN revision 1203)
    +## Geospatial Data Abstraction Library extensions to R successfully loaded
    +## Loaded GDAL runtime: GDAL 3.5.3, released 2022/10/21
    +## Path to GDAL shared files: /Library/Frameworks/R.framework/Versions/4.3-x86_64/Resources/library/rgdal/gdal
    +##  GDAL does not use iconv for recoding strings.
    +## GDAL binary built with GEOS: TRUE 
    +## Loaded PROJ runtime: Rel. 9.1.0, September 1st, 2022, [PJ_VERSION: 910]
    +## Path to PROJ shared files: /Library/Frameworks/R.framework/Versions/4.3-x86_64/Resources/library/gdalcubes/proj
    +## PROJ CDN enabled: FALSE
    +## Linking to sp version:1.6-1
    +## To mute warnings of possible GDAL/OSR exportToProj4() degradation,
    +## use options("rgdal_show_exportToProj4_warnings"="none") before loading sp or rgdal.
    +## 
    +## Attaching package: 'gdalUtils'
    +## The following object is masked from 'package:sf':
    +## 
    +##     gdal_rasterize
    +library(gdalcubes)
    +library(colorspace)
    +library(terra)
    +## Warning: package 'terra' was built under R version 4.3.2
    +## terra 1.7.71
    +## 
    +## Attaching package: 'terra'
    +## The following object is masked from 'package:colorspace':
    +## 
    +##     RGB
    +## The following objects are masked from 'package:gdalcubes':
    +## 
    +##     animate, crop, size
    +library(tidyterra)
    +## 
    +## Attaching package: 'tidyterra'
    +## The following object is masked from 'package:stats':
    +## 
    +##     filter
    +library(basemapR)
    +library(tidytext)
    +library(ggwordcloud)
    +library(osmextract)
    +## Data (c) OpenStreetMap contributors, ODbL 1.0. https://www.openstreetmap.org/copyright.
    +## Check the package website, https://docs.ropensci.org/osmextract/, for more details.
    +library(sf)
    +library(ggplot2)
    +library(ggthemes)
    +library(glue)
    +## 
    +## Attaching package: 'glue'
    +## The following object is masked from 'package:terra':
    +## 
    +##     trim
    +
    +library(purrr)
    +
    + +
    +
    + +FUNCTION: List cities where HOLC data are available + + +
    # Function to get a list of unique cities and states from the redlining data
    +get_city_state_list_from_redlining_data <- function() {
    +  # URL to the GeoJSON data
    +  url <- "https://raw.githubusercontent.com/americanpanorama/mapping-inequality-census-crosswalk/main/MIv3Areas_2010TractCrosswalk.geojson"
    +
    +  # Read the GeoJSON file into an sf object
    +  redlining_data <- tryCatch({
    +    read_sf(url)
    +  }, error = function(e) {
    +    stop("Error reading GeoJSON data: ", e$message)
    +  })
    +
    +  # Check for the existence of 'city' and 'state' columns
    +  if (!all(c("city", "state") %in% names(redlining_data))) {
    +    stop("The required columns 'city' and/or 'state' do not exist in the data.")
    +  }
    +
    +  # Extract a unique list of city and state pairs without the geometries
    +  city_state_df <- redlining_data %>%
    +    select(city, state) %>%
    +    st_set_geometry(NULL) %>%  # Drop the geometry to avoid issues with invalid shapes
    +    distinct(city, state) %>%
    +    arrange(state, city )  # Arrange the list alphabetically by state, then by city
    +
    +  # Return the dataframe of unique city-state pairs
    +  return(city_state_df)
    +}
    +
    + +
    +
    + +Stream list of available HOLC cities + + +
    #Retrieve the list of cities and states
    +city_state_list <- get_city_state_list_from_redlining_data()
    +
    +knitr::kable(city_state_list, format = "markdown")
    +
    + +| city | state | +|:---------------------------------|:------| +| Birmingham | AL | +| Mobile | AL | +| Montgomery | AL | +| Arkadelphia | AR | +| Batesville | AR | +| Camden | AR | +| Conway | AR | +| El Dorado | AR | +| Fort Smith | AR | +| Little Rock | AR | +| Russellville | AR | +| Texarkana | AR | +| Phoenix | AZ | +| Fresno | CA | +| Los Angeles | CA | +| Oakland | CA | +| Sacramento | CA | +| San Diego | CA | +| San Francisco | CA | +| San Jose | CA | +| Stockton | CA | +| Boulder | CO | +| Colorado Springs | CO | +| Denver | CO | +| Fort Collins | CO | +| Fort Morgan | CO | +| Grand Junction | CO | +| Greeley | CO | +| Longmont | CO | +| Pueblo | CO | +| Bridgeport and Fairfield | CT | +| Hartford | CT | +| New Britain | CT | +| New Haven | CT | +| Stamford, Darien, and New Canaan | CT | +| Waterbury | CT | +| Crestview | FL | +| Daytona Beach | FL | +| DeFuniak Springs | FL | +| DeLand | FL | +| Jacksonville | FL | +| Miami | FL | +| New Smyrna | FL | +| Orlando | FL | +| Pensacola | FL | +| St. Petersburg | FL | +| Tampa | FL | +| Atlanta | GA | +| Augusta | GA | +| Columbus | GA | +| Macon | GA | +| Savannah | GA | +| Boone | IA | +| Cedar Rapids | IA | +| Council Bluffs | IA | +| Davenport | IA | +| Des Moines | IA | +| Dubuque | IA | +| Sioux City | IA | +| Waterloo | IA | +| Aurora | IL | +| Chicago | IL | +| Decatur | IL | +| East St. Louis | IL | +| Joliet | IL | +| Peoria | IL | +| Rockford | IL | +| Springfield | IL | +| Evansville | IN | +| Fort Wayne | IN | +| Indianapolis | IN | +| Lake Co. Gary | IN | +| Muncie | IN | +| South Bend | IN | +| Terre Haute | IN | +| Atchison | KS | +| Junction City | KS | +| Topeka | KS | +| Wichita | KS | +| Covington | KY | +| Lexington | KY | +| Louisville | KY | +| New Orleans | LA | +| Shreveport | LA | +| Arlington | MA | +| Belmont | MA | +| Boston | MA | +| Braintree | MA | +| Brockton | MA | +| Brookline | MA | +| Cambridge | MA | +| Chelsea | MA | +| Dedham | MA | +| Everett | MA | +| Fall River | MA | +| Fitchburg | MA | +| Haverhill | MA | +| Holyoke Chicopee | MA | +| Lawrence | MA | +| Lexington | MA | +| Lowell | MA | +| Lynn | MA | +| Malden | MA | +| Medford | MA | +| Melrose | MA | +| Milton | MA | +| Needham | MA | +| New Bedford | MA | +| Newton | MA | +| Pittsfield | MA | +| Quincy | MA | +| Revere | MA | +| Salem | MA | +| Saugus | MA | +| Somerville | MA | +| Springfield | MA | +| Waltham | MA | +| Watertown | MA | +| Winchester | MA | +| Winthrop | MA | +| Worcester | MA | +| Baltimore | MD | +| Augusta | ME | +| Boothbay | ME | +| Portland | ME | +| Sanford | ME | +| Waterville | ME | +| Battle Creek | MI | +| Bay City | MI | +| Detroit | MI | +| Flint | MI | +| Grand Rapids | MI | +| Jackson | MI | +| Kalamazoo | MI | +| Lansing | MI | +| Muskegon | MI | +| Pontiac | MI | +| Saginaw | MI | +| Austin | MN | +| Duluth | MN | +| Mankato | MN | +| Minneapolis | MN | +| Rochester | MN | +| St. Cloud | MN | +| St. Paul | MN | +| Staples | MN | +| Cape Girardeau | MO | +| Carthage | MO | +| Greater Kansas City | MO | +| Joplin | MO | +| Springfield | MO | +| St. Joseph | MO | +| St. Louis | MO | +| Jackson | MS | +| Asheville | NC | +| Charlotte | NC | +| Durham | NC | +| Elizabeth City | NC | +| Fayetteville | NC | +| Goldsboro | NC | +| Greensboro | NC | +| Hendersonville | NC | +| High Point | NC | +| New Bern | NC | +| Rocky Mount | NC | +| Statesville | NC | +| Winston-Salem | NC | +| Fargo | ND | +| Grand Forks | ND | +| Minot | ND | +| Williston | ND | +| Lincoln | NE | +| Omaha | NE | +| Manchester | NH | +| Atlantic City | NJ | +| Bergen Co. | NJ | +| Camden | NJ | +| Essex Co. | NJ | +| Hudson Co. | NJ | +| Monmouth | NJ | +| Passaic County | NJ | +| Perth Amboy | NJ | +| Trenton | NJ | +| Union Co. | NJ | +| Albany | NY | +| Binghamton-Johnson City | NY | +| Bronx | NY | +| Brooklyn | NY | +| Buffalo | NY | +| Elmira | NY | +| Jamestown | NY | +| Lower Westchester Co. | NY | +| Manhattan | NY | +| Niagara Falls | NY | +| Poughkeepsie | NY | +| Queens | NY | +| Rochester | NY | +| Schenectady | NY | +| Staten Island | NY | +| Syracuse | NY | +| Troy | NY | +| Utica | NY | +| Akron | OH | +| Canton | OH | +| Cleveland | OH | +| Columbus | OH | +| Dayton | OH | +| Hamilton | OH | +| Lima | OH | +| Lorain | OH | +| Portsmouth | OH | +| Springfield | OH | +| Toledo | OH | +| Warren | OH | +| Youngstown | OH | +| Ada | OK | +| Alva | OK | +| Enid | OK | +| Miami Ottawa County | OK | +| Muskogee | OK | +| Norman | OK | +| Oklahoma City | OK | +| South McAlester | OK | +| Tulsa | OK | +| Portland | OR | +| Allentown | PA | +| Altoona | PA | +| Bethlehem | PA | +| Chester | PA | +| Erie | PA | +| Harrisburg | PA | +| Johnstown | PA | +| Lancaster | PA | +| McKeesport | PA | +| New Castle | PA | +| Philadelphia | PA | +| Pittsburgh | PA | +| Wilkes-Barre | PA | +| York | PA | +| Pawtucket & Central Falls | RI | +| Providence | RI | +| Woonsocket | RI | +| Aiken | SC | +| Charleston | SC | +| Columbia | SC | +| Greater Anderson | SC | +| Greater Greenville | SC | +| Orangeburg | SC | +| Rock Hill | SC | +| Spartanburg | SC | +| Sumter | SC | +| Aberdeen | SD | +| Huron | SD | +| Milbank | SD | +| Mitchell | SD | +| Rapid City | SD | +| Sioux Falls | SD | +| Vermillion | SD | +| Watertown | SD | +| Chattanooga | TN | +| Elizabethton | TN | +| Erwin | TN | +| Greenville | TN | +| Johnson City | TN | +| Knoxville | TN | +| Memphis | TN | +| Nashville | TN | +| Amarillo | TX | +| Austin | TX | +| Beaumont | TX | +| Dallas | TX | +| El Paso | TX | +| Fort Worth | TX | +| Galveston | TX | +| Houston | TX | +| Port Arthur | TX | +| San Antonio | TX | +| Waco | TX | +| Wichita Falls | TX | +| Ogden | UT | +| Salt Lake City | UT | +| Bristol | VA | +| Danville | VA | +| Harrisonburg | VA | +| Lynchburg | VA | +| Newport News | VA | +| Norfolk | VA | +| Petersburg | VA | +| Phoebus | VA | +| Richmond | VA | +| Roanoke | VA | +| Staunton | VA | +| Bennington | VT | +| Brattleboro | VT | +| Burlington | VT | +| Montpelier | VT | +| Newport City | VT | +| Poultney | VT | +| Rutland | VT | +| Springfield | VT | +| St. Albans | VT | +| St. Johnsbury | VT | +| Windsor | VT | +| Seattle | WA | +| Spokane | WA | +| Tacoma | WA | +| Kenosha | WI | +| Madison | WI | +| Milwaukee Co. | WI | +| Oshkosh | WI | +| Racine | WI | +| Charleston | WV | +| Huntington | WV | +| Wheeling | WV | + +
    +
    + +FUNCTION: Stream HOLC data from a city + + +
    # Function to load and filter redlining data by city
    +load_city_redlining_data <- function(city_name) {
    +  # URL to the GeoJSON data
    +  url <- "https://raw.githubusercontent.com/americanpanorama/mapping-inequality-census-crosswalk/main/MIv3Areas_2010TractCrosswalk.geojson"
    +
    +  # Read the GeoJSON file into an sf object
    +  redlining_data <- read_sf(url)
    +
    +  # Filter the data for the specified city and non-empty grades
    +
    +  city_redline <- redlining_data %>%
    +    filter(city == city_name )
    +
    +  # Return the filtered data
    +  return(city_redline)
    +}
    +
    + +
    +
    + +Stream HOLC data for Denver, CO + + +
    # Load redlining data for Denver
    +denver_redlining <- load_city_redlining_data("Denver")
    +knitr::kable(head(denver_redlining), format = "markdown")
    +
    + +| area_id | city | state | city_survey | cat | grade | label | res | com | ind | fill | GEOID10 | GISJOIN | calc_area | pct_tract | geometry | +|--------:|:-------|:------|:------------|:-----|:------|:------|:-----|:------|:------|:---------|:------------|:---------------|-------------:|----------:|:-----------------------------| +| 6525 | Denver | CO | TRUE | Best | A | A1 | TRUE | FALSE | FALSE | \#76a865 | 08031004104 | G0800310004104 | 1.525535e+01 | 0.00001 | MULTIPOLYGON (((-104.9125 3… | +| 6525 | Denver | CO | TRUE | Best | A | A1 | TRUE | FALSE | FALSE | \#76a865 | 08031004201 | G0800310004201 | 3.987458e+05 | 0.20900 | MULTIPOLYGON (((-104.9246 3… | +| 6525 | Denver | CO | TRUE | Best | A | A1 | TRUE | FALSE | FALSE | \#76a865 | 08031004304 | G0800310004304 | 1.554195e+05 | 0.05927 | MULTIPOLYGON (((-104.9125 3… | +| 6525 | Denver | CO | TRUE | Best | A | A1 | TRUE | FALSE | FALSE | \#76a865 | 08031004202 | G0800310004202 | 1.117770e+06 | 0.57245 | MULTIPOLYGON (((-104.9125 3… | +| 6529 | Denver | CO | TRUE | Best | A | A2 | TRUE | FALSE | FALSE | \#76a865 | 08031004302 | G0800310004302 | 3.133415e+05 | 0.28381 | MULTIPOLYGON (((-104.928 39… | +| 6529 | Denver | CO | TRUE | Best | A | A2 | TRUE | FALSE | FALSE | \#76a865 | 08031004301 | G0800310004301 | 1.221218e+05 | 0.08622 | MULTIPOLYGON (((-104.9305 3… | + +
    +
    + +FUNCTION: Get Points-of-Interest from city of interest + + +
    get_places <- function(polygon_layer, type = "food" ) {
    +  # Check if the input is an sf object
    +  if (!inherits(polygon_layer, "sf")) {
    +    stop("The provided object is not an sf object.")
    +  }
    +
    +  # Create a bounding box from the input sf object
    +  bbox_here <- st_bbox(polygon_layer) |>
    +    st_as_sfc()
    +
    +  if(type == "food"){
    +    my_layer <- "multipolygons"
    +    my_query <- "SELECT * FROM multipolygons WHERE (
    +                 shop IN ('supermarket', 'bodega', 'market', 'other_market', 'farm', 'garden_centre', 'doityourself', 'farm_supply', 'compost', 'mulch', 'fertilizer') OR
    +                 amenity IN ('social_facility', 'market', 'restaurant', 'coffee') OR
    +                 leisure = 'garden' OR
    +                 landuse IN ('farm', 'farmland', 'row_crops', 'orchard_plantation', 'dairy_grazing') OR
    +                 building IN ('brewery', 'winery', 'distillery') OR
    +                 shop = 'greengrocer' OR
    +                 amenity = 'marketplace'
    +               )"
    +    title <- "food"
    +  }
    +
    +  if (type == "processed_food") {
    +    my_layer <- "multipolygons"
    +    my_query <- "SELECT * FROM multipolygons WHERE (
    +                   amenity IN ('fast_food', 'cafe', 'pub') OR
    +                   shop IN ('convenience', 'supermarket') OR
    +                   shop = 'kiosk'
    +                 )"
    +    title <- "Processed Food Locations"
    +}
    +
    +  if(type == "natural_habitats"){
    +    my_layer <- "multipolygons"
    +    my_query <- "SELECT * FROM multipolygons WHERE (
    +             boundary = 'protected_area' OR
    +             natural IN ('tree', 'wood') OR
    +             landuse = 'forest' OR
    +             leisure = 'park'
    +           )"
    +    title <- "Natural habitats or City owned trees"
    +  }
    +
    +   if(type == "roads"){
    +    my_layer <- "lines"
    +    my_query <- "SELECT * FROM lines WHERE (
    +             highway IN ('motorway', 'trunk', 'primary', 'secondary', 'tertiary') )"
    +    title <- "Major roads"
    +   }
    +
    +  if(type == "rivers"){
    +    my_layer <- "lines"
    +    my_query <- "SELECT * FROM lines WHERE (
    +             waterway IN ('river'))"
    +    title <- "Major rivers"
    +  }
    +
    +  if(type == "internet_access") {
    +    my_layer <- "multipolygons"
    +    my_query <- "SELECT * FROM multipolygons WHERE (
    +                 amenity IN ('library', 'cafe', 'community_centre', 'public_building') AND
    +                 internet_access = 'yes' 
    +               )"
    +    title <- "Internet Access Locations"
    +}
    +
    +  if(type == "water_bodies") {
    +    my_layer <- "multipolygons"
    +    my_query <- "SELECT * FROM multipolygons WHERE (
    +                 natural IN ('water', 'lake', 'pond') OR
    +                 water IN ('lake', 'pond') OR
    +                 landuse = 'reservoir'
    +               )"
    +    title <- "Water Bodies"
    +}
    +
    + if(type == "government_buildings") {
    +    my_layer <- "multipolygons"
    +    my_query <- "SELECT * FROM multipolygons WHERE (
    +                 amenity IN ('townhall', 'courthouse', 'embassy', 'police', 'fire_station') OR
    +                 building IN ('capitol', 'government')
    +               )"
    +    title <- "Government Buildings"
    +}
    +
    +
    +
    +  # Use the bbox to get data with oe_get(), specifying the desired layer and a custom SQL query for fresh food places
    +  tryCatch({
    +    places <- oe_get(
    +      place = bbox_here,
    +      layer = my_layer,  # Adjusted layer; change as per actual data availability
    +      query = my_query,
    +      quiet = TRUE
    +    )
    +
    +  places <- st_make_valid(places)
    +
    +    # Crop the data to the bounding box
    +    cropped_places <- st_crop(places, bbox_here)
    +
    +    # Plotting the cropped fresh food places
    +    plot <- ggplot(data = cropped_places) +
    +      geom_sf(fill="cornflowerblue", color="cornflowerblue") +
    +      ggtitle(title) +
    +  theme_tufte()+
    +  theme(legend.position = "none",  # Optionally hide the legend
    +        axis.text = element_blank(),     # Remove axis text
    +        axis.title = element_blank(),    # Remove axis titles
    +        axis.ticks = element_blank(),    # Remove axis ticks
    +         plot.background = element_rect(fill = "white", color = NA),  # Set the plot background to white
    +        panel.background = element_rect(fill = "white", color = NA),  # Set the panel background to white
    +        panel.grid.major = element_blank(),  # Remove major grid lines
    +        panel.grid.minor = element_blank(),
    +        ) 
    +
    +    # Save the plot as a PNG file
    +    png_filename <- paste0(title,"_", Sys.Date(), ".png")
    +    ggsave(png_filename, plot, width = 10, height = 8, units = "in")
    +
    +    # Return the cropped dataset
    +    return(cropped_places)
    +  }, error = function(e) {
    +    stop("Failed to retrieve or plot data: ", e$message)
    +  })
    +}
    +
    + +
    +
    + +FUNCTION: Plot POI over HOLC grades + + +
    plot_city_redlining <- function(redlining_data, filename = "redlining_plot.png") {
    +  # Fetch additional geographic data based on redlining data
    +  roads <- get_places(redlining_data, type = "roads")
    +  rivers <- get_places(redlining_data, type = "rivers")
    +
    +  # Filter residential zones with valid grades and where city survey is TRUE
    +  residential_zones <- redlining_data %>%
    +    filter(city_survey == TRUE & grade != "") 
    +
    +  # Colors for the grades
    +  colors <- c("#76a865", "#7cb5bd", "#ffff00", "#d9838d")
    +
    +  # Plot the data using ggplot2
    +  plot <- ggplot() +
    +    geom_sf(data = roads, lwd = 0.1) +
    +    geom_sf(data = rivers, color = "blue", alpha = 0.5, lwd = 1.1) +
    +    geom_sf(data = residential_zones, aes(fill = grade), alpha = 0.5) +
    +    theme_tufte() +
    +    scale_fill_manual(values = colors) +
    +    labs(fill = 'HOLC Categories') +
    +    theme(
    +      plot.background = element_rect(fill = "white", color = NA),
    +      panel.background = element_rect(fill = "white", color = NA),
    +      panel.grid.major = element_blank(),
    +      panel.grid.minor = element_blank(),
    +      legend.position = "right"
    +    )
    +
    +  # Save the plot as a high-resolution PNG file
    +  ggsave(filename, plot, width = 10, height = 8, units = "in", dpi = 600)
    +
    +  # Return the plot object if needed for further manipulation or checking
    +  return(plot)
    +}
    +
    + +
    +
    + +Plot Denver Redlining + + +
    denver_plot <- plot_city_redlining(denver_redlining)
    +
    + +
    + +

    +
    + +Stream amenities by category + + +
    food <- get_places(denver_redlining, type="food")
    +
    +food_processed <- get_places(denver_redlining, type="processed_food")
    +
    +natural_habitats <- get_places(denver_redlining, type="natural_habitats")
    +
    +roads <- get_places(denver_redlining, type="roads")
    +
    +rivers <- get_places(denver_redlining, type="rivers")
    +
    +#water_bodies <- get_places(denver_redlining, type="water_bodies")
    +
    +government_buildings <- get_places(denver_redlining, type="government_buildings")
    +
    + +
    +
    + +FUNCTION: Plot the HOLC grades individually + + +
    split_plot <- function(sf_data, roads, rivers) {
    +  # Filter for grades A, B, C, and D
    +  sf_data_filtered <- sf_data %>% 
    +    filter(grade %in% c('A', 'B', 'C', 'D'))
    +
    +  # Define a color for each grade
    +  grade_colors <- c("A" = "#76a865", "B" = "#7cb5bd", "C" = "#ffff00", "D" = "#d9838d")
    +
    +  # Create the plot with panels for each grade
    +  plot <- ggplot(data = sf_data_filtered) +
    +    geom_sf(data = roads, alpha = 0.1, lwd = 0.1) +
    +    geom_sf(data = rivers, color = "blue", alpha = 0.1, lwd = 1.1) +
    +    geom_sf(aes(fill = grade)) +
    +    facet_wrap(~ grade, nrow = 1) +  # Free scales for different zoom levels if needed
    +    scale_fill_manual(values = grade_colors) +
    +    theme_minimal() +
    +    labs(fill = 'HOLC Grade') +
    +    theme_tufte() +
    +    theme(plot.background = element_rect(fill = "white", color = NA),
    +          panel.background = element_rect(fill = "white", color = NA),
    +          legend.position = "none",  # Optionally hide the legend
    +          axis.text = element_blank(),     # Remove axis text
    +          axis.title = element_blank(),    # Remove axis titles
    +          axis.ticks = element_blank(),    # Remove axis ticks
    +          panel.grid.major = element_blank(),  # Remove major grid lines
    +          panel.grid.minor = element_blank())  
    +
    +  ggsave(plot, filename = "HOLC_grades_individually.png", width = 10, height = 4, units = "in", dpi = 1200)
    +  return(plot)
    +}
    +
    + +
    +
    + +Plot 4 HOLC grades individually + + +
    plot_row <- split_plot(denver_redlining, roads, rivers)
    +
    + +
    + +

    +
    + +FUNCTION: Map an amenity over each grade individually + + +
    process_and_plot_sf_layers <- function(layer1, layer2, output_file = "output_plot.png") {
    + # Make geometries valid
    +layer1 <- st_make_valid(layer1)
    +layer2 <- st_make_valid(layer2)
    +
    +# Optionally, simplify geometries to remove duplicate vertices
    +layer1 <- st_simplify(layer1, preserveTopology = TRUE) |>
    +  filter(grade != "")
    +
    +# Prepare a list to store results
    +results <- list()
    +
    +# Loop through each grade and perform operations
    +for (grade in c("A", "B", "C", "D")) {
    +  # Filter layer1 for current grade
    +  layer1_grade <- layer1[layer1$grade == grade, ]
    +
    +  # Buffer the geometries of the current grade
    +  buffered_layer1_grade <- st_buffer(layer1_grade, dist = 500)
    +
    +  # Intersect with the second layer
    +  intersections <- st_intersects(layer2, buffered_layer1_grade, sparse = FALSE)
    +  selected_polygons <- layer2[rowSums(intersections) > 0, ]
    +
    +  # Add a new column to store the grade information
    +  selected_polygons$grade <- grade
    +
    +  # Store the result
    +  results[[grade]] <- selected_polygons
    +}
    +
    +# Combine all selected polygons from different grades into one sf object
    +final_selected_polygons <- do.call(rbind, results)
    +
    +  # Define colors for the grades
    +  grade_colors <- c("A" = "grey", "B" = "grey", "C" = "grey", "D" = "grey")
    +
    +  # Create the plot
    +  plot <- ggplot() +
    +    geom_sf(data = roads, alpha = 0.05, lwd = 0.1) +
    +    geom_sf(data = rivers, color = "blue", alpha = 0.1, lwd = 1.1) +
    +    geom_sf(data = layer1, fill = "grey", color = "grey", size = 0.1) +
    +    facet_wrap(~ grade, nrow = 1) +
    +    geom_sf(data = final_selected_polygons,fill = "green", color = "green", size = 0.1) +
    +    facet_wrap(~ grade, nrow = 1) +
    +    #scale_fill_manual(values = grade_colors) +
    +    #scale_color_manual(values = grade_colors) +
    +    theme_minimal() +
    +    labs(fill = 'HOLC Grade') +
    +    theme_tufte() +
    +    theme(plot.background = element_rect(fill = "white", color = NA),
    +      panel.background = element_rect(fill = "white", color = NA),
    +      legend.position = "none",
    +          axis.text = element_blank(),
    +          axis.title = element_blank(),
    +          axis.ticks = element_blank(),
    +          panel.grid.major = element_blank(),
    +          panel.grid.minor = element_blank())
    +
    +  # Save the plot as a high-resolution PNG file
    +  ggsave(output_file, plot, width = 10, height = 4, units = "in", dpi = 1200)
    +
    +  # Return the plot for optional further use
    +  return(list(plot=plot, sf = final_selected_polygons))
    +}
    +
    + +
    +
    + +FUNCTION: Create word cloud per grade + + +
    create_wordclouds_by_grade <- function(sf_object, output_file = "food_word_cloud_per_grade.png",title = "Healthy food place names word cloud", max_size =25, col_select = "name") {
    +
    +
    +    # Extract relevant data and prepare text data
    +    text_data <- sf_object %>%
    +        select(grade, col_select) %>%
    +        filter(!is.na(col_select)) %>%
    +        unnest_tokens(output = "word", input = col_select, token = "words") %>%
    +        count(grade, word, sort = TRUE) %>%
    +        ungroup() %>%
    +        filter(n() > 1)  # Filter to remove overly common or single-occurrence words
    +
    +    # Ensure there are no NA values in the 'word' column
    +    text_data <- text_data %>% filter(!is.na(word))
    +
    +    # Handle cases where text_data might be empty
    +    if (nrow(text_data) == 0) {
    +        stop("No data available for creating word clouds.")
    +    }
    +
    +    # Create a word cloud using ggplot2 and ggwordcloud
    +    p <- ggplot( ) +
    +        geom_text_wordcloud_area(data=text_data, aes(label = word, size = n),rm_outside = TRUE) +
    +        scale_size_area(max_size = max_size) +
    +        facet_wrap(~ grade, nrow = 1) +
    +      scale_color_gradient(low = "darkred", high = "red") +
    +        theme_minimal() +
    +        theme(plot.background = element_rect(fill = "white", color = NA),
    +          panel.background = element_rect(fill = "white", color = NA),
    +          panel.spacing = unit(0.5, "lines"),
    +              plot.title = element_text(size = 16, face = "bold"),
    +              legend.position = "none") +
    +        labs(title = title)
    +
    +    # Attempt to save the plot and handle any errors
    +    tryCatch({
    +        ggsave(output_file, p, width = 10, height = 4, units = "in", dpi = 600)
    +    }, error = function(e) {
    +        cat("Error in saving the plot: ", e$message, "\n")
    +    })
    +
    +    return(p)
    +}
    +
    + +
    +
    + +Map food over each grade individually + + +
     layer1 <- denver_redlining
    + layer2 <- food
    + food_match <- process_and_plot_sf_layers(layer1, layer2, "food_match.png")
    +
    + +
    + +

    +
    + +WORD CLOUD: Names of places with fresh food + + +
    food_word_cloud <- create_wordclouds_by_grade(food_match$sf, output_file = "food_word_cloud_per_grade.png")
    +
    + + Warning: Using an external vector in selections was deprecated in tidyselect 1.1.0. + ℹ Please use `all_of()` or `any_of()` instead. + # Was: + data %>% select(col_select) + + # Now: + data %>% select(all_of(col_select)) + + See . + + Warning in wordcloud_boxes(data_points = points_valid_first, boxes = boxes, : + Some words could not fit on page. They have been removed. + +
    + +

    +
    + +Map processed food over each grade individually + + +
     layer1 <- denver_redlining
    + layer2 <- food_processed
    + processed_food_match <- process_and_plot_sf_layers(layer1, layer2, "processed_food_match.png")
    +
    + +
    + +

    +
    + +WORD CLOUD: Names of places with processed food + + +
    processed_food_cloud <- create_wordclouds_by_grade(processed_food_match$sf, output_file = "processed_food_word_cloud_per_grade.png",title = "Processed food place names where larger text is more frequent", max_size =17)
    +
    + +
    + +

    +

    Part 2: Integrating Environmental Data

    +

    Data Processing

    +
      +
    • Use satellite data from 2010 to analyze greenspace using NDVI, an + index that measures the quantity of vegetation in an area.
    • +
    • Apply methods to adjust for potential confounders as described in + the study, ensuring that comparisons of greenspace across HOLC + grades are valid and not biased by historical or socio-demographic + factors.
    • +
    +
    + +Map natural habitats over each grade individually + + +
     layer1 <- denver_redlining
    + layer2 <- natural_habitats
    + natural_habitats_match <- process_and_plot_sf_layers(layer1, layer2, "natural_habitats_match.png")
    + print(natural_habitats_match$plot)
    +
    + +![](worksheet_redlining_files/figure-gfm/unnamed-chunk-18-1.png) + +
    + +

    +
    + +WORD CLOUD: Name of natural habitat area + + +
    natural_habitats_cloud <- create_wordclouds_by_grade(natural_habitats_match$sf, output_file = "natural_habitats_word_cloud_per_grade.png",title = "Natural habitats place names where larger text is more frequent", max_size =35)
    +
    + +
    + +

    +
    + +FUNCTION: Stream NDVI data + + +
    polygon_layer <- denver_redlining
    +# Function to process satellite data based on an SF polygon's extent
    +process_satellite_data <- function(polygon_layer, start_date, end_date, assets, fps = 1, output_file = "anim.gif") {
    +  # Record start time
    +  start_time <- Sys.time()
    +
    +  # Calculate the bbox from the polygon layer
    +  bbox <- st_bbox(polygon_layer)
    +
    +  s = stac("https://earth-search.aws.element84.com/v0")
    +
    +
    +  # Use stacR to search for Sentinel-2 images within the bbox and date range
    +  items = s |> stac_search(
    +    collections = "sentinel-s2-l2a-cogs",
    +    bbox = c(bbox["xmin"], bbox["ymin"], bbox["xmax"], bbox["ymax"]),
    +    datetime = paste(start_date, end_date, sep = "/"),
    +    limit = 500
    +  ) %>% 
    +  post_request()
    +
    +  # Define mask for Sentinel-2 image quality
    +  #S2.mask <- image_mask("SCL", values = c(3, 8, 9))
    +
    +  # Create a collection of images filtering by cloud cover
    +  col <- stac_image_collection(items$features, asset_names = assets, property_filter = function(x) {x[["eo:cloud_cover"]] < 30})
    +
    +  # Define a view for processing the data
    +  v <- cube_view(srs = "EPSG:4326", 
    +                 extent = list(t0 = start_date, t1 = end_date,
    +                               left = bbox["xmin"], right = bbox["xmax"], 
    +                               top = bbox["ymax"], bottom = bbox["ymin"]),
    +                 dx = 0.001, dy = 0.001, dt = "P1M", 
    +                 aggregation = "median", resampling = "bilinear")
    +
    +  # Calculate NDVI and create an animation
    +  ndvi_col <- function(n) {
    +    rev(sequential_hcl(n, "Green-Yellow"))
    +  }
    +
    +  #raster_cube(col, v, mask = S2.mask) %>%
    +  raster_cube(col, v) %>%
    +    select_bands(c("B04", "B08")) %>%
    +    apply_pixel("(B08-B04)/(B08+B04)", "NDVI") %>%
    +    gdalcubes::animate(col = ndvi_col, zlim = c(-0.2, 1), key.pos = 1, save_as = output_file, fps = fps)
    +
    +  # Calculate processing time
    +  end_time <- Sys.time()
    +  processing_time <- difftime(end_time, start_time)
    +
    +  # Return processing time
    +  return(processing_time)
    +}
    +
    + +
    +
    + +Stream NDVI data: animation + + +
    processing_time <- process_satellite_data(denver_redlining, "2022-05-31", "2023-05-31", c("B04", "B08"))
    +
    + +
    + +

    +
    + +FUNCTION: Stream year average NDVI + + +
    yearly_average_ndvi <- function(polygon_layer, output_file = "ndvi.png", dx = 0.01, dy = 0.01) {
    +  # Record start time
    +  start_time <- Sys.time()
    +
    +  # Calculate the bbox from the polygon layer
    +  bbox <- st_bbox(polygon_layer)
    +
    +  s = stac("https://earth-search.aws.element84.com/v0")
    +
    +  # Search for Sentinel-2 images within the bbox for June
    +  items <- s |> stac_search(
    +    collections = "sentinel-s2-l2a-cogs",
    +    bbox = c(bbox["xmin"], bbox["ymin"], bbox["xmax"], bbox["ymax"]),
    +    datetime = "2023-01-01/2023-12-31",
    +    limit = 500
    +  ) %>% 
    +  post_request()
    +
    +  # Create a collection of images filtering by cloud cover
    +  col <- stac_image_collection(items$features, asset_names = c("B04", "B08"), property_filter = function(x) {x[["eo:cloud_cover"]] < 80})
    +
    +  # Define a view for processing the data specifically for June
    +  v <- cube_view(srs = "EPSG:4326", 
    +                 extent = list(t0 = "2023-01-01", t1 = "2023-12-31",
    +                               left = bbox["xmin"], right = bbox["xmax"], 
    +                               top = bbox["ymax"], bottom = bbox["ymin"]),
    +                 dx = dx, dy = dy, dt = "P1Y", 
    +                 aggregation = "median", resampling = "bilinear")
    +
    +  # Process NDVI
    +  ndvi_rast <- raster_cube(col, v) %>%
    +    select_bands(c("B04", "B08")) %>%
    +    apply_pixel("(B08-B04)/(B08+B04)", "NDVI") %>%
    +    write_tif() |>
    +    terra::rast()
    +
    +
    +  # Convert terra Raster to ggplot using tidyterra
    +ndvi_plot <-   ggplot() +
    +    geom_spatraster(data = ndvi_rast, aes(fill = NDVI)) +
    +    scale_fill_viridis_c(option = "viridis", direction = -1, name = "NDVI") +
    +    labs(title = "NDVI mean for 2023") +
    +    theme_minimal() +
    +    coord_sf() +
    +    theme(plot.background = element_rect(fill = "white", color = NA),
    +      panel.background = element_rect(fill = "white", color = NA),
    +      legend.position = "right",
    +          axis.text = element_blank(),
    +          axis.title = element_blank(),
    +          axis.ticks = element_blank(),
    +          panel.grid.major = element_blank(),
    +          panel.grid.minor = element_blank()) 
    +
    +  # Save the plot as a high-resolution PNG file
    +  ggsave(output_file, ndvi_plot, width = 10, height = 8, dpi = 600)
    +
    +  # Calculate processing time
    +  end_time <- Sys.time()
    +  processing_time <- difftime(end_time, start_time)
    +
    +  # Return the plot and processing time
    +  return(list(plot = ndvi_plot, processing_time = processing_time, raster = ndvi_rast))
    +}
    +
    + +
    +
    + +Stream NDVI: high resolution + + +
    ndvi_background <- yearly_average_ndvi(denver_redlining,dx = 0.0001, dy = 0.0001)
    +
    + +
    + +

    +
    + +FUNCTION: Map NDVI per HOLC grade individually + + +
    create_mask_and_plot <- function(redlining_sf, background_raster = ndvi$raster, roads = NULL, rivers = NULL){
    +  start_time <- Sys.time()  # Start timing
    +
    +  # Validate and prepare the redlining data
    +  redlining_sf <- redlining_sf %>%
    +    filter(grade != "") %>%
    +    st_make_valid()
    +
    +
    +bbox <- st_bbox(redlining_sf)  # Get original bounding box
    +
    +
    +expanded_bbox <- expand_bbox(bbox, 6000, 1000)  # 
    +
    +
    +expanded_bbox_poly <- st_as_sfc(expanded_bbox, crs = st_crs(redlining_sf)) %>%
    +    st_make_valid()
    +
    +  # Initialize an empty list to store masks
    +  masks <- list()
    +
    +  # Iterate over each grade to create masks
    +  unique_grades <- unique(redlining_sf$grade)
    +  for (grade in unique_grades) {
    +    # Filter polygons by grade
    +    grade_polygons <- redlining_sf[redlining_sf$grade == grade, ]
    +
    +    # Create an "inverted" mask by subtracting these polygons from the background
    +    mask <- st_difference(expanded_bbox_poly, st_union(grade_polygons))
    +
    +    # Store the mask in the list with the grade as the name
    +    masks[[grade]] <- st_sf(geometry = mask, grade = grade)
    +  }
    +
    +  # Combine all masks into a single sf object
    +  mask_sf <- do.call(rbind, masks)
    +
    +  # Normalize the grades so that C.2 becomes C, but correctly handle other grades
    +  mask_sf$grade <- ifelse(mask_sf$grade == "C.2", "C", mask_sf$grade)
    +
    +  # Prepare the plot
    +  plot <- ggplot() +
    +    geom_spatraster(data = background_raster, aes(fill = NDVI)) +
    +  scale_fill_viridis_c(name = "NDVI", option = "viridis", direction = -1) +
    +
    +    geom_sf(data = mask_sf, aes(color = grade), fill = "white", size = 0.1, show.legend = FALSE) +
    +    scale_color_manual(values = c("A" = "white", "B" = "white", "C" = "white", "D" = "white"), name = "Grade") +
    +    facet_wrap(~ grade, nrow = 1) +
    +     geom_sf(data = roads, alpha = 1, lwd = 0.1, color="white") +
    +    geom_sf(data = rivers, color = "white", alpha = 0.5, lwd = 1.1) +
    +    labs(title = "NDVI: Normalized Difference Vegetation Index") +
    +    theme_minimal() +
    +    coord_sf(xlim = c(bbox["xmin"], bbox["xmax"]), 
    +           ylim = c(bbox["ymin"], bbox["ymax"]), 
    +           expand = FALSE) + 
    +    theme(plot.background = element_rect(fill = "white", color = NA),
    +          panel.background = element_rect(fill = "white", color = NA),
    +          legend.position = "bottom",
    +          axis.text = element_blank(),
    +          axis.title = element_blank(),
    +          axis.ticks = element_blank(),
    +          panel.grid.major = element_blank(),
    +          panel.grid.minor = element_blank())
    +
    +  # Save the plot
    +  ggsave("redlining_mask_ndvi.png", plot, width = 10, height = 4, dpi = 600)
    +
    +  end_time <- Sys.time()  # End timing
    +  runtime <- end_time - start_time
    +
    +  # Return the plot and runtime
    +  return(list(plot = plot, runtime = runtime, mask_sf = mask_sf))
    +}
    +
    + +
    +
    + +Stream NDVI: low resolution + + +
    ndvi_background_low <- yearly_average_ndvi(denver_redlining)
    +
    + +
    +

    +
    + +Map low resolution NDVI per HOLC grade + + +
    ndvi <- create_mask_and_plot(denver_redlining, background_raster = ndvi_background_low$raster, roads = roads, rivers = rivers)
    +
    + +
    + +

    +
    + +FUNCTION: Map Denver City provided data per HOLC grade + + +
    process_city_inventory_data <- function(address, inner_file, polygon_layer, output_filename,variable_label= 'Tree Density') {
    +  # Download and read the shapefile
    +  full_path <- glue("/vsizip/vsicurl/{address}/{inner_file}")
    +  shape_data <- st_read(full_path, quiet = TRUE) |> st_as_sf()
    +
    +  # Process the shape data with the provided polygon layer
    +  processed_data <- process_and_plot_sf_layers(polygon_layer, shape_data, paste0(output_filename, ".png"))
    +
    +  # Extract trees from the processed data
    +  trees <- processed_data$sf
    +  denver_redlining_residential <- polygon_layer |> filter(grade != "")
    +
    +  # Generate the density plot
    +  plot <- ggplot() +
    +    geom_sf(data = roads, alpha = 0.05, lwd = 0.1) +
    +    geom_sf(data = rivers, color = "blue", alpha = 0.1, lwd = 1.1) +
    +    geom_sf(data = denver_redlining_residential, fill = "grey", color = "grey", size = 0.1) +
    +    facet_wrap(~ grade, nrow = 1) +
    +    stat_density_2d(data = trees, 
    +                    mapping = aes(x = map_dbl(geometry, ~.[1]),
    +                                  y = map_dbl(geometry, ~.[2]),
    +                                  fill = stat(density)),
    +                    geom = 'tile',
    +                    contour = FALSE,
    +                    alpha = 0.9) +
    +    scale_fill_gradientn(colors = c("transparent", "white", "limegreen"),
    +                         values = scales::rescale(c(0, 0.1, 1)),  # Adjust these based on your density range
    +                         guide = "colourbar") +
    +    theme_minimal() +
    +    labs(fill = variable_label) +
    +    theme_tufte() +
    +    theme(plot.background = element_rect(fill = "white", color = NA),
    +          panel.background = element_rect(fill = "white", color = NA),
    +          legend.position = "bottom",
    +          axis.text = element_blank(),
    +          axis.title = element_blank(),
    +          axis.ticks = element_blank(),
    +          panel.grid.major = element_blank(),
    +          panel.grid.minor = element_blank())
    +
    +  # Save the plot
    +  ggsave(paste0(output_filename, "_density_plot.png"), plot, width = 10, height = 4, units = "in", dpi = 600)
    +
    +  # Return the plot and the tree layer
    +  return(list(plot = plot, layer = trees))
    +}
    +
    + +
    +
    + +Map tree inventory per HOLC grade + + +
    result <- process_city_inventory_data(
    +  "https://www.denvergov.org/media/gis/DataCatalog/tree_inventory/shape/tree_inventory.zip",
    +  "tree_inventory.shp",
    +  denver_redlining,
    +  "Denver_tree_inventory_2023"
    +)
    +
    + + Warning: `stat(density)` was deprecated in ggplot2 3.4.0. + ℹ Please use `after_stat(density)` instead. + +
    + +

    +

    +
    + +Map traffic accidents per HOLC grade + + +
    result <- process_city_inventory_data(
    +  "https://www.denvergov.org/media/gis/DataCatalog/traffic_accidents/shape/traffic_accidents.zip",
    +  "traffic_accidents.shp",
    +  denver_redlining,
    +  "Denver_traffic_accidents",
    +  variable_label= 'Traffic accidents density'
    +)
    +
    + +
    + +

    +

    +
    + +Map stream sampling effort per HOLC grade + + +
    instream_sampling_sites <- process_city_inventory_data(
    +  "https://www.denvergov.org/media/gis/DataCatalog/instream_sampling_sites/shape/instream_sampling_sites.zip",
    +  "instream_sampling_sites.shp",
    +  denver_redlining,
    +  "instream_sampling_sites",
    +  variable_label= 'Instream sampling sites density'
    +)
    +
    + +
    + +

    +

    +
    + +Map soil sampling effort per HOLC grade + + +
    soil_samples <- process_city_inventory_data(
    +  "https://www.denvergov.org/media/gis/DataCatalog/soil_samples/shape/soil_samples.zip",
    +  "soil_samples.shp",
    +  denver_redlining,
    +  "Soil samples",
    +  variable_label= 'soil samples density'
    +)
    +
    + +
    + +

    +

    +
    + +Map public art density per HOLC grade + + +
    public_art <- process_city_inventory_data(
    +  "https://www.denvergov.org/media/gis/DataCatalog/public_art/shape/public_art.zip",
    +  "public_art.shp",
    +  denver_redlining,
    +  "Public art ",
    +  variable_label= 'Public art density'
    +)
    +
    + +
    + +

    +

    +
    + +Map liquor licenses density per HOLC grade + + +
    liquor_licenses <- process_city_inventory_data(
    +  "https://www.denvergov.org/media/gis/DataCatalog/liquor_licenses/shape/liquor_licenses.zip",
    +  "liquor_licenses.shp",
    +  denver_redlining,
    +  "liquor licenses ",
    +  variable_label= 'liquor licenses density'
    +)
    +
    + +
    + +

    +

    +
    + +Map crime density per HOLC grade + + +
    Crime <- process_city_inventory_data(
    +  "https://www.denvergov.org/media/gis/DataCatalog/crime/shape/crime.zip",
    +  "crime.shp",
    +  denver_redlining,
    +  "crime",
    +  variable_label= 'Crime density'
    +)
    +
    + +
    +

    +
    + +WORD CLOUD: Types of crimes + + +
    crime_cloud <- create_wordclouds_by_grade(Crime$layer, output_file = "Crime_word_cloud_per_grade.png",title = "Crime type where larger text is more frequent", max_size =25, col_select = "OFFENSE_TY")
    +
    + + Warning: Using an external vector in selections was deprecated in tidyselect 1.1.0. + ℹ Please use `all_of()` or `any_of()` instead. + # Was: + data %>% select(col_select) + + # Now: + data %>% select(all_of(col_select)) + + See . + +
    + +

    +
    + +Map police shooting density per HOLC grade + + +
    Denver_police_shootings <- process_city_inventory_data(
    +  "https://www.denvergov.org/media/gis/DataCatalog/denver_police_officer_involved_shootings/shape/denver_police_officer_involved_shootings.zip",
    +  "denver_police_officer_involved_shootings.shp",
    +  denver_redlining,
    +  "Police shootings",
    +  variable_label= 'Police shootings density'
    +)
    +
    + +
    + +

    +

    Not enough data for density across all 4

    +
    + +WORD CLOUD: Police involved shootings + + +
    Denver_police_shootings_cloud <- create_wordclouds_by_grade(Denver_police_shootings$layer, output_file = "police_shootings_word_cloud_per_grade.png",title = "police involved shooting per crime type where larger text is more frequent", max_size =35, col_select = "SHOOT_ACTI")
    +
    + +
    + +

    +

    Part 3: Comparative Analysis and Visualization

    +

    Statistical Analysis

    +
      +
    • Conduct a detailed statistical analysis to compare greenspace across + different HOLC grades, using techniques like Targeted Maximum + Likelihood Estimation (TMLE) to assess the association between + historical redlining and current greenspace levels.
    • +
    • Visualize the disparities in greenspace distribution using GIS + tools, highlighting how redlining has shaped urban ecological + landscapes.
    • +
    +

    Conclusion

    +

    This tutorial provides tools and methodologies to explore the lingering +effects of historic redlining on urban greenspace, offering insights +into the intersection of urban planning, environmental justice, and +public health.

    +

    References

    +
      +
    • Nardone, A., Rudolph, K. E., Morello-Frosch, R., & Casey, J. A. + (2021). Redlines and Greenspace: The Relationship between Historical + Redlining and 2010 Greenspace across the United States. + Environmental Health Perspectives, 129(1), 017006. + DOI:10.1289/EHP7495. Available + online
    • +
    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/worksheet_redlining/worksheet_redlining.md b/worksheet_redlining/worksheet_redlining.md new file mode 100644 index 0000000..fe8ad1a --- /dev/null +++ b/worksheet_redlining/worksheet_redlining.md @@ -0,0 +1,1715 @@ +Redlining +================ + +# Exploring the Impact of Historical Redlining on Urban Greenspace: A Collaborative Examination of Maps, Justice, and Resilience + +## Introduction + +This group exploration delves into the long-term impacts of historical +redlining on urban greenspace, emphasizing the powerful role of maps in +shaping environmental and social landscapes. By drawing on the research +by Nardone et al. (2021), you will collaboratively investigate how +discriminatory practices encoded in maps have led to persistent +disparities in urban settings. This exploration aims to uncover the +resilience of communities in adapting to these entrenched injustices and +to foster a deeper understanding of how mapping can serve both as a tool +of exclusion and as a means for promoting social equity. + +![1938 Map of Atlanta uses colors as grades for neighborhoods. The red +swaths identify each area with large African-American populations that +were deemed “less safe.”](../assets/redlining/redlining.png)) + +## Understanding Redlining as a Systemic Disturbance + +Redlining originated in the 1930s as a discriminatory practice where the +Home Owners’ Loan Corporation (HOLC) systematically denied mortgages or +offered unfavorable terms based on racial and ethnic compositions. This +methodical exclusion, executed through maps that color-coded “risky” +investment areas in red, marked minority-populated areas, denying them +crucial investment and development opportunities and initiating a +profound and lasting disturbance in the urban fabric. + +Maps serve as powerful tools beyond navigation; they communicate and +enforce control. By defining neighborhood boundaries through redlining, +HOLC maps not only mirrored societal biases but also perpetuated and +embedded them into the urban landscape. This manipulation of geographic +data set a trajectory that limited economic growth, dictated the +allocation of services, and influenced the development or deterioration +of community infrastructure. + +**Figure 1:** 1938 Map of Atlanta uses colors as grades for +neighborhoods. The red swaths identify each area with large +African-American populations that were deemed “less safe.” + +
    + +[![](../assets/redlining/georectified-thumbnail.png)](https://storymaps.arcgis.com/stories/0f58d49c566b486482b3e64e9e5f7ac9) + +ArcGIS Story Map + +
    + +**Explore the Story Map:** Click on the image above to explore the +interactive story map about \[subject of the story map\]. + +## Resilience and Adaptation in Urban Environments + +The legacy of redlining presents both a challenge and an opportunity for +resilience and adaptation. Economically and socially, redlining +entrenched cycles of poverty and racial segregation, creating a +resilient wealth gap that has been difficult to dismantle. +Environmentally, the neighborhoods targeted by redlining continue to +face significant challenges—they generally feature less greenspace, +suffer from higher pollution levels, and are more vulnerable to the +impacts of climate change. These factors compound the health and +wellness challenges faced by residents. + +Despite these adversities, urban communities have continually +demonstrated remarkable resilience. Adaptation strategies, such as +community-led green initiatives, urban agriculture, and grassroots +activism, have emerged as responses to these systemic disturbances. By +enhancing green infrastructure and advocating for equitable +environmental policies, these communities strive to increase their +resilience against both historical inequities and environmental +challenges. + +
    + +[![](https://img.youtube.com/vi/O5FBJyqfoLM/hqdefault.jpg)](https://youtu.be/O5FBJyqfoLM) + +Watch the video + +
    + +**Video Title:** Exploring the Impacts of Historical Redlining on Urban +Development +**Description:** Click on the image above to watch a video that delves +into the consequences of historical redlining and its ongoing impact on +urban environments. This educational piece offers insights into how such +discriminatory practices have shaped cities and what can be learned from +them. + +The following group exercise will not only uncover the impact of +redlining on urban greenspace but also highlight the adaptive strategies +developed in response to this enduring disturbance. Through mapping and +analysis, we aim to illustrate the powerful role that geographic data +can play in understanding and fostering urban resilience and social +equity. + +### References + +- Nardone, A., Rudolph, K. E., Morello-Frosch, R., & Casey, J. A. + (2021). Redlines and Greenspace: The Relationship between Historical + Redlining and 2010 Greenspace across the United States. + *Environmental Health Perspectives*, 129(1), 017006. + DOI:10.1289/EHP7495. +- Hoffman, J. S., Shandas, V., & Pendleton, N. (2020). The Effects of + Historical Housing Policies on Resident Exposure to Intra-Urban + Heat: A Study of 108 US Urban Areas. *Climate*, 8(1), 12. + DOI:10.3390/cli8010012. + +## Goals of this group activity + +The primary objectives of this tutorial are: 1. To practice coding in +CyVerse. 2. To analyze the relationship between HOLC grades and the +presence of urban greenspace. 3. To understand how historic policies +continue to affect the spatial distribution of environmental amenities. + +## Part 1: Accessing and Visualizing Historic Redlining Data + +We will begin by accessing HOLC maps from the Mapping Inequality project +and overlaying this data with modern geographic datasets to visualize +the historical impact on contemporary urban landscapes. + +### Data Acquisition + +- Download HOLC map shapefiles from the University of Richmond’s + Mapping Inequality Project. +- Utilize satellite imagery and other geospatial data to map current + greenspace using the normalized difference vegetation index (NDVI). + +### Analysis Methodology + +- Replicate the approach used by Nardone et al. to calculate NDVI + values for each HOLC neighborhood, assessing greenspace as a + health-promoting resource. +- Employ statistical methods such as propensity score matching to + control for confounding variables and estimate the true impact of + HOLC grades on urban greenspace. + + +
    + +R libraries we use in this analysis + + +``` r +if (!requireNamespace("tidytext", quietly = TRUE)) { + install.packages("tidytext") +} +library(tidytext) +## Warning: package 'tidytext' was built under R version 4.3.2 +library(sf) +## Warning: package 'sf' was built under R version 4.3.2 +## Linking to GEOS 3.11.0, GDAL 3.5.3, PROJ 9.1.0; sf_use_s2() is TRUE +library(ggplot2) +## Warning: package 'ggplot2' was built under R version 4.3.2 +library(ggthemes) +## Warning: package 'ggthemes' was built under R version 4.3.2 +library(dplyr) +## +## Attaching package: 'dplyr' +## The following objects are masked from 'package:stats': +## +## filter, lag +## The following objects are masked from 'package:base': +## +## intersect, setdiff, setequal, union +library(rstac) +## Warning: package 'rstac' was built under R version 4.3.2 +library(gdalcubes) +## Warning: package 'gdalcubes' was built under R version 4.3.2 +library(gdalUtils) +## Please note that rgdal will be retired during October 2023, +## plan transition to sf/stars/terra functions using GDAL and PROJ +## at your earliest convenience. +## See https://r-spatial.org/r/2023/05/15/evolution4.html and https://github.com/r-spatial/evolution +## rgdal: version: 1.6-7, (SVN revision 1203) +## Geospatial Data Abstraction Library extensions to R successfully loaded +## Loaded GDAL runtime: GDAL 3.5.3, released 2022/10/21 +## Path to GDAL shared files: /Library/Frameworks/R.framework/Versions/4.3-x86_64/Resources/library/rgdal/gdal +## GDAL does not use iconv for recoding strings. +## GDAL binary built with GEOS: TRUE +## Loaded PROJ runtime: Rel. 9.1.0, September 1st, 2022, [PJ_VERSION: 910] +## Path to PROJ shared files: /Library/Frameworks/R.framework/Versions/4.3-x86_64/Resources/library/gdalcubes/proj +## PROJ CDN enabled: FALSE +## Linking to sp version:1.6-1 +## To mute warnings of possible GDAL/OSR exportToProj4() degradation, +## use options("rgdal_show_exportToProj4_warnings"="none") before loading sp or rgdal. +## +## Attaching package: 'gdalUtils' +## The following object is masked from 'package:sf': +## +## gdal_rasterize +library(gdalcubes) +library(colorspace) +library(terra) +## Warning: package 'terra' was built under R version 4.3.2 +## terra 1.7.71 +## +## Attaching package: 'terra' +## The following object is masked from 'package:colorspace': +## +## RGB +## The following objects are masked from 'package:gdalcubes': +## +## animate, crop, size +library(tidyterra) +## +## Attaching package: 'tidyterra' +## The following object is masked from 'package:stats': +## +## filter +library(basemapR) +library(tidytext) +library(ggwordcloud) +library(osmextract) +## Data (c) OpenStreetMap contributors, ODbL 1.0. https://www.openstreetmap.org/copyright. +## Check the package website, https://docs.ropensci.org/osmextract/, for more details. +library(sf) +library(ggplot2) +library(ggthemes) +library(glue) +## +## Attaching package: 'glue' +## The following object is masked from 'package:terra': +## +## trim + +library(purrr) +``` + +
    +
    + +FUNCTION: List cities where HOLC data are available + + +``` r +# Function to get a list of unique cities and states from the redlining data +get_city_state_list_from_redlining_data <- function() { + # URL to the GeoJSON data + url <- "https://raw.githubusercontent.com/americanpanorama/mapping-inequality-census-crosswalk/main/MIv3Areas_2010TractCrosswalk.geojson" + + # Read the GeoJSON file into an sf object + redlining_data <- tryCatch({ + read_sf(url) + }, error = function(e) { + stop("Error reading GeoJSON data: ", e$message) + }) + + # Check for the existence of 'city' and 'state' columns + if (!all(c("city", "state") %in% names(redlining_data))) { + stop("The required columns 'city' and/or 'state' do not exist in the data.") + } + + # Extract a unique list of city and state pairs without the geometries + city_state_df <- redlining_data %>% + select(city, state) %>% + st_set_geometry(NULL) %>% # Drop the geometry to avoid issues with invalid shapes + distinct(city, state) %>% + arrange(state, city ) # Arrange the list alphabetically by state, then by city + + # Return the dataframe of unique city-state pairs + return(city_state_df) +} +``` + +
    +
    + +Stream list of available HOLC cities + + +``` r +#Retrieve the list of cities and states +city_state_list <- get_city_state_list_from_redlining_data() + +knitr::kable(city_state_list, format = "markdown") +``` + +| city | state | +|:---------------------------------|:------| +| Birmingham | AL | +| Mobile | AL | +| Montgomery | AL | +| Arkadelphia | AR | +| Batesville | AR | +| Camden | AR | +| Conway | AR | +| El Dorado | AR | +| Fort Smith | AR | +| Little Rock | AR | +| Russellville | AR | +| Texarkana | AR | +| Phoenix | AZ | +| Fresno | CA | +| Los Angeles | CA | +| Oakland | CA | +| Sacramento | CA | +| San Diego | CA | +| San Francisco | CA | +| San Jose | CA | +| Stockton | CA | +| Boulder | CO | +| Colorado Springs | CO | +| Denver | CO | +| Fort Collins | CO | +| Fort Morgan | CO | +| Grand Junction | CO | +| Greeley | CO | +| Longmont | CO | +| Pueblo | CO | +| Bridgeport and Fairfield | CT | +| Hartford | CT | +| New Britain | CT | +| New Haven | CT | +| Stamford, Darien, and New Canaan | CT | +| Waterbury | CT | +| Crestview | FL | +| Daytona Beach | FL | +| DeFuniak Springs | FL | +| DeLand | FL | +| Jacksonville | FL | +| Miami | FL | +| New Smyrna | FL | +| Orlando | FL | +| Pensacola | FL | +| St. Petersburg | FL | +| Tampa | FL | +| Atlanta | GA | +| Augusta | GA | +| Columbus | GA | +| Macon | GA | +| Savannah | GA | +| Boone | IA | +| Cedar Rapids | IA | +| Council Bluffs | IA | +| Davenport | IA | +| Des Moines | IA | +| Dubuque | IA | +| Sioux City | IA | +| Waterloo | IA | +| Aurora | IL | +| Chicago | IL | +| Decatur | IL | +| East St. Louis | IL | +| Joliet | IL | +| Peoria | IL | +| Rockford | IL | +| Springfield | IL | +| Evansville | IN | +| Fort Wayne | IN | +| Indianapolis | IN | +| Lake Co. Gary | IN | +| Muncie | IN | +| South Bend | IN | +| Terre Haute | IN | +| Atchison | KS | +| Junction City | KS | +| Topeka | KS | +| Wichita | KS | +| Covington | KY | +| Lexington | KY | +| Louisville | KY | +| New Orleans | LA | +| Shreveport | LA | +| Arlington | MA | +| Belmont | MA | +| Boston | MA | +| Braintree | MA | +| Brockton | MA | +| Brookline | MA | +| Cambridge | MA | +| Chelsea | MA | +| Dedham | MA | +| Everett | MA | +| Fall River | MA | +| Fitchburg | MA | +| Haverhill | MA | +| Holyoke Chicopee | MA | +| Lawrence | MA | +| Lexington | MA | +| Lowell | MA | +| Lynn | MA | +| Malden | MA | +| Medford | MA | +| Melrose | MA | +| Milton | MA | +| Needham | MA | +| New Bedford | MA | +| Newton | MA | +| Pittsfield | MA | +| Quincy | MA | +| Revere | MA | +| Salem | MA | +| Saugus | MA | +| Somerville | MA | +| Springfield | MA | +| Waltham | MA | +| Watertown | MA | +| Winchester | MA | +| Winthrop | MA | +| Worcester | MA | +| Baltimore | MD | +| Augusta | ME | +| Boothbay | ME | +| Portland | ME | +| Sanford | ME | +| Waterville | ME | +| Battle Creek | MI | +| Bay City | MI | +| Detroit | MI | +| Flint | MI | +| Grand Rapids | MI | +| Jackson | MI | +| Kalamazoo | MI | +| Lansing | MI | +| Muskegon | MI | +| Pontiac | MI | +| Saginaw | MI | +| Austin | MN | +| Duluth | MN | +| Mankato | MN | +| Minneapolis | MN | +| Rochester | MN | +| St. Cloud | MN | +| St. Paul | MN | +| Staples | MN | +| Cape Girardeau | MO | +| Carthage | MO | +| Greater Kansas City | MO | +| Joplin | MO | +| Springfield | MO | +| St. Joseph | MO | +| St. Louis | MO | +| Jackson | MS | +| Asheville | NC | +| Charlotte | NC | +| Durham | NC | +| Elizabeth City | NC | +| Fayetteville | NC | +| Goldsboro | NC | +| Greensboro | NC | +| Hendersonville | NC | +| High Point | NC | +| New Bern | NC | +| Rocky Mount | NC | +| Statesville | NC | +| Winston-Salem | NC | +| Fargo | ND | +| Grand Forks | ND | +| Minot | ND | +| Williston | ND | +| Lincoln | NE | +| Omaha | NE | +| Manchester | NH | +| Atlantic City | NJ | +| Bergen Co. | NJ | +| Camden | NJ | +| Essex Co. | NJ | +| Hudson Co. | NJ | +| Monmouth | NJ | +| Passaic County | NJ | +| Perth Amboy | NJ | +| Trenton | NJ | +| Union Co. | NJ | +| Albany | NY | +| Binghamton-Johnson City | NY | +| Bronx | NY | +| Brooklyn | NY | +| Buffalo | NY | +| Elmira | NY | +| Jamestown | NY | +| Lower Westchester Co. | NY | +| Manhattan | NY | +| Niagara Falls | NY | +| Poughkeepsie | NY | +| Queens | NY | +| Rochester | NY | +| Schenectady | NY | +| Staten Island | NY | +| Syracuse | NY | +| Troy | NY | +| Utica | NY | +| Akron | OH | +| Canton | OH | +| Cleveland | OH | +| Columbus | OH | +| Dayton | OH | +| Hamilton | OH | +| Lima | OH | +| Lorain | OH | +| Portsmouth | OH | +| Springfield | OH | +| Toledo | OH | +| Warren | OH | +| Youngstown | OH | +| Ada | OK | +| Alva | OK | +| Enid | OK | +| Miami Ottawa County | OK | +| Muskogee | OK | +| Norman | OK | +| Oklahoma City | OK | +| South McAlester | OK | +| Tulsa | OK | +| Portland | OR | +| Allentown | PA | +| Altoona | PA | +| Bethlehem | PA | +| Chester | PA | +| Erie | PA | +| Harrisburg | PA | +| Johnstown | PA | +| Lancaster | PA | +| McKeesport | PA | +| New Castle | PA | +| Philadelphia | PA | +| Pittsburgh | PA | +| Wilkes-Barre | PA | +| York | PA | +| Pawtucket & Central Falls | RI | +| Providence | RI | +| Woonsocket | RI | +| Aiken | SC | +| Charleston | SC | +| Columbia | SC | +| Greater Anderson | SC | +| Greater Greenville | SC | +| Orangeburg | SC | +| Rock Hill | SC | +| Spartanburg | SC | +| Sumter | SC | +| Aberdeen | SD | +| Huron | SD | +| Milbank | SD | +| Mitchell | SD | +| Rapid City | SD | +| Sioux Falls | SD | +| Vermillion | SD | +| Watertown | SD | +| Chattanooga | TN | +| Elizabethton | TN | +| Erwin | TN | +| Greenville | TN | +| Johnson City | TN | +| Knoxville | TN | +| Memphis | TN | +| Nashville | TN | +| Amarillo | TX | +| Austin | TX | +| Beaumont | TX | +| Dallas | TX | +| El Paso | TX | +| Fort Worth | TX | +| Galveston | TX | +| Houston | TX | +| Port Arthur | TX | +| San Antonio | TX | +| Waco | TX | +| Wichita Falls | TX | +| Ogden | UT | +| Salt Lake City | UT | +| Bristol | VA | +| Danville | VA | +| Harrisonburg | VA | +| Lynchburg | VA | +| Newport News | VA | +| Norfolk | VA | +| Petersburg | VA | +| Phoebus | VA | +| Richmond | VA | +| Roanoke | VA | +| Staunton | VA | +| Bennington | VT | +| Brattleboro | VT | +| Burlington | VT | +| Montpelier | VT | +| Newport City | VT | +| Poultney | VT | +| Rutland | VT | +| Springfield | VT | +| St. Albans | VT | +| St. Johnsbury | VT | +| Windsor | VT | +| Seattle | WA | +| Spokane | WA | +| Tacoma | WA | +| Kenosha | WI | +| Madison | WI | +| Milwaukee Co. | WI | +| Oshkosh | WI | +| Racine | WI | +| Charleston | WV | +| Huntington | WV | +| Wheeling | WV | + +
    +
    + +FUNCTION: Stream HOLC data from a city + + +``` r +# Function to load and filter redlining data by city +load_city_redlining_data <- function(city_name) { + # URL to the GeoJSON data + url <- "https://raw.githubusercontent.com/americanpanorama/mapping-inequality-census-crosswalk/main/MIv3Areas_2010TractCrosswalk.geojson" + + # Read the GeoJSON file into an sf object + redlining_data <- read_sf(url) + + # Filter the data for the specified city and non-empty grades + + city_redline <- redlining_data %>% + filter(city == city_name ) + + # Return the filtered data + return(city_redline) +} +``` + +
    +
    + +Stream HOLC data for Denver, CO + + +``` r +# Load redlining data for Denver +denver_redlining <- load_city_redlining_data("Denver") +knitr::kable(head(denver_redlining), format = "markdown") +``` + +| area_id | city | state | city_survey | cat | grade | label | res | com | ind | fill | GEOID10 | GISJOIN | calc_area | pct_tract | geometry | +|--------:|:-------|:------|:------------|:-----|:------|:------|:-----|:------|:------|:---------|:------------|:---------------|-------------:|----------:|:-----------------------------| +| 6525 | Denver | CO | TRUE | Best | A | A1 | TRUE | FALSE | FALSE | \#76a865 | 08031004104 | G0800310004104 | 1.525535e+01 | 0.00001 | MULTIPOLYGON (((-104.9125 3… | +| 6525 | Denver | CO | TRUE | Best | A | A1 | TRUE | FALSE | FALSE | \#76a865 | 08031004201 | G0800310004201 | 3.987458e+05 | 0.20900 | MULTIPOLYGON (((-104.9246 3… | +| 6525 | Denver | CO | TRUE | Best | A | A1 | TRUE | FALSE | FALSE | \#76a865 | 08031004304 | G0800310004304 | 1.554195e+05 | 0.05927 | MULTIPOLYGON (((-104.9125 3… | +| 6525 | Denver | CO | TRUE | Best | A | A1 | TRUE | FALSE | FALSE | \#76a865 | 08031004202 | G0800310004202 | 1.117770e+06 | 0.57245 | MULTIPOLYGON (((-104.9125 3… | +| 6529 | Denver | CO | TRUE | Best | A | A2 | TRUE | FALSE | FALSE | \#76a865 | 08031004302 | G0800310004302 | 3.133415e+05 | 0.28381 | MULTIPOLYGON (((-104.928 39… | +| 6529 | Denver | CO | TRUE | Best | A | A2 | TRUE | FALSE | FALSE | \#76a865 | 08031004301 | G0800310004301 | 1.221218e+05 | 0.08622 | MULTIPOLYGON (((-104.9305 3… | + +
    +
    + +FUNCTION: Get Points-of-Interest from city of interest + + +``` r + + +get_places <- function(polygon_layer, type = "food" ) { + # Check if the input is an sf object + if (!inherits(polygon_layer, "sf")) { + stop("The provided object is not an sf object.") + } + + # Create a bounding box from the input sf object + bbox_here <- st_bbox(polygon_layer) |> + st_as_sfc() + + if(type == "food"){ + my_layer <- "multipolygons" + my_query <- "SELECT * FROM multipolygons WHERE ( + shop IN ('supermarket', 'bodega', 'market', 'other_market', 'farm', 'garden_centre', 'doityourself', 'farm_supply', 'compost', 'mulch', 'fertilizer') OR + amenity IN ('social_facility', 'market', 'restaurant', 'coffee') OR + leisure = 'garden' OR + landuse IN ('farm', 'farmland', 'row_crops', 'orchard_plantation', 'dairy_grazing') OR + building IN ('brewery', 'winery', 'distillery') OR + shop = 'greengrocer' OR + amenity = 'marketplace' + )" + title <- "food" + } + + if (type == "processed_food") { + my_layer <- "multipolygons" + my_query <- "SELECT * FROM multipolygons WHERE ( + amenity IN ('fast_food', 'cafe', 'pub') OR + shop IN ('convenience', 'supermarket') OR + shop = 'kiosk' + )" + title <- "Processed Food Locations" +} + + if(type == "natural_habitats"){ + my_layer <- "multipolygons" + my_query <- "SELECT * FROM multipolygons WHERE ( + boundary = 'protected_area' OR + natural IN ('tree', 'wood') OR + landuse = 'forest' OR + leisure = 'park' + )" + title <- "Natural habitats or City owned trees" + } + + if(type == "roads"){ + my_layer <- "lines" + my_query <- "SELECT * FROM lines WHERE ( + highway IN ('motorway', 'trunk', 'primary', 'secondary', 'tertiary') )" + title <- "Major roads" + } + + if(type == "rivers"){ + my_layer <- "lines" + my_query <- "SELECT * FROM lines WHERE ( + waterway IN ('river'))" + title <- "Major rivers" + } + + if(type == "internet_access") { + my_layer <- "multipolygons" + my_query <- "SELECT * FROM multipolygons WHERE ( + amenity IN ('library', 'cafe', 'community_centre', 'public_building') AND + internet_access = 'yes' + )" + title <- "Internet Access Locations" +} + + if(type == "water_bodies") { + my_layer <- "multipolygons" + my_query <- "SELECT * FROM multipolygons WHERE ( + natural IN ('water', 'lake', 'pond') OR + water IN ('lake', 'pond') OR + landuse = 'reservoir' + )" + title <- "Water Bodies" +} + + if(type == "government_buildings") { + my_layer <- "multipolygons" + my_query <- "SELECT * FROM multipolygons WHERE ( + amenity IN ('townhall', 'courthouse', 'embassy', 'police', 'fire_station') OR + building IN ('capitol', 'government') + )" + title <- "Government Buildings" +} + + + + # Use the bbox to get data with oe_get(), specifying the desired layer and a custom SQL query for fresh food places + tryCatch({ + places <- oe_get( + place = bbox_here, + layer = my_layer, # Adjusted layer; change as per actual data availability + query = my_query, + quiet = TRUE + ) + + places <- st_make_valid(places) + + # Crop the data to the bounding box + cropped_places <- st_crop(places, bbox_here) + + # Plotting the cropped fresh food places + plot <- ggplot(data = cropped_places) + + geom_sf(fill="cornflowerblue", color="cornflowerblue") + + ggtitle(title) + + theme_tufte()+ + theme(legend.position = "none", # Optionally hide the legend + axis.text = element_blank(), # Remove axis text + axis.title = element_blank(), # Remove axis titles + axis.ticks = element_blank(), # Remove axis ticks + plot.background = element_rect(fill = "white", color = NA), # Set the plot background to white + panel.background = element_rect(fill = "white", color = NA), # Set the panel background to white + panel.grid.major = element_blank(), # Remove major grid lines + panel.grid.minor = element_blank(), + ) + + # Save the plot as a PNG file + png_filename <- paste0(title,"_", Sys.Date(), ".png") + ggsave(png_filename, plot, width = 10, height = 8, units = "in") + + # Return the cropped dataset + return(cropped_places) + }, error = function(e) { + stop("Failed to retrieve or plot data: ", e$message) + }) +} +``` + +
    +
    + +FUNCTION: Plot POI over HOLC grades + + +``` r + + +plot_city_redlining <- function(redlining_data, filename = "redlining_plot.png") { + # Fetch additional geographic data based on redlining data + roads <- get_places(redlining_data, type = "roads") + rivers <- get_places(redlining_data, type = "rivers") + + # Filter residential zones with valid grades and where city survey is TRUE + residential_zones <- redlining_data %>% + filter(city_survey == TRUE & grade != "") + + # Colors for the grades + colors <- c("#76a865", "#7cb5bd", "#ffff00", "#d9838d") + + # Plot the data using ggplot2 + plot <- ggplot() + + geom_sf(data = roads, lwd = 0.1) + + geom_sf(data = rivers, color = "blue", alpha = 0.5, lwd = 1.1) + + geom_sf(data = residential_zones, aes(fill = grade), alpha = 0.5) + + theme_tufte() + + scale_fill_manual(values = colors) + + labs(fill = 'HOLC Categories') + + theme( + plot.background = element_rect(fill = "white", color = NA), + panel.background = element_rect(fill = "white", color = NA), + panel.grid.major = element_blank(), + panel.grid.minor = element_blank(), + legend.position = "right" + ) + + # Save the plot as a high-resolution PNG file + ggsave(filename, plot, width = 10, height = 8, units = "in", dpi = 600) + + # Return the plot object if needed for further manipulation or checking + return(plot) +} +``` + +
    +
    + +Plot Denver Redlining + + +``` r +denver_plot <- plot_city_redlining(denver_redlining) +``` + +
    + +![](../worksheets/redlining_plot.png) + +
    + +Stream amenities by category + + +``` r +food <- get_places(denver_redlining, type="food") + +food_processed <- get_places(denver_redlining, type="processed_food") + +natural_habitats <- get_places(denver_redlining, type="natural_habitats") + +roads <- get_places(denver_redlining, type="roads") + +rivers <- get_places(denver_redlining, type="rivers") + +#water_bodies <- get_places(denver_redlining, type="water_bodies") + +government_buildings <- get_places(denver_redlining, type="government_buildings") +``` + +
    +
    + +FUNCTION: Plot the HOLC grades individually + + +``` r +split_plot <- function(sf_data, roads, rivers) { + # Filter for grades A, B, C, and D + sf_data_filtered <- sf_data %>% + filter(grade %in% c('A', 'B', 'C', 'D')) + + # Define a color for each grade + grade_colors <- c("A" = "#76a865", "B" = "#7cb5bd", "C" = "#ffff00", "D" = "#d9838d") + + # Create the plot with panels for each grade + plot <- ggplot(data = sf_data_filtered) + + geom_sf(data = roads, alpha = 0.1, lwd = 0.1) + + geom_sf(data = rivers, color = "blue", alpha = 0.1, lwd = 1.1) + + geom_sf(aes(fill = grade)) + + facet_wrap(~ grade, nrow = 1) + # Free scales for different zoom levels if needed + scale_fill_manual(values = grade_colors) + + theme_minimal() + + labs(fill = 'HOLC Grade') + + theme_tufte() + + theme(plot.background = element_rect(fill = "white", color = NA), + panel.background = element_rect(fill = "white", color = NA), + legend.position = "none", # Optionally hide the legend + axis.text = element_blank(), # Remove axis text + axis.title = element_blank(), # Remove axis titles + axis.ticks = element_blank(), # Remove axis ticks + panel.grid.major = element_blank(), # Remove major grid lines + panel.grid.minor = element_blank()) + + ggsave(plot, filename = "HOLC_grades_individually.png", width = 10, height = 4, units = "in", dpi = 1200) + return(plot) +} +``` + +
    +
    + +Plot 4 HOLC grades individually + + +``` r +plot_row <- split_plot(denver_redlining, roads, rivers) +``` + +
    + +![](../worksheets/HOLC_grades_individually.png) + +
    + +FUNCTION: Map an amenity over each grade individually + + +``` r + +process_and_plot_sf_layers <- function(layer1, layer2, output_file = "output_plot.png") { + # Make geometries valid +layer1 <- st_make_valid(layer1) +layer2 <- st_make_valid(layer2) + +# Optionally, simplify geometries to remove duplicate vertices +layer1 <- st_simplify(layer1, preserveTopology = TRUE) |> + filter(grade != "") + +# Prepare a list to store results +results <- list() + +# Loop through each grade and perform operations +for (grade in c("A", "B", "C", "D")) { + # Filter layer1 for current grade + layer1_grade <- layer1[layer1$grade == grade, ] + + # Buffer the geometries of the current grade + buffered_layer1_grade <- st_buffer(layer1_grade, dist = 500) + + # Intersect with the second layer + intersections <- st_intersects(layer2, buffered_layer1_grade, sparse = FALSE) + selected_polygons <- layer2[rowSums(intersections) > 0, ] + + # Add a new column to store the grade information + selected_polygons$grade <- grade + + # Store the result + results[[grade]] <- selected_polygons +} + +# Combine all selected polygons from different grades into one sf object +final_selected_polygons <- do.call(rbind, results) + + # Define colors for the grades + grade_colors <- c("A" = "grey", "B" = "grey", "C" = "grey", "D" = "grey") + + # Create the plot + plot <- ggplot() + + geom_sf(data = roads, alpha = 0.05, lwd = 0.1) + + geom_sf(data = rivers, color = "blue", alpha = 0.1, lwd = 1.1) + + geom_sf(data = layer1, fill = "grey", color = "grey", size = 0.1) + + facet_wrap(~ grade, nrow = 1) + + geom_sf(data = final_selected_polygons,fill = "green", color = "green", size = 0.1) + + facet_wrap(~ grade, nrow = 1) + + #scale_fill_manual(values = grade_colors) + + #scale_color_manual(values = grade_colors) + + theme_minimal() + + labs(fill = 'HOLC Grade') + + theme_tufte() + + theme(plot.background = element_rect(fill = "white", color = NA), + panel.background = element_rect(fill = "white", color = NA), + legend.position = "none", + axis.text = element_blank(), + axis.title = element_blank(), + axis.ticks = element_blank(), + panel.grid.major = element_blank(), + panel.grid.minor = element_blank()) + + # Save the plot as a high-resolution PNG file + ggsave(output_file, plot, width = 10, height = 4, units = "in", dpi = 1200) + + # Return the plot for optional further use + return(list(plot=plot, sf = final_selected_polygons)) +} +``` + +
    +
    + +FUNCTION: Create word cloud per grade + + +``` r +create_wordclouds_by_grade <- function(sf_object, output_file = "food_word_cloud_per_grade.png",title = "Healthy food place names word cloud", max_size =25, col_select = "name") { + + + # Extract relevant data and prepare text data + text_data <- sf_object %>% + select(grade, col_select) %>% + filter(!is.na(col_select)) %>% + unnest_tokens(output = "word", input = col_select, token = "words") %>% + count(grade, word, sort = TRUE) %>% + ungroup() %>% + filter(n() > 1) # Filter to remove overly common or single-occurrence words + + # Ensure there are no NA values in the 'word' column + text_data <- text_data %>% filter(!is.na(word)) + + # Handle cases where text_data might be empty + if (nrow(text_data) == 0) { + stop("No data available for creating word clouds.") + } + + # Create a word cloud using ggplot2 and ggwordcloud + p <- ggplot( ) + + geom_text_wordcloud_area(data=text_data, aes(label = word, size = n),rm_outside = TRUE) + + scale_size_area(max_size = max_size) + + facet_wrap(~ grade, nrow = 1) + + scale_color_gradient(low = "darkred", high = "red") + + theme_minimal() + + theme(plot.background = element_rect(fill = "white", color = NA), + panel.background = element_rect(fill = "white", color = NA), + panel.spacing = unit(0.5, "lines"), + plot.title = element_text(size = 16, face = "bold"), + legend.position = "none") + + labs(title = title) + + # Attempt to save the plot and handle any errors + tryCatch({ + ggsave(output_file, p, width = 10, height = 4, units = "in", dpi = 600) + }, error = function(e) { + cat("Error in saving the plot: ", e$message, "\n") + }) + + return(p) +} +``` + +
    +
    + +Map food over each grade individually + + +``` r + layer1 <- denver_redlining + layer2 <- food + food_match <- process_and_plot_sf_layers(layer1, layer2, "food_match.png") +``` + +
    + +![](../worksheets/food_match.png) + +
    + +WORD CLOUD: Names of places with fresh food + + +``` r +food_word_cloud <- create_wordclouds_by_grade(food_match$sf, output_file = "food_word_cloud_per_grade.png") +``` + + Warning: Using an external vector in selections was deprecated in tidyselect 1.1.0. + ℹ Please use `all_of()` or `any_of()` instead. + # Was: + data %>% select(col_select) + + # Now: + data %>% select(all_of(col_select)) + + See . + + Warning in wordcloud_boxes(data_points = points_valid_first, boxes = boxes, : + Some words could not fit on page. They have been removed. + +
    + +![](../worksheets/food_word_cloud_per_grade.png) + +
    + +Map processed food over each grade individually + + +``` r + layer1 <- denver_redlining + layer2 <- food_processed + processed_food_match <- process_and_plot_sf_layers(layer1, layer2, "processed_food_match.png") +``` + +
    + +![](../worksheets/processed_food_match.png) + +
    + +WORD CLOUD: Names of places with processed food + + +``` r +processed_food_cloud <- create_wordclouds_by_grade(processed_food_match$sf, output_file = "processed_food_word_cloud_per_grade.png",title = "Processed food place names where larger text is more frequent", max_size =17) +``` + +
    + +![](../worksheets/processed_food_word_cloud_per_grade.png) + +## Part 2: Integrating Environmental Data + +### Data Processing + +- Use satellite data from 2010 to analyze greenspace using NDVI, an + index that measures the quantity of vegetation in an area. +- Apply methods to adjust for potential confounders as described in + the study, ensuring that comparisons of greenspace across HOLC + grades are valid and not biased by historical or socio-demographic + factors. + +
    + +Map natural habitats over each grade individually + + +``` r + layer1 <- denver_redlining + layer2 <- natural_habitats + natural_habitats_match <- process_and_plot_sf_layers(layer1, layer2, "natural_habitats_match.png") + print(natural_habitats_match$plot) +``` + +![](worksheet_redlining_files/figure-gfm/unnamed-chunk-18-1.png) + +
    + +![](../worksheets/natural_habitats_match.png) + +
    + +WORD CLOUD: Name of natural habitat area + + +``` r +natural_habitats_cloud <- create_wordclouds_by_grade(natural_habitats_match$sf, output_file = "natural_habitats_word_cloud_per_grade.png",title = "Natural habitats place names where larger text is more frequent", max_size =35) +``` + +
    + +![](../worksheets/natural_habitats_word_cloud_per_grade.png) + +
    + +FUNCTION: Stream NDVI data + + +``` r +polygon_layer <- denver_redlining +# Function to process satellite data based on an SF polygon's extent +process_satellite_data <- function(polygon_layer, start_date, end_date, assets, fps = 1, output_file = "anim.gif") { + # Record start time + start_time <- Sys.time() + + # Calculate the bbox from the polygon layer + bbox <- st_bbox(polygon_layer) + + s = stac("https://earth-search.aws.element84.com/v0") + + + # Use stacR to search for Sentinel-2 images within the bbox and date range + items = s |> stac_search( + collections = "sentinel-s2-l2a-cogs", + bbox = c(bbox["xmin"], bbox["ymin"], bbox["xmax"], bbox["ymax"]), + datetime = paste(start_date, end_date, sep = "/"), + limit = 500 + ) %>% + post_request() + + # Define mask for Sentinel-2 image quality + #S2.mask <- image_mask("SCL", values = c(3, 8, 9)) + + # Create a collection of images filtering by cloud cover + col <- stac_image_collection(items$features, asset_names = assets, property_filter = function(x) {x[["eo:cloud_cover"]] < 30}) + + # Define a view for processing the data + v <- cube_view(srs = "EPSG:4326", + extent = list(t0 = start_date, t1 = end_date, + left = bbox["xmin"], right = bbox["xmax"], + top = bbox["ymax"], bottom = bbox["ymin"]), + dx = 0.001, dy = 0.001, dt = "P1M", + aggregation = "median", resampling = "bilinear") + + # Calculate NDVI and create an animation + ndvi_col <- function(n) { + rev(sequential_hcl(n, "Green-Yellow")) + } + + #raster_cube(col, v, mask = S2.mask) %>% + raster_cube(col, v) %>% + select_bands(c("B04", "B08")) %>% + apply_pixel("(B08-B04)/(B08+B04)", "NDVI") %>% + gdalcubes::animate(col = ndvi_col, zlim = c(-0.2, 1), key.pos = 1, save_as = output_file, fps = fps) + + # Calculate processing time + end_time <- Sys.time() + processing_time <- difftime(end_time, start_time) + + # Return processing time + return(processing_time) +} +``` + +
    +
    + +Stream NDVI data: animation + + +``` r +processing_time <- process_satellite_data(denver_redlining, "2022-05-31", "2023-05-31", c("B04", "B08")) +``` + +
    + +![](../worksheets/anim.gif) + +
    + +FUNCTION: Stream year average NDVI + + +``` r + + + +yearly_average_ndvi <- function(polygon_layer, output_file = "ndvi.png", dx = 0.01, dy = 0.01) { + # Record start time + start_time <- Sys.time() + + # Calculate the bbox from the polygon layer + bbox <- st_bbox(polygon_layer) + + s = stac("https://earth-search.aws.element84.com/v0") + + # Search for Sentinel-2 images within the bbox for June + items <- s |> stac_search( + collections = "sentinel-s2-l2a-cogs", + bbox = c(bbox["xmin"], bbox["ymin"], bbox["xmax"], bbox["ymax"]), + datetime = "2023-01-01/2023-12-31", + limit = 500 + ) %>% + post_request() + + # Create a collection of images filtering by cloud cover + col <- stac_image_collection(items$features, asset_names = c("B04", "B08"), property_filter = function(x) {x[["eo:cloud_cover"]] < 80}) + + # Define a view for processing the data specifically for June + v <- cube_view(srs = "EPSG:4326", + extent = list(t0 = "2023-01-01", t1 = "2023-12-31", + left = bbox["xmin"], right = bbox["xmax"], + top = bbox["ymax"], bottom = bbox["ymin"]), + dx = dx, dy = dy, dt = "P1Y", + aggregation = "median", resampling = "bilinear") + + # Process NDVI + ndvi_rast <- raster_cube(col, v) %>% + select_bands(c("B04", "B08")) %>% + apply_pixel("(B08-B04)/(B08+B04)", "NDVI") %>% + write_tif() |> + terra::rast() + + + # Convert terra Raster to ggplot using tidyterra +ndvi_plot <- ggplot() + + geom_spatraster(data = ndvi_rast, aes(fill = NDVI)) + + scale_fill_viridis_c(option = "viridis", direction = -1, name = "NDVI") + + labs(title = "NDVI mean for 2023") + + theme_minimal() + + coord_sf() + + theme(plot.background = element_rect(fill = "white", color = NA), + panel.background = element_rect(fill = "white", color = NA), + legend.position = "right", + axis.text = element_blank(), + axis.title = element_blank(), + axis.ticks = element_blank(), + panel.grid.major = element_blank(), + panel.grid.minor = element_blank()) + + # Save the plot as a high-resolution PNG file + ggsave(output_file, ndvi_plot, width = 10, height = 8, dpi = 600) + + # Calculate processing time + end_time <- Sys.time() + processing_time <- difftime(end_time, start_time) + + # Return the plot and processing time + return(list(plot = ndvi_plot, processing_time = processing_time, raster = ndvi_rast)) +} +``` + +
    +
    + +Stream NDVI: high resolution + + +``` r +ndvi_background <- yearly_average_ndvi(denver_redlining,dx = 0.0001, dy = 0.0001) +``` + +
    + +![](../worksheets/ndvi_00001.png) + +
    + +FUNCTION: Map NDVI per HOLC grade individually + + +``` r + + +create_mask_and_plot <- function(redlining_sf, background_raster = ndvi$raster, roads = NULL, rivers = NULL){ + start_time <- Sys.time() # Start timing + + # Validate and prepare the redlining data + redlining_sf <- redlining_sf %>% + filter(grade != "") %>% + st_make_valid() + + +bbox <- st_bbox(redlining_sf) # Get original bounding box + + +expanded_bbox <- expand_bbox(bbox, 6000, 1000) # + + +expanded_bbox_poly <- st_as_sfc(expanded_bbox, crs = st_crs(redlining_sf)) %>% + st_make_valid() + + # Initialize an empty list to store masks + masks <- list() + + # Iterate over each grade to create masks + unique_grades <- unique(redlining_sf$grade) + for (grade in unique_grades) { + # Filter polygons by grade + grade_polygons <- redlining_sf[redlining_sf$grade == grade, ] + + # Create an "inverted" mask by subtracting these polygons from the background + mask <- st_difference(expanded_bbox_poly, st_union(grade_polygons)) + + # Store the mask in the list with the grade as the name + masks[[grade]] <- st_sf(geometry = mask, grade = grade) + } + + # Combine all masks into a single sf object + mask_sf <- do.call(rbind, masks) + + # Normalize the grades so that C.2 becomes C, but correctly handle other grades + mask_sf$grade <- ifelse(mask_sf$grade == "C.2", "C", mask_sf$grade) + + # Prepare the plot + plot <- ggplot() + + geom_spatraster(data = background_raster, aes(fill = NDVI)) + + scale_fill_viridis_c(name = "NDVI", option = "viridis", direction = -1) + + + geom_sf(data = mask_sf, aes(color = grade), fill = "white", size = 0.1, show.legend = FALSE) + + scale_color_manual(values = c("A" = "white", "B" = "white", "C" = "white", "D" = "white"), name = "Grade") + + facet_wrap(~ grade, nrow = 1) + + geom_sf(data = roads, alpha = 1, lwd = 0.1, color="white") + + geom_sf(data = rivers, color = "white", alpha = 0.5, lwd = 1.1) + + labs(title = "NDVI: Normalized Difference Vegetation Index") + + theme_minimal() + + coord_sf(xlim = c(bbox["xmin"], bbox["xmax"]), + ylim = c(bbox["ymin"], bbox["ymax"]), + expand = FALSE) + + theme(plot.background = element_rect(fill = "white", color = NA), + panel.background = element_rect(fill = "white", color = NA), + legend.position = "bottom", + axis.text = element_blank(), + axis.title = element_blank(), + axis.ticks = element_blank(), + panel.grid.major = element_blank(), + panel.grid.minor = element_blank()) + + # Save the plot + ggsave("redlining_mask_ndvi.png", plot, width = 10, height = 4, dpi = 600) + + end_time <- Sys.time() # End timing + runtime <- end_time - start_time + + # Return the plot and runtime + return(list(plot = plot, runtime = runtime, mask_sf = mask_sf)) +} +``` + +
    +
    + +Stream NDVI: low resolution + + +``` r +ndvi_background_low <- yearly_average_ndvi(denver_redlining) +``` + +
    +![](../worksheets/ndvi.png) +
    + +Map low resolution NDVI per HOLC grade + + +``` r +ndvi <- create_mask_and_plot(denver_redlining, background_raster = ndvi_background_low$raster, roads = roads, rivers = rivers) +``` + +
    + +![](../worksheets/redlining_mask_ndvi.png) + +
    + +FUNCTION: Map Denver City provided data per HOLC grade + + +``` r +process_city_inventory_data <- function(address, inner_file, polygon_layer, output_filename,variable_label= 'Tree Density') { + # Download and read the shapefile + full_path <- glue("/vsizip/vsicurl/{address}/{inner_file}") + shape_data <- st_read(full_path, quiet = TRUE) |> st_as_sf() + + # Process the shape data with the provided polygon layer + processed_data <- process_and_plot_sf_layers(polygon_layer, shape_data, paste0(output_filename, ".png")) + + # Extract trees from the processed data + trees <- processed_data$sf + denver_redlining_residential <- polygon_layer |> filter(grade != "") + + # Generate the density plot + plot <- ggplot() + + geom_sf(data = roads, alpha = 0.05, lwd = 0.1) + + geom_sf(data = rivers, color = "blue", alpha = 0.1, lwd = 1.1) + + geom_sf(data = denver_redlining_residential, fill = "grey", color = "grey", size = 0.1) + + facet_wrap(~ grade, nrow = 1) + + stat_density_2d(data = trees, + mapping = aes(x = map_dbl(geometry, ~.[1]), + y = map_dbl(geometry, ~.[2]), + fill = stat(density)), + geom = 'tile', + contour = FALSE, + alpha = 0.9) + + scale_fill_gradientn(colors = c("transparent", "white", "limegreen"), + values = scales::rescale(c(0, 0.1, 1)), # Adjust these based on your density range + guide = "colourbar") + + theme_minimal() + + labs(fill = variable_label) + + theme_tufte() + + theme(plot.background = element_rect(fill = "white", color = NA), + panel.background = element_rect(fill = "white", color = NA), + legend.position = "bottom", + axis.text = element_blank(), + axis.title = element_blank(), + axis.ticks = element_blank(), + panel.grid.major = element_blank(), + panel.grid.minor = element_blank()) + + # Save the plot + ggsave(paste0(output_filename, "_density_plot.png"), plot, width = 10, height = 4, units = "in", dpi = 600) + + # Return the plot and the tree layer + return(list(plot = plot, layer = trees)) +} +``` + +
    +
    + +Map tree inventory per HOLC grade + + +``` r +result <- process_city_inventory_data( + "https://www.denvergov.org/media/gis/DataCatalog/tree_inventory/shape/tree_inventory.zip", + "tree_inventory.shp", + denver_redlining, + "Denver_tree_inventory_2023" +) +``` + + Warning: `stat(density)` was deprecated in ggplot2 3.4.0. + ℹ Please use `after_stat(density)` instead. + +
    + +![](../worksheets/Denver_tree_inventory_2023.png) +![](../worksheets/Denver_tree_inventory_2023_density_plot.png) + +
    + +Map traffic accidents per HOLC grade + + +``` r +result <- process_city_inventory_data( + "https://www.denvergov.org/media/gis/DataCatalog/traffic_accidents/shape/traffic_accidents.zip", + "traffic_accidents.shp", + denver_redlining, + "Denver_traffic_accidents", + variable_label= 'Traffic accidents density' +) +``` + +
    + +![](../worksheets/Denver_traffic_accidents.png) +![](../worksheets/Denver_traffic_accidents_density_plot.png) + +
    + +Map stream sampling effort per HOLC grade + + +``` r +instream_sampling_sites <- process_city_inventory_data( + "https://www.denvergov.org/media/gis/DataCatalog/instream_sampling_sites/shape/instream_sampling_sites.zip", + "instream_sampling_sites.shp", + denver_redlining, + "instream_sampling_sites", + variable_label= 'Instream sampling sites density' +) +``` + +
    + +![](../worksheets/instream_sampling_sites.png) +![](../worksheets/instream_sampling_sites_density_plot.png) + +
    + +Map soil sampling effort per HOLC grade + + +``` r +soil_samples <- process_city_inventory_data( + "https://www.denvergov.org/media/gis/DataCatalog/soil_samples/shape/soil_samples.zip", + "soil_samples.shp", + denver_redlining, + "Soil samples", + variable_label= 'soil samples density' +) +``` + +
    + +![](../worksheets/Soil%20samples.png) +![](../worksheets/Soil%20samples_density_plot.png) + +
    + +Map public art density per HOLC grade + + +``` r +public_art <- process_city_inventory_data( + "https://www.denvergov.org/media/gis/DataCatalog/public_art/shape/public_art.zip", + "public_art.shp", + denver_redlining, + "Public art ", + variable_label= 'Public art density' +) +``` + +
    + +![](../worksheets/Public%20art%20.png) +![](../worksheets/Public%20art%20_density_plot.png) + +
    + +Map liquor licenses density per HOLC grade + + +``` r +liquor_licenses <- process_city_inventory_data( + "https://www.denvergov.org/media/gis/DataCatalog/liquor_licenses/shape/liquor_licenses.zip", + "liquor_licenses.shp", + denver_redlining, + "liquor licenses ", + variable_label= 'liquor licenses density' +) +``` + +
    + +![](../worksheets/liquor%20licenses%20.png) +![](../worksheets/liquor%20licenses%20_density_plot.png) + +
    + +Map crime density per HOLC grade + + +``` r +Crime <- process_city_inventory_data( + "https://www.denvergov.org/media/gis/DataCatalog/crime/shape/crime.zip", + "crime.shp", + denver_redlining, + "crime", + variable_label= 'Crime density' +) +``` + +
    +![](../worksheets/crime.png) ![](../worksheets/crime_density_plot.png) +
    + +WORD CLOUD: Types of crimes + + +``` r +crime_cloud <- create_wordclouds_by_grade(Crime$layer, output_file = "Crime_word_cloud_per_grade.png",title = "Crime type where larger text is more frequent", max_size =25, col_select = "OFFENSE_TY") +``` + + Warning: Using an external vector in selections was deprecated in tidyselect 1.1.0. + ℹ Please use `all_of()` or `any_of()` instead. + # Was: + data %>% select(col_select) + + # Now: + data %>% select(all_of(col_select)) + + See . + +
    + +![](../worksheets/Crime_word_cloud_per_grade.png) + +
    + +Map police shooting density per HOLC grade + + +``` r +Denver_police_shootings <- process_city_inventory_data( + "https://www.denvergov.org/media/gis/DataCatalog/denver_police_officer_involved_shootings/shape/denver_police_officer_involved_shootings.zip", + "denver_police_officer_involved_shootings.shp", + denver_redlining, + "Police shootings", + variable_label= 'Police shootings density' +) +``` + +
    + +![](../worksheets/Police%20shootings.png) + +**Not enough data for density across all 4** + +
    + +WORD CLOUD: Police involved shootings + + +``` r +Denver_police_shootings_cloud <- create_wordclouds_by_grade(Denver_police_shootings$layer, output_file = "police_shootings_word_cloud_per_grade.png",title = "police involved shooting per crime type where larger text is more frequent", max_size =35, col_select = "SHOOT_ACTI") +``` + +
    + +![](../worksheets/police_shootings_word_cloud_per_grade.png) + +## Part 3: Comparative Analysis and Visualization + +### Statistical Analysis + +- Conduct a detailed statistical analysis to compare greenspace across + different HOLC grades, using techniques like Targeted Maximum + Likelihood Estimation (TMLE) to assess the association between + historical redlining and current greenspace levels. +- Visualize the disparities in greenspace distribution using GIS + tools, highlighting how redlining has shaped urban ecological + landscapes. + +## Conclusion + +This tutorial provides tools and methodologies to explore the lingering +effects of historic redlining on urban greenspace, offering insights +into the intersection of urban planning, environmental justice, and +public health. + +### References + +- Nardone, A., Rudolph, K. E., Morello-Frosch, R., & Casey, J. A. + (2021). Redlines and Greenspace: The Relationship between Historical + Redlining and 2010 Greenspace across the United States. + *Environmental Health Perspectives*, 129(1), 017006. + DOI:10.1289/EHP7495. [Available + online](https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7839347/pdf/ehp7495.pdf) diff --git a/worksheets/Adult diabetes.png b/worksheets/Adult diabetes.png new file mode 100644 index 0000000..0c2e481 Binary files /dev/null and b/worksheets/Adult diabetes.png differ diff --git a/worksheets/Adult diabetes_density_plot.png b/worksheets/Adult diabetes_density_plot.png new file mode 100644 index 0000000..ef3d3f9 Binary files /dev/null and b/worksheets/Adult diabetes_density_plot.png differ diff --git a/worksheets/Crime_word_cloud_per_grade.png b/worksheets/Crime_word_cloud_per_grade.png new file mode 100644 index 0000000..ef3d3f9 Binary files /dev/null and b/worksheets/Crime_word_cloud_per_grade.png differ diff --git a/worksheets/Denver_traffic_accidents.png b/worksheets/Denver_traffic_accidents.png new file mode 100644 index 0000000..2f7fce6 Binary files /dev/null and b/worksheets/Denver_traffic_accidents.png differ diff --git a/worksheets/Denver_traffic_accidents_density_plot.png b/worksheets/Denver_traffic_accidents_density_plot.png new file mode 100644 index 0000000..ed59d7a Binary files /dev/null and b/worksheets/Denver_traffic_accidents_density_plot.png differ diff --git a/worksheets/Denver_tree_inventory_2023.png b/worksheets/Denver_tree_inventory_2023.png new file mode 100644 index 0000000..65b1623 Binary files /dev/null and b/worksheets/Denver_tree_inventory_2023.png differ diff --git a/worksheets/Denver_tree_inventory_2023_density_plot.png b/worksheets/Denver_tree_inventory_2023_density_plot.png new file mode 100644 index 0000000..7ee9380 Binary files /dev/null and b/worksheets/Denver_tree_inventory_2023_density_plot.png differ diff --git a/worksheets/Denver_tree_inventory_2023_word_cloud_per_grade.png b/worksheets/Denver_tree_inventory_2023_word_cloud_per_grade.png new file mode 100644 index 0000000..339c3d2 Binary files /dev/null and b/worksheets/Denver_tree_inventory_2023_word_cloud_per_grade.png differ diff --git a/worksheets/Government Buildings_2024-04-22.png b/worksheets/Government Buildings_2024-04-22.png new file mode 100644 index 0000000..fc45a90 Binary files /dev/null and b/worksheets/Government Buildings_2024-04-22.png differ diff --git a/worksheets/Government Buildings_2024-04-23.png b/worksheets/Government Buildings_2024-04-23.png new file mode 100644 index 0000000..a627384 Binary files /dev/null and b/worksheets/Government Buildings_2024-04-23.png differ diff --git a/worksheets/Government Buildings_2024-04-29.png b/worksheets/Government Buildings_2024-04-29.png new file mode 100644 index 0000000..fc45a90 Binary files /dev/null and b/worksheets/Government Buildings_2024-04-29.png differ diff --git a/worksheets/Government Buildings_2024-04-30.png b/worksheets/Government Buildings_2024-04-30.png new file mode 100644 index 0000000..fc45a90 Binary files /dev/null and b/worksheets/Government Buildings_2024-04-30.png differ diff --git a/worksheets/HOLC_grades_individually.png b/worksheets/HOLC_grades_individually.png new file mode 100644 index 0000000..6385ccf Binary files /dev/null and b/worksheets/HOLC_grades_individually.png differ diff --git a/worksheets/Major rivers_2024-04-18.png b/worksheets/Major rivers_2024-04-18.png new file mode 100644 index 0000000..8149989 Binary files /dev/null and b/worksheets/Major rivers_2024-04-18.png differ diff --git a/worksheets/Major rivers_2024-04-19.png b/worksheets/Major rivers_2024-04-19.png new file mode 100644 index 0000000..53bc730 Binary files /dev/null and b/worksheets/Major rivers_2024-04-19.png differ diff --git a/worksheets/Major rivers_2024-04-22.png b/worksheets/Major rivers_2024-04-22.png new file mode 100644 index 0000000..53bc730 Binary files /dev/null and b/worksheets/Major rivers_2024-04-22.png differ diff --git a/worksheets/Major rivers_2024-04-23.png b/worksheets/Major rivers_2024-04-23.png new file mode 100644 index 0000000..bad035e Binary files /dev/null and b/worksheets/Major rivers_2024-04-23.png differ diff --git a/worksheets/Major rivers_2024-04-29.png b/worksheets/Major rivers_2024-04-29.png new file mode 100644 index 0000000..53bc730 Binary files /dev/null and b/worksheets/Major rivers_2024-04-29.png differ diff --git a/worksheets/Major rivers_2024-04-30.png b/worksheets/Major rivers_2024-04-30.png new file mode 100644 index 0000000..53bc730 Binary files /dev/null and b/worksheets/Major rivers_2024-04-30.png differ diff --git a/worksheets/Major rivers_2024-05-12.png b/worksheets/Major rivers_2024-05-12.png new file mode 100644 index 0000000..53bc730 Binary files /dev/null and b/worksheets/Major rivers_2024-05-12.png differ diff --git a/worksheets/Major roads_2024-04-18.png b/worksheets/Major roads_2024-04-18.png new file mode 100644 index 0000000..a16993d Binary files /dev/null and b/worksheets/Major roads_2024-04-18.png differ diff --git a/worksheets/Major roads_2024-04-19.png b/worksheets/Major roads_2024-04-19.png new file mode 100644 index 0000000..a16993d Binary files /dev/null and b/worksheets/Major roads_2024-04-19.png differ diff --git a/worksheets/Major roads_2024-04-22.png b/worksheets/Major roads_2024-04-22.png new file mode 100644 index 0000000..b976a88 Binary files /dev/null and b/worksheets/Major roads_2024-04-22.png differ diff --git a/worksheets/Major roads_2024-04-23.png b/worksheets/Major roads_2024-04-23.png new file mode 100644 index 0000000..23f7c33 Binary files /dev/null and b/worksheets/Major roads_2024-04-23.png differ diff --git a/worksheets/Major roads_2024-04-29.png b/worksheets/Major roads_2024-04-29.png new file mode 100644 index 0000000..e429727 Binary files /dev/null and b/worksheets/Major roads_2024-04-29.png differ diff --git a/worksheets/Major roads_2024-04-30.png b/worksheets/Major roads_2024-04-30.png new file mode 100644 index 0000000..e429727 Binary files /dev/null and b/worksheets/Major roads_2024-04-30.png differ diff --git a/worksheets/Major roads_2024-05-12.png b/worksheets/Major roads_2024-05-12.png new file mode 100644 index 0000000..d1a2c09 Binary files /dev/null and b/worksheets/Major roads_2024-05-12.png differ diff --git a/worksheets/Natural habitats or City owned trees_2024-04-18.png b/worksheets/Natural habitats or City owned trees_2024-04-18.png new file mode 100644 index 0000000..efc6125 Binary files /dev/null and b/worksheets/Natural habitats or City owned trees_2024-04-18.png differ diff --git a/worksheets/Natural habitats or City owned trees_2024-04-19.png b/worksheets/Natural habitats or City owned trees_2024-04-19.png new file mode 100644 index 0000000..8454f37 Binary files /dev/null and b/worksheets/Natural habitats or City owned trees_2024-04-19.png differ diff --git a/worksheets/Natural habitats or City owned trees_2024-04-22.png b/worksheets/Natural habitats or City owned trees_2024-04-22.png new file mode 100644 index 0000000..3a4be9c Binary files /dev/null and b/worksheets/Natural habitats or City owned trees_2024-04-22.png differ diff --git a/worksheets/Natural habitats or City owned trees_2024-04-23.png b/worksheets/Natural habitats or City owned trees_2024-04-23.png new file mode 100644 index 0000000..4f766db Binary files /dev/null and b/worksheets/Natural habitats or City owned trees_2024-04-23.png differ diff --git a/worksheets/Natural habitats or City owned trees_2024-04-29.png b/worksheets/Natural habitats or City owned trees_2024-04-29.png new file mode 100644 index 0000000..3f042f6 Binary files /dev/null and b/worksheets/Natural habitats or City owned trees_2024-04-29.png differ diff --git a/worksheets/Natural habitats or City owned trees_2024-04-30.png b/worksheets/Natural habitats or City owned trees_2024-04-30.png new file mode 100644 index 0000000..3f042f6 Binary files /dev/null and b/worksheets/Natural habitats or City owned trees_2024-04-30.png differ diff --git a/worksheets/Police shootings.png b/worksheets/Police shootings.png new file mode 100644 index 0000000..07e1f9b Binary files /dev/null and b/worksheets/Police shootings.png differ diff --git a/worksheets/Police shootings_density_plot.png b/worksheets/Police shootings_density_plot.png new file mode 100644 index 0000000..cebcc12 Binary files /dev/null and b/worksheets/Police shootings_density_plot.png differ diff --git a/worksheets/Processed Food Locations_2024-04-22.png b/worksheets/Processed Food Locations_2024-04-22.png new file mode 100644 index 0000000..bdfbb84 Binary files /dev/null and b/worksheets/Processed Food Locations_2024-04-22.png differ diff --git a/worksheets/Processed Food Locations_2024-04-23.png b/worksheets/Processed Food Locations_2024-04-23.png new file mode 100644 index 0000000..4431169 Binary files /dev/null and b/worksheets/Processed Food Locations_2024-04-23.png differ diff --git a/worksheets/Processed Food Locations_2024-04-29.png b/worksheets/Processed Food Locations_2024-04-29.png new file mode 100644 index 0000000..bdfbb84 Binary files /dev/null and b/worksheets/Processed Food Locations_2024-04-29.png differ diff --git a/worksheets/Processed Food Locations_2024-04-30.png b/worksheets/Processed Food Locations_2024-04-30.png new file mode 100644 index 0000000..bdfbb84 Binary files /dev/null and b/worksheets/Processed Food Locations_2024-04-30.png differ diff --git a/worksheets/Public art .png b/worksheets/Public art .png new file mode 100644 index 0000000..b24227b Binary files /dev/null and b/worksheets/Public art .png differ diff --git a/worksheets/Public art _density_plot.png b/worksheets/Public art _density_plot.png new file mode 100644 index 0000000..78824c7 Binary files /dev/null and b/worksheets/Public art _density_plot.png differ diff --git a/worksheets/R_run_all.png b/worksheets/R_run_all.png new file mode 100644 index 0000000..66ae2ff Binary files /dev/null and b/worksheets/R_run_all.png differ diff --git a/worksheets/Soil samples.png b/worksheets/Soil samples.png new file mode 100644 index 0000000..9742592 Binary files /dev/null and b/worksheets/Soil samples.png differ diff --git a/worksheets/Soil samples_density_plot.png b/worksheets/Soil samples_density_plot.png new file mode 100644 index 0000000..4e419ac Binary files /dev/null and b/worksheets/Soil samples_density_plot.png differ diff --git a/worksheets/anim.gif b/worksheets/anim.gif new file mode 100644 index 0000000..9fa4996 Binary files /dev/null and b/worksheets/anim.gif differ diff --git a/worksheets/crime.png b/worksheets/crime.png new file mode 100644 index 0000000..8c0bac4 Binary files /dev/null and b/worksheets/crime.png differ diff --git a/worksheets/crime_density_plot.png b/worksheets/crime_density_plot.png new file mode 100644 index 0000000..59f433d Binary files /dev/null and b/worksheets/crime_density_plot.png differ diff --git a/worksheets/final_redlining_plot.png b/worksheets/final_redlining_plot.png new file mode 100644 index 0000000..75372af Binary files /dev/null and b/worksheets/final_redlining_plot.png differ diff --git a/worksheets/food_2024-04-18.png b/worksheets/food_2024-04-18.png new file mode 100644 index 0000000..ddc4f73 Binary files /dev/null and b/worksheets/food_2024-04-18.png differ diff --git a/worksheets/food_2024-04-19.png b/worksheets/food_2024-04-19.png new file mode 100644 index 0000000..ddc4f73 Binary files /dev/null and b/worksheets/food_2024-04-19.png differ diff --git a/worksheets/food_2024-04-22.png b/worksheets/food_2024-04-22.png new file mode 100644 index 0000000..6c6987e Binary files /dev/null and b/worksheets/food_2024-04-22.png differ diff --git a/worksheets/food_2024-04-23.png b/worksheets/food_2024-04-23.png new file mode 100644 index 0000000..796dffb Binary files /dev/null and b/worksheets/food_2024-04-23.png differ diff --git a/worksheets/food_2024-04-29.png b/worksheets/food_2024-04-29.png new file mode 100644 index 0000000..6c6987e Binary files /dev/null and b/worksheets/food_2024-04-29.png differ diff --git a/worksheets/food_2024-04-30.png b/worksheets/food_2024-04-30.png new file mode 100644 index 0000000..ddc4f73 Binary files /dev/null and b/worksheets/food_2024-04-30.png differ diff --git a/worksheets/food_match.png b/worksheets/food_match.png new file mode 100644 index 0000000..94a3263 Binary files /dev/null and b/worksheets/food_match.png differ diff --git a/worksheets/food_word_cloud_per_grade.png b/worksheets/food_word_cloud_per_grade.png new file mode 100644 index 0000000..c0dc508 Binary files /dev/null and b/worksheets/food_word_cloud_per_grade.png differ diff --git a/worksheets/instream_sampling_sites.png b/worksheets/instream_sampling_sites.png new file mode 100644 index 0000000..68fe6c9 Binary files /dev/null and b/worksheets/instream_sampling_sites.png differ diff --git a/worksheets/instream_sampling_sites_density_plot.png b/worksheets/instream_sampling_sites_density_plot.png new file mode 100644 index 0000000..ff7f43b Binary files /dev/null and b/worksheets/instream_sampling_sites_density_plot.png differ diff --git a/worksheets/june_ndvi.png b/worksheets/june_ndvi.png new file mode 100644 index 0000000..b60cbd3 Binary files /dev/null and b/worksheets/june_ndvi.png differ diff --git a/worksheets/june_ndvi.tif/cube_92485f6df4e02021-06-01.tif b/worksheets/june_ndvi.tif/cube_92485f6df4e02021-06-01.tif new file mode 100644 index 0000000..db61892 Binary files /dev/null and b/worksheets/june_ndvi.tif/cube_92485f6df4e02021-06-01.tif differ diff --git a/worksheets/june_ndvi.tif/cube_92485ff64ac32021-06-01.tif b/worksheets/june_ndvi.tif/cube_92485ff64ac32021-06-01.tif new file mode 100644 index 0000000..db61892 Binary files /dev/null and b/worksheets/june_ndvi.tif/cube_92485ff64ac32021-06-01.tif differ diff --git a/worksheets/june_ndvi.tif/cube_924870061d012021-06-01.tif b/worksheets/june_ndvi.tif/cube_924870061d012021-06-01.tif new file mode 100644 index 0000000..db61892 Binary files /dev/null and b/worksheets/june_ndvi.tif/cube_924870061d012021-06-01.tif differ diff --git a/worksheets/liquor licenses .png b/worksheets/liquor licenses .png new file mode 100644 index 0000000..c81fb0c Binary files /dev/null and b/worksheets/liquor licenses .png differ diff --git a/worksheets/liquor licenses _density_plot.png b/worksheets/liquor licenses _density_plot.png new file mode 100644 index 0000000..f248d1f Binary files /dev/null and b/worksheets/liquor licenses _density_plot.png differ diff --git a/worksheets/love_gradient-of-agreement.png b/worksheets/love_gradient-of-agreement.png new file mode 100644 index 0000000..128081b Binary files /dev/null and b/worksheets/love_gradient-of-agreement.png differ diff --git a/worksheets/mask_and_raster_plot.png b/worksheets/mask_and_raster_plot.png new file mode 100644 index 0000000..ee47435 Binary files /dev/null and b/worksheets/mask_and_raster_plot.png differ diff --git a/worksheets/natural_habitats_match.png b/worksheets/natural_habitats_match.png new file mode 100644 index 0000000..780df3c Binary files /dev/null and b/worksheets/natural_habitats_match.png differ diff --git a/worksheets/natural_habitats_word_cloud_per_grade.png b/worksheets/natural_habitats_word_cloud_per_grade.png new file mode 100644 index 0000000..651431a Binary files /dev/null and b/worksheets/natural_habitats_word_cloud_per_grade.png differ diff --git a/worksheets/ndvi.png b/worksheets/ndvi.png new file mode 100644 index 0000000..4aa9df6 Binary files /dev/null and b/worksheets/ndvi.png differ diff --git a/worksheets/ndvi_00001.png b/worksheets/ndvi_00001.png new file mode 100644 index 0000000..97d3cfe Binary files /dev/null and b/worksheets/ndvi_00001.png differ diff --git a/worksheets/police_shootings.png b/worksheets/police_shootings.png new file mode 100644 index 0000000..a62b52e Binary files /dev/null and b/worksheets/police_shootings.png differ diff --git a/worksheets/police_shootings_word_cloud_per_grade.png b/worksheets/police_shootings_word_cloud_per_grade.png new file mode 100644 index 0000000..ef3d3f9 Binary files /dev/null and b/worksheets/police_shootings_word_cloud_per_grade.png differ diff --git a/worksheets/processed_food_match.png b/worksheets/processed_food_match.png new file mode 100644 index 0000000..292946d Binary files /dev/null and b/worksheets/processed_food_match.png differ diff --git a/worksheets/processed_food_word_cloud_per_grade.png b/worksheets/processed_food_word_cloud_per_grade.png new file mode 100644 index 0000000..71e863f Binary files /dev/null and b/worksheets/processed_food_word_cloud_per_grade.png differ diff --git a/worksheets/redlining_mask_ndvi.png b/worksheets/redlining_mask_ndvi.png new file mode 100644 index 0000000..722920b Binary files /dev/null and b/worksheets/redlining_mask_ndvi.png differ diff --git a/worksheets/redlining_plot.png b/worksheets/redlining_plot.png new file mode 100644 index 0000000..03cbfbf Binary files /dev/null and b/worksheets/redlining_plot.png differ diff --git a/worksheets/table.html b/worksheets/table.html new file mode 100644 index 0000000..2c801d9 --- /dev/null +++ b/worksheets/table.html @@ -0,0 +1,132 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    area_id city state city_survey cat grade label res com ind fill GEOID10 GISJOIN calc_area pct_tract geometry
    6525 Denver CO TRUE Best A A1 TRUE FALSE FALSE #76a865 08031004104 G0800310004104 1.525535e+01 0.00001 MULTIPOLYGON (((-104.9125 3...
    6525 Denver CO TRUE Best A A1 TRUE FALSE FALSE #76a865 08031004201 G0800310004201 3.987458e+05 0.20900 MULTIPOLYGON (((-104.9246 3...
    6525 Denver CO TRUE Best A A1 TRUE FALSE FALSE #76a865 08031004304 G0800310004304 1.554195e+05 0.05927 MULTIPOLYGON (((-104.9125 3...
    6525 Denver CO TRUE Best A A1 TRUE FALSE FALSE #76a865 08031004202 G0800310004202 1.117770e+06 0.57245 MULTIPOLYGON (((-104.9125 3...
    6529 Denver CO TRUE Best A A2 TRUE FALSE FALSE #76a865 08031004302 G0800310004302 3.133415e+05 0.28381 MULTIPOLYGON (((-104.928 39...
    6529 Denver CO TRUE Best A A2 TRUE FALSE FALSE #76a865 08031004301 G0800310004301 1.221218e+05 0.08622 MULTIPOLYGON (((-104.9305 3...
    diff --git a/worksheets/table.png b/worksheets/table.png new file mode 100644 index 0000000..4b03f4c Binary files /dev/null and b/worksheets/table.png differ diff --git a/worksheets/word_cloud_per_grade.png b/worksheets/word_cloud_per_grade.png new file mode 100644 index 0000000..c667b8e Binary files /dev/null and b/worksheets/word_cloud_per_grade.png differ diff --git a/worksheets/worksheet_0/index.html b/worksheets/worksheet_0/index.html new file mode 100644 index 0000000..0bd88fa --- /dev/null +++ b/worksheets/worksheet_0/index.html @@ -0,0 +1,1540 @@ + + + + + + + + + + + + + + + + + + + + + + Explore an example (student edition) - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    Exploring Resilience with Data in your Third Space (CyVerse)

    +

    Instructions

    +

    Work through the prompts below with the people at your table. Please use a decision-making method to decide before moving to a new section of the activity.

    +

    Introductions

    +

    Please share the following information with your team: +- Name +- Pronouns +- Where did you travel from? +- Reflecting back on the polarities exercise, share one thing you observed about yourself.

    +

    Objectives of this group activity

    +
      +
    1. Increase comfort with Cyverse
    2. +
    3. Practice decision-making with a group
    4. +
    5. Get to know other Summit participants
    6. +
    7. Explore how historic policies continue to affect the spatial distribution of environmental amenities.
    8. +
    +

    Background

    +

    Introduction to Redlining

    +

    This group exploration delves into the long-term impacts of historical redlining on urban greenspace, emphasizing the powerful role of maps in shaping environmental and social landscapes. By drawing on the research by Nardone et al. (2021), you will collaboratively investigate how discriminatory practices encoded in maps have led to persistent disparities in urban settings. This exploration aims to uncover the resilience of communities in adapting to these entrenched injustices and to foster a deeper understanding of how mapping can serve both as a tool of exclusion and as a means for promoting social equity.

    +

    1938 Map of Atlanta uses colors as grades for neighborhoods. The red swaths identify each area with large African-American populations that were deemed “less safe.”

    +

    Understanding Redlining as a Systemic Disturbance

    +

    Redlining originated in the 1930s as a discriminatory practice where the Home Owners' Loan Corporation (HOLC) systematically denied mortgages or offered unfavorable terms based on racial and ethnic compositions. This methodical exclusion, executed through maps that color-coded "risky" investment areas in red, marked minority-populated areas, denying them crucial investment and development opportunities and initiating a profound and lasting disturbance in the urban fabric.

    +

    Maps serve as powerful tools beyond navigation; they communicate and enforce control. By defining neighborhood boundaries through redlining, HOLC maps not only mirrored societal biases but also perpetuated and embedded them into the urban landscape. This manipulation of geographic data set a trajectory that limited economic growth, dictated the allocation of services, and influenced the development or deterioration of community infrastructure.

    +

    Figure 1: 1938 Map of Atlanta uses colors as grades for neighborhoods. The red swaths identify each area with large African-American populations that were deemed “less safe.”

    +

    ArcGIS Story Map

    +

    Explore the Story Map: Click on the image above to explore the interactive story map about [subject of the story map].

    +

    Resilience and Adaptation in Urban Environments

    +

    The legacy of redlining presents both a challenge and an opportunity for resilience and adaptation. Economically and socially, redlining entrenched cycles of poverty and racial segregation, creating a resilient wealth gap that has been difficult to dismantle. Environmentally, the neighborhoods targeted by redlining continue to face significant challenges—they generally feature less greenspace, suffer from higher pollution levels, and are more vulnerable to the impacts of climate change. These factors compound the health and wellness challenges faced by residents.

    +

    Despite these adversities, urban communities have continually demonstrated remarkable resilience. Adaptation strategies, such as community-led green initiatives, urban agriculture, and grassroots activism, have emerged as responses to these systemic disturbances. By enhancing green infrastructure and advocating for equitable environmental policies, these communities strive to increase their resilience against both historical inequities and environmental challenges.

    +

    The following group exercise will uncover the impact of redlining on urban greenspace and highlight the adaptive strategies developed in response to this enduring disturbance. Through mapping and analysis, we aim to illustrate the powerful role that geographic data can play in understanding and fostering urban resilience and social equity.

    +

    References

    +
      +
    • Nardone, A., Rudolph, K. E., Morello-Frosch, R., & Casey, J. A. (2021). Redlines and Greenspace: The Relationship between Historical Redlining and 2010 Greenspace across the United States. Environmental Health Perspectives, 129(1), 017006. DOI:10.1289/EHP7495.
    • +
    • Hoffman, J. S., Shandas, V., & Pendleton, N. (2020). The Effects of Historical Housing Policies on Resident Exposure to Intra-Urban Heat: A Study of 108 US Urban Areas. Climate, 8(1), 12. DOI:10.3390/cli8010012.
    • +
    +
    +

    Group Activity

    +

    Setting up CyVerse

    +
      +
    • Log into CyVerse
    • +
    • Use the startup procedure to start an instance on CyVerse. Be sure to: +
    • +
    • Please raise your hand if you have questions or run into technical issues. ESIIL represenatives will be walking around to help.
    • +
    • Once you initiate your CyVerse instance, DO NOT close it. You can keep this instance running the entire Summit so you don't have to do the start-up procedure again.
    • +
    +

    Create a Map

    +

    We'll be using pre-developed code to visualize redlining impacts on Denver, CO. Please follow these steps:

    +
      +
    • Open R Studio in CyVerse
    • +
    • Use "files" (lower right) to navigate to this markdown document:
        +
      • innovation-summit-2024/code/worksheet_redlining_student_edition.qmd
      • +
      +
    • +
    • Start at the beginning of the code and complete the following:
        +
      • Create a map of historically redlined districts in Denver
      • +
      • Overlay current-day NDVI (vegetation greenness) data onto your map
      • +
      +
    • +
    • You can choose "Run All" to run all the code at once. Note: It will take about 5 minutes to run.
    • +
    +

    +
      +
    • Now, it's your turn to choose a variable to observe. Use the provided code to select the variable you want to add to your map. More detailed instructions are included in the code.
    • +
    +

    Variable Options:

    +
      +
    1. Tree inventory
    2. +
    3. Traffic accidents
    4. +
    5. Stream sampling effort
    6. +
    7. Soil sampling effort
    8. +
    9. Public art density
    10. +
    11. Liquor license density
    12. +
    13. Crime density
    14. +
    +

    Decision-Making

    +

    Use the gradient of agreement (Kaner 2014) to make a decision as a team about which variable you want to explore.

    +

    Gradients of agreement

    +

    Unique Title

    +

    Come up with a unique title for your anaylysis. Write it down on a sticky note at your table.

    +

    Discussion Questions

    +

    After completing your anaylysis, discuss these questions with your group:

    +
      +
    1. What patterns do you notice? What are the immediate questions that come to mind?
    2. +
    3. How does big data help illustrate resilience?
    4. +
    5. Redlining has a long-term impact. How is the impact of redlining still evident today?
    6. +
    +

    Still have time?

    +

    As a group, choose another variable to explore and then discuss your findings.

    +

    Look through all the variables:

    +

    Once you're done, you can see all the code and variable maps on the "Teacher Edition" version of the activity: https://cu-esiil.github.io/Innovation-Summit-2024/worksheets/worksheet_redlining/

    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/worksheets/worksheet_0/worksheet_0.md b/worksheets/worksheet_0/worksheet_0.md new file mode 100644 index 0000000..6c5de76 --- /dev/null +++ b/worksheets/worksheet_0/worksheet_0.md @@ -0,0 +1,117 @@ +# Exploring Resilience with Data in your Third Space (CyVerse) + +## Instructions + +Work through the prompts below with the people at your table. Please use a decision-making method *to decide* before moving to a new section of the activity. + +## Introductions + +Please share the following information with your team: +- Name +- Pronouns +- Where did you travel from? +- Reflecting back on the polarities exercise, share one thing you observed about yourself. + +## Objectives of this group activity + +1. Increase comfort with Cyverse +2. Practice decision-making with a group +3. Get to know other Summit participants +4. Explore how historic policies continue to affect the spatial distribution of environmental amenities. + +# Background + +## Introduction to Redlining + +This group exploration delves into the long-term impacts of historical redlining on urban greenspace, emphasizing the powerful role of maps in shaping environmental and social landscapes. By drawing on the research by Nardone et al. (2021), you will collaboratively investigate how discriminatory practices encoded in maps have led to persistent disparities in urban settings. This exploration aims to uncover the resilience of communities in adapting to these entrenched injustices and to foster a deeper understanding of how mapping can serve both as a tool of exclusion and as a means for promoting social equity. + +![1938 Map of Atlanta uses colors as grades for neighborhoods. The red swaths identify each area with large African-American populations that were deemed “less safe.”](../assets/redlining/redlining.png) + +## Understanding Redlining as a Systemic Disturbance + +Redlining originated in the 1930s as a discriminatory practice where the Home Owners' Loan Corporation (HOLC) systematically denied mortgages or offered unfavorable terms based on racial and ethnic compositions. This methodical exclusion, executed through maps that color-coded "risky" investment areas in red, marked minority-populated areas, denying them crucial investment and development opportunities and initiating a profound and lasting disturbance in the urban fabric. + +Maps serve as powerful tools beyond navigation; they communicate and enforce control. By defining neighborhood boundaries through redlining, HOLC maps not only mirrored societal biases but also perpetuated and embedded them into the urban landscape. This manipulation of geographic data set a trajectory that limited economic growth, dictated the allocation of services, and influenced the development or deterioration of community infrastructure. + +**Figure 1:** 1938 Map of Atlanta uses colors as grades for neighborhoods. The red swaths identify each area with large African-American populations that were deemed “less safe.” + + +[![ArcGIS Story Map](../assets/redlining/georectified-thumbnail.png)](https://storymaps.arcgis.com/stories/0f58d49c566b486482b3e64e9e5f7ac9) + +**Explore the Story Map:** Click on the image above to explore the interactive story map about [subject of the story map]. + + +## Resilience and Adaptation in Urban Environments + +The legacy of redlining presents both a challenge and an opportunity for resilience and adaptation. Economically and socially, redlining entrenched cycles of poverty and racial segregation, creating a resilient wealth gap that has been difficult to dismantle. Environmentally, the neighborhoods targeted by redlining continue to face significant challenges—they generally feature less greenspace, suffer from higher pollution levels, and are more vulnerable to the impacts of climate change. These factors compound the health and wellness challenges faced by residents. + +Despite these adversities, urban communities have continually demonstrated remarkable resilience. Adaptation strategies, such as community-led green initiatives, urban agriculture, and grassroots activism, have emerged as responses to these systemic disturbances. By enhancing green infrastructure and advocating for equitable environmental policies, these communities strive to increase their resilience against both historical inequities and environmental challenges. + +The following group exercise will uncover the impact of redlining on urban greenspace and highlight the adaptive strategies developed in response to this enduring disturbance. Through mapping and analysis, we aim to illustrate the powerful role that geographic data can play in understanding and fostering urban resilience and social equity. + +### References + +- Nardone, A., Rudolph, K. E., Morello-Frosch, R., & Casey, J. A. (2021). Redlines and Greenspace: The Relationship between Historical Redlining and 2010 Greenspace across the United States. *Environmental Health Perspectives*, 129(1), 017006. DOI:10.1289/EHP7495. +- Hoffman, J. S., Shandas, V., & Pendleton, N. (2020). The Effects of Historical Housing Policies on Resident Exposure to Intra-Urban Heat: A Study of 108 US Urban Areas. *Climate*, 8(1), 12. DOI:10.3390/cli8010012. +**************************************** + +# Group Activity + +## Setting up CyVerse +- Log into [CyVerse](https://de.cyverse.org/) +- Use the [startup procedure](https://cu-esiil.github.io/Innovation-Summit-2024/resources/cyverse_startup/) to start an instance on CyVerse. Be sure to: + - Create an SSH key and add it to your Github account (2nd half of start-up procedure instructions) + - Clone the [Innovation-Summit-2024 repository](https://github.com/CU-ESIIL/Innovation-Summit-2024) + - Complete the [R Studio hack](https://cu-esiil.github.io/Innovation-Summit-2024/additional-resources/cyverse_hacks/) +- Please raise your hand if you have questions or run into technical issues. ESIIL represenatives will be walking around to help. +- Once you initiate your CyVerse instance, **DO NOT close it**. You can keep this instance running the entire Summit so you don't have to do the start-up procedure again. + +## Create a Map + +We'll be using pre-developed code to visualize redlining impacts on Denver, CO. Please follow these steps: + +- Open R Studio in CyVerse +- Use "files" (lower right) to navigate to this markdown document: + - **innovation-summit-2024/code/worksheet_redlining_student_edition.qmd** +- Start at the beginning of the code and complete the following: + - Create a map of historically redlined districts in Denver + - Overlay current-day NDVI (vegetation greenness) data onto your map +- You can choose "Run All" to run all the code at once. Note: It will take about **5 minutes** to run. + +![](../worksheets/R_run_all.png) + +- Now, it's your turn to choose a variable to observe. Use the provided code to select the variable you want to add to your map. More detailed instructions are included in the code. + +**Variable Options:** + +1. Tree inventory +2. Traffic accidents +3. Stream sampling effort +4. Soil sampling effort +5. Public art density +6. Liquor license density +7. Crime density + +### Decision-Making +Use the gradient of agreement (Kaner 2014) to make a decision as a team about which variable you want to explore. + +![Gradients of agreement](../worksheets/love_gradient-of-agreement.png) + +## Unique Title +Come up with a unique title for your anaylysis. Write it down on a sticky note at your table. + +## Discussion Questions + +After completing your anaylysis, discuss these questions with your group: + +1. What patterns do you notice? What are the immediate questions that come to mind? +2. How does big data help illustrate resilience? +3. Redlining has a long-term impact. How is the impact of redlining still evident today? + +## Still have time? + +As a group, choose another variable to explore and then discuss your findings. + +### Look through all the variables: + +Once you're done, you can see all the code and variable maps on the "Teacher Edition" version of the activity: diff --git a/worksheets/worksheet_2/index.html b/worksheets/worksheet_2/index.html new file mode 100644 index 0000000..9f64228 --- /dev/null +++ b/worksheets/worksheet_2/index.html @@ -0,0 +1,1610 @@ + + + + + + + + + + + + + + + + + + + + + + Make a plan - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    TEAM ACTIVITY Day 1: Make a plan

    +

    Instructions

    +

    Work through the prompts in order. Please use a decision-making method “to decide” before moving to a new section of the activity.

    +

    Day 1 Objectives

    +
      +
    1. Get to know your group members.
    2. +
    3. Decide on a research question and project title.
    4. +
    5. Start exploring potential datasets.
    6. +
    +

    Introductions (approx. time: 10 mins total or "1-2 breaths" per prompt)

    +

    Please share the following information about yourself. Each team member should type their response in the space below (create more as needed).

    +
      +
    • Name: [Your Name]
    • +
    • Pronouns: [Your Pronouns]
    • +
    • Expertise: [Your Expertise]
    • +
    • Environmental Data Science Superpower: [Describe your unique skill or interest in environmental data science]
    • +
    • Reflection on Polarities Exercise: [Share one thing you observed about yourself]
    • +
    +
    +
      +
    • Name: [Your Name]
    • +
    • Pronouns: [Your Pronouns]
    • +
    • Expertise: [Your Expertise]
    • +
    • Environmental Data Science Superpower: [Describe your unique skill or interest in environmental data science]
    • +
    • Reflection on Polarities Exercise: [Share one thing you observed about yourself]
    • +
    +
    +
      +
    • Name: [Your Name]
    • +
    • Pronouns: [Your Pronouns]
    • +
    • Expertise: [Your Expertise]
    • +
    • Environmental Data Science Superpower: [Describe your unique skill or interest in environmental data science]
    • +
    • Reflection on Polarities Exercise: [Share one thing you observed about yourself]
    • +
    +
    +
      +
    • Name: [Your Name]
    • +
    • Pronouns: [Your Pronouns]
    • +
    • Expertise: [Your Expertise]
    • +
    • Environmental Data Science Superpower: [Describe your unique skill or interest in environmental data science]
    • +
    • Reflection on Polarities Exercise: [Share one thing you observed about yourself]
    • +
    +
    +
      +
    • Continue adding more team members following the same format, with a line break after each person.
    • +
    +
    +

    Research Question: Innovation for Inclusion or Computation (approx. time: 5-10 mins)

    +

    Write the research question your team selected in the space below. Feel free to revise the original question.

    +
      +
    • [Insert research question here]
    • +
    +

    Project Title (approx. time: 5-10 mins)

    +

    Craft a catchy title for your team’s project. Think of something that would grab attention at a conference or in a headline.

    +
      +
    • [Insert title here]
    • +
    +

    Promoting Resilience and Adaptation

    +

    Describe how your proposed project aligns with the Summit's themes of resilience and adaptation. Please provide 1-2 sentences that clearly connect your project's goals or methods to these themes.

    +
      +
    • [Insert your response here]
    • +
    +

    Choosing Big Data Sets

    +

    Explore potential data sets for your project's topic from the data library. List your options below, organizing them by whether they represent the system you're studying (e.g., deciduous forests) or the disruption to it (e.g., wildfire). Then discuss your choices and indicate your final selections.

    +

    Draft Potential Data Sets

    +
      +
    • System Being Perturbed/Disrupted:
        +
      • [List all potential data sets here]
      • +
      +
    • +
    • Perturbator/Disrupter:
        +
      • [List all potential data sets here]
      • +
      +
    • +
    +

    Final Choice

    +
      +
    • System Being Perturbed/Disrupted (Final Choice):
        +
      • [Indicate your final selected data set here]
      • +
      +
    • +
    • Perturbator/Disrupter (Final Choice):
        +
      • [Indicate your final selected data set here]
      • +
      +
    • +
    +

    Brief Check-in: Definition of Resilience (approx. 5 mins)

    +

    Below is a working definition of the word "Resilience" for the Summit. Please edit the definition below based on your earlier discussion and chosen project.

    +

    "Resilience is the capacity of a system, community, organization, or individual to absorb stress, recover from disruptions, adapt to change, and continue to develop and thrive."

    +
      +
    • [Edit or reaffirm this definition here]
    • +
    +

    Day 1 Report Back

    +

    Select one representative from your group to present your proposed project. For the report back, each group will have 30-60 seconds to present their responses to the questions below. Keep it concise and focused. This is just a quick oral presentation - you will not be able to use slides/images.

    +
      +
    • Project Title:
    • +
    • [Insert your team's project title here]
    • +
    • Research Question:
    • +
    • [Insert your team's refined research question here]
    • +
    • Selected Data Sets:
    • +
    • [List the data sets your team has chosen to use here]
    • +
    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/worksheets/worksheet_2/worksheet_2.md b/worksheets/worksheet_2/worksheet_2.md new file mode 100644 index 0000000..8216112 --- /dev/null +++ b/worksheets/worksheet_2/worksheet_2.md @@ -0,0 +1,96 @@ +# TEAM ACTIVITY Day 1: Make a plan + +## Instructions +Work through the prompts in order. Please use a decision-making method “to decide” before moving to a new section of the activity. + +## Day 1 Objectives +1. Get to know your group members. +2. Decide on a research question and project title. +3. Start exploring potential datasets. + +## Introductions (approx. time: 10 mins total or "1-2 breaths" per prompt) +Please share the following information about yourself. Each team member should type their response in the space below (create more as needed). + + - **Name:** [Your Name] + - **Pronouns:** [Your Pronouns] + - **Expertise:** [Your Expertise] + - **Environmental Data Science Superpower:** [Describe your unique skill or interest in environmental data science] + - **Reflection on Polarities Exercise:** [Share one thing you observed about yourself] + + --- + - **Name:** [Your Name] + - **Pronouns:** [Your Pronouns] + - **Expertise:** [Your Expertise] + - **Environmental Data Science Superpower:** [Describe your unique skill or interest in environmental data science] + - **Reflection on Polarities Exercise:** [Share one thing you observed about yourself] + + --- + - **Name:** [Your Name] + - **Pronouns:** [Your Pronouns] + - **Expertise:** [Your Expertise] + - **Environmental Data Science Superpower:** [Describe your unique skill or interest in environmental data science] + - **Reflection on Polarities Exercise:** [Share one thing you observed about yourself] + + --- + - **Name:** [Your Name] + - **Pronouns:** [Your Pronouns] + - **Expertise:** [Your Expertise] + - **Environmental Data Science Superpower:** [Describe your unique skill or interest in environmental data science] + - **Reflection on Polarities Exercise:** [Share one thing you observed about yourself] + + --- + - Continue adding more team members following the same format, with a line break after each person. + --- +## Research Question: Innovation for Inclusion or Computation (approx. time: 5-10 mins) +Write the research question your team selected in the space below. Feel free to revise the original question. + +- [Insert research question here] + + +## Project Title (approx. time: 5-10 mins) +Craft a catchy title for your team’s project. Think of something that would grab attention at a conference or in a headline. + +- [Insert title here] + + +## Promoting Resilience and Adaptation +Describe how your proposed project aligns with the Summit's themes of resilience and adaptation. Please provide 1-2 sentences that clearly connect your project's goals or methods to these themes. + +- [Insert your response here] + + +## Choosing Big Data Sets +Explore potential data sets for your project's topic from the [data library](https://cu-esiil.github.io/data-library/). List your options below, organizing them by whether they represent the system you're studying (e.g., deciduous forests) or the disruption to it (e.g., wildfire). Then discuss your choices and indicate your final selections. + +### Draft Potential Data Sets + - **System Being Perturbed/Disrupted:** + - [List all potential data sets here] + - **Perturbator/Disrupter:** + - [List all potential data sets here] + +### Final Choice + - **System Being Perturbed/Disrupted (Final Choice):** + - [Indicate your final selected data set here] + - **Perturbator/Disrupter (Final Choice):** + - [Indicate your final selected data set here] + + + +## Brief Check-in: Definition of Resilience (approx. 5 mins) +Below is a working definition of the word "Resilience" for the Summit. Please edit the definition below based on your earlier discussion and chosen project. + +*"Resilience is the capacity of a system, community, organization, or individual to absorb stress, recover from disruptions, adapt to change, and continue to develop and thrive."* + +- [Edit or reaffirm this definition here] + + +## Day 1 Report Back +Select one representative from your group to present your proposed project. For the report back, each group will have 30-60 seconds to present their responses to the questions below. Keep it concise and focused. This is just a quick oral presentation - you will not be able to use slides/images. + +- **Project Title:** + - [Insert your team's project title here] +- **Research Question:** + - [Insert your team's refined research question here] +- **Selected Data Sets:** + - [List the data sets your team has chosen to use here] + diff --git a/worksheets/worksheet_3/index.html b/worksheets/worksheet_3/index.html new file mode 100644 index 0000000..b552544 --- /dev/null +++ b/worksheets/worksheet_3/index.html @@ -0,0 +1,1522 @@ + + + + + + + + + + + + + + + + + + + + + + Innovate as a team - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    TEAM ACTIVITY 2: Innovate as a Team

    +

    Welcome back! We hope today is a productive day getting to know your team and coding.

    +

    Day 2 summary:

    +

    Please complete the warm-up with your team, briefly review today’s objectives, and carefully read the Day 2 and Day 3 report out items to guide your efforts.

    +

    Objectives for Day 2

    +
      +
    1. Work together to decide on the data sets you will use. Reminder: Use a decision-making technique discussed during Day 1. Kaner’s Gradient of Agreement is below for reference.
    2. +
    3. Practice joining your datasets together.
    4. +
    5. Discuss and try creating interesting graphics.
    6. +
    7. Report back on your results at the end of the day. Today’s report back is short and focused on your team process. The Day 3 report back is more detailed.
    8. +
    +

    Morning Warm-up

    +

    Please share the following informaton with your team. (No need to write down your responses this time) +- Name +- Pronouns +- Reflecting on Day 1, what is something that surprised you?

    +

    Decision-Making

    +

    Use the gradient of agreement (Kaner 20214) to make decisions as a team.

    +

    Gradients of agreement

    +

    Day 2 Report Back

    +

    Day 2 report-back questions are about the team process. We are interested in your team’s unique experience. Below are some prompts you might consider. You don't need to address all of them - choose which ones you want to present. Please limit your reflection to 2-3 mins.

    +
      +
    1. What worked well for your team?
    2. +
    3. What’s one thing you would change?
    4. +
    5. Did your group ever have an “ah-ha” moment? What led up to that moment?
    6. +
    7. Did your group experience the groan zone? What is one tip you want to share with future groups at the Summit about getting through the groan zone?
        +
      • [insert your group reflection responses here]
      • +
      +
    8. +
    +
    +

    Looking Ahead: Day 3 Report Back

    +

    These are the prompts for the final Report Back tomorrow (Day 3) - start thinking about these questions as you work today. Each group will share their Day 3 GitHub page on the screen and give a 4 minute presentation.

    +
      +
    • Project Title:
    • +
    • Research Question:
    • +
    • One interesting graphic/finding:
    • +
    • What are you thinking about doing next with your team? Long-term, short-term?
    • +
    • What’s missing: what resources, people, data sets, etc. does your team need?
    • +
    +

    Reminder

    +

    There is the opportunity for groups to continue working on their projects as an ESIIL Working Group. If you love your team and want to continue working together, considering submitting a Working Group Application this fall. See the ESIIL website for more information: https://esiil.org/working-groups.

    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/worksheets/worksheet_3/worksheet_3.md b/worksheets/worksheet_3/worksheet_3.md new file mode 100644 index 0000000..7542d54 --- /dev/null +++ b/worksheets/worksheet_3/worksheet_3.md @@ -0,0 +1,49 @@ +# TEAM ACTIVITY 2: Innovate as a Team + +Welcome back! We hope today is a productive day getting to know your team and coding. + +## Day 2 summary: +Please complete the warm-up with your team, briefly review today’s objectives, and carefully read the Day 2 and Day 3 report out items to guide your efforts. + +## Objectives for Day 2 +1. Work together to decide on the data sets you will use. Reminder: Use a decision-making technique discussed during Day 1. Kaner’s Gradient of Agreement is below for reference. +2. Practice joining your datasets together. +3. Discuss and try creating interesting graphics. +4. Report back on your results at the end of the day. Today’s report back is short and focused on your team process. The Day 3 report back is more detailed. + + +## Morning Warm-up +Please share the following informaton with your team. (No need to write down your responses this time) +- Name +- Pronouns +- Reflecting on Day 1, what is something that surprised you? + +## Decision-Making +Use the gradient of agreement (Kaner 20214) to make decisions as a team. + +![Gradients of agreement](../worksheets/love_gradient-of-agreement.png) + +## Day 2 Report Back +Day 2 report-back questions are about the team *process*. We are interested in your team’s unique experience. Below are some prompts you might consider. You don't need to address all of them - choose which ones you want to present. Please limit your reflection to 2-3 mins. + +1. **What worked well for your team?** +3. **What’s one thing you would change?** +4. **Did your group ever have an “ah-ha” moment? What led up to that moment?** +5. **Did your group experience the groan zone? What is one tip you want to share with future groups at the Summit about getting through the groan zone?** + - [insert your group reflection responses here] + +************************************************************** + +### Looking Ahead: Day 3 Report Back +*These are the prompts for the final Report Back **tomorrow (Day 3)** - start thinking about these questions as you work today. Each group will share their Day 3 GitHub page on the screen and give a 4 minute presentation.* + +- **Project Title:** +- **Research Question:** +- **One interesting graphic/finding:** +- **What are you thinking about doing next with your team? Long-term, short-term?** +- **What’s missing: what resources, people, data sets, etc. does your team need?** + +### Reminder +There is the opportunity for groups to continue working on their projects as an ESIIL Working Group. If you love your team and want to continue working together, considering submitting a Working Group Application this fall. See the ESIIL website for more information: . + + diff --git a/worksheets/worksheet_4/index.html b/worksheets/worksheet_4/index.html new file mode 100644 index 0000000..a798fd5 --- /dev/null +++ b/worksheets/worksheet_4/index.html @@ -0,0 +1,1474 @@ + + + + + + + + + + + + + + + + + + + + + + Share your progress - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    TEAM ACTIVITY 3: Share Your Progress

    +

    Use this time to prepare for the final report back, where you'll share an interesting result/outcome from your project and discuss potential future plans.

    +

    Day 3 Report Back

    +

    Select one or more people from your group to give a final report back. You will share this page on the screen as your presentation. Presentations should be no longer than 4 minutes.

    +
      +
    • Project Title:
        +
      • [Insert project title here]
      • +
      +
    • +
    • Research Question:
        +
      • [Insert research question here]
      • +
      +
    • +
    • One interesting graphic/finding:
        +
      • [Insert graphic/finding here]
      • +
      +
    • +
    • What are you thinking about doing next with your team? Long-term, short-term?
        +
      • [Insert response here]
      • +
      +
    • +
    • What’s missing: what resources, people, data sets, etc. does your team need?
        +
      • [Insert response here]
      • +
      +
    • +
    +

    Reminder

    +

    There is the opportunity for groups to continue working on their projects as an ESIIL Working Group. If you love your team and want to continue working together, considering submitting a Working Group Application this fall. See the ESIIL website for more information: https://esiil.org/working-groups.

    +

    Thank you for participating in the 2024 ESIIL Innovation Summit!!

    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/worksheets/worksheet_4/worksheet_4.md b/worksheets/worksheet_4/worksheet_4.md new file mode 100644 index 0000000..ab4834d --- /dev/null +++ b/worksheets/worksheet_4/worksheet_4.md @@ -0,0 +1,23 @@ +# TEAM ACTIVITY 3: Share Your Progress + +Use this time to prepare for the final report back, where you'll share an interesting result/outcome from your project and discuss potential future plans. + +## Day 3 Report Back +Select one or more people from your group to give a final report back. You will share this page on the screen as your presentation. Presentations should be no longer than 4 minutes. + +- **Project Title:** + - [Insert project title here] +- **Research Question:** + - [Insert research question here] +- **One interesting graphic/finding:** + - [Insert graphic/finding here] +- **What are you thinking about doing next with your team? Long-term, short-term?** + - [Insert response here] +- **What’s missing: what resources, people, data sets, etc. does your team need?** + - [Insert response here] + + +### Reminder +There is the opportunity for groups to continue working on their projects as an ESIIL Working Group. If you love your team and want to continue working together, considering submitting a Working Group Application this fall. See the ESIIL website for more information: . + +**Thank you for participating in the 2024 ESIIL Innovation Summit!!** diff --git a/worksheets/worksheet_5/index.html b/worksheets/worksheet_5/index.html new file mode 100644 index 0000000..6633003 --- /dev/null +++ b/worksheets/worksheet_5/index.html @@ -0,0 +1,1474 @@ + + + + + + + + + + + + + + + + + + + + + + TEAM ACTIVITY 2: Make a plan - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    TEAM ACTIVITY 2: Make a plan

    +

    Instructions:

    +
      +
    • Follow the Prompts Sequentially:
        +
      • Work through the prompts in the order they are presented.
      • +
      +
    • +
    • Decision-Making Process:
        +
      • Before advancing to the next section of the handout, use a structured decision-making method. Ensure that all team members agree on the decisions made. This approach helps in maintaining coherence and collective agreement throughout the activities.
      • +
      +
    • +
    +

    Introductions (approx. time: 10 mins total or "2 breaths" per person)

    +
      +
    • +

      Each team member please share the following information about yourself:

      +
    • +
    • +

      Name: [Your Name]

      +
    • +
    • Preferred Pronouns: [Your Pronouns]
    • +
    • Expertise: [Your Expertise]
    • +
    • Environmental Data Science Superpower: [Describe your unique skill or interest in environmental data science]
    • +
    • Reflection on Polarities Exercise: [Share one thing you observed about yourself]
    • +
    +
    +
      +
    • Name: [Your Name]
    • +
    • Preferred Pronouns: [Your Pronouns]
    • +
    • Expertise: [Your Expertise]
    • +
    • Environmental Data Science Superpower: [Describe your unique skill or interest in environmental data science]
    • +
    • Reflection on Polarities Exercise: [Share one thing you observed about yourself]
    • +
    +
    +
      +
    • Name: [Your Name]
    • +
    • Preferred Pronouns: [Your Pronouns]
    • +
    • Expertise: [Your Expertise]
    • +
    • Environmental Data Science Superpower: [Describe your unique skill or interest in environmental data science]
    • +
    • Reflection on Polarities Exercise: [Share one thing you observed about yourself]
    • +
    +
    +
      +
    • Name: [Your Name]
    • +
    • Preferred Pronouns: [Your Pronouns]
    • +
    • Expertise: [Your Expertise]
    • +
    • Environmental Data Science Superpower: [Describe your unique skill or interest in environmental data science]
    • +
    • Reflection on Polarities Exercise: [Share one thing you observed about yourself]
    • +
    +
    +
      +
    • Continue adding more team members following the same format, with a line break after each person.
    • +
    +
    +

    Research Question: Innovation for Inclusion or Computation (approx. time: 5-10 mins)

    +
      +
    • Refine the initial research question your team developed earlier. Please make any necessary edits or adjustments below:
    • +
    • [Edit or refine your team's previously selected research question here]
    • +
    +

    Title: Innovation for Inclusion or Computation (approx. time: 5-10 mins)

    +
      +
    • Craft a catchy and public-facing title for your team’s project. Think of something that would grab attention at a conference or in a headline:
    • +
    • [Create an engaging title that captures the essence of your project here]
    • +
    +

    Promoting Resilience and Adaptation

    +
      +
    • Describe how your proposed project aligns with the summit's themes of resilience and adaptation. Please provide 1-2 sentences that clearly connect your project's goals or methods to these themes:
    • +
    • [Insert your response here]
    • +
    +

    Which Big Data Sets

    +
      +
    • Explore potential data sets for your project's topic from the data library. List your options below, and after discussion and review, indicate your final choice for both the system being perturbed/disrupted and the perturbator/disrupter.
    • +
    +

    Draft Potential Data Sets

    +
      +
    • System Being Perturbed/Disrupted:
        +
      • [List all potential data sets here]
      • +
      +
    • +
    • Perturbator/Disrupter:
        +
      • [List all potential data sets here]
      • +
      +
    • +
    +

    Final Choice

    +
      +
    • System Being Perturbed/Disrupted (Final Choice):
        +
      • [Indicate your final selected data set here]
      • +
      +
    • +
    • Perturbator/Disrupter (Final Choice):
        +
      • [Indicate your final selected data set here]
      • +
      +
    • +
    +

    Brief Check-in: Definition of Resilience (approx. 5 mins)

    +
      +
    • Review and refine the working definition of 'Resilience' provided below, based on your discussions and insights from earlier sections of this worksheet. Adjust the definition to better align with your team’s understanding or reaffirm it if it resonates with your views:
    • +
    • "Resilience is the capacity of a system, community, organization, or individual to absorb stress, recover from disruptions, adapt to change, and continue to develop and thrive."
        +
      • [Edit or reaffirm this definition here]
      • +
      +
    • +
    +

    Day 1 Report Back

    +
      +
    • Select one representative from your group to present your proposed project to all Summit attendees (~125 people). This is an opportunity for your breakout group to summarize your project’s approach as it relates to the Summit themes of adaptation and resilience.
    • +
    • +

      Presentation Content:

      +
        +
      • Project Title:
      • +
      • [Insert your team's project title here]
      • +
      • Research Question:
      • +
      • [Insert your team's refined research question here]
      • +
      • Selected Data Sets:
      • +
      • [List the data sets your team has chosen to use here]
      • +
      +
    • +
    • +

      Presentation Guidelines:

      +
        +
      • Duration: Your presentation should last between 30-60 seconds. Keep it concise and focused. This is just a quick oral presentation -you will not be able to use slides/images.
      • +
      • Objective: Clearly communicate how your project aligns with the conference themes and highlight actionable insights that can aid decision makers.
      • +
      +
    • +
    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/worksheets/worksheet_5/worksheet_5.md b/worksheets/worksheet_5/worksheet_5.md new file mode 100644 index 0000000..55e6309 --- /dev/null +++ b/worksheets/worksheet_5/worksheet_5.md @@ -0,0 +1,91 @@ +# TEAM ACTIVITY 2: Make a plan +### Instructions: +- **Follow the Prompts Sequentially:** + - Work through the prompts in the order they are presented. +- **Decision-Making Process:** + - Before advancing to the next section of the handout, use a structured decision-making method. Ensure that all team members agree on the decisions made. This approach helps in maintaining coherence and collective agreement throughout the activities. + +### Introductions (approx. time: 10 mins total or "2 breaths" per person) +- **Each team member please share the following information about yourself:** + + - **Name:** [Your Name] + - **Preferred Pronouns:** [Your Pronouns] + - **Expertise:** [Your Expertise] + - **Environmental Data Science Superpower:** [Describe your unique skill or interest in environmental data science] + - **Reflection on Polarities Exercise:** [Share one thing you observed about yourself] + + --- + - **Name:** [Your Name] + - **Preferred Pronouns:** [Your Pronouns] + - **Expertise:** [Your Expertise] + - **Environmental Data Science Superpower:** [Describe your unique skill or interest in environmental data science] + - **Reflection on Polarities Exercise:** [Share one thing you observed about yourself] + + --- + - **Name:** [Your Name] + - **Preferred Pronouns:** [Your Pronouns] + - **Expertise:** [Your Expertise] + - **Environmental Data Science Superpower:** [Describe your unique skill or interest in environmental data science] + - **Reflection on Polarities Exercise:** [Share one thing you observed about yourself] + + --- + - **Name:** [Your Name] + - **Preferred Pronouns:** [Your Pronouns] + - **Expertise:** [Your Expertise] + - **Environmental Data Science Superpower:** [Describe your unique skill or interest in environmental data science] + - **Reflection on Polarities Exercise:** [Share one thing you observed about yourself] + + --- + - **Continue adding more team members following the same format, with a line break after each person.** + --- +### Research Question: Innovation for Inclusion or Computation (approx. time: 5-10 mins) +- **Refine the initial research question your team developed earlier. Please make any necessary edits or adjustments below:** + - [Edit or refine your team's previously selected research question here] + + +### Title: Innovation for Inclusion or Computation (approx. time: 5-10 mins) +- **Craft a catchy and public-facing title for your team’s project. Think of something that would grab attention at a conference or in a headline:** + - [Create an engaging title that captures the essence of your project here] + + +### Promoting Resilience and Adaptation +- **Describe how your proposed project aligns with the summit's themes of resilience and adaptation. Please provide 1-2 sentences that clearly connect your project's goals or methods to these themes:** + - [Insert your response here] + + +### Which Big Data Sets +- **Explore potential data sets for your project's topic from the [data library](https://cu-esiil.github.io/data-library/). List your options below, and after discussion and review, indicate your final choice for both the system being perturbed/disrupted and the perturbator/disrupter.** + +#### Draft Potential Data Sets + - **System Being Perturbed/Disrupted:** + - [List all potential data sets here] + - **Perturbator/Disrupter:** + - [List all potential data sets here] + +#### Final Choice + - **System Being Perturbed/Disrupted (Final Choice):** + - [Indicate your final selected data set here] + - **Perturbator/Disrupter (Final Choice):** + - [Indicate your final selected data set here] + + + +### Brief Check-in: Definition of Resilience (approx. 5 mins) +- **Review and refine the working definition of 'Resilience' provided below, based on your discussions and insights from earlier sections of this worksheet. Adjust the definition to better align with your team’s understanding or reaffirm it if it resonates with your views:** + - "Resilience is the capacity of a system, community, organization, or individual to absorb stress, recover from disruptions, adapt to change, and continue to develop and thrive." + - [Edit or reaffirm this definition here] + + +### Day 1 Report Back +- **Select one representative from your group to present your proposed project to all Summit attendees (~125 people). This is an opportunity for your breakout group to summarize your project’s approach as it relates to the Summit themes of adaptation and resilience.** + - **Presentation Content:** + - **Project Title:** + - [Insert your team's project title here] + - **Research Question:** + - [Insert your team's refined research question here] + - **Selected Data Sets:** + - [List the data sets your team has chosen to use here] + + - **Presentation Guidelines:** + - **Duration:** Your presentation should last between 30-60 seconds. Keep it concise and focused. This is just a quick oral presentation -you will not be able to use slides/images. + - **Objective:** Clearly communicate how your project aligns with the conference themes and highlight actionable insights that can aid decision makers. diff --git a/worksheets/worksheet_redlining.html b/worksheets/worksheet_redlining.html new file mode 100644 index 0000000..ea7f6f8 --- /dev/null +++ b/worksheets/worksheet_redlining.html @@ -0,0 +1,1058 @@ + + + + + + + + + +Redlining + + + + + + + + + + + + + + + + + + + +
    + +
    + +
    +
    +

    Redlining

    +
    + + + +
    + + + +
    + + +
    + +
    +
    if (!requireNamespace("tidytext", quietly = TRUE)) {
    +  install.packages("tidytext")
    +}
    +library(tidytext)
    +## Warning: package 'tidytext' was built under R version 4.3.2
    +library(sf)
    +## Warning: package 'sf' was built under R version 4.3.2
    +## Linking to GEOS 3.11.0, GDAL 3.5.3, PROJ 9.1.0; sf_use_s2() is TRUE
    +library(ggplot2)
    +## Warning: package 'ggplot2' was built under R version 4.3.2
    +library(ggthemes)
    +## Warning: package 'ggthemes' was built under R version 4.3.2
    +library(dplyr)
    +## 
    +## Attaching package: 'dplyr'
    +## The following objects are masked from 'package:stats':
    +## 
    +##     filter, lag
    +## The following objects are masked from 'package:base':
    +## 
    +##     intersect, setdiff, setequal, union
    +library(rstac)
    +## Warning: package 'rstac' was built under R version 4.3.2
    +library(gdalcubes)
    +## Warning: package 'gdalcubes' was built under R version 4.3.2
    +library(gdalUtils)
    +## Please note that rgdal will be retired during October 2023,
    +## plan transition to sf/stars/terra functions using GDAL and PROJ
    +## at your earliest convenience.
    +## See https://r-spatial.org/r/2023/05/15/evolution4.html and https://github.com/r-spatial/evolution
    +## rgdal: version: 1.6-7, (SVN revision 1203)
    +## Geospatial Data Abstraction Library extensions to R successfully loaded
    +## Loaded GDAL runtime: GDAL 3.5.3, released 2022/10/21
    +## Path to GDAL shared files: /Library/Frameworks/R.framework/Versions/4.3-x86_64/Resources/library/rgdal/gdal
    +##  GDAL does not use iconv for recoding strings.
    +## GDAL binary built with GEOS: TRUE 
    +## Loaded PROJ runtime: Rel. 9.1.0, September 1st, 2022, [PJ_VERSION: 910]
    +## Path to PROJ shared files: /Library/Frameworks/R.framework/Versions/4.3-x86_64/Resources/library/gdalcubes/proj
    +## PROJ CDN enabled: FALSE
    +## Linking to sp version:1.6-1
    +## To mute warnings of possible GDAL/OSR exportToProj4() degradation,
    +## use options("rgdal_show_exportToProj4_warnings"="none") before loading sp or rgdal.
    +## 
    +## Attaching package: 'gdalUtils'
    +## The following object is masked from 'package:sf':
    +## 
    +##     gdal_rasterize
    +library(gdalcubes)
    +library(colorspace)
    +library(terra)
    +## Warning: package 'terra' was built under R version 4.3.2
    +## terra 1.7.71
    +## 
    +## Attaching package: 'terra'
    +## The following object is masked from 'package:colorspace':
    +## 
    +##     RGB
    +## The following objects are masked from 'package:gdalcubes':
    +## 
    +##     animate, crop, size
    +library(tidyterra)
    +## 
    +## Attaching package: 'tidyterra'
    +## The following object is masked from 'package:stats':
    +## 
    +##     filter
    +library(basemapR)
    +library(tidytext)
    +library(ggwordcloud)
    +library(osmextract)
    +## Data (c) OpenStreetMap contributors, ODbL 1.0. https://www.openstreetmap.org/copyright.
    +## Check the package website, https://docs.ropensci.org/osmextract/, for more details.
    +library(sf)
    +library(ggplot2)
    +library(ggthemes)
    +
    +
    +
    # Function to get a list of unique cities and states from the redlining data
    +get_city_state_list_from_redlining_data <- function() {
    +  # URL to the GeoJSON data
    +  url <- "https://raw.githubusercontent.com/americanpanorama/mapping-inequality-census-crosswalk/main/MIv3Areas_2010TractCrosswalk.geojson"
    +  
    +  # Read the GeoJSON file into an sf object
    +  redlining_data <- tryCatch({
    +    read_sf(url)
    +  }, error = function(e) {
    +    stop("Error reading GeoJSON data: ", e$message)
    +  })
    +
    +  # Check for the existence of 'city' and 'state' columns
    +  if (!all(c("city", "state") %in% names(redlining_data))) {
    +    stop("The required columns 'city' and/or 'state' do not exist in the data.")
    +  }
    +
    +  # Extract a unique list of city and state pairs without the geometries
    +  city_state_df <- redlining_data %>%
    +    select(city, state) %>%
    +    st_set_geometry(NULL) %>%  # Drop the geometry to avoid issues with invalid shapes
    +    distinct(city, state) %>%
    +    arrange(state, city )  # Arrange the list alphabetically by state, then by city
    +
    +  # Return the dataframe of unique city-state pairs
    +  return(city_state_df)
    +}
    +
    +
    +
    #Retrieve the list of cities and states
    +city_state_list <- get_city_state_list_from_redlining_data()
    +print(city_state_list)
    +
    +
    # A tibble: 314 × 2
    +   city        state
    +   <chr>       <chr>
    + 1 Birmingham  AL   
    + 2 Mobile      AL   
    + 3 Montgomery  AL   
    + 4 Arkadelphia AR   
    + 5 Batesville  AR   
    + 6 Camden      AR   
    + 7 Conway      AR   
    + 8 El Dorado   AR   
    + 9 Fort Smith  AR   
    +10 Little Rock AR   
    +# ℹ 304 more rows
    +
    +
    +
    +
    # Function to load and filter redlining data by city
    +load_city_redlining_data <- function(city_name) {
    +  # URL to the GeoJSON data
    +  url <- "https://raw.githubusercontent.com/americanpanorama/mapping-inequality-census-crosswalk/main/MIv3Areas_2010TractCrosswalk.geojson"
    +  
    +  # Read the GeoJSON file into an sf object
    +  redlining_data <- read_sf(url)
    +  
    +  # Filter the data for the specified city and non-empty grades
    +  
    +  city_redline <- redlining_data %>%
    +    filter(city == city_name )
    +  
    +  # Return the filtered data
    +  return(city_redline)
    +}
    +
    +
    +
    # Load redlining data for Denver
    +denver_redlining <- load_city_redlining_data("Denver")
    +print(denver_redlining)
    +
    +
    Simple feature collection with 316 features and 15 fields
    +Geometry type: MULTIPOLYGON
    +Dimension:     XY
    +Bounding box:  xmin: -105.0622 ymin: 39.62952 xmax: -104.8763 ymax: 39.79111
    +Geodetic CRS:  WGS 84
    +# A tibble: 316 × 16
    +   area_id city   state city_survey cat   grade label res   com   ind   fill   
    + *   <int> <chr>  <chr> <lgl>       <chr> <chr> <chr> <lgl> <lgl> <lgl> <chr>  
    + 1    6525 Denver CO    TRUE        Best  A     A1    TRUE  FALSE FALSE #76a865
    + 2    6525 Denver CO    TRUE        Best  A     A1    TRUE  FALSE FALSE #76a865
    + 3    6525 Denver CO    TRUE        Best  A     A1    TRUE  FALSE FALSE #76a865
    + 4    6525 Denver CO    TRUE        Best  A     A1    TRUE  FALSE FALSE #76a865
    + 5    6529 Denver CO    TRUE        Best  A     A2    TRUE  FALSE FALSE #76a865
    + 6    6529 Denver CO    TRUE        Best  A     A2    TRUE  FALSE FALSE #76a865
    + 7    6529 Denver CO    TRUE        Best  A     A2    TRUE  FALSE FALSE #76a865
    + 8    6537 Denver CO    TRUE        Best  A     A3    TRUE  FALSE FALSE #76a865
    + 9    6537 Denver CO    TRUE        Best  A     A3    TRUE  FALSE FALSE #76a865
    +10    6537 Denver CO    TRUE        Best  A     A3    TRUE  FALSE FALSE #76a865
    +# ℹ 306 more rows
    +# ℹ 5 more variables: GEOID10 <chr>, GISJOIN <chr>, calc_area <dbl>,
    +#   pct_tract <dbl>, geometry <MULTIPOLYGON [°]>
    +
    +
    +
    +
    
    +
    +get_places <- function(polygon_layer, type = "food" ) {
    +  # Check if the input is an sf object
    +  if (!inherits(polygon_layer, "sf")) {
    +    stop("The provided object is not an sf object.")
    +  }
    +  
    +  # Create a bounding box from the input sf object
    +  bbox_here <- st_bbox(polygon_layer) |>
    +    st_as_sfc()
    +  
    +  if(type == "food"){
    +    my_layer <- "multipolygons"
    +    my_query <- "SELECT * FROM multipolygons WHERE (
    +                 shop IN ('supermarket', 'bodega', 'market', 'other_market', 'farm', 'garden_centre', 'doityourself', 'farm_supply', 'compost', 'mulch', 'fertilizer') OR
    +                 amenity IN ('social_facility', 'market', 'restaurant', 'coffee') OR
    +                 leisure = 'garden' OR
    +                 landuse IN ('farm', 'farmland', 'row_crops', 'orchard_plantation', 'dairy_grazing') OR
    +                 building IN ('brewery', 'winery', 'distillery') OR
    +                 shop = 'greengrocer' OR
    +                 amenity = 'marketplace'
    +               )"
    +    title <- "food"
    +  }
    +  
    +  if (type == "processed_food") {
    +    my_layer <- "multipolygons"
    +    my_query <- "SELECT * FROM multipolygons WHERE (
    +                   amenity IN ('fast_food', 'cafe', 'pub') OR
    +                   shop IN ('convenience', 'supermarket') OR
    +                   shop = 'kiosk'
    +                 )"
    +    title <- "Processed Food Locations"
    +}
    +  
    +  if(type == "natural_habitats"){
    +    my_layer <- "multipolygons"
    +    my_query <- "SELECT * FROM multipolygons WHERE (
    +             boundary = 'protected_area' OR
    +             natural IN ('tree', 'wood') OR
    +             landuse = 'forest' OR
    +             leisure = 'park'
    +           )"
    +    title <- "Natural habitats or City owned trees"
    +  }
    +  
    +   if(type == "roads"){
    +    my_layer <- "lines"
    +    my_query <- "SELECT * FROM lines WHERE (
    +             highway IN ('motorway', 'trunk', 'primary', 'secondary', 'tertiary') )"
    +    title <- "Major roads"
    +   }
    +  
    +  if(type == "rivers"){
    +    my_layer <- "lines"
    +    my_query <- "SELECT * FROM lines WHERE (
    +             waterway IN ('river'))"
    +    title <- "Major rivers"
    +  }
    +  
    +  if(type == "internet_access") {
    +    my_layer <- "multipolygons"
    +    my_query <- "SELECT * FROM multipolygons WHERE (
    +                 amenity IN ('library', 'cafe', 'community_centre', 'public_building') AND
    +                 internet_access = 'yes' 
    +               )"
    +    title <- "Internet Access Locations"
    +}
    +
    +  if(type == "water_bodies") {
    +    my_layer <- "multipolygons"
    +    my_query <- "SELECT * FROM multipolygons WHERE (
    +                 natural IN ('water', 'lake', 'pond') OR
    +                 water IN ('lake', 'pond') OR
    +                 landuse = 'reservoir'
    +               )"
    +    title <- "Water Bodies"
    +}
    +
    + if(type == "government_buildings") {
    +    my_layer <- "multipolygons"
    +    my_query <- "SELECT * FROM multipolygons WHERE (
    +                 amenity IN ('townhall', 'courthouse', 'embassy', 'police', 'fire_station') OR
    +                 building IN ('capitol', 'government')
    +               )"
    +    title <- "Government Buildings"
    +}
    +
    +
    +
    +  # Use the bbox to get data with oe_get(), specifying the desired layer and a custom SQL query for fresh food places
    +  tryCatch({
    +    places <- oe_get(
    +      place = bbox_here,
    +      layer = my_layer,  # Adjusted layer; change as per actual data availability
    +      query = my_query,
    +      quiet = TRUE
    +    )
    +    
    +  places <- st_make_valid(places)
    +    
    +    # Crop the data to the bounding box
    +    cropped_places <- st_crop(places, bbox_here)
    +    
    +    # Plotting the cropped fresh food places
    +    plot <- ggplot(data = cropped_places) +
    +      geom_sf(fill="cornflowerblue", color="cornflowerblue") +
    +      ggtitle(title) +
    +  theme_tufte()+
    +  theme(legend.position = "none",  # Optionally hide the legend
    +        axis.text = element_blank(),     # Remove axis text
    +        axis.title = element_blank(),    # Remove axis titles
    +        axis.ticks = element_blank(),    # Remove axis ticks
    +         plot.background = element_rect(fill = "white", color = NA),  # Set the plot background to white
    +        panel.background = element_rect(fill = "white", color = NA),  # Set the panel background to white
    +        panel.grid.major = element_blank(),  # Remove major grid lines
    +        panel.grid.minor = element_blank(),
    +        ) 
    +    
    +    # Save the plot as a PNG file
    +    png_filename <- paste0(title,"_", Sys.Date(), ".png")
    +    ggsave(png_filename, plot, width = 10, height = 8, units = "in")
    +    
    +    # Return the cropped dataset
    +    return(cropped_places)
    +  }, error = function(e) {
    +    stop("Failed to retrieve or plot data: ", e$message)
    +  })
    +}
    +
    +
    +
    
    +
    +plot_city_redlining <- function(redlining_data, filename = "redlining_plot.png") {
    +  # Fetch additional geographic data based on redlining data
    +  roads <- get_places(redlining_data, type = "roads")
    +  rivers <- get_places(redlining_data, type = "rivers")
    +  
    +  # Filter residential zones with valid grades and where city survey is TRUE
    +  residential_zones <- redlining_data %>%
    +    filter(city_survey == TRUE & grade != "") 
    +
    +  # Colors for the grades
    +  colors <- c("#76a865", "#7cb5bd", "#ffff00", "#d9838d")
    +
    +  # Plot the data using ggplot2
    +  plot <- ggplot() +
    +    geom_sf(data = roads, lwd = 0.1) +
    +    geom_sf(data = rivers, color = "blue", alpha = 0.5, lwd = 1.1) +
    +    geom_sf(data = residential_zones, aes(fill = grade), alpha = 0.5) +
    +    theme_tufte() +
    +    scale_fill_manual(values = colors) +
    +    labs(fill = 'HOLC Categories') +
    +    theme(
    +      plot.background = element_rect(fill = "white", color = NA),
    +      panel.background = element_rect(fill = "white", color = NA),
    +      panel.grid.major = element_blank(),
    +      panel.grid.minor = element_blank(),
    +      legend.position = "right"
    +    )
    +  
    +  # Save the plot as a high-resolution PNG file
    +  ggsave(filename, plot, width = 10, height = 8, units = "in", dpi = 600)
    +  
    +  # Return the plot object if needed for further manipulation or checking
    +  return(plot)
    +}
    +
    +
    +
    denver_plot <- plot_city_redlining(denver_redlining)
    +print(denver_plot)
    +
    +

    +
    +
    +
    +
    food <- get_places(denver_redlining, type="food")
    +
    +food_processed <- get_places(denver_redlining, type="processed_food")
    +
    +
    +natural_habitats <- get_places(denver_redlining, type="natural_habitats")
    +
    +roads <- get_places(denver_redlining, type="roads")
    +
    +rivers <- get_places(denver_redlining, type="rivers")
    +
    +#water_bodies <- get_places(denver_redlining, type="water_bodies")
    +
    +government_buildings <- get_places(denver_redlining, type="government_buildings")
    +
    +
    +
    split_plot <- function(sf_data, roads, rivers) {
    +  # Filter for grades A, B, C, and D
    +  sf_data_filtered <- sf_data %>% 
    +    filter(grade %in% c('A', 'B', 'C', 'D'))
    +
    +  # Define a color for each grade
    +  grade_colors <- c("A" = "#76a865", "B" = "#7cb5bd", "C" = "#ffff00", "D" = "#d9838d")
    +
    +  # Create the plot with panels for each grade
    +  plot <- ggplot(data = sf_data_filtered) +
    +    geom_sf(data = roads, alpha = 0.1, lwd = 0.1) +
    +    geom_sf(data = rivers, color = "blue", alpha = 0.1, lwd = 1.1) +
    +    geom_sf(aes(fill = grade)) +
    +    facet_wrap(~ grade, nrow = 1) +  # Free scales for different zoom levels if needed
    +    scale_fill_manual(values = grade_colors) +
    +    theme_minimal() +
    +    labs(fill = 'HOLC Grade') +
    +    theme_tufte() +
    +    theme(plot.background = element_rect(fill = "white", color = NA),
    +          panel.background = element_rect(fill = "white", color = NA),
    +          legend.position = "none",  # Optionally hide the legend
    +          axis.text = element_blank(),     # Remove axis text
    +          axis.title = element_blank(),    # Remove axis titles
    +          axis.ticks = element_blank(),    # Remove axis ticks
    +          panel.grid.major = element_blank(),  # Remove major grid lines
    +          panel.grid.minor = element_blank())  
    +
    +  return(plot)
    +}
    +
    +
    +
    plot_row <- split_plot(denver_redlining, roads, rivers)
    +print(plot_row)
    +
    +

    +
    +
    +
    +
    
    +process_and_plot_sf_layers <- function(layer1, layer2, output_file = "output_plot.png") {
    + # Make geometries valid
    +layer1 <- st_make_valid(layer1)
    +layer2 <- st_make_valid(layer2)
    +
    +# Optionally, simplify geometries to remove duplicate vertices
    +layer1 <- st_simplify(layer1, preserveTopology = TRUE) |>
    +  filter(grade != "")
    +
    +# Prepare a list to store results
    +results <- list()
    +
    +# Loop through each grade and perform operations
    +for (grade in c("A", "B", "C", "D")) {
    +  # Filter layer1 for current grade
    +  layer1_grade <- layer1[layer1$grade == grade, ]
    +
    +  # Buffer the geometries of the current grade
    +  buffered_layer1_grade <- st_buffer(layer1_grade, dist = 500)
    +
    +  # Intersect with the second layer
    +  intersections <- st_intersects(layer2, buffered_layer1_grade, sparse = FALSE)
    +  selected_polygons <- layer2[rowSums(intersections) > 0, ]
    +
    +  # Add a new column to store the grade information
    +  selected_polygons$grade <- grade
    +
    +  # Store the result
    +  results[[grade]] <- selected_polygons
    +}
    +
    +# Combine all selected polygons from different grades into one sf object
    +final_selected_polygons <- do.call(rbind, results)
    +
    +  # Define colors for the grades
    +  grade_colors <- c("A" = "grey", "B" = "grey", "C" = "grey", "D" = "grey")
    +
    +  # Create the plot
    +  plot <- ggplot() +
    +    geom_sf(data = roads, alpha = 0.05, lwd = 0.1) +
    +    geom_sf(data = rivers, color = "blue", alpha = 0.1, lwd = 1.1) +
    +    geom_sf(data = layer1, fill = "grey", color = "grey", size = 0.1) +
    +    facet_wrap(~ grade, nrow = 1) +
    +    geom_sf(data = final_selected_polygons,fill = "green", color = "green", size = 0.1) +
    +    facet_wrap(~ grade, nrow = 1) +
    +    #scale_fill_manual(values = grade_colors) +
    +    #scale_color_manual(values = grade_colors) +
    +    theme_minimal() +
    +    labs(fill = 'HOLC Grade') +
    +    theme_tufte() +
    +    theme(plot.background = element_rect(fill = "white", color = NA),
    +      panel.background = element_rect(fill = "white", color = NA),
    +      legend.position = "none",
    +          axis.text = element_blank(),
    +          axis.title = element_blank(),
    +          axis.ticks = element_blank(),
    +          panel.grid.major = element_blank(),
    +          panel.grid.minor = element_blank())
    +
    +  # Save the plot as a high-resolution PNG file
    +  ggsave(output_file, plot, width = 10, height = 8, units = "in", dpi = 600)
    +  
    +  # Return the plot for optional further use
    +  return(list(plot=plot, sf = final_selected_polygons))
    +}
    +
    +
    +
    create_wordclouds_by_grade <- function(sf_object, output_file = "food_word_cloud_per_grade.png",title = "Healthy food place names word cloud", max_size =25) {
    +    # Ensure the 'name' and 'grade' columns are present
    +    if (!("name" %in% names(sf_object)) || !("grade" %in% names(sf_object))) {
    +        stop("The sf object must contain 'name' and 'grade' columns.")
    +    }
    +    
    +    # Extract relevant data and prepare text data
    +    text_data <- sf_object %>%
    +        select(grade, name) %>%
    +        filter(!is.na(name)) %>%
    +        unnest_tokens(output = "word", input = name, token = "words") %>%
    +        count(grade, word, sort = TRUE) %>%
    +        ungroup() %>%
    +        filter(n() > 1)  # Filter to remove overly common or single-occurrence words
    +    
    +    # Ensure there are no NA values in the 'word' column
    +    text_data <- text_data %>% filter(!is.na(word))
    +
    +    # Handle cases where text_data might be empty
    +    if (nrow(text_data) == 0) {
    +        stop("No data available for creating word clouds.")
    +    }
    +    
    +    # Create a word cloud using ggplot2 and ggwordcloud
    +    p <- ggplot( ) +
    +        geom_text_wordcloud_area(data=text_data, aes(label = word, size = n),rm_outside = TRUE) +
    +        scale_size_area(max_size = max_size) +
    +        facet_wrap(~ grade, nrow = 1) +
    +      scale_color_gradient(low = "darkred", high = "red") +
    +        theme_minimal() +
    +        theme(plot.background = element_rect(fill = "white", color = NA),
    +          panel.background = element_rect(fill = "white", color = NA),
    +          panel.spacing = unit(0.5, "lines"),
    +              plot.title = element_text(size = 16, face = "bold"),
    +              legend.position = "none") +
    +        labs(title = title)
    +    
    +    # Attempt to save the plot and handle any errors
    +    tryCatch({
    +        ggsave(output_file, p, width = 10, height = 4, units = "in", dpi = 600)
    +    }, error = function(e) {
    +        cat("Error in saving the plot: ", e$message, "\n")
    +    })
    +    
    +    return(p)
    +}
    +
    +
    +
     layer1 <- denver_redlining
    + layer2 <- food
    + food_match <- process_and_plot_sf_layers(layer1, layer2, "final_redlining_plot.png")
    + print(food_match$plot)
    +
    +

    +
    +
    +
    +
    food_word_cloud <- create_wordclouds_by_grade(food_match$sf, output_file = "food_word_cloud_per_grade.png")
    +
    +
    Warning in wordcloud_boxes(data_points = points_valid_first, boxes = boxes, :
    +Some words could not fit on page. They have been removed.
    +
    +
    +

    +
    +
     layer1 <- denver_redlining
    + layer2 <- food_processed
    + processed_food_match <- process_and_plot_sf_layers(layer1, layer2, "final_redlining_plot.png")
    + print(processed_food_match$plot)
    +
    +

    +
    +
    +
    +
    processed_food_cloud <- create_wordclouds_by_grade(processed_food_match$sf, output_file = "processed_food_word_cloud_per_grade.png",title = "Processed food place names where larger text is more frequent", max_size =17)
    +
    +

    +
    +
     layer1 <- denver_redlining
    + layer2 <- natural_habitats
    + natural_habitats_match <- process_and_plot_sf_layers(layer1, layer2, "final_redlining_plot.png")
    + print(natural_habitats_match$plot)
    +
    +

    +
    +
    +
    +
    natural_habitats_cloud <- create_wordclouds_by_grade(natural_habitats_match$sf, output_file = "natural_habitats_word_cloud_per_grade.png",title = "Natural habitats place names where larger text is more frequent", max_size =35)
    +
    +

    +
    +
    polygon_layer <- denver_redlining
    +# Function to process satellite data based on an SF polygon's extent
    +process_satellite_data <- function(polygon_layer, start_date, end_date, assets, fps = 1, output_file = "anim.gif") {
    +  # Record start time
    +  start_time <- Sys.time()
    +  
    +  # Calculate the bbox from the polygon layer
    +  bbox <- st_bbox(polygon_layer)
    +  
    +  s = stac("https://earth-search.aws.element84.com/v0")
    +
    +  
    +  # Use stacR to search for Sentinel-2 images within the bbox and date range
    +  items = s |> stac_search(
    +    collections = "sentinel-s2-l2a-cogs",
    +    bbox = c(bbox["xmin"], bbox["ymin"], bbox["xmax"], bbox["ymax"]),
    +    datetime = paste(start_date, end_date, sep = "/"),
    +    limit = 500
    +  ) %>% 
    +  post_request()
    +  
    +  # Define mask for Sentinel-2 image quality
    +  #S2.mask <- image_mask("SCL", values = c(3, 8, 9))
    +  
    +  # Create a collection of images filtering by cloud cover
    +  col <- stac_image_collection(items$features, asset_names = assets, property_filter = function(x) {x[["eo:cloud_cover"]] < 30})
    +  
    +  # Define a view for processing the data
    +  v <- cube_view(srs = "EPSG:4326", 
    +                 extent = list(t0 = start_date, t1 = end_date,
    +                               left = bbox["xmin"], right = bbox["xmax"], 
    +                               top = bbox["ymax"], bottom = bbox["ymin"]),
    +                 dx = 0.001, dy = 0.001, dt = "P1M", 
    +                 aggregation = "median", resampling = "bilinear")
    +  
    +  # Calculate NDVI and create an animation
    +  ndvi_col <- function(n) {
    +    rev(sequential_hcl(n, "Green-Yellow"))
    +  }
    +  
    +  #raster_cube(col, v, mask = S2.mask) %>%
    +  raster_cube(col, v) %>%
    +    select_bands(c("B04", "B08")) %>%
    +    apply_pixel("(B08-B04)/(B08+B04)", "NDVI") %>%
    +    gdalcubes::animate(col = ndvi_col, zlim = c(-0.2, 1), key.pos = 1, save_as = output_file, fps = fps)
    +  
    +  # Calculate processing time
    +  end_time <- Sys.time()
    +  processing_time <- difftime(end_time, start_time)
    +  
    +  # Return processing time
    +  return(processing_time)
    +}
    +
    +
    +
    processing_time <- process_satellite_data(denver_redlining, "2022-05-31", "2023-05-31", c("B04", "B08"))
    +print(processing_time)
    +
    +
    Time difference of 8.922858 mins
    +
    +
    +

    +
    +
    
    +
    +
    +yearly_average_ndvi <- function(polygon_layer, output_file = "ndvi.png", dx = 0.01, dy = 0.01) {
    +  # Record start time
    +  start_time <- Sys.time()
    +
    +  # Calculate the bbox from the polygon layer
    +  bbox <- st_bbox(polygon_layer)
    +  
    +  s = stac("https://earth-search.aws.element84.com/v0")
    +
    +  # Search for Sentinel-2 images within the bbox for June
    +  items <- s |> stac_search(
    +    collections = "sentinel-s2-l2a-cogs",
    +    bbox = c(bbox["xmin"], bbox["ymin"], bbox["xmax"], bbox["ymax"]),
    +    datetime = "2023-01-01/2023-12-31",
    +    limit = 500
    +  ) %>% 
    +  post_request()
    +  
    +  # Create a collection of images filtering by cloud cover
    +  col <- stac_image_collection(items$features, asset_names = c("B04", "B08"), property_filter = function(x) {x[["eo:cloud_cover"]] < 80})
    +  
    +  # Define a view for processing the data specifically for June
    +  v <- cube_view(srs = "EPSG:4326", 
    +                 extent = list(t0 = "2023-01-01", t1 = "2023-12-31",
    +                               left = bbox["xmin"], right = bbox["xmax"], 
    +                               top = bbox["ymax"], bottom = bbox["ymin"]),
    +                 dx = dx, dy = dy, dt = "P1Y", 
    +                 aggregation = "median", resampling = "bilinear")
    +
    +  # Process NDVI
    +  ndvi_rast <- raster_cube(col, v) %>%
    +    select_bands(c("B04", "B08")) %>%
    +    apply_pixel("(B08-B04)/(B08+B04)", "NDVI") %>%
    +    write_tif() |>
    +    terra::rast()
    +  
    + 
    +  # Convert terra Raster to ggplot using tidyterra
    +ndvi_plot <-   ggplot() +
    +    geom_spatraster(data = ndvi_rast, aes(fill = NDVI)) +
    +    scale_fill_viridis_c(option = "viridis", direction = -1, name = "NDVI") +
    +    labs(title = "NDVI mean for 2023") +
    +    theme_minimal() +
    +    coord_sf() +
    +    theme(plot.background = element_rect(fill = "white", color = NA),
    +      panel.background = element_rect(fill = "white", color = NA),
    +      legend.position = "right",
    +          axis.text = element_blank(),
    +          axis.title = element_blank(),
    +          axis.ticks = element_blank(),
    +          panel.grid.major = element_blank(),
    +          panel.grid.minor = element_blank()) 
    +
    +  # Save the plot as a high-resolution PNG file
    +  ggsave(output_file, ndvi_plot, width = 10, height = 8, dpi = 600)
    +
    +  # Calculate processing time
    +  end_time <- Sys.time()
    +  processing_time <- difftime(end_time, start_time)
    +
    +  # Return the plot and processing time
    +  return(list(plot = ndvi_plot, processing_time = processing_time, raster = ndvi_rast))
    +}
    +
    +
    +
    ndvi_background <- yearly_average_ndvi(denver_redlining,dx = 0.0001, dy = 0.0001)
    +print(ndvi_background$plot)
    +
    +

    +
    +
    print(ndvi_background$processing_time)
    +
    +
    Time difference of 17.70048 mins
    +
    +
    print(ndvi_background$raster)
    +
    +
    class       : SpatRaster 
    +dimensions  : 1616, 1860, 1  (nrow, ncol, nlyr)
    +resolution  : 1e-04, 1e-04  (x, y)
    +extent      : -105.0623, -104.8763, 39.62951, 39.79112  (xmin, xmax, ymin, ymax)
    +coord. ref. : lon/lat WGS 84 (EPSG:4326) 
    +source      : cube_c1217c8746c2023-01-01.tif 
    +name        : NDVI 
    +
    +
    +
    +
    
    +
    +create_mask_and_plot <- function(redlining_sf, background_raster = ndvi$raster, roads = NULL, rivers = NULL){
    +  start_time <- Sys.time()  # Start timing
    +  
    +  # Validate and prepare the redlining data
    +  redlining_sf <- redlining_sf %>%
    +    filter(grade != "") %>%
    +    st_make_valid()
    +  
    +  
    +bbox <- st_bbox(redlining_sf)  # Get original bounding box
    +
    +
    +expanded_bbox <- expand_bbox(bbox, 6000, 1000)  # 
    +
    +   
    +expanded_bbox_poly <- st_as_sfc(expanded_bbox, crs = st_crs(redlining_sf)) %>%
    +    st_make_valid()
    +  
    +  # Initialize an empty list to store masks
    +  masks <- list()
    +  
    +  # Iterate over each grade to create masks
    +  unique_grades <- unique(redlining_sf$grade)
    +  for (grade in unique_grades) {
    +    # Filter polygons by grade
    +    grade_polygons <- redlining_sf[redlining_sf$grade == grade, ]
    +    
    +    # Create an "inverted" mask by subtracting these polygons from the background
    +    mask <- st_difference(expanded_bbox_poly, st_union(grade_polygons))
    +    
    +    # Store the mask in the list with the grade as the name
    +    masks[[grade]] <- st_sf(geometry = mask, grade = grade)
    +  }
    +  
    +  # Combine all masks into a single sf object
    +  mask_sf <- do.call(rbind, masks)
    +  
    +  # Normalize the grades so that C.2 becomes C, but correctly handle other grades
    +  mask_sf$grade <- ifelse(mask_sf$grade == "C.2", "C", mask_sf$grade)
    +
    +  # Prepare the plot
    +  plot <- ggplot() +
    +    geom_spatraster(data = background_raster, aes(fill = NDVI)) +
    +  scale_fill_viridis_c(name = "NDVI", option = "viridis", direction = -1) +
    +   
    +    geom_sf(data = mask_sf, aes(color = grade), fill = "white", size = 0.1, show.legend = FALSE) +
    +    scale_color_manual(values = c("A" = "white", "B" = "white", "C" = "white", "D" = "white"), name = "Grade") +
    +    facet_wrap(~ grade, nrow = 1) +
    +     geom_sf(data = roads, alpha = 1, lwd = 0.1, color="white") +
    +    geom_sf(data = rivers, color = "white", alpha = 0.5, lwd = 1.1) +
    +    labs(title = "NDVI: Normalized Difference Vegetation Index") +
    +    theme_minimal() +
    +    coord_sf(xlim = c(bbox["xmin"], bbox["xmax"]), 
    +           ylim = c(bbox["ymin"], bbox["ymax"]), 
    +           expand = FALSE) + 
    +    theme(plot.background = element_rect(fill = "white", color = NA),
    +          panel.background = element_rect(fill = "white", color = NA),
    +          legend.position = "bottom",
    +          axis.text = element_blank(),
    +          axis.title = element_blank(),
    +          axis.ticks = element_blank(),
    +          panel.grid.major = element_blank(),
    +          panel.grid.minor = element_blank())
    +
    +  # Save the plot
    +  ggsave("redlining_mask_ndvi.png", plot, width = 10, height = 4, dpi = 600)
    +
    +  end_time <- Sys.time()  # End timing
    +  runtime <- end_time - start_time
    +
    +  # Return the plot and runtime
    +  return(list(plot = plot, runtime = runtime, mask_sf = mask_sf))
    +}
    +
    +
    +
    ndvi_background_low <- yearly_average_ndvi(denver_redlining)
    +print(ndvi_background_low$plot)
    +
    +

    +
    +
    print(ndvi_background_low$processing_time)
    +
    +
    Time difference of 1.728088 mins
    +
    +
    print(ndvi_background_low$raster)
    +
    +
    class       : SpatRaster 
    +dimensions  : 17, 19, 1  (nrow, ncol, nlyr)
    +resolution  : 0.01, 0.01  (x, y)
    +extent      : -105.0643, -104.8743, 39.62532, 39.79532  (xmin, xmax, ymin, ymax)
    +coord. ref. : lon/lat WGS 84 (EPSG:4326) 
    +source      : cube_c1221bc8bdc2023-01-01.tif 
    +name        : NDVI 
    +
    +
    +
    +
    ndvi <- create_mask_and_plot(denver_redlining, background_raster = ndvi_background_low$raster, roads = roads, rivers = rivers)
    +ndvi$mask_sf
    +
    +
    Simple feature collection with 4 features and 1 field
    +Geometry type: GEOMETRY
    +Dimension:     XY
    +Bounding box:  xmin: -105.0865 ymin: 39.62053 xmax: -104.8546 ymax: 39.8001
    +Geodetic CRS:  WGS 84
    +  grade                       geometry
    +A     A MULTIPOLYGON (((-105.0865 3...
    +B     B POLYGON ((-105.0865 39.6205...
    +C     C MULTIPOLYGON (((-105.0865 3...
    +D     D MULTIPOLYGON (((-105.0865 3...
    +
    +
    ndvi$plot
    +
    +

    +
    +
    +

    + +
    + + +
    + + + + \ No newline at end of file diff --git a/worksheets/worksheet_redlining.qmd b/worksheets/worksheet_redlining.qmd new file mode 100644 index 0000000..9f89919 --- /dev/null +++ b/worksheets/worksheet_redlining.qmd @@ -0,0 +1,1173 @@ +--- +title: "Redlining" +format: gfm + +--- + +# Exploring the Impact of Historical Redlining on Urban Greenspace: A Collaborative Examination of Maps, Justice, and Resilience + +## Introduction + +This group exploration delves into the long-term impacts of historical redlining on urban greenspace, emphasizing the powerful role of maps in shaping environmental and social landscapes. By drawing on the research by Nardone et al. (2021), you will collaboratively investigate how discriminatory practices encoded in maps have led to persistent disparities in urban settings. This exploration aims to uncover the resilience of communities in adapting to these entrenched injustices and to foster a deeper understanding of how mapping can serve both as a tool of exclusion and as a means for promoting social equity. + +![1938 Map of Atlanta uses colors as grades for neighborhoods. The red swaths identify each area with large African-American populations that were deemed “less safe.”](../assets/redlining/redlining.png)) + +## Understanding Redlining as a Systemic Disturbance + +Redlining originated in the 1930s as a discriminatory practice where the Home Owners' Loan Corporation (HOLC) systematically denied mortgages or offered unfavorable terms based on racial and ethnic compositions. This methodical exclusion, executed through maps that color-coded "risky" investment areas in red, marked minority-populated areas, denying them crucial investment and development opportunities and initiating a profound and lasting disturbance in the urban fabric. + +Maps serve as powerful tools beyond navigation; they communicate and enforce control. By defining neighborhood boundaries through redlining, HOLC maps not only mirrored societal biases but also perpetuated and embedded them into the urban landscape. This manipulation of geographic data set a trajectory that limited economic growth, dictated the allocation of services, and influenced the development or deterioration of community infrastructure. + +**Figure 1:** 1938 Map of Atlanta uses colors as grades for neighborhoods. The red swaths identify each area with large African-American populations that were deemed “less safe.” + + + +[![ArcGIS Story Map](../assets/redlining/georectified-thumbnail.png)](https://storymaps.arcgis.com/stories/0f58d49c566b486482b3e64e9e5f7ac9) + + +**Explore the Story Map:** Click on the image above to explore the interactive story map about [subject of the story map]. + + +## Resilience and Adaptation in Urban Environments + +The legacy of redlining presents both a challenge and an opportunity for resilience and adaptation. Economically and socially, redlining entrenched cycles of poverty and racial segregation, creating a resilient wealth gap that has been difficult to dismantle. Environmentally, the neighborhoods targeted by redlining continue to face significant challenges—they generally feature less greenspace, suffer from higher pollution levels, and are more vulnerable to the impacts of climate change. These factors compound the health and wellness challenges faced by residents. + +Despite these adversities, urban communities have continually demonstrated remarkable resilience. Adaptation strategies, such as community-led green initiatives, urban agriculture, and grassroots activism, have emerged as responses to these systemic disturbances. By enhancing green infrastructure and advocating for equitable environmental policies, these communities strive to increase their resilience against both historical inequities and environmental challenges. + + +[![Watch the video](https://img.youtube.com/vi/O5FBJyqfoLM/hqdefault.jpg)](https://youtu.be/O5FBJyqfoLM) + + +**Video Title:** Exploring the Impacts of Historical Redlining on Urban Development +**Description:** Click on the image above to watch a video that delves into the consequences of historical redlining and its ongoing impact on urban environments. This educational piece offers insights into how such discriminatory practices have shaped cities and what can be learned from them. + +The following group exercise will not only uncover the impact of redlining on urban greenspace but also highlight the adaptive strategies developed in response to this enduring disturbance. Through mapping and analysis, we aim to illustrate the powerful role that geographic data can play in understanding and fostering urban resilience and social equity. + + +### References + +- Nardone, A., Rudolph, K. E., Morello-Frosch, R., & Casey, J. A. (2021). Redlines and Greenspace: The Relationship between Historical Redlining and 2010 Greenspace across the United States. *Environmental Health Perspectives*, 129(1), 017006. DOI:10.1289/EHP7495. +- Hoffman, J. S., Shandas, V., & Pendleton, N. (2020). The Effects of Historical Housing Policies on Resident Exposure to Intra-Urban Heat: A Study of 108 US Urban Areas. *Climate*, 8(1), 12. DOI:10.3390/cli8010012. + + +## Goals of this group activity + +The primary objectives of this tutorial are: + 1. To practice coding in CyVerse. + 2. To analyze the relationship between HOLC grades and the presence of urban greenspace. + 3. To understand how historic policies continue to affect the spatial distribution of environmental amenities. + +## Part 1: Accessing and Visualizing Historic Redlining Data + +We will begin by accessing HOLC maps from the Mapping Inequality project and overlaying this data with modern geographic datasets to visualize the historical impact on contemporary urban landscapes. + +### Data Acquisition +- Download HOLC map shapefiles from the University of Richmond’s Mapping Inequality Project. +- Utilize satellite imagery and other geospatial data to map current greenspace using the normalized difference vegetation index (NDVI). + +### Analysis Methodology +- Replicate the approach used by Nardone et al. to calculate NDVI values for each HOLC neighborhood, assessing greenspace as a health-promoting resource. +- Employ statistical methods such as propensity score matching to control for confounding variables and estimate the true impact of HOLC grades on urban greenspace. + + + + + + + + + + +
    +R libraries we use in this analysis + +```{r, collapse=TRUE} +if (!requireNamespace("tidytext", quietly = TRUE)) { + install.packages("tidytext") +} +library(tidytext) +library(sf) +library(ggplot2) +library(ggthemes) +library(dplyr) +library(rstac) +library(gdalcubes) +library(gdalUtils) +library(gdalcubes) +library(colorspace) +library(terra) +library(tidyterra) +library(basemapR) +library(tidytext) +library(ggwordcloud) +library(osmextract) +library(sf) +library(ggplot2) +library(ggthemes) +library(glue) + +library(purrr) +``` +
    + + + +
    +FUNCTION: List cities where HOLC data are available +```{r, collapse=TRUE} +# Function to get a list of unique cities and states from the redlining data +get_city_state_list_from_redlining_data <- function() { + # URL to the GeoJSON data + url <- "https://raw.githubusercontent.com/americanpanorama/mapping-inequality-census-crosswalk/main/MIv3Areas_2010TractCrosswalk.geojson" + + # Read the GeoJSON file into an sf object + redlining_data <- tryCatch({ + read_sf(url) + }, error = function(e) { + stop("Error reading GeoJSON data: ", e$message) + }) + + # Check for the existence of 'city' and 'state' columns + if (!all(c("city", "state") %in% names(redlining_data))) { + stop("The required columns 'city' and/or 'state' do not exist in the data.") + } + + # Extract a unique list of city and state pairs without the geometries + city_state_df <- redlining_data %>% + select(city, state) %>% + st_set_geometry(NULL) %>% # Drop the geometry to avoid issues with invalid shapes + distinct(city, state) %>% + arrange(state, city ) # Arrange the list alphabetically by state, then by city + + # Return the dataframe of unique city-state pairs + return(city_state_df) +} + +``` +
    + + + + + + + +
    +Stream list of available HOLC cities +```{r, cache=TRUE, warning=FALSE, message=FALSE} +#Retrieve the list of cities and states +city_state_list <- get_city_state_list_from_redlining_data() + +knitr::kable(city_state_list, format = "markdown") +``` +
    + + + + + + +
    +FUNCTION: Stream HOLC data from a city +```{r, collapse=TRUE} +# Function to load and filter redlining data by city +load_city_redlining_data <- function(city_name) { + # URL to the GeoJSON data + url <- "https://raw.githubusercontent.com/americanpanorama/mapping-inequality-census-crosswalk/main/MIv3Areas_2010TractCrosswalk.geojson" + + # Read the GeoJSON file into an sf object + redlining_data <- read_sf(url) + + # Filter the data for the specified city and non-empty grades + + city_redline <- redlining_data %>% + filter(city == city_name ) + + # Return the filtered data + return(city_redline) +} +``` +
    + + + +
    +Stream HOLC data for Denver, CO +```{r, cache=TRUE, warning=FALSE, message=FALSE} +# Load redlining data for Denver +denver_redlining <- load_city_redlining_data("Denver") +knitr::kable(head(denver_redlining), format = "markdown") +``` +
    + + + +
    +FUNCTION: Get Points-of-Interest from city of interest +```{r, collapse=TRUE} + + +get_places <- function(polygon_layer, type = "food" ) { + # Check if the input is an sf object + if (!inherits(polygon_layer, "sf")) { + stop("The provided object is not an sf object.") + } + + # Create a bounding box from the input sf object + bbox_here <- st_bbox(polygon_layer) |> + st_as_sfc() + + if(type == "food"){ + my_layer <- "multipolygons" + my_query <- "SELECT * FROM multipolygons WHERE ( + shop IN ('supermarket', 'bodega', 'market', 'other_market', 'farm', 'garden_centre', 'doityourself', 'farm_supply', 'compost', 'mulch', 'fertilizer') OR + amenity IN ('social_facility', 'market', 'restaurant', 'coffee') OR + leisure = 'garden' OR + landuse IN ('farm', 'farmland', 'row_crops', 'orchard_plantation', 'dairy_grazing') OR + building IN ('brewery', 'winery', 'distillery') OR + shop = 'greengrocer' OR + amenity = 'marketplace' + )" + title <- "food" + } + + if (type == "processed_food") { + my_layer <- "multipolygons" + my_query <- "SELECT * FROM multipolygons WHERE ( + amenity IN ('fast_food', 'cafe', 'pub') OR + shop IN ('convenience', 'supermarket') OR + shop = 'kiosk' + )" + title <- "Processed Food Locations" +} + + if(type == "natural_habitats"){ + my_layer <- "multipolygons" + my_query <- "SELECT * FROM multipolygons WHERE ( + boundary = 'protected_area' OR + natural IN ('tree', 'wood') OR + landuse = 'forest' OR + leisure = 'park' + )" + title <- "Natural habitats or City owned trees" + } + + if(type == "roads"){ + my_layer <- "lines" + my_query <- "SELECT * FROM lines WHERE ( + highway IN ('motorway', 'trunk', 'primary', 'secondary', 'tertiary') )" + title <- "Major roads" + } + + if(type == "rivers"){ + my_layer <- "lines" + my_query <- "SELECT * FROM lines WHERE ( + waterway IN ('river'))" + title <- "Major rivers" + } + + if(type == "internet_access") { + my_layer <- "multipolygons" + my_query <- "SELECT * FROM multipolygons WHERE ( + amenity IN ('library', 'cafe', 'community_centre', 'public_building') AND + internet_access = 'yes' + )" + title <- "Internet Access Locations" +} + + if(type == "water_bodies") { + my_layer <- "multipolygons" + my_query <- "SELECT * FROM multipolygons WHERE ( + natural IN ('water', 'lake', 'pond') OR + water IN ('lake', 'pond') OR + landuse = 'reservoir' + )" + title <- "Water Bodies" +} + + if(type == "government_buildings") { + my_layer <- "multipolygons" + my_query <- "SELECT * FROM multipolygons WHERE ( + amenity IN ('townhall', 'courthouse', 'embassy', 'police', 'fire_station') OR + building IN ('capitol', 'government') + )" + title <- "Government Buildings" +} + + + + # Use the bbox to get data with oe_get(), specifying the desired layer and a custom SQL query for fresh food places + tryCatch({ + places <- oe_get( + place = bbox_here, + layer = my_layer, # Adjusted layer; change as per actual data availability + query = my_query, + quiet = TRUE + ) + + places <- st_make_valid(places) + + # Crop the data to the bounding box + cropped_places <- st_crop(places, bbox_here) + + # Plotting the cropped fresh food places + plot <- ggplot(data = cropped_places) + + geom_sf(fill="cornflowerblue", color="cornflowerblue") + + ggtitle(title) + + theme_tufte()+ + theme(legend.position = "none", # Optionally hide the legend + axis.text = element_blank(), # Remove axis text + axis.title = element_blank(), # Remove axis titles + axis.ticks = element_blank(), # Remove axis ticks + plot.background = element_rect(fill = "white", color = NA), # Set the plot background to white + panel.background = element_rect(fill = "white", color = NA), # Set the panel background to white + panel.grid.major = element_blank(), # Remove major grid lines + panel.grid.minor = element_blank(), + ) + + # Save the plot as a PNG file + png_filename <- paste0(title,"_", Sys.Date(), ".png") + ggsave(png_filename, plot, width = 10, height = 8, units = "in") + + # Return the cropped dataset + return(cropped_places) + }, error = function(e) { + stop("Failed to retrieve or plot data: ", e$message) + }) +} + + + +``` +
    + +
    +FUNCTION: Plot POI over HOLC grades +```{r, warning=FALSE, collapse=TRUE} + + +plot_city_redlining <- function(redlining_data, filename = "redlining_plot.png") { + # Fetch additional geographic data based on redlining data + roads <- get_places(redlining_data, type = "roads") + rivers <- get_places(redlining_data, type = "rivers") + + # Filter residential zones with valid grades and where city survey is TRUE + residential_zones <- redlining_data %>% + filter(city_survey == TRUE & grade != "") + + # Colors for the grades + colors <- c("#76a865", "#7cb5bd", "#ffff00", "#d9838d") + + # Plot the data using ggplot2 + plot <- ggplot() + + geom_sf(data = roads, lwd = 0.1) + + geom_sf(data = rivers, color = "blue", alpha = 0.5, lwd = 1.1) + + geom_sf(data = residential_zones, aes(fill = grade), alpha = 0.5) + + theme_tufte() + + scale_fill_manual(values = colors) + + labs(fill = 'HOLC Categories') + + theme( + plot.background = element_rect(fill = "white", color = NA), + panel.background = element_rect(fill = "white", color = NA), + panel.grid.major = element_blank(), + panel.grid.minor = element_blank(), + legend.position = "right" + ) + + # Save the plot as a high-resolution PNG file + ggsave(filename, plot, width = 10, height = 8, units = "in", dpi = 600) + + # Return the plot object if needed for further manipulation or checking + return(plot) +} + + + + +``` +
    + + +
    +Plot Denver Redlining +```{r, cache=TRUE, warning=FALSE, message=FALSE} +denver_plot <- plot_city_redlining(denver_redlining) +``` + +
    +![](../worksheets/redlining_plot.png) + + + +
    +Stream amenities by category +```{r, cache=TRUE, warning=FALSE, message=FALSE} +food <- get_places(denver_redlining, type="food") + +food_processed <- get_places(denver_redlining, type="processed_food") + +natural_habitats <- get_places(denver_redlining, type="natural_habitats") + +roads <- get_places(denver_redlining, type="roads") + +rivers <- get_places(denver_redlining, type="rivers") + +#water_bodies <- get_places(denver_redlining, type="water_bodies") + +government_buildings <- get_places(denver_redlining, type="government_buildings") +``` +
    + + +
    +FUNCTION: Plot the HOLC grades individually +```{r, collapse=TRUE} +split_plot <- function(sf_data, roads, rivers) { + # Filter for grades A, B, C, and D + sf_data_filtered <- sf_data %>% + filter(grade %in% c('A', 'B', 'C', 'D')) + + # Define a color for each grade + grade_colors <- c("A" = "#76a865", "B" = "#7cb5bd", "C" = "#ffff00", "D" = "#d9838d") + + # Create the plot with panels for each grade + plot <- ggplot(data = sf_data_filtered) + + geom_sf(data = roads, alpha = 0.1, lwd = 0.1) + + geom_sf(data = rivers, color = "blue", alpha = 0.1, lwd = 1.1) + + geom_sf(aes(fill = grade)) + + facet_wrap(~ grade, nrow = 1) + # Free scales for different zoom levels if needed + scale_fill_manual(values = grade_colors) + + theme_minimal() + + labs(fill = 'HOLC Grade') + + theme_tufte() + + theme(plot.background = element_rect(fill = "white", color = NA), + panel.background = element_rect(fill = "white", color = NA), + legend.position = "none", # Optionally hide the legend + axis.text = element_blank(), # Remove axis text + axis.title = element_blank(), # Remove axis titles + axis.ticks = element_blank(), # Remove axis ticks + panel.grid.major = element_blank(), # Remove major grid lines + panel.grid.minor = element_blank()) + + ggsave(plot, filename = "HOLC_grades_individually.png", width = 10, height = 4, units = "in", dpi = 1200) + return(plot) +} +``` +
    + + +
    +Plot 4 HOLC grades individually +```{r, cache=TRUE, warning=FALSE, message=FALSE} +plot_row <- split_plot(denver_redlining, roads, rivers) + +``` +
    +![](../worksheets/HOLC_grades_individually.png) + +
    +FUNCTION: Map an amenity over each grade individually +```{r, collapse=TRUE} + +process_and_plot_sf_layers <- function(layer1, layer2, output_file = "output_plot.png") { + # Make geometries valid +layer1 <- st_make_valid(layer1) +layer2 <- st_make_valid(layer2) + +# Optionally, simplify geometries to remove duplicate vertices +layer1 <- st_simplify(layer1, preserveTopology = TRUE) |> + filter(grade != "") + +# Prepare a list to store results +results <- list() + +# Loop through each grade and perform operations +for (grade in c("A", "B", "C", "D")) { + # Filter layer1 for current grade + layer1_grade <- layer1[layer1$grade == grade, ] + + # Buffer the geometries of the current grade + buffered_layer1_grade <- st_buffer(layer1_grade, dist = 500) + + # Intersect with the second layer + intersections <- st_intersects(layer2, buffered_layer1_grade, sparse = FALSE) + selected_polygons <- layer2[rowSums(intersections) > 0, ] + + # Add a new column to store the grade information + selected_polygons$grade <- grade + + # Store the result + results[[grade]] <- selected_polygons +} + +# Combine all selected polygons from different grades into one sf object +final_selected_polygons <- do.call(rbind, results) + + # Define colors for the grades + grade_colors <- c("A" = "grey", "B" = "grey", "C" = "grey", "D" = "grey") + + # Create the plot + plot <- ggplot() + + geom_sf(data = roads, alpha = 0.05, lwd = 0.1) + + geom_sf(data = rivers, color = "blue", alpha = 0.1, lwd = 1.1) + + geom_sf(data = layer1, fill = "grey", color = "grey", size = 0.1) + + facet_wrap(~ grade, nrow = 1) + + geom_sf(data = final_selected_polygons,fill = "green", color = "green", size = 0.1) + + facet_wrap(~ grade, nrow = 1) + + #scale_fill_manual(values = grade_colors) + + #scale_color_manual(values = grade_colors) + + theme_minimal() + + labs(fill = 'HOLC Grade') + + theme_tufte() + + theme(plot.background = element_rect(fill = "white", color = NA), + panel.background = element_rect(fill = "white", color = NA), + legend.position = "none", + axis.text = element_blank(), + axis.title = element_blank(), + axis.ticks = element_blank(), + panel.grid.major = element_blank(), + panel.grid.minor = element_blank()) + + # Save the plot as a high-resolution PNG file + ggsave(output_file, plot, width = 10, height = 4, units = "in", dpi = 1200) + + # Return the plot for optional further use + return(list(plot=plot, sf = final_selected_polygons)) +} + + + + +``` +
    + + + + + + + +
    +FUNCTION: Create word cloud per grade +```{r} + +create_wordclouds_by_grade <- function(sf_object, output_file = "food_word_cloud_per_grade.png",title = "Healthy food place names word cloud", max_size =25, col_select = "name") { + + + # Extract relevant data and prepare text data + text_data <- sf_object %>% + select(grade, col_select) %>% + filter(!is.na(col_select)) %>% + unnest_tokens(output = "word", input = col_select, token = "words") %>% + count(grade, word, sort = TRUE) %>% + ungroup() %>% + filter(n() > 1) # Filter to remove overly common or single-occurrence words + + # Ensure there are no NA values in the 'word' column + text_data <- text_data %>% filter(!is.na(word)) + + # Handle cases where text_data might be empty + if (nrow(text_data) == 0) { + stop("No data available for creating word clouds.") + } + + # Create a word cloud using ggplot2 and ggwordcloud + p <- ggplot( ) + + geom_text_wordcloud_area(data=text_data, aes(label = word, size = n),rm_outside = TRUE) + + scale_size_area(max_size = max_size) + + facet_wrap(~ grade, nrow = 1) + + scale_color_gradient(low = "darkred", high = "red") + + theme_minimal() + + theme(plot.background = element_rect(fill = "white", color = NA), + panel.background = element_rect(fill = "white", color = NA), + panel.spacing = unit(0.5, "lines"), + plot.title = element_text(size = 16, face = "bold"), + legend.position = "none") + + labs(title = title) + + # Attempt to save the plot and handle any errors + tryCatch({ + ggsave(output_file, p, width = 10, height = 4, units = "in", dpi = 600) + }, error = function(e) { + cat("Error in saving the plot: ", e$message, "\n") + }) + + return(p) +} + + + + +``` +
    + +
    +Map food over each grade individually +```{r, cache=TRUE, warning=FALSE, message=FALSE} + layer1 <- denver_redlining + layer2 <- food + food_match <- process_and_plot_sf_layers(layer1, layer2, "food_match.png") +``` +
    +![](../worksheets/food_match.png) + +
    +WORD CLOUD: Names of places with fresh food +```{r, cache=TRUE} +food_word_cloud <- create_wordclouds_by_grade(food_match$sf, output_file = "food_word_cloud_per_grade.png") + +``` + +
    + + +![](../worksheets/food_word_cloud_per_grade.png) + + +
    +Map processed food over each grade individually +```{r, cache=TRUE, warning=FALSE, message=FALSE} + layer1 <- denver_redlining + layer2 <- food_processed + processed_food_match <- process_and_plot_sf_layers(layer1, layer2, "processed_food_match.png") + +``` +
    + +![](../worksheets/processed_food_match.png) + +
    +WORD CLOUD: Names of places with processed food +```{r, cache=TRUE} +processed_food_cloud <- create_wordclouds_by_grade(processed_food_match$sf, output_file = "processed_food_word_cloud_per_grade.png",title = "Processed food place names where larger text is more frequent", max_size =17) + +``` +
    + +![](../worksheets/processed_food_word_cloud_per_grade.png) + +## Part 2: Integrating Environmental Data + +### Data Processing +- Use satellite data from 2010 to analyze greenspace using NDVI, an index that measures the quantity of vegetation in an area. +- Apply methods to adjust for potential confounders as described in the study, ensuring that comparisons of greenspace across HOLC grades are valid and not biased by historical or socio-demographic factors. + +
    +Map natural habitats over each grade individually +```{r, cache=TRUE, warning=FALSE, message=FALSE} + layer1 <- denver_redlining + layer2 <- natural_habitats + natural_habitats_match <- process_and_plot_sf_layers(layer1, layer2, "natural_habitats_match.png") + print(natural_habitats_match$plot) +``` +
    +![](../worksheets/natural_habitats_match.png) + + +
    +WORD CLOUD: Name of natural habitat area +```{r, cache=TRUE} +natural_habitats_cloud <- create_wordclouds_by_grade(natural_habitats_match$sf, output_file = "natural_habitats_word_cloud_per_grade.png",title = "Natural habitats place names where larger text is more frequent", max_size =35) + +``` +
    +![](../worksheets/natural_habitats_word_cloud_per_grade.png) + + +
    +FUNCTION: Stream NDVI data +```{r, collapse=TRUE} +polygon_layer <- denver_redlining +# Function to process satellite data based on an SF polygon's extent +process_satellite_data <- function(polygon_layer, start_date, end_date, assets, fps = 1, output_file = "anim.gif") { + # Record start time + start_time <- Sys.time() + + # Calculate the bbox from the polygon layer + bbox <- st_bbox(polygon_layer) + + s = stac("https://earth-search.aws.element84.com/v0") + + + # Use stacR to search for Sentinel-2 images within the bbox and date range + items = s |> stac_search( + collections = "sentinel-s2-l2a-cogs", + bbox = c(bbox["xmin"], bbox["ymin"], bbox["xmax"], bbox["ymax"]), + datetime = paste(start_date, end_date, sep = "/"), + limit = 500 + ) %>% + post_request() + + # Define mask for Sentinel-2 image quality + #S2.mask <- image_mask("SCL", values = c(3, 8, 9)) + + # Create a collection of images filtering by cloud cover + col <- stac_image_collection(items$features, asset_names = assets, property_filter = function(x) {x[["eo:cloud_cover"]] < 30}) + + # Define a view for processing the data + v <- cube_view(srs = "EPSG:4326", + extent = list(t0 = start_date, t1 = end_date, + left = bbox["xmin"], right = bbox["xmax"], + top = bbox["ymax"], bottom = bbox["ymin"]), + dx = 0.001, dy = 0.001, dt = "P1M", + aggregation = "median", resampling = "bilinear") + + # Calculate NDVI and create an animation + ndvi_col <- function(n) { + rev(sequential_hcl(n, "Green-Yellow")) + } + + #raster_cube(col, v, mask = S2.mask) %>% + raster_cube(col, v) %>% + select_bands(c("B04", "B08")) %>% + apply_pixel("(B08-B04)/(B08+B04)", "NDVI") %>% + gdalcubes::animate(col = ndvi_col, zlim = c(-0.2, 1), key.pos = 1, save_as = output_file, fps = fps) + + # Calculate processing time + end_time <- Sys.time() + processing_time <- difftime(end_time, start_time) + + # Return processing time + return(processing_time) +} + + +``` +
    + +
    +Stream NDVI data: animation +```{r, cache=TRUE, warning=FALSE, message=FALSE} +processing_time <- process_satellite_data(denver_redlining, "2022-05-31", "2023-05-31", c("B04", "B08")) + +``` +
    +![](../worksheets/anim.gif) + + + +
    +FUNCTION: Stream year average NDVI +```{r, collapse=TRUE} + + + +yearly_average_ndvi <- function(polygon_layer, output_file = "ndvi.png", dx = 0.01, dy = 0.01) { + # Record start time + start_time <- Sys.time() + + # Calculate the bbox from the polygon layer + bbox <- st_bbox(polygon_layer) + + s = stac("https://earth-search.aws.element84.com/v0") + + # Search for Sentinel-2 images within the bbox for June + items <- s |> stac_search( + collections = "sentinel-s2-l2a-cogs", + bbox = c(bbox["xmin"], bbox["ymin"], bbox["xmax"], bbox["ymax"]), + datetime = "2023-01-01/2023-12-31", + limit = 500 + ) %>% + post_request() + + # Create a collection of images filtering by cloud cover + col <- stac_image_collection(items$features, asset_names = c("B04", "B08"), property_filter = function(x) {x[["eo:cloud_cover"]] < 80}) + + # Define a view for processing the data specifically for June + v <- cube_view(srs = "EPSG:4326", + extent = list(t0 = "2023-01-01", t1 = "2023-12-31", + left = bbox["xmin"], right = bbox["xmax"], + top = bbox["ymax"], bottom = bbox["ymin"]), + dx = dx, dy = dy, dt = "P1Y", + aggregation = "median", resampling = "bilinear") + + # Process NDVI + ndvi_rast <- raster_cube(col, v) %>% + select_bands(c("B04", "B08")) %>% + apply_pixel("(B08-B04)/(B08+B04)", "NDVI") %>% + write_tif() |> + terra::rast() + + + # Convert terra Raster to ggplot using tidyterra +ndvi_plot <- ggplot() + + geom_spatraster(data = ndvi_rast, aes(fill = NDVI)) + + scale_fill_viridis_c(option = "viridis", direction = -1, name = "NDVI") + + labs(title = "NDVI mean for 2023") + + theme_minimal() + + coord_sf() + + theme(plot.background = element_rect(fill = "white", color = NA), + panel.background = element_rect(fill = "white", color = NA), + legend.position = "right", + axis.text = element_blank(), + axis.title = element_blank(), + axis.ticks = element_blank(), + panel.grid.major = element_blank(), + panel.grid.minor = element_blank()) + + # Save the plot as a high-resolution PNG file + ggsave(output_file, ndvi_plot, width = 10, height = 8, dpi = 600) + + # Calculate processing time + end_time <- Sys.time() + processing_time <- difftime(end_time, start_time) + + # Return the plot and processing time + return(list(plot = ndvi_plot, processing_time = processing_time, raster = ndvi_rast)) +} + + + +``` +
    + + +
    +Stream NDVI: high resolution +```{r, warning=FALSE, message=FALSE, cache=TRUE} +ndvi_background <- yearly_average_ndvi(denver_redlining,dx = 0.0001, dy = 0.0001) + +``` +
    + +![](../worksheets/ndvi_00001.png) + + + +
    +FUNCTION: Map NDVI per HOLC grade individually +```{r, collapse=TRUE} + + +create_mask_and_plot <- function(redlining_sf, background_raster = ndvi$raster, roads = NULL, rivers = NULL){ + start_time <- Sys.time() # Start timing + + # Validate and prepare the redlining data + redlining_sf <- redlining_sf %>% + filter(grade != "") %>% + st_make_valid() + + +bbox <- st_bbox(redlining_sf) # Get original bounding box + + +expanded_bbox <- expand_bbox(bbox, 6000, 1000) # + + +expanded_bbox_poly <- st_as_sfc(expanded_bbox, crs = st_crs(redlining_sf)) %>% + st_make_valid() + + # Initialize an empty list to store masks + masks <- list() + + # Iterate over each grade to create masks + unique_grades <- unique(redlining_sf$grade) + for (grade in unique_grades) { + # Filter polygons by grade + grade_polygons <- redlining_sf[redlining_sf$grade == grade, ] + + # Create an "inverted" mask by subtracting these polygons from the background + mask <- st_difference(expanded_bbox_poly, st_union(grade_polygons)) + + # Store the mask in the list with the grade as the name + masks[[grade]] <- st_sf(geometry = mask, grade = grade) + } + + # Combine all masks into a single sf object + mask_sf <- do.call(rbind, masks) + + # Normalize the grades so that C.2 becomes C, but correctly handle other grades + mask_sf$grade <- ifelse(mask_sf$grade == "C.2", "C", mask_sf$grade) + + # Prepare the plot + plot <- ggplot() + + geom_spatraster(data = background_raster, aes(fill = NDVI)) + + scale_fill_viridis_c(name = "NDVI", option = "viridis", direction = -1) + + + geom_sf(data = mask_sf, aes(color = grade), fill = "white", size = 0.1, show.legend = FALSE) + + scale_color_manual(values = c("A" = "white", "B" = "white", "C" = "white", "D" = "white"), name = "Grade") + + facet_wrap(~ grade, nrow = 1) + + geom_sf(data = roads, alpha = 1, lwd = 0.1, color="white") + + geom_sf(data = rivers, color = "white", alpha = 0.5, lwd = 1.1) + + labs(title = "NDVI: Normalized Difference Vegetation Index") + + theme_minimal() + + coord_sf(xlim = c(bbox["xmin"], bbox["xmax"]), + ylim = c(bbox["ymin"], bbox["ymax"]), + expand = FALSE) + + theme(plot.background = element_rect(fill = "white", color = NA), + panel.background = element_rect(fill = "white", color = NA), + legend.position = "bottom", + axis.text = element_blank(), + axis.title = element_blank(), + axis.ticks = element_blank(), + panel.grid.major = element_blank(), + panel.grid.minor = element_blank()) + + # Save the plot + ggsave("redlining_mask_ndvi.png", plot, width = 10, height = 4, dpi = 600) + + end_time <- Sys.time() # End timing + runtime <- end_time - start_time + + # Return the plot and runtime + return(list(plot = plot, runtime = runtime, mask_sf = mask_sf)) +} + +``` +
    + +
    +Stream NDVI: low resolution +```{r, cache=TRUE} +ndvi_background_low <- yearly_average_ndvi(denver_redlining) +``` +
    +![](../worksheets/ndvi.png) +
    +Map low resolution NDVI per HOLC grade +```{r, cache=TRUE} +ndvi <- create_mask_and_plot(denver_redlining, background_raster = ndvi_background_low$raster, roads = roads, rivers = rivers) +``` +
    +![](../worksheets/redlining_mask_ndvi.png) + + + + +
    +FUNCTION: Map Denver City provided data per HOLC grade +```{r} + + +process_city_inventory_data <- function(address, inner_file, polygon_layer, output_filename,variable_label= 'Tree Density') { + # Download and read the shapefile + full_path <- glue("/vsizip/vsicurl/{address}/{inner_file}") + shape_data <- st_read(full_path, quiet = TRUE) |> st_as_sf() + + # Process the shape data with the provided polygon layer + processed_data <- process_and_plot_sf_layers(polygon_layer, shape_data, paste0(output_filename, ".png")) + + # Extract trees from the processed data + trees <- processed_data$sf + denver_redlining_residential <- polygon_layer |> filter(grade != "") + + # Generate the density plot + plot <- ggplot() + + geom_sf(data = roads, alpha = 0.05, lwd = 0.1) + + geom_sf(data = rivers, color = "blue", alpha = 0.1, lwd = 1.1) + + geom_sf(data = denver_redlining_residential, fill = "grey", color = "grey", size = 0.1) + + facet_wrap(~ grade, nrow = 1) + + stat_density_2d(data = trees, + mapping = aes(x = map_dbl(geometry, ~.[1]), + y = map_dbl(geometry, ~.[2]), + fill = stat(density)), + geom = 'tile', + contour = FALSE, + alpha = 0.9) + + scale_fill_gradientn(colors = c("transparent", "white", "limegreen"), + values = scales::rescale(c(0, 0.1, 1)), # Adjust these based on your density range + guide = "colourbar") + + theme_minimal() + + labs(fill = variable_label) + + theme_tufte() + + theme(plot.background = element_rect(fill = "white", color = NA), + panel.background = element_rect(fill = "white", color = NA), + legend.position = "bottom", + axis.text = element_blank(), + axis.title = element_blank(), + axis.ticks = element_blank(), + panel.grid.major = element_blank(), + panel.grid.minor = element_blank()) + + # Save the plot + ggsave(paste0(output_filename, "_density_plot.png"), plot, width = 10, height = 4, units = "in", dpi = 600) + + # Return the plot and the tree layer + return(list(plot = plot, layer = trees)) +} + + + +``` +
    + + + + +
    +Map tree inventory per HOLC grade +```{r, cache=TRUE} +result <- process_city_inventory_data( + "https://www.denvergov.org/media/gis/DataCatalog/tree_inventory/shape/tree_inventory.zip", + "tree_inventory.shp", + denver_redlining, + "Denver_tree_inventory_2023" +) +``` +
    + +![](../worksheets/Denver_tree_inventory_2023.png) +![](../worksheets/Denver_tree_inventory_2023_density_plot.png) + +
    +Map traffic accidents per HOLC grade +```{r, cache=TRUE} +result <- process_city_inventory_data( + "https://www.denvergov.org/media/gis/DataCatalog/traffic_accidents/shape/traffic_accidents.zip", + "traffic_accidents.shp", + denver_redlining, + "Denver_traffic_accidents", + variable_label= 'Traffic accidents density' +) +``` +
    +![](../worksheets/Denver_traffic_accidents.png) +![](../worksheets/Denver_traffic_accidents_density_plot.png) + +
    +Map stream sampling effort per HOLC grade +```{r, cache=TRUE} +instream_sampling_sites <- process_city_inventory_data( + "https://www.denvergov.org/media/gis/DataCatalog/instream_sampling_sites/shape/instream_sampling_sites.zip", + "instream_sampling_sites.shp", + denver_redlining, + "instream_sampling_sites", + variable_label= 'Instream sampling sites density' +) +``` +
    +![](../worksheets/instream_sampling_sites.png) +![](../worksheets/instream_sampling_sites_density_plot.png) + +
    +Map soil sampling effort per HOLC grade +```{r, cache=TRUE} +soil_samples <- process_city_inventory_data( + "https://www.denvergov.org/media/gis/DataCatalog/soil_samples/shape/soil_samples.zip", + "soil_samples.shp", + denver_redlining, + "Soil samples", + variable_label= 'soil samples density' +) +``` +
    +![](../worksheets/Soil samples.png) +![](../worksheets/Soil samples_density_plot.png) + +
    +Map public art density per HOLC grade +```{r, cache=TRUE} +public_art <- process_city_inventory_data( + "https://www.denvergov.org/media/gis/DataCatalog/public_art/shape/public_art.zip", + "public_art.shp", + denver_redlining, + "Public art ", + variable_label= 'Public art density' +) +``` +
    + +![](../worksheets/Public art .png) +![](../worksheets/Public art _density_plot.png) + +
    +Map liquor licenses density per HOLC grade +```{r, cache=TRUE} +liquor_licenses <- process_city_inventory_data( + "https://www.denvergov.org/media/gis/DataCatalog/liquor_licenses/shape/liquor_licenses.zip", + "liquor_licenses.shp", + denver_redlining, + "liquor licenses ", + variable_label= 'liquor licenses density' +) +``` +
    +![](../worksheets/liquor licenses .png) +![](../worksheets/liquor licenses _density_plot.png) + + +
    +Map crime density per HOLC grade +```{r, cache=TRUE} +Crime <- process_city_inventory_data( + "https://www.denvergov.org/media/gis/DataCatalog/crime/shape/crime.zip", + "crime.shp", + denver_redlining, + "crime", + variable_label= 'Crime density' +) +``` +
    + +![](../worksheets/crime.png) +![](../worksheets/crime_density_plot.png) +
    +WORD CLOUD: Types of crimes +```{r, cache=TRUE} +crime_cloud <- create_wordclouds_by_grade(Crime$layer, output_file = "Crime_word_cloud_per_grade.png",title = "Crime type where larger text is more frequent", max_size =25, col_select = "OFFENSE_TY") + +``` +
    + +![](../worksheets/Crime_word_cloud_per_grade.png) + + + +
    +Map police shooting density per HOLC grade +```{r, cache=TRUE} +Denver_police_shootings <- process_city_inventory_data( + "https://www.denvergov.org/media/gis/DataCatalog/denver_police_officer_involved_shootings/shape/denver_police_officer_involved_shootings.zip", + "denver_police_officer_involved_shootings.shp", + denver_redlining, + "Police shootings", + variable_label= 'Police shootings density' +) + +``` +
    + +![](../worksheets/Police shootings.png) + +**Not enough data for density across all 4** + +
    +WORD CLOUD: Police involved shootings +```{r, cache=TRUE} +Denver_police_shootings_cloud <- create_wordclouds_by_grade(Denver_police_shootings$layer, output_file = "police_shootings_word_cloud_per_grade.png",title = "police involved shooting per crime type where larger text is more frequent", max_size =35, col_select = "SHOOT_ACTI") + +``` + +
    + +![](../worksheets/police_shootings_word_cloud_per_grade.png) + + + + +## Part 3: Comparative Analysis and Visualization + +### Statistical Analysis +- Conduct a detailed statistical analysis to compare greenspace across different HOLC grades, using techniques like Targeted Maximum Likelihood Estimation (TMLE) to assess the association between historical redlining and current greenspace levels. +- Visualize the disparities in greenspace distribution using GIS tools, highlighting how redlining has shaped urban ecological landscapes. + +## Conclusion + +This tutorial provides tools and methodologies to explore the lingering effects of historic redlining on urban greenspace, offering insights into the intersection of urban planning, environmental justice, and public health. + +### References + +- Nardone, A., Rudolph, K. E., Morello-Frosch, R., & Casey, J. A. (2021). Redlines and Greenspace: The Relationship between Historical Redlining and 2010 Greenspace across the United States. *Environmental Health Perspectives*, 129(1), 017006. DOI:10.1289/EHP7495. [Available online](https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7839347/pdf/ehp7495.pdf) diff --git a/worksheets/worksheet_redlining/index.html b/worksheets/worksheet_redlining/index.html new file mode 100644 index 0000000..eec3a0b --- /dev/null +++ b/worksheets/worksheet_redlining/index.html @@ -0,0 +1,3013 @@ + + + + + + + + + + + + + + + + + + + + + + Explore an example (teacher edition) - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    Redlining

    +

    Exploring the Impact of Historical Redlining on Urban Greenspace: A Collaborative Examination of Maps, Justice, and Resilience

    +

    Introduction

    +

    This group exploration delves into the long-term impacts of historical +redlining on urban greenspace, emphasizing the powerful role of maps in +shaping environmental and social landscapes. By drawing on the research +by Nardone et al. (2021), you will collaboratively investigate how +discriminatory practices encoded in maps have led to persistent +disparities in urban settings. This exploration aims to uncover the +resilience of communities in adapting to these entrenched injustices and +to foster a deeper understanding of how mapping can serve both as a tool +of exclusion and as a means for promoting social equity.

    +

    1938 Map of Atlanta uses colors as grades for neighborhoods. The red
+swaths identify each area with large African-American populations that
+were deemed “less safe.”)

    +

    Understanding Redlining as a Systemic Disturbance

    +

    Redlining originated in the 1930s as a discriminatory practice where the +Home Owners’ Loan Corporation (HOLC) systematically denied mortgages or +offered unfavorable terms based on racial and ethnic compositions. This +methodical exclusion, executed through maps that color-coded “risky” +investment areas in red, marked minority-populated areas, denying them +crucial investment and development opportunities and initiating a +profound and lasting disturbance in the urban fabric.

    +

    Maps serve as powerful tools beyond navigation; they communicate and +enforce control. By defining neighborhood boundaries through redlining, +HOLC maps not only mirrored societal biases but also perpetuated and +embedded them into the urban landscape. This manipulation of geographic +data set a trajectory that limited economic growth, dictated the +allocation of services, and influenced the development or deterioration +of community infrastructure.

    +

    Figure 1: 1938 Map of Atlanta uses colors as grades for +neighborhoods. The red swaths identify each area with large +African-American populations that were deemed “less safe.”

    +

    +

    ArcGIS Story Map

    +

    Explore the Story Map: Click on the image above to explore the +interactive story map about [subject of the story map].

    +

    Resilience and Adaptation in Urban Environments

    +

    The legacy of redlining presents both a challenge and an opportunity for +resilience and adaptation. Economically and socially, redlining +entrenched cycles of poverty and racial segregation, creating a +resilient wealth gap that has been difficult to dismantle. +Environmentally, the neighborhoods targeted by redlining continue to +face significant challenges—they generally feature less greenspace, +suffer from higher pollution levels, and are more vulnerable to the +impacts of climate change. These factors compound the health and +wellness challenges faced by residents.

    +

    Despite these adversities, urban communities have continually +demonstrated remarkable resilience. Adaptation strategies, such as +community-led green initiatives, urban agriculture, and grassroots +activism, have emerged as responses to these systemic disturbances. By +enhancing green infrastructure and advocating for equitable +environmental policies, these communities strive to increase their +resilience against both historical inequities and environmental +challenges.

    +

    +

    Watch the video

    +

    Video Title: Exploring the Impacts of Historical Redlining on Urban +Development
    +Description: Click on the image above to watch a video that delves +into the consequences of historical redlining and its ongoing impact on +urban environments. This educational piece offers insights into how such +discriminatory practices have shaped cities and what can be learned from +them.

    +

    The following group exercise will not only uncover the impact of +redlining on urban greenspace but also highlight the adaptive strategies +developed in response to this enduring disturbance. Through mapping and +analysis, we aim to illustrate the powerful role that geographic data +can play in understanding and fostering urban resilience and social +equity.

    +

    References

    +
      +
    • Nardone, A., Rudolph, K. E., Morello-Frosch, R., & Casey, J. A. + (2021). Redlines and Greenspace: The Relationship between Historical + Redlining and 2010 Greenspace across the United States. + Environmental Health Perspectives, 129(1), 017006. + DOI:10.1289/EHP7495.
    • +
    • Hoffman, J. S., Shandas, V., & Pendleton, N. (2020). The Effects of + Historical Housing Policies on Resident Exposure to Intra-Urban + Heat: A Study of 108 US Urban Areas. Climate, 8(1), 12. + DOI:10.3390/cli8010012.
    • +
    +

    Goals of this group activity

    +

    The primary objectives of this tutorial are: 1. To practice coding in +CyVerse. 2. To analyze the relationship between HOLC grades and the +presence of urban greenspace. 3. To understand how historic policies +continue to affect the spatial distribution of environmental amenities.

    +

    Part 1: Accessing and Visualizing Historic Redlining Data

    +

    We will begin by accessing HOLC maps from the Mapping Inequality project +and overlaying this data with modern geographic datasets to visualize +the historical impact on contemporary urban landscapes.

    +

    Data Acquisition

    +
      +
    • Download HOLC map shapefiles from the University of Richmond’s + Mapping Inequality Project.
    • +
    • Utilize satellite imagery and other geospatial data to map current + greenspace using the normalized difference vegetation index (NDVI).
    • +
    +

    Analysis Methodology

    +
      +
    • Replicate the approach used by Nardone et al. to calculate NDVI + values for each HOLC neighborhood, assessing greenspace as a + health-promoting resource.
    • +
    • Employ statistical methods such as propensity score matching to + control for confounding variables and estimate the true impact of + HOLC grades on urban greenspace.
    • +
    + +
    + +R libraries we use in this analysis + + +
    if (!requireNamespace("tidytext", quietly = TRUE)) {
    +  install.packages("tidytext")
    +}
    +library(tidytext)
    +## Warning: package 'tidytext' was built under R version 4.3.2
    +library(sf)
    +## Warning: package 'sf' was built under R version 4.3.2
    +## Linking to GEOS 3.11.0, GDAL 3.5.3, PROJ 9.1.0; sf_use_s2() is TRUE
    +library(ggplot2)
    +## Warning: package 'ggplot2' was built under R version 4.3.2
    +library(ggthemes)
    +## Warning: package 'ggthemes' was built under R version 4.3.2
    +library(dplyr)
    +## 
    +## Attaching package: 'dplyr'
    +## The following objects are masked from 'package:stats':
    +## 
    +##     filter, lag
    +## The following objects are masked from 'package:base':
    +## 
    +##     intersect, setdiff, setequal, union
    +library(rstac)
    +## Warning: package 'rstac' was built under R version 4.3.2
    +library(gdalcubes)
    +## Warning: package 'gdalcubes' was built under R version 4.3.2
    +library(gdalUtils)
    +## Please note that rgdal will be retired during October 2023,
    +## plan transition to sf/stars/terra functions using GDAL and PROJ
    +## at your earliest convenience.
    +## See https://r-spatial.org/r/2023/05/15/evolution4.html and https://github.com/r-spatial/evolution
    +## rgdal: version: 1.6-7, (SVN revision 1203)
    +## Geospatial Data Abstraction Library extensions to R successfully loaded
    +## Loaded GDAL runtime: GDAL 3.5.3, released 2022/10/21
    +## Path to GDAL shared files: /Library/Frameworks/R.framework/Versions/4.3-x86_64/Resources/library/rgdal/gdal
    +##  GDAL does not use iconv for recoding strings.
    +## GDAL binary built with GEOS: TRUE 
    +## Loaded PROJ runtime: Rel. 9.1.0, September 1st, 2022, [PJ_VERSION: 910]
    +## Path to PROJ shared files: /Library/Frameworks/R.framework/Versions/4.3-x86_64/Resources/library/gdalcubes/proj
    +## PROJ CDN enabled: FALSE
    +## Linking to sp version:1.6-1
    +## To mute warnings of possible GDAL/OSR exportToProj4() degradation,
    +## use options("rgdal_show_exportToProj4_warnings"="none") before loading sp or rgdal.
    +## 
    +## Attaching package: 'gdalUtils'
    +## The following object is masked from 'package:sf':
    +## 
    +##     gdal_rasterize
    +library(gdalcubes)
    +library(colorspace)
    +library(terra)
    +## Warning: package 'terra' was built under R version 4.3.2
    +## terra 1.7.71
    +## 
    +## Attaching package: 'terra'
    +## The following object is masked from 'package:colorspace':
    +## 
    +##     RGB
    +## The following objects are masked from 'package:gdalcubes':
    +## 
    +##     animate, crop, size
    +library(tidyterra)
    +## 
    +## Attaching package: 'tidyterra'
    +## The following object is masked from 'package:stats':
    +## 
    +##     filter
    +library(basemapR)
    +library(tidytext)
    +library(ggwordcloud)
    +library(osmextract)
    +## Data (c) OpenStreetMap contributors, ODbL 1.0. https://www.openstreetmap.org/copyright.
    +## Check the package website, https://docs.ropensci.org/osmextract/, for more details.
    +library(sf)
    +library(ggplot2)
    +library(ggthemes)
    +library(glue)
    +## 
    +## Attaching package: 'glue'
    +## The following object is masked from 'package:terra':
    +## 
    +##     trim
    +
    +library(purrr)
    +
    + +
    +
    + +FUNCTION: List cities where HOLC data are available + + +
    # Function to get a list of unique cities and states from the redlining data
    +get_city_state_list_from_redlining_data <- function() {
    +  # URL to the GeoJSON data
    +  url <- "https://raw.githubusercontent.com/americanpanorama/mapping-inequality-census-crosswalk/main/MIv3Areas_2010TractCrosswalk.geojson"
    +
    +  # Read the GeoJSON file into an sf object
    +  redlining_data <- tryCatch({
    +    read_sf(url)
    +  }, error = function(e) {
    +    stop("Error reading GeoJSON data: ", e$message)
    +  })
    +
    +  # Check for the existence of 'city' and 'state' columns
    +  if (!all(c("city", "state") %in% names(redlining_data))) {
    +    stop("The required columns 'city' and/or 'state' do not exist in the data.")
    +  }
    +
    +  # Extract a unique list of city and state pairs without the geometries
    +  city_state_df <- redlining_data %>%
    +    select(city, state) %>%
    +    st_set_geometry(NULL) %>%  # Drop the geometry to avoid issues with invalid shapes
    +    distinct(city, state) %>%
    +    arrange(state, city )  # Arrange the list alphabetically by state, then by city
    +
    +  # Return the dataframe of unique city-state pairs
    +  return(city_state_df)
    +}
    +
    + +
    +
    + +Stream list of available HOLC cities + + +
    #Retrieve the list of cities and states
    +city_state_list <- get_city_state_list_from_redlining_data()
    +
    +knitr::kable(city_state_list, format = "markdown")
    +
    + +| city | state | +|:---------------------------------|:------| +| Birmingham | AL | +| Mobile | AL | +| Montgomery | AL | +| Arkadelphia | AR | +| Batesville | AR | +| Camden | AR | +| Conway | AR | +| El Dorado | AR | +| Fort Smith | AR | +| Little Rock | AR | +| Russellville | AR | +| Texarkana | AR | +| Phoenix | AZ | +| Fresno | CA | +| Los Angeles | CA | +| Oakland | CA | +| Sacramento | CA | +| San Diego | CA | +| San Francisco | CA | +| San Jose | CA | +| Stockton | CA | +| Boulder | CO | +| Colorado Springs | CO | +| Denver | CO | +| Fort Collins | CO | +| Fort Morgan | CO | +| Grand Junction | CO | +| Greeley | CO | +| Longmont | CO | +| Pueblo | CO | +| Bridgeport and Fairfield | CT | +| Hartford | CT | +| New Britain | CT | +| New Haven | CT | +| Stamford, Darien, and New Canaan | CT | +| Waterbury | CT | +| Crestview | FL | +| Daytona Beach | FL | +| DeFuniak Springs | FL | +| DeLand | FL | +| Jacksonville | FL | +| Miami | FL | +| New Smyrna | FL | +| Orlando | FL | +| Pensacola | FL | +| St. Petersburg | FL | +| Tampa | FL | +| Atlanta | GA | +| Augusta | GA | +| Columbus | GA | +| Macon | GA | +| Savannah | GA | +| Boone | IA | +| Cedar Rapids | IA | +| Council Bluffs | IA | +| Davenport | IA | +| Des Moines | IA | +| Dubuque | IA | +| Sioux City | IA | +| Waterloo | IA | +| Aurora | IL | +| Chicago | IL | +| Decatur | IL | +| East St. Louis | IL | +| Joliet | IL | +| Peoria | IL | +| Rockford | IL | +| Springfield | IL | +| Evansville | IN | +| Fort Wayne | IN | +| Indianapolis | IN | +| Lake Co. Gary | IN | +| Muncie | IN | +| South Bend | IN | +| Terre Haute | IN | +| Atchison | KS | +| Junction City | KS | +| Topeka | KS | +| Wichita | KS | +| Covington | KY | +| Lexington | KY | +| Louisville | KY | +| New Orleans | LA | +| Shreveport | LA | +| Arlington | MA | +| Belmont | MA | +| Boston | MA | +| Braintree | MA | +| Brockton | MA | +| Brookline | MA | +| Cambridge | MA | +| Chelsea | MA | +| Dedham | MA | +| Everett | MA | +| Fall River | MA | +| Fitchburg | MA | +| Haverhill | MA | +| Holyoke Chicopee | MA | +| Lawrence | MA | +| Lexington | MA | +| Lowell | MA | +| Lynn | MA | +| Malden | MA | +| Medford | MA | +| Melrose | MA | +| Milton | MA | +| Needham | MA | +| New Bedford | MA | +| Newton | MA | +| Pittsfield | MA | +| Quincy | MA | +| Revere | MA | +| Salem | MA | +| Saugus | MA | +| Somerville | MA | +| Springfield | MA | +| Waltham | MA | +| Watertown | MA | +| Winchester | MA | +| Winthrop | MA | +| Worcester | MA | +| Baltimore | MD | +| Augusta | ME | +| Boothbay | ME | +| Portland | ME | +| Sanford | ME | +| Waterville | ME | +| Battle Creek | MI | +| Bay City | MI | +| Detroit | MI | +| Flint | MI | +| Grand Rapids | MI | +| Jackson | MI | +| Kalamazoo | MI | +| Lansing | MI | +| Muskegon | MI | +| Pontiac | MI | +| Saginaw | MI | +| Austin | MN | +| Duluth | MN | +| Mankato | MN | +| Minneapolis | MN | +| Rochester | MN | +| St. Cloud | MN | +| St. Paul | MN | +| Staples | MN | +| Cape Girardeau | MO | +| Carthage | MO | +| Greater Kansas City | MO | +| Joplin | MO | +| Springfield | MO | +| St. Joseph | MO | +| St. Louis | MO | +| Jackson | MS | +| Asheville | NC | +| Charlotte | NC | +| Durham | NC | +| Elizabeth City | NC | +| Fayetteville | NC | +| Goldsboro | NC | +| Greensboro | NC | +| Hendersonville | NC | +| High Point | NC | +| New Bern | NC | +| Rocky Mount | NC | +| Statesville | NC | +| Winston-Salem | NC | +| Fargo | ND | +| Grand Forks | ND | +| Minot | ND | +| Williston | ND | +| Lincoln | NE | +| Omaha | NE | +| Manchester | NH | +| Atlantic City | NJ | +| Bergen Co. | NJ | +| Camden | NJ | +| Essex Co. | NJ | +| Hudson Co. | NJ | +| Monmouth | NJ | +| Passaic County | NJ | +| Perth Amboy | NJ | +| Trenton | NJ | +| Union Co. | NJ | +| Albany | NY | +| Binghamton-Johnson City | NY | +| Bronx | NY | +| Brooklyn | NY | +| Buffalo | NY | +| Elmira | NY | +| Jamestown | NY | +| Lower Westchester Co. | NY | +| Manhattan | NY | +| Niagara Falls | NY | +| Poughkeepsie | NY | +| Queens | NY | +| Rochester | NY | +| Schenectady | NY | +| Staten Island | NY | +| Syracuse | NY | +| Troy | NY | +| Utica | NY | +| Akron | OH | +| Canton | OH | +| Cleveland | OH | +| Columbus | OH | +| Dayton | OH | +| Hamilton | OH | +| Lima | OH | +| Lorain | OH | +| Portsmouth | OH | +| Springfield | OH | +| Toledo | OH | +| Warren | OH | +| Youngstown | OH | +| Ada | OK | +| Alva | OK | +| Enid | OK | +| Miami Ottawa County | OK | +| Muskogee | OK | +| Norman | OK | +| Oklahoma City | OK | +| South McAlester | OK | +| Tulsa | OK | +| Portland | OR | +| Allentown | PA | +| Altoona | PA | +| Bethlehem | PA | +| Chester | PA | +| Erie | PA | +| Harrisburg | PA | +| Johnstown | PA | +| Lancaster | PA | +| McKeesport | PA | +| New Castle | PA | +| Philadelphia | PA | +| Pittsburgh | PA | +| Wilkes-Barre | PA | +| York | PA | +| Pawtucket & Central Falls | RI | +| Providence | RI | +| Woonsocket | RI | +| Aiken | SC | +| Charleston | SC | +| Columbia | SC | +| Greater Anderson | SC | +| Greater Greenville | SC | +| Orangeburg | SC | +| Rock Hill | SC | +| Spartanburg | SC | +| Sumter | SC | +| Aberdeen | SD | +| Huron | SD | +| Milbank | SD | +| Mitchell | SD | +| Rapid City | SD | +| Sioux Falls | SD | +| Vermillion | SD | +| Watertown | SD | +| Chattanooga | TN | +| Elizabethton | TN | +| Erwin | TN | +| Greenville | TN | +| Johnson City | TN | +| Knoxville | TN | +| Memphis | TN | +| Nashville | TN | +| Amarillo | TX | +| Austin | TX | +| Beaumont | TX | +| Dallas | TX | +| El Paso | TX | +| Fort Worth | TX | +| Galveston | TX | +| Houston | TX | +| Port Arthur | TX | +| San Antonio | TX | +| Waco | TX | +| Wichita Falls | TX | +| Ogden | UT | +| Salt Lake City | UT | +| Bristol | VA | +| Danville | VA | +| Harrisonburg | VA | +| Lynchburg | VA | +| Newport News | VA | +| Norfolk | VA | +| Petersburg | VA | +| Phoebus | VA | +| Richmond | VA | +| Roanoke | VA | +| Staunton | VA | +| Bennington | VT | +| Brattleboro | VT | +| Burlington | VT | +| Montpelier | VT | +| Newport City | VT | +| Poultney | VT | +| Rutland | VT | +| Springfield | VT | +| St. Albans | VT | +| St. Johnsbury | VT | +| Windsor | VT | +| Seattle | WA | +| Spokane | WA | +| Tacoma | WA | +| Kenosha | WI | +| Madison | WI | +| Milwaukee Co. | WI | +| Oshkosh | WI | +| Racine | WI | +| Charleston | WV | +| Huntington | WV | +| Wheeling | WV | + +
    +
    + +FUNCTION: Stream HOLC data from a city + + +
    # Function to load and filter redlining data by city
    +load_city_redlining_data <- function(city_name) {
    +  # URL to the GeoJSON data
    +  url <- "https://raw.githubusercontent.com/americanpanorama/mapping-inequality-census-crosswalk/main/MIv3Areas_2010TractCrosswalk.geojson"
    +
    +  # Read the GeoJSON file into an sf object
    +  redlining_data <- read_sf(url)
    +
    +  # Filter the data for the specified city and non-empty grades
    +
    +  city_redline <- redlining_data %>%
    +    filter(city == city_name )
    +
    +  # Return the filtered data
    +  return(city_redline)
    +}
    +
    + +
    +
    + +Stream HOLC data for Denver, CO + + +
    # Load redlining data for Denver
    +denver_redlining <- load_city_redlining_data("Denver")
    +knitr::kable(head(denver_redlining), format = "markdown")
    +
    + +| area_id | city | state | city_survey | cat | grade | label | res | com | ind | fill | GEOID10 | GISJOIN | calc_area | pct_tract | geometry | +|--------:|:-------|:------|:------------|:-----|:------|:------|:-----|:------|:------|:---------|:------------|:---------------|-------------:|----------:|:-----------------------------| +| 6525 | Denver | CO | TRUE | Best | A | A1 | TRUE | FALSE | FALSE | \#76a865 | 08031004104 | G0800310004104 | 1.525535e+01 | 0.00001 | MULTIPOLYGON (((-104.9125 3… | +| 6525 | Denver | CO | TRUE | Best | A | A1 | TRUE | FALSE | FALSE | \#76a865 | 08031004201 | G0800310004201 | 3.987458e+05 | 0.20900 | MULTIPOLYGON (((-104.9246 3… | +| 6525 | Denver | CO | TRUE | Best | A | A1 | TRUE | FALSE | FALSE | \#76a865 | 08031004304 | G0800310004304 | 1.554195e+05 | 0.05927 | MULTIPOLYGON (((-104.9125 3… | +| 6525 | Denver | CO | TRUE | Best | A | A1 | TRUE | FALSE | FALSE | \#76a865 | 08031004202 | G0800310004202 | 1.117770e+06 | 0.57245 | MULTIPOLYGON (((-104.9125 3… | +| 6529 | Denver | CO | TRUE | Best | A | A2 | TRUE | FALSE | FALSE | \#76a865 | 08031004302 | G0800310004302 | 3.133415e+05 | 0.28381 | MULTIPOLYGON (((-104.928 39… | +| 6529 | Denver | CO | TRUE | Best | A | A2 | TRUE | FALSE | FALSE | \#76a865 | 08031004301 | G0800310004301 | 1.221218e+05 | 0.08622 | MULTIPOLYGON (((-104.9305 3… | + +
    +
    + +FUNCTION: Get Points-of-Interest from city of interest + + +
    get_places <- function(polygon_layer, type = "food" ) {
    +  # Check if the input is an sf object
    +  if (!inherits(polygon_layer, "sf")) {
    +    stop("The provided object is not an sf object.")
    +  }
    +
    +  # Create a bounding box from the input sf object
    +  bbox_here <- st_bbox(polygon_layer) |>
    +    st_as_sfc()
    +
    +  if(type == "food"){
    +    my_layer <- "multipolygons"
    +    my_query <- "SELECT * FROM multipolygons WHERE (
    +                 shop IN ('supermarket', 'bodega', 'market', 'other_market', 'farm', 'garden_centre', 'doityourself', 'farm_supply', 'compost', 'mulch', 'fertilizer') OR
    +                 amenity IN ('social_facility', 'market', 'restaurant', 'coffee') OR
    +                 leisure = 'garden' OR
    +                 landuse IN ('farm', 'farmland', 'row_crops', 'orchard_plantation', 'dairy_grazing') OR
    +                 building IN ('brewery', 'winery', 'distillery') OR
    +                 shop = 'greengrocer' OR
    +                 amenity = 'marketplace'
    +               )"
    +    title <- "food"
    +  }
    +
    +  if (type == "processed_food") {
    +    my_layer <- "multipolygons"
    +    my_query <- "SELECT * FROM multipolygons WHERE (
    +                   amenity IN ('fast_food', 'cafe', 'pub') OR
    +                   shop IN ('convenience', 'supermarket') OR
    +                   shop = 'kiosk'
    +                 )"
    +    title <- "Processed Food Locations"
    +}
    +
    +  if(type == "natural_habitats"){
    +    my_layer <- "multipolygons"
    +    my_query <- "SELECT * FROM multipolygons WHERE (
    +             boundary = 'protected_area' OR
    +             natural IN ('tree', 'wood') OR
    +             landuse = 'forest' OR
    +             leisure = 'park'
    +           )"
    +    title <- "Natural habitats or City owned trees"
    +  }
    +
    +   if(type == "roads"){
    +    my_layer <- "lines"
    +    my_query <- "SELECT * FROM lines WHERE (
    +             highway IN ('motorway', 'trunk', 'primary', 'secondary', 'tertiary') )"
    +    title <- "Major roads"
    +   }
    +
    +  if(type == "rivers"){
    +    my_layer <- "lines"
    +    my_query <- "SELECT * FROM lines WHERE (
    +             waterway IN ('river'))"
    +    title <- "Major rivers"
    +  }
    +
    +  if(type == "internet_access") {
    +    my_layer <- "multipolygons"
    +    my_query <- "SELECT * FROM multipolygons WHERE (
    +                 amenity IN ('library', 'cafe', 'community_centre', 'public_building') AND
    +                 internet_access = 'yes' 
    +               )"
    +    title <- "Internet Access Locations"
    +}
    +
    +  if(type == "water_bodies") {
    +    my_layer <- "multipolygons"
    +    my_query <- "SELECT * FROM multipolygons WHERE (
    +                 natural IN ('water', 'lake', 'pond') OR
    +                 water IN ('lake', 'pond') OR
    +                 landuse = 'reservoir'
    +               )"
    +    title <- "Water Bodies"
    +}
    +
    + if(type == "government_buildings") {
    +    my_layer <- "multipolygons"
    +    my_query <- "SELECT * FROM multipolygons WHERE (
    +                 amenity IN ('townhall', 'courthouse', 'embassy', 'police', 'fire_station') OR
    +                 building IN ('capitol', 'government')
    +               )"
    +    title <- "Government Buildings"
    +}
    +
    +
    +
    +  # Use the bbox to get data with oe_get(), specifying the desired layer and a custom SQL query for fresh food places
    +  tryCatch({
    +    places <- oe_get(
    +      place = bbox_here,
    +      layer = my_layer,  # Adjusted layer; change as per actual data availability
    +      query = my_query,
    +      quiet = TRUE
    +    )
    +
    +  places <- st_make_valid(places)
    +
    +    # Crop the data to the bounding box
    +    cropped_places <- st_crop(places, bbox_here)
    +
    +    # Plotting the cropped fresh food places
    +    plot <- ggplot(data = cropped_places) +
    +      geom_sf(fill="cornflowerblue", color="cornflowerblue") +
    +      ggtitle(title) +
    +  theme_tufte()+
    +  theme(legend.position = "none",  # Optionally hide the legend
    +        axis.text = element_blank(),     # Remove axis text
    +        axis.title = element_blank(),    # Remove axis titles
    +        axis.ticks = element_blank(),    # Remove axis ticks
    +         plot.background = element_rect(fill = "white", color = NA),  # Set the plot background to white
    +        panel.background = element_rect(fill = "white", color = NA),  # Set the panel background to white
    +        panel.grid.major = element_blank(),  # Remove major grid lines
    +        panel.grid.minor = element_blank(),
    +        ) 
    +
    +    # Save the plot as a PNG file
    +    png_filename <- paste0(title,"_", Sys.Date(), ".png")
    +    ggsave(png_filename, plot, width = 10, height = 8, units = "in")
    +
    +    # Return the cropped dataset
    +    return(cropped_places)
    +  }, error = function(e) {
    +    stop("Failed to retrieve or plot data: ", e$message)
    +  })
    +}
    +
    + +
    +
    + +FUNCTION: Plot POI over HOLC grades + + +
    plot_city_redlining <- function(redlining_data, filename = "redlining_plot.png") {
    +  # Fetch additional geographic data based on redlining data
    +  roads <- get_places(redlining_data, type = "roads")
    +  rivers <- get_places(redlining_data, type = "rivers")
    +
    +  # Filter residential zones with valid grades and where city survey is TRUE
    +  residential_zones <- redlining_data %>%
    +    filter(city_survey == TRUE & grade != "") 
    +
    +  # Colors for the grades
    +  colors <- c("#76a865", "#7cb5bd", "#ffff00", "#d9838d")
    +
    +  # Plot the data using ggplot2
    +  plot <- ggplot() +
    +    geom_sf(data = roads, lwd = 0.1) +
    +    geom_sf(data = rivers, color = "blue", alpha = 0.5, lwd = 1.1) +
    +    geom_sf(data = residential_zones, aes(fill = grade), alpha = 0.5) +
    +    theme_tufte() +
    +    scale_fill_manual(values = colors) +
    +    labs(fill = 'HOLC Categories') +
    +    theme(
    +      plot.background = element_rect(fill = "white", color = NA),
    +      panel.background = element_rect(fill = "white", color = NA),
    +      panel.grid.major = element_blank(),
    +      panel.grid.minor = element_blank(),
    +      legend.position = "right"
    +    )
    +
    +  # Save the plot as a high-resolution PNG file
    +  ggsave(filename, plot, width = 10, height = 8, units = "in", dpi = 600)
    +
    +  # Return the plot object if needed for further manipulation or checking
    +  return(plot)
    +}
    +
    + +
    +
    + +Plot Denver Redlining + + +
    denver_plot <- plot_city_redlining(denver_redlining)
    +
    + +
    + +

    +
    + +Stream amenities by category + + +
    food <- get_places(denver_redlining, type="food")
    +
    +food_processed <- get_places(denver_redlining, type="processed_food")
    +
    +natural_habitats <- get_places(denver_redlining, type="natural_habitats")
    +
    +roads <- get_places(denver_redlining, type="roads")
    +
    +rivers <- get_places(denver_redlining, type="rivers")
    +
    +#water_bodies <- get_places(denver_redlining, type="water_bodies")
    +
    +government_buildings <- get_places(denver_redlining, type="government_buildings")
    +
    + +
    +
    + +FUNCTION: Plot the HOLC grades individually + + +
    split_plot <- function(sf_data, roads, rivers) {
    +  # Filter for grades A, B, C, and D
    +  sf_data_filtered <- sf_data %>% 
    +    filter(grade %in% c('A', 'B', 'C', 'D'))
    +
    +  # Define a color for each grade
    +  grade_colors <- c("A" = "#76a865", "B" = "#7cb5bd", "C" = "#ffff00", "D" = "#d9838d")
    +
    +  # Create the plot with panels for each grade
    +  plot <- ggplot(data = sf_data_filtered) +
    +    geom_sf(data = roads, alpha = 0.1, lwd = 0.1) +
    +    geom_sf(data = rivers, color = "blue", alpha = 0.1, lwd = 1.1) +
    +    geom_sf(aes(fill = grade)) +
    +    facet_wrap(~ grade, nrow = 1) +  # Free scales for different zoom levels if needed
    +    scale_fill_manual(values = grade_colors) +
    +    theme_minimal() +
    +    labs(fill = 'HOLC Grade') +
    +    theme_tufte() +
    +    theme(plot.background = element_rect(fill = "white", color = NA),
    +          panel.background = element_rect(fill = "white", color = NA),
    +          legend.position = "none",  # Optionally hide the legend
    +          axis.text = element_blank(),     # Remove axis text
    +          axis.title = element_blank(),    # Remove axis titles
    +          axis.ticks = element_blank(),    # Remove axis ticks
    +          panel.grid.major = element_blank(),  # Remove major grid lines
    +          panel.grid.minor = element_blank())  
    +
    +  ggsave(plot, filename = "HOLC_grades_individually.png", width = 10, height = 4, units = "in", dpi = 1200)
    +  return(plot)
    +}
    +
    + +
    +
    + +Plot 4 HOLC grades individually + + +
    plot_row <- split_plot(denver_redlining, roads, rivers)
    +
    + +
    + +

    +
    + +FUNCTION: Map an amenity over each grade individually + + +
    process_and_plot_sf_layers <- function(layer1, layer2, output_file = "output_plot.png") {
    + # Make geometries valid
    +layer1 <- st_make_valid(layer1)
    +layer2 <- st_make_valid(layer2)
    +
    +# Optionally, simplify geometries to remove duplicate vertices
    +layer1 <- st_simplify(layer1, preserveTopology = TRUE) |>
    +  filter(grade != "")
    +
    +# Prepare a list to store results
    +results <- list()
    +
    +# Loop through each grade and perform operations
    +for (grade in c("A", "B", "C", "D")) {
    +  # Filter layer1 for current grade
    +  layer1_grade <- layer1[layer1$grade == grade, ]
    +
    +  # Buffer the geometries of the current grade
    +  buffered_layer1_grade <- st_buffer(layer1_grade, dist = 500)
    +
    +  # Intersect with the second layer
    +  intersections <- st_intersects(layer2, buffered_layer1_grade, sparse = FALSE)
    +  selected_polygons <- layer2[rowSums(intersections) > 0, ]
    +
    +  # Add a new column to store the grade information
    +  selected_polygons$grade <- grade
    +
    +  # Store the result
    +  results[[grade]] <- selected_polygons
    +}
    +
    +# Combine all selected polygons from different grades into one sf object
    +final_selected_polygons <- do.call(rbind, results)
    +
    +  # Define colors for the grades
    +  grade_colors <- c("A" = "grey", "B" = "grey", "C" = "grey", "D" = "grey")
    +
    +  # Create the plot
    +  plot <- ggplot() +
    +    geom_sf(data = roads, alpha = 0.05, lwd = 0.1) +
    +    geom_sf(data = rivers, color = "blue", alpha = 0.1, lwd = 1.1) +
    +    geom_sf(data = layer1, fill = "grey", color = "grey", size = 0.1) +
    +    facet_wrap(~ grade, nrow = 1) +
    +    geom_sf(data = final_selected_polygons,fill = "green", color = "green", size = 0.1) +
    +    facet_wrap(~ grade, nrow = 1) +
    +    #scale_fill_manual(values = grade_colors) +
    +    #scale_color_manual(values = grade_colors) +
    +    theme_minimal() +
    +    labs(fill = 'HOLC Grade') +
    +    theme_tufte() +
    +    theme(plot.background = element_rect(fill = "white", color = NA),
    +      panel.background = element_rect(fill = "white", color = NA),
    +      legend.position = "none",
    +          axis.text = element_blank(),
    +          axis.title = element_blank(),
    +          axis.ticks = element_blank(),
    +          panel.grid.major = element_blank(),
    +          panel.grid.minor = element_blank())
    +
    +  # Save the plot as a high-resolution PNG file
    +  ggsave(output_file, plot, width = 10, height = 4, units = "in", dpi = 1200)
    +
    +  # Return the plot for optional further use
    +  return(list(plot=plot, sf = final_selected_polygons))
    +}
    +
    + +
    +
    + +FUNCTION: Create word cloud per grade + + +
    create_wordclouds_by_grade <- function(sf_object, output_file = "food_word_cloud_per_grade.png",title = "Healthy food place names word cloud", max_size =25, col_select = "name") {
    +
    +
    +    # Extract relevant data and prepare text data
    +    text_data <- sf_object %>%
    +        select(grade, col_select) %>%
    +        filter(!is.na(col_select)) %>%
    +        unnest_tokens(output = "word", input = col_select, token = "words") %>%
    +        count(grade, word, sort = TRUE) %>%
    +        ungroup() %>%
    +        filter(n() > 1)  # Filter to remove overly common or single-occurrence words
    +
    +    # Ensure there are no NA values in the 'word' column
    +    text_data <- text_data %>% filter(!is.na(word))
    +
    +    # Handle cases where text_data might be empty
    +    if (nrow(text_data) == 0) {
    +        stop("No data available for creating word clouds.")
    +    }
    +
    +    # Create a word cloud using ggplot2 and ggwordcloud
    +    p <- ggplot( ) +
    +        geom_text_wordcloud_area(data=text_data, aes(label = word, size = n),rm_outside = TRUE) +
    +        scale_size_area(max_size = max_size) +
    +        facet_wrap(~ grade, nrow = 1) +
    +      scale_color_gradient(low = "darkred", high = "red") +
    +        theme_minimal() +
    +        theme(plot.background = element_rect(fill = "white", color = NA),
    +          panel.background = element_rect(fill = "white", color = NA),
    +          panel.spacing = unit(0.5, "lines"),
    +              plot.title = element_text(size = 16, face = "bold"),
    +              legend.position = "none") +
    +        labs(title = title)
    +
    +    # Attempt to save the plot and handle any errors
    +    tryCatch({
    +        ggsave(output_file, p, width = 10, height = 4, units = "in", dpi = 600)
    +    }, error = function(e) {
    +        cat("Error in saving the plot: ", e$message, "\n")
    +    })
    +
    +    return(p)
    +}
    +
    + +
    +
    + +Map food over each grade individually + + +
     layer1 <- denver_redlining
    + layer2 <- food
    + food_match <- process_and_plot_sf_layers(layer1, layer2, "food_match.png")
    +
    + +
    + +

    +
    + +WORD CLOUD: Names of places with fresh food + + +
    food_word_cloud <- create_wordclouds_by_grade(food_match$sf, output_file = "food_word_cloud_per_grade.png")
    +
    + + Warning: Using an external vector in selections was deprecated in tidyselect 1.1.0. + ℹ Please use `all_of()` or `any_of()` instead. + # Was: + data %>% select(col_select) + + # Now: + data %>% select(all_of(col_select)) + + See . + + Warning in wordcloud_boxes(data_points = points_valid_first, boxes = boxes, : + Some words could not fit on page. They have been removed. + +
    + +

    +
    + +Map processed food over each grade individually + + +
     layer1 <- denver_redlining
    + layer2 <- food_processed
    + processed_food_match <- process_and_plot_sf_layers(layer1, layer2, "processed_food_match.png")
    +
    + +
    + +

    +
    + +WORD CLOUD: Names of places with processed food + + +
    processed_food_cloud <- create_wordclouds_by_grade(processed_food_match$sf, output_file = "processed_food_word_cloud_per_grade.png",title = "Processed food place names where larger text is more frequent", max_size =17)
    +
    + +
    + +

    +

    Part 2: Integrating Environmental Data

    +

    Data Processing

    +
      +
    • Use satellite data from 2010 to analyze greenspace using NDVI, an + index that measures the quantity of vegetation in an area.
    • +
    • Apply methods to adjust for potential confounders as described in + the study, ensuring that comparisons of greenspace across HOLC + grades are valid and not biased by historical or socio-demographic + factors.
    • +
    +
    + +Map natural habitats over each grade individually + + +
     layer1 <- denver_redlining
    + layer2 <- natural_habitats
    + natural_habitats_match <- process_and_plot_sf_layers(layer1, layer2, "natural_habitats_match.png")
    + print(natural_habitats_match$plot)
    +
    + +![](worksheet_redlining_files/figure-gfm/unnamed-chunk-18-1.png) + +
    + +

    +
    + +WORD CLOUD: Name of natural habitat area + + +
    natural_habitats_cloud <- create_wordclouds_by_grade(natural_habitats_match$sf, output_file = "natural_habitats_word_cloud_per_grade.png",title = "Natural habitats place names where larger text is more frequent", max_size =35)
    +
    + +
    + +

    +
    + +FUNCTION: Stream NDVI data + + +
    polygon_layer <- denver_redlining
    +# Function to process satellite data based on an SF polygon's extent
    +process_satellite_data <- function(polygon_layer, start_date, end_date, assets, fps = 1, output_file = "anim.gif") {
    +  # Record start time
    +  start_time <- Sys.time()
    +
    +  # Calculate the bbox from the polygon layer
    +  bbox <- st_bbox(polygon_layer)
    +
    +  s = stac("https://earth-search.aws.element84.com/v0")
    +
    +
    +  # Use stacR to search for Sentinel-2 images within the bbox and date range
    +  items = s |> stac_search(
    +    collections = "sentinel-s2-l2a-cogs",
    +    bbox = c(bbox["xmin"], bbox["ymin"], bbox["xmax"], bbox["ymax"]),
    +    datetime = paste(start_date, end_date, sep = "/"),
    +    limit = 500
    +  ) %>% 
    +  post_request()
    +
    +  # Define mask for Sentinel-2 image quality
    +  #S2.mask <- image_mask("SCL", values = c(3, 8, 9))
    +
    +  # Create a collection of images filtering by cloud cover
    +  col <- stac_image_collection(items$features, asset_names = assets, property_filter = function(x) {x[["eo:cloud_cover"]] < 30})
    +
    +  # Define a view for processing the data
    +  v <- cube_view(srs = "EPSG:4326", 
    +                 extent = list(t0 = start_date, t1 = end_date,
    +                               left = bbox["xmin"], right = bbox["xmax"], 
    +                               top = bbox["ymax"], bottom = bbox["ymin"]),
    +                 dx = 0.001, dy = 0.001, dt = "P1M", 
    +                 aggregation = "median", resampling = "bilinear")
    +
    +  # Calculate NDVI and create an animation
    +  ndvi_col <- function(n) {
    +    rev(sequential_hcl(n, "Green-Yellow"))
    +  }
    +
    +  #raster_cube(col, v, mask = S2.mask) %>%
    +  raster_cube(col, v) %>%
    +    select_bands(c("B04", "B08")) %>%
    +    apply_pixel("(B08-B04)/(B08+B04)", "NDVI") %>%
    +    gdalcubes::animate(col = ndvi_col, zlim = c(-0.2, 1), key.pos = 1, save_as = output_file, fps = fps)
    +
    +  # Calculate processing time
    +  end_time <- Sys.time()
    +  processing_time <- difftime(end_time, start_time)
    +
    +  # Return processing time
    +  return(processing_time)
    +}
    +
    + +
    +
    + +Stream NDVI data: animation + + +
    processing_time <- process_satellite_data(denver_redlining, "2022-05-31", "2023-05-31", c("B04", "B08"))
    +
    + +
    + +

    +
    + +FUNCTION: Stream year average NDVI + + +
    yearly_average_ndvi <- function(polygon_layer, output_file = "ndvi.png", dx = 0.01, dy = 0.01) {
    +  # Record start time
    +  start_time <- Sys.time()
    +
    +  # Calculate the bbox from the polygon layer
    +  bbox <- st_bbox(polygon_layer)
    +
    +  s = stac("https://earth-search.aws.element84.com/v0")
    +
    +  # Search for Sentinel-2 images within the bbox for June
    +  items <- s |> stac_search(
    +    collections = "sentinel-s2-l2a-cogs",
    +    bbox = c(bbox["xmin"], bbox["ymin"], bbox["xmax"], bbox["ymax"]),
    +    datetime = "2023-01-01/2023-12-31",
    +    limit = 500
    +  ) %>% 
    +  post_request()
    +
    +  # Create a collection of images filtering by cloud cover
    +  col <- stac_image_collection(items$features, asset_names = c("B04", "B08"), property_filter = function(x) {x[["eo:cloud_cover"]] < 80})
    +
    +  # Define a view for processing the data specifically for June
    +  v <- cube_view(srs = "EPSG:4326", 
    +                 extent = list(t0 = "2023-01-01", t1 = "2023-12-31",
    +                               left = bbox["xmin"], right = bbox["xmax"], 
    +                               top = bbox["ymax"], bottom = bbox["ymin"]),
    +                 dx = dx, dy = dy, dt = "P1Y", 
    +                 aggregation = "median", resampling = "bilinear")
    +
    +  # Process NDVI
    +  ndvi_rast <- raster_cube(col, v) %>%
    +    select_bands(c("B04", "B08")) %>%
    +    apply_pixel("(B08-B04)/(B08+B04)", "NDVI") %>%
    +    write_tif() |>
    +    terra::rast()
    +
    +
    +  # Convert terra Raster to ggplot using tidyterra
    +ndvi_plot <-   ggplot() +
    +    geom_spatraster(data = ndvi_rast, aes(fill = NDVI)) +
    +    scale_fill_viridis_c(option = "viridis", direction = -1, name = "NDVI") +
    +    labs(title = "NDVI mean for 2023") +
    +    theme_minimal() +
    +    coord_sf() +
    +    theme(plot.background = element_rect(fill = "white", color = NA),
    +      panel.background = element_rect(fill = "white", color = NA),
    +      legend.position = "right",
    +          axis.text = element_blank(),
    +          axis.title = element_blank(),
    +          axis.ticks = element_blank(),
    +          panel.grid.major = element_blank(),
    +          panel.grid.minor = element_blank()) 
    +
    +  # Save the plot as a high-resolution PNG file
    +  ggsave(output_file, ndvi_plot, width = 10, height = 8, dpi = 600)
    +
    +  # Calculate processing time
    +  end_time <- Sys.time()
    +  processing_time <- difftime(end_time, start_time)
    +
    +  # Return the plot and processing time
    +  return(list(plot = ndvi_plot, processing_time = processing_time, raster = ndvi_rast))
    +}
    +
    + +
    +
    + +Stream NDVI: high resolution + + +
    ndvi_background <- yearly_average_ndvi(denver_redlining,dx = 0.0001, dy = 0.0001)
    +
    + +
    + +

    +
    + +FUNCTION: Map NDVI per HOLC grade individually + + +
    create_mask_and_plot <- function(redlining_sf, background_raster = ndvi$raster, roads = NULL, rivers = NULL){
    +  start_time <- Sys.time()  # Start timing
    +
    +  # Validate and prepare the redlining data
    +  redlining_sf <- redlining_sf %>%
    +    filter(grade != "") %>%
    +    st_make_valid()
    +
    +
    +bbox <- st_bbox(redlining_sf)  # Get original bounding box
    +
    +
    +expanded_bbox <- expand_bbox(bbox, 6000, 1000)  # 
    +
    +
    +expanded_bbox_poly <- st_as_sfc(expanded_bbox, crs = st_crs(redlining_sf)) %>%
    +    st_make_valid()
    +
    +  # Initialize an empty list to store masks
    +  masks <- list()
    +
    +  # Iterate over each grade to create masks
    +  unique_grades <- unique(redlining_sf$grade)
    +  for (grade in unique_grades) {
    +    # Filter polygons by grade
    +    grade_polygons <- redlining_sf[redlining_sf$grade == grade, ]
    +
    +    # Create an "inverted" mask by subtracting these polygons from the background
    +    mask <- st_difference(expanded_bbox_poly, st_union(grade_polygons))
    +
    +    # Store the mask in the list with the grade as the name
    +    masks[[grade]] <- st_sf(geometry = mask, grade = grade)
    +  }
    +
    +  # Combine all masks into a single sf object
    +  mask_sf <- do.call(rbind, masks)
    +
    +  # Normalize the grades so that C.2 becomes C, but correctly handle other grades
    +  mask_sf$grade <- ifelse(mask_sf$grade == "C.2", "C", mask_sf$grade)
    +
    +  # Prepare the plot
    +  plot <- ggplot() +
    +    geom_spatraster(data = background_raster, aes(fill = NDVI)) +
    +  scale_fill_viridis_c(name = "NDVI", option = "viridis", direction = -1) +
    +
    +    geom_sf(data = mask_sf, aes(color = grade), fill = "white", size = 0.1, show.legend = FALSE) +
    +    scale_color_manual(values = c("A" = "white", "B" = "white", "C" = "white", "D" = "white"), name = "Grade") +
    +    facet_wrap(~ grade, nrow = 1) +
    +     geom_sf(data = roads, alpha = 1, lwd = 0.1, color="white") +
    +    geom_sf(data = rivers, color = "white", alpha = 0.5, lwd = 1.1) +
    +    labs(title = "NDVI: Normalized Difference Vegetation Index") +
    +    theme_minimal() +
    +    coord_sf(xlim = c(bbox["xmin"], bbox["xmax"]), 
    +           ylim = c(bbox["ymin"], bbox["ymax"]), 
    +           expand = FALSE) + 
    +    theme(plot.background = element_rect(fill = "white", color = NA),
    +          panel.background = element_rect(fill = "white", color = NA),
    +          legend.position = "bottom",
    +          axis.text = element_blank(),
    +          axis.title = element_blank(),
    +          axis.ticks = element_blank(),
    +          panel.grid.major = element_blank(),
    +          panel.grid.minor = element_blank())
    +
    +  # Save the plot
    +  ggsave("redlining_mask_ndvi.png", plot, width = 10, height = 4, dpi = 600)
    +
    +  end_time <- Sys.time()  # End timing
    +  runtime <- end_time - start_time
    +
    +  # Return the plot and runtime
    +  return(list(plot = plot, runtime = runtime, mask_sf = mask_sf))
    +}
    +
    + +
    +
    + +Stream NDVI: low resolution + + +
    ndvi_background_low <- yearly_average_ndvi(denver_redlining)
    +
    + +
    +

    +
    + +Map low resolution NDVI per HOLC grade + + +
    ndvi <- create_mask_and_plot(denver_redlining, background_raster = ndvi_background_low$raster, roads = roads, rivers = rivers)
    +
    + +
    + +

    +
    + +FUNCTION: Map Denver City provided data per HOLC grade + + +
    process_city_inventory_data <- function(address, inner_file, polygon_layer, output_filename,variable_label= 'Tree Density') {
    +  # Download and read the shapefile
    +  full_path <- glue("/vsizip/vsicurl/{address}/{inner_file}")
    +  shape_data <- st_read(full_path, quiet = TRUE) |> st_as_sf()
    +
    +  # Process the shape data with the provided polygon layer
    +  processed_data <- process_and_plot_sf_layers(polygon_layer, shape_data, paste0(output_filename, ".png"))
    +
    +  # Extract trees from the processed data
    +  trees <- processed_data$sf
    +  denver_redlining_residential <- polygon_layer |> filter(grade != "")
    +
    +  # Generate the density plot
    +  plot <- ggplot() +
    +    geom_sf(data = roads, alpha = 0.05, lwd = 0.1) +
    +    geom_sf(data = rivers, color = "blue", alpha = 0.1, lwd = 1.1) +
    +    geom_sf(data = denver_redlining_residential, fill = "grey", color = "grey", size = 0.1) +
    +    facet_wrap(~ grade, nrow = 1) +
    +    stat_density_2d(data = trees, 
    +                    mapping = aes(x = map_dbl(geometry, ~.[1]),
    +                                  y = map_dbl(geometry, ~.[2]),
    +                                  fill = stat(density)),
    +                    geom = 'tile',
    +                    contour = FALSE,
    +                    alpha = 0.9) +
    +    scale_fill_gradientn(colors = c("transparent", "white", "limegreen"),
    +                         values = scales::rescale(c(0, 0.1, 1)),  # Adjust these based on your density range
    +                         guide = "colourbar") +
    +    theme_minimal() +
    +    labs(fill = variable_label) +
    +    theme_tufte() +
    +    theme(plot.background = element_rect(fill = "white", color = NA),
    +          panel.background = element_rect(fill = "white", color = NA),
    +          legend.position = "bottom",
    +          axis.text = element_blank(),
    +          axis.title = element_blank(),
    +          axis.ticks = element_blank(),
    +          panel.grid.major = element_blank(),
    +          panel.grid.minor = element_blank())
    +
    +  # Save the plot
    +  ggsave(paste0(output_filename, "_density_plot.png"), plot, width = 10, height = 4, units = "in", dpi = 600)
    +
    +  # Return the plot and the tree layer
    +  return(list(plot = plot, layer = trees))
    +}
    +
    + +
    +
    + +Map tree inventory per HOLC grade + + +
    result <- process_city_inventory_data(
    +  "https://www.denvergov.org/media/gis/DataCatalog/tree_inventory/shape/tree_inventory.zip",
    +  "tree_inventory.shp",
    +  denver_redlining,
    +  "Denver_tree_inventory_2023"
    +)
    +
    + + Warning: `stat(density)` was deprecated in ggplot2 3.4.0. + ℹ Please use `after_stat(density)` instead. + +
    + +

    +

    +
    + +Map traffic accidents per HOLC grade + + +
    result <- process_city_inventory_data(
    +  "https://www.denvergov.org/media/gis/DataCatalog/traffic_accidents/shape/traffic_accidents.zip",
    +  "traffic_accidents.shp",
    +  denver_redlining,
    +  "Denver_traffic_accidents",
    +  variable_label= 'Traffic accidents density'
    +)
    +
    + +
    + +

    +

    +
    + +Map stream sampling effort per HOLC grade + + +
    instream_sampling_sites <- process_city_inventory_data(
    +  "https://www.denvergov.org/media/gis/DataCatalog/instream_sampling_sites/shape/instream_sampling_sites.zip",
    +  "instream_sampling_sites.shp",
    +  denver_redlining,
    +  "instream_sampling_sites",
    +  variable_label= 'Instream sampling sites density'
    +)
    +
    + +
    + +

    +

    +
    + +Map soil sampling effort per HOLC grade + + +
    soil_samples <- process_city_inventory_data(
    +  "https://www.denvergov.org/media/gis/DataCatalog/soil_samples/shape/soil_samples.zip",
    +  "soil_samples.shp",
    +  denver_redlining,
    +  "Soil samples",
    +  variable_label= 'soil samples density'
    +)
    +
    + +
    + +

    +

    +
    + +Map public art density per HOLC grade + + +
    public_art <- process_city_inventory_data(
    +  "https://www.denvergov.org/media/gis/DataCatalog/public_art/shape/public_art.zip",
    +  "public_art.shp",
    +  denver_redlining,
    +  "Public art ",
    +  variable_label= 'Public art density'
    +)
    +
    + +
    + +

    +

    +
    + +Map liquor licenses density per HOLC grade + + +
    liquor_licenses <- process_city_inventory_data(
    +  "https://www.denvergov.org/media/gis/DataCatalog/liquor_licenses/shape/liquor_licenses.zip",
    +  "liquor_licenses.shp",
    +  denver_redlining,
    +  "liquor licenses ",
    +  variable_label= 'liquor licenses density'
    +)
    +
    + +
    + +

    +

    +
    + +Map crime density per HOLC grade + + +
    Crime <- process_city_inventory_data(
    +  "https://www.denvergov.org/media/gis/DataCatalog/crime/shape/crime.zip",
    +  "crime.shp",
    +  denver_redlining,
    +  "crime",
    +  variable_label= 'Crime density'
    +)
    +
    + +
    +

    +
    + +WORD CLOUD: Types of crimes + + +
    crime_cloud <- create_wordclouds_by_grade(Crime$layer, output_file = "Crime_word_cloud_per_grade.png",title = "Crime type where larger text is more frequent", max_size =25, col_select = "OFFENSE_TY")
    +
    + + Warning: Using an external vector in selections was deprecated in tidyselect 1.1.0. + ℹ Please use `all_of()` or `any_of()` instead. + # Was: + data %>% select(col_select) + + # Now: + data %>% select(all_of(col_select)) + + See . + +
    + +

    +
    + +Map police shooting density per HOLC grade + + +
    Denver_police_shootings <- process_city_inventory_data(
    +  "https://www.denvergov.org/media/gis/DataCatalog/denver_police_officer_involved_shootings/shape/denver_police_officer_involved_shootings.zip",
    +  "denver_police_officer_involved_shootings.shp",
    +  denver_redlining,
    +  "Police shootings",
    +  variable_label= 'Police shootings density'
    +)
    +
    + +
    + +

    +

    Not enough data for density across all 4

    +
    + +WORD CLOUD: Police involved shootings + + +
    Denver_police_shootings_cloud <- create_wordclouds_by_grade(Denver_police_shootings$layer, output_file = "police_shootings_word_cloud_per_grade.png",title = "police involved shooting per crime type where larger text is more frequent", max_size =35, col_select = "SHOOT_ACTI")
    +
    + +
    + +

    +

    Part 3: Comparative Analysis and Visualization

    +

    Statistical Analysis

    +
      +
    • Conduct a detailed statistical analysis to compare greenspace across + different HOLC grades, using techniques like Targeted Maximum + Likelihood Estimation (TMLE) to assess the association between + historical redlining and current greenspace levels.
    • +
    • Visualize the disparities in greenspace distribution using GIS + tools, highlighting how redlining has shaped urban ecological + landscapes.
    • +
    +

    Conclusion

    +

    This tutorial provides tools and methodologies to explore the lingering +effects of historic redlining on urban greenspace, offering insights +into the intersection of urban planning, environmental justice, and +public health.

    +

    References

    +
      +
    • Nardone, A., Rudolph, K. E., Morello-Frosch, R., & Casey, J. A. + (2021). Redlines and Greenspace: The Relationship between Historical + Redlining and 2010 Greenspace across the United States. + Environmental Health Perspectives, 129(1), 017006. + DOI:10.1289/EHP7495. Available + online
    • +
    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/worksheets/worksheet_redlining/worksheet_redlining.md b/worksheets/worksheet_redlining/worksheet_redlining.md new file mode 100644 index 0000000..35a00bf --- /dev/null +++ b/worksheets/worksheet_redlining/worksheet_redlining.md @@ -0,0 +1,1715 @@ +Redlining +================ + +# Exploring the Impact of Historical Redlining on Urban Greenspace: A Collaborative Examination of Maps, Justice, and Resilience + +## Introduction + +This group exploration delves into the long-term impacts of historical +redlining on urban greenspace, emphasizing the powerful role of maps in +shaping environmental and social landscapes. By drawing on the research +by Nardone et al. (2021), you will collaboratively investigate how +discriminatory practices encoded in maps have led to persistent +disparities in urban settings. This exploration aims to uncover the +resilience of communities in adapting to these entrenched injustices and +to foster a deeper understanding of how mapping can serve both as a tool +of exclusion and as a means for promoting social equity. + +![1938 Map of Atlanta uses colors as grades for neighborhoods. The red +swaths identify each area with large African-American populations that +were deemed “less safe.”](../assets/redlining/redlining.png)) + +## Understanding Redlining as a Systemic Disturbance + +Redlining originated in the 1930s as a discriminatory practice where the +Home Owners’ Loan Corporation (HOLC) systematically denied mortgages or +offered unfavorable terms based on racial and ethnic compositions. This +methodical exclusion, executed through maps that color-coded “risky” +investment areas in red, marked minority-populated areas, denying them +crucial investment and development opportunities and initiating a +profound and lasting disturbance in the urban fabric. + +Maps serve as powerful tools beyond navigation; they communicate and +enforce control. By defining neighborhood boundaries through redlining, +HOLC maps not only mirrored societal biases but also perpetuated and +embedded them into the urban landscape. This manipulation of geographic +data set a trajectory that limited economic growth, dictated the +allocation of services, and influenced the development or deterioration +of community infrastructure. + +**Figure 1:** 1938 Map of Atlanta uses colors as grades for +neighborhoods. The red swaths identify each area with large +African-American populations that were deemed “less safe.” + + + +[![](../assets/redlining/georectified-thumbnail.png)](https://storymaps.arcgis.com/stories/0f58d49c566b486482b3e64e9e5f7ac9) + +ArcGIS Story Map + + + +**Explore the Story Map:** Click on the image above to explore the +interactive story map about \[subject of the story map\]. + +## Resilience and Adaptation in Urban Environments + +The legacy of redlining presents both a challenge and an opportunity for +resilience and adaptation. Economically and socially, redlining +entrenched cycles of poverty and racial segregation, creating a +resilient wealth gap that has been difficult to dismantle. +Environmentally, the neighborhoods targeted by redlining continue to +face significant challenges—they generally feature less greenspace, +suffer from higher pollution levels, and are more vulnerable to the +impacts of climate change. These factors compound the health and +wellness challenges faced by residents. + +Despite these adversities, urban communities have continually +demonstrated remarkable resilience. Adaptation strategies, such as +community-led green initiatives, urban agriculture, and grassroots +activism, have emerged as responses to these systemic disturbances. By +enhancing green infrastructure and advocating for equitable +environmental policies, these communities strive to increase their +resilience against both historical inequities and environmental +challenges. + + + +[![](https://img.youtube.com/vi/O5FBJyqfoLM/hqdefault.jpg)](https://youtu.be/O5FBJyqfoLM) + +Watch the video + + + +**Video Title:** Exploring the Impacts of Historical Redlining on Urban +Development +**Description:** Click on the image above to watch a video that delves +into the consequences of historical redlining and its ongoing impact on +urban environments. This educational piece offers insights into how such +discriminatory practices have shaped cities and what can be learned from +them. + +The following group exercise will not only uncover the impact of +redlining on urban greenspace but also highlight the adaptive strategies +developed in response to this enduring disturbance. Through mapping and +analysis, we aim to illustrate the powerful role that geographic data +can play in understanding and fostering urban resilience and social +equity. + +### References + +- Nardone, A., Rudolph, K. E., Morello-Frosch, R., & Casey, J. A. + (2021). Redlines and Greenspace: The Relationship between Historical + Redlining and 2010 Greenspace across the United States. + *Environmental Health Perspectives*, 129(1), 017006. + DOI:10.1289/EHP7495. +- Hoffman, J. S., Shandas, V., & Pendleton, N. (2020). The Effects of + Historical Housing Policies on Resident Exposure to Intra-Urban + Heat: A Study of 108 US Urban Areas. *Climate*, 8(1), 12. + DOI:10.3390/cli8010012. + +## Goals of this group activity + +The primary objectives of this tutorial are: 1. To practice coding in +CyVerse. 2. To analyze the relationship between HOLC grades and the +presence of urban greenspace. 3. To understand how historic policies +continue to affect the spatial distribution of environmental amenities. + +## Part 1: Accessing and Visualizing Historic Redlining Data + +We will begin by accessing HOLC maps from the Mapping Inequality project +and overlaying this data with modern geographic datasets to visualize +the historical impact on contemporary urban landscapes. + +### Data Acquisition + +- Download HOLC map shapefiles from the University of Richmond’s + Mapping Inequality Project. +- Utilize satellite imagery and other geospatial data to map current + greenspace using the normalized difference vegetation index (NDVI). + +### Analysis Methodology + +- Replicate the approach used by Nardone et al. to calculate NDVI + values for each HOLC neighborhood, assessing greenspace as a + health-promoting resource. +- Employ statistical methods such as propensity score matching to + control for confounding variables and estimate the true impact of + HOLC grades on urban greenspace. + + +
    + +R libraries we use in this analysis + + +``` r +if (!requireNamespace("tidytext", quietly = TRUE)) { + install.packages("tidytext") +} +library(tidytext) +## Warning: package 'tidytext' was built under R version 4.3.2 +library(sf) +## Warning: package 'sf' was built under R version 4.3.2 +## Linking to GEOS 3.11.0, GDAL 3.5.3, PROJ 9.1.0; sf_use_s2() is TRUE +library(ggplot2) +## Warning: package 'ggplot2' was built under R version 4.3.2 +library(ggthemes) +## Warning: package 'ggthemes' was built under R version 4.3.2 +library(dplyr) +## +## Attaching package: 'dplyr' +## The following objects are masked from 'package:stats': +## +## filter, lag +## The following objects are masked from 'package:base': +## +## intersect, setdiff, setequal, union +library(rstac) +## Warning: package 'rstac' was built under R version 4.3.2 +library(gdalcubes) +## Warning: package 'gdalcubes' was built under R version 4.3.2 +library(gdalUtils) +## Please note that rgdal will be retired during October 2023, +## plan transition to sf/stars/terra functions using GDAL and PROJ +## at your earliest convenience. +## See https://r-spatial.org/r/2023/05/15/evolution4.html and https://github.com/r-spatial/evolution +## rgdal: version: 1.6-7, (SVN revision 1203) +## Geospatial Data Abstraction Library extensions to R successfully loaded +## Loaded GDAL runtime: GDAL 3.5.3, released 2022/10/21 +## Path to GDAL shared files: /Library/Frameworks/R.framework/Versions/4.3-x86_64/Resources/library/rgdal/gdal +## GDAL does not use iconv for recoding strings. +## GDAL binary built with GEOS: TRUE +## Loaded PROJ runtime: Rel. 9.1.0, September 1st, 2022, [PJ_VERSION: 910] +## Path to PROJ shared files: /Library/Frameworks/R.framework/Versions/4.3-x86_64/Resources/library/gdalcubes/proj +## PROJ CDN enabled: FALSE +## Linking to sp version:1.6-1 +## To mute warnings of possible GDAL/OSR exportToProj4() degradation, +## use options("rgdal_show_exportToProj4_warnings"="none") before loading sp or rgdal. +## +## Attaching package: 'gdalUtils' +## The following object is masked from 'package:sf': +## +## gdal_rasterize +library(gdalcubes) +library(colorspace) +library(terra) +## Warning: package 'terra' was built under R version 4.3.2 +## terra 1.7.71 +## +## Attaching package: 'terra' +## The following object is masked from 'package:colorspace': +## +## RGB +## The following objects are masked from 'package:gdalcubes': +## +## animate, crop, size +library(tidyterra) +## +## Attaching package: 'tidyterra' +## The following object is masked from 'package:stats': +## +## filter +library(basemapR) +library(tidytext) +library(ggwordcloud) +library(osmextract) +## Data (c) OpenStreetMap contributors, ODbL 1.0. https://www.openstreetmap.org/copyright. +## Check the package website, https://docs.ropensci.org/osmextract/, for more details. +library(sf) +library(ggplot2) +library(ggthemes) +library(glue) +## +## Attaching package: 'glue' +## The following object is masked from 'package:terra': +## +## trim + +library(purrr) +``` + +
    +
    + +FUNCTION: List cities where HOLC data are available + + +``` r +# Function to get a list of unique cities and states from the redlining data +get_city_state_list_from_redlining_data <- function() { + # URL to the GeoJSON data + url <- "https://raw.githubusercontent.com/americanpanorama/mapping-inequality-census-crosswalk/main/MIv3Areas_2010TractCrosswalk.geojson" + + # Read the GeoJSON file into an sf object + redlining_data <- tryCatch({ + read_sf(url) + }, error = function(e) { + stop("Error reading GeoJSON data: ", e$message) + }) + + # Check for the existence of 'city' and 'state' columns + if (!all(c("city", "state") %in% names(redlining_data))) { + stop("The required columns 'city' and/or 'state' do not exist in the data.") + } + + # Extract a unique list of city and state pairs without the geometries + city_state_df <- redlining_data %>% + select(city, state) %>% + st_set_geometry(NULL) %>% # Drop the geometry to avoid issues with invalid shapes + distinct(city, state) %>% + arrange(state, city ) # Arrange the list alphabetically by state, then by city + + # Return the dataframe of unique city-state pairs + return(city_state_df) +} +``` + +
    +
    + +Stream list of available HOLC cities + + +``` r +#Retrieve the list of cities and states +city_state_list <- get_city_state_list_from_redlining_data() + +knitr::kable(city_state_list, format = "markdown") +``` + +| city | state | +|:---------------------------------|:------| +| Birmingham | AL | +| Mobile | AL | +| Montgomery | AL | +| Arkadelphia | AR | +| Batesville | AR | +| Camden | AR | +| Conway | AR | +| El Dorado | AR | +| Fort Smith | AR | +| Little Rock | AR | +| Russellville | AR | +| Texarkana | AR | +| Phoenix | AZ | +| Fresno | CA | +| Los Angeles | CA | +| Oakland | CA | +| Sacramento | CA | +| San Diego | CA | +| San Francisco | CA | +| San Jose | CA | +| Stockton | CA | +| Boulder | CO | +| Colorado Springs | CO | +| Denver | CO | +| Fort Collins | CO | +| Fort Morgan | CO | +| Grand Junction | CO | +| Greeley | CO | +| Longmont | CO | +| Pueblo | CO | +| Bridgeport and Fairfield | CT | +| Hartford | CT | +| New Britain | CT | +| New Haven | CT | +| Stamford, Darien, and New Canaan | CT | +| Waterbury | CT | +| Crestview | FL | +| Daytona Beach | FL | +| DeFuniak Springs | FL | +| DeLand | FL | +| Jacksonville | FL | +| Miami | FL | +| New Smyrna | FL | +| Orlando | FL | +| Pensacola | FL | +| St. Petersburg | FL | +| Tampa | FL | +| Atlanta | GA | +| Augusta | GA | +| Columbus | GA | +| Macon | GA | +| Savannah | GA | +| Boone | IA | +| Cedar Rapids | IA | +| Council Bluffs | IA | +| Davenport | IA | +| Des Moines | IA | +| Dubuque | IA | +| Sioux City | IA | +| Waterloo | IA | +| Aurora | IL | +| Chicago | IL | +| Decatur | IL | +| East St. Louis | IL | +| Joliet | IL | +| Peoria | IL | +| Rockford | IL | +| Springfield | IL | +| Evansville | IN | +| Fort Wayne | IN | +| Indianapolis | IN | +| Lake Co. Gary | IN | +| Muncie | IN | +| South Bend | IN | +| Terre Haute | IN | +| Atchison | KS | +| Junction City | KS | +| Topeka | KS | +| Wichita | KS | +| Covington | KY | +| Lexington | KY | +| Louisville | KY | +| New Orleans | LA | +| Shreveport | LA | +| Arlington | MA | +| Belmont | MA | +| Boston | MA | +| Braintree | MA | +| Brockton | MA | +| Brookline | MA | +| Cambridge | MA | +| Chelsea | MA | +| Dedham | MA | +| Everett | MA | +| Fall River | MA | +| Fitchburg | MA | +| Haverhill | MA | +| Holyoke Chicopee | MA | +| Lawrence | MA | +| Lexington | MA | +| Lowell | MA | +| Lynn | MA | +| Malden | MA | +| Medford | MA | +| Melrose | MA | +| Milton | MA | +| Needham | MA | +| New Bedford | MA | +| Newton | MA | +| Pittsfield | MA | +| Quincy | MA | +| Revere | MA | +| Salem | MA | +| Saugus | MA | +| Somerville | MA | +| Springfield | MA | +| Waltham | MA | +| Watertown | MA | +| Winchester | MA | +| Winthrop | MA | +| Worcester | MA | +| Baltimore | MD | +| Augusta | ME | +| Boothbay | ME | +| Portland | ME | +| Sanford | ME | +| Waterville | ME | +| Battle Creek | MI | +| Bay City | MI | +| Detroit | MI | +| Flint | MI | +| Grand Rapids | MI | +| Jackson | MI | +| Kalamazoo | MI | +| Lansing | MI | +| Muskegon | MI | +| Pontiac | MI | +| Saginaw | MI | +| Austin | MN | +| Duluth | MN | +| Mankato | MN | +| Minneapolis | MN | +| Rochester | MN | +| St. Cloud | MN | +| St. Paul | MN | +| Staples | MN | +| Cape Girardeau | MO | +| Carthage | MO | +| Greater Kansas City | MO | +| Joplin | MO | +| Springfield | MO | +| St. Joseph | MO | +| St. Louis | MO | +| Jackson | MS | +| Asheville | NC | +| Charlotte | NC | +| Durham | NC | +| Elizabeth City | NC | +| Fayetteville | NC | +| Goldsboro | NC | +| Greensboro | NC | +| Hendersonville | NC | +| High Point | NC | +| New Bern | NC | +| Rocky Mount | NC | +| Statesville | NC | +| Winston-Salem | NC | +| Fargo | ND | +| Grand Forks | ND | +| Minot | ND | +| Williston | ND | +| Lincoln | NE | +| Omaha | NE | +| Manchester | NH | +| Atlantic City | NJ | +| Bergen Co. | NJ | +| Camden | NJ | +| Essex Co. | NJ | +| Hudson Co. | NJ | +| Monmouth | NJ | +| Passaic County | NJ | +| Perth Amboy | NJ | +| Trenton | NJ | +| Union Co. | NJ | +| Albany | NY | +| Binghamton-Johnson City | NY | +| Bronx | NY | +| Brooklyn | NY | +| Buffalo | NY | +| Elmira | NY | +| Jamestown | NY | +| Lower Westchester Co. | NY | +| Manhattan | NY | +| Niagara Falls | NY | +| Poughkeepsie | NY | +| Queens | NY | +| Rochester | NY | +| Schenectady | NY | +| Staten Island | NY | +| Syracuse | NY | +| Troy | NY | +| Utica | NY | +| Akron | OH | +| Canton | OH | +| Cleveland | OH | +| Columbus | OH | +| Dayton | OH | +| Hamilton | OH | +| Lima | OH | +| Lorain | OH | +| Portsmouth | OH | +| Springfield | OH | +| Toledo | OH | +| Warren | OH | +| Youngstown | OH | +| Ada | OK | +| Alva | OK | +| Enid | OK | +| Miami Ottawa County | OK | +| Muskogee | OK | +| Norman | OK | +| Oklahoma City | OK | +| South McAlester | OK | +| Tulsa | OK | +| Portland | OR | +| Allentown | PA | +| Altoona | PA | +| Bethlehem | PA | +| Chester | PA | +| Erie | PA | +| Harrisburg | PA | +| Johnstown | PA | +| Lancaster | PA | +| McKeesport | PA | +| New Castle | PA | +| Philadelphia | PA | +| Pittsburgh | PA | +| Wilkes-Barre | PA | +| York | PA | +| Pawtucket & Central Falls | RI | +| Providence | RI | +| Woonsocket | RI | +| Aiken | SC | +| Charleston | SC | +| Columbia | SC | +| Greater Anderson | SC | +| Greater Greenville | SC | +| Orangeburg | SC | +| Rock Hill | SC | +| Spartanburg | SC | +| Sumter | SC | +| Aberdeen | SD | +| Huron | SD | +| Milbank | SD | +| Mitchell | SD | +| Rapid City | SD | +| Sioux Falls | SD | +| Vermillion | SD | +| Watertown | SD | +| Chattanooga | TN | +| Elizabethton | TN | +| Erwin | TN | +| Greenville | TN | +| Johnson City | TN | +| Knoxville | TN | +| Memphis | TN | +| Nashville | TN | +| Amarillo | TX | +| Austin | TX | +| Beaumont | TX | +| Dallas | TX | +| El Paso | TX | +| Fort Worth | TX | +| Galveston | TX | +| Houston | TX | +| Port Arthur | TX | +| San Antonio | TX | +| Waco | TX | +| Wichita Falls | TX | +| Ogden | UT | +| Salt Lake City | UT | +| Bristol | VA | +| Danville | VA | +| Harrisonburg | VA | +| Lynchburg | VA | +| Newport News | VA | +| Norfolk | VA | +| Petersburg | VA | +| Phoebus | VA | +| Richmond | VA | +| Roanoke | VA | +| Staunton | VA | +| Bennington | VT | +| Brattleboro | VT | +| Burlington | VT | +| Montpelier | VT | +| Newport City | VT | +| Poultney | VT | +| Rutland | VT | +| Springfield | VT | +| St. Albans | VT | +| St. Johnsbury | VT | +| Windsor | VT | +| Seattle | WA | +| Spokane | WA | +| Tacoma | WA | +| Kenosha | WI | +| Madison | WI | +| Milwaukee Co. | WI | +| Oshkosh | WI | +| Racine | WI | +| Charleston | WV | +| Huntington | WV | +| Wheeling | WV | + +
    +
    + +FUNCTION: Stream HOLC data from a city + + +``` r +# Function to load and filter redlining data by city +load_city_redlining_data <- function(city_name) { + # URL to the GeoJSON data + url <- "https://raw.githubusercontent.com/americanpanorama/mapping-inequality-census-crosswalk/main/MIv3Areas_2010TractCrosswalk.geojson" + + # Read the GeoJSON file into an sf object + redlining_data <- read_sf(url) + + # Filter the data for the specified city and non-empty grades + + city_redline <- redlining_data %>% + filter(city == city_name ) + + # Return the filtered data + return(city_redline) +} +``` + +
    +
    + +Stream HOLC data for Denver, CO + + +``` r +# Load redlining data for Denver +denver_redlining <- load_city_redlining_data("Denver") +knitr::kable(head(denver_redlining), format = "markdown") +``` + +| area_id | city | state | city_survey | cat | grade | label | res | com | ind | fill | GEOID10 | GISJOIN | calc_area | pct_tract | geometry | +|--------:|:-------|:------|:------------|:-----|:------|:------|:-----|:------|:------|:---------|:------------|:---------------|-------------:|----------:|:-----------------------------| +| 6525 | Denver | CO | TRUE | Best | A | A1 | TRUE | FALSE | FALSE | \#76a865 | 08031004104 | G0800310004104 | 1.525535e+01 | 0.00001 | MULTIPOLYGON (((-104.9125 3… | +| 6525 | Denver | CO | TRUE | Best | A | A1 | TRUE | FALSE | FALSE | \#76a865 | 08031004201 | G0800310004201 | 3.987458e+05 | 0.20900 | MULTIPOLYGON (((-104.9246 3… | +| 6525 | Denver | CO | TRUE | Best | A | A1 | TRUE | FALSE | FALSE | \#76a865 | 08031004304 | G0800310004304 | 1.554195e+05 | 0.05927 | MULTIPOLYGON (((-104.9125 3… | +| 6525 | Denver | CO | TRUE | Best | A | A1 | TRUE | FALSE | FALSE | \#76a865 | 08031004202 | G0800310004202 | 1.117770e+06 | 0.57245 | MULTIPOLYGON (((-104.9125 3… | +| 6529 | Denver | CO | TRUE | Best | A | A2 | TRUE | FALSE | FALSE | \#76a865 | 08031004302 | G0800310004302 | 3.133415e+05 | 0.28381 | MULTIPOLYGON (((-104.928 39… | +| 6529 | Denver | CO | TRUE | Best | A | A2 | TRUE | FALSE | FALSE | \#76a865 | 08031004301 | G0800310004301 | 1.221218e+05 | 0.08622 | MULTIPOLYGON (((-104.9305 3… | + +
    +
    + +FUNCTION: Get Points-of-Interest from city of interest + + +``` r + + +get_places <- function(polygon_layer, type = "food" ) { + # Check if the input is an sf object + if (!inherits(polygon_layer, "sf")) { + stop("The provided object is not an sf object.") + } + + # Create a bounding box from the input sf object + bbox_here <- st_bbox(polygon_layer) |> + st_as_sfc() + + if(type == "food"){ + my_layer <- "multipolygons" + my_query <- "SELECT * FROM multipolygons WHERE ( + shop IN ('supermarket', 'bodega', 'market', 'other_market', 'farm', 'garden_centre', 'doityourself', 'farm_supply', 'compost', 'mulch', 'fertilizer') OR + amenity IN ('social_facility', 'market', 'restaurant', 'coffee') OR + leisure = 'garden' OR + landuse IN ('farm', 'farmland', 'row_crops', 'orchard_plantation', 'dairy_grazing') OR + building IN ('brewery', 'winery', 'distillery') OR + shop = 'greengrocer' OR + amenity = 'marketplace' + )" + title <- "food" + } + + if (type == "processed_food") { + my_layer <- "multipolygons" + my_query <- "SELECT * FROM multipolygons WHERE ( + amenity IN ('fast_food', 'cafe', 'pub') OR + shop IN ('convenience', 'supermarket') OR + shop = 'kiosk' + )" + title <- "Processed Food Locations" +} + + if(type == "natural_habitats"){ + my_layer <- "multipolygons" + my_query <- "SELECT * FROM multipolygons WHERE ( + boundary = 'protected_area' OR + natural IN ('tree', 'wood') OR + landuse = 'forest' OR + leisure = 'park' + )" + title <- "Natural habitats or City owned trees" + } + + if(type == "roads"){ + my_layer <- "lines" + my_query <- "SELECT * FROM lines WHERE ( + highway IN ('motorway', 'trunk', 'primary', 'secondary', 'tertiary') )" + title <- "Major roads" + } + + if(type == "rivers"){ + my_layer <- "lines" + my_query <- "SELECT * FROM lines WHERE ( + waterway IN ('river'))" + title <- "Major rivers" + } + + if(type == "internet_access") { + my_layer <- "multipolygons" + my_query <- "SELECT * FROM multipolygons WHERE ( + amenity IN ('library', 'cafe', 'community_centre', 'public_building') AND + internet_access = 'yes' + )" + title <- "Internet Access Locations" +} + + if(type == "water_bodies") { + my_layer <- "multipolygons" + my_query <- "SELECT * FROM multipolygons WHERE ( + natural IN ('water', 'lake', 'pond') OR + water IN ('lake', 'pond') OR + landuse = 'reservoir' + )" + title <- "Water Bodies" +} + + if(type == "government_buildings") { + my_layer <- "multipolygons" + my_query <- "SELECT * FROM multipolygons WHERE ( + amenity IN ('townhall', 'courthouse', 'embassy', 'police', 'fire_station') OR + building IN ('capitol', 'government') + )" + title <- "Government Buildings" +} + + + + # Use the bbox to get data with oe_get(), specifying the desired layer and a custom SQL query for fresh food places + tryCatch({ + places <- oe_get( + place = bbox_here, + layer = my_layer, # Adjusted layer; change as per actual data availability + query = my_query, + quiet = TRUE + ) + + places <- st_make_valid(places) + + # Crop the data to the bounding box + cropped_places <- st_crop(places, bbox_here) + + # Plotting the cropped fresh food places + plot <- ggplot(data = cropped_places) + + geom_sf(fill="cornflowerblue", color="cornflowerblue") + + ggtitle(title) + + theme_tufte()+ + theme(legend.position = "none", # Optionally hide the legend + axis.text = element_blank(), # Remove axis text + axis.title = element_blank(), # Remove axis titles + axis.ticks = element_blank(), # Remove axis ticks + plot.background = element_rect(fill = "white", color = NA), # Set the plot background to white + panel.background = element_rect(fill = "white", color = NA), # Set the panel background to white + panel.grid.major = element_blank(), # Remove major grid lines + panel.grid.minor = element_blank(), + ) + + # Save the plot as a PNG file + png_filename <- paste0(title,"_", Sys.Date(), ".png") + ggsave(png_filename, plot, width = 10, height = 8, units = "in") + + # Return the cropped dataset + return(cropped_places) + }, error = function(e) { + stop("Failed to retrieve or plot data: ", e$message) + }) +} +``` + +
    +
    + +FUNCTION: Plot POI over HOLC grades + + +``` r + + +plot_city_redlining <- function(redlining_data, filename = "redlining_plot.png") { + # Fetch additional geographic data based on redlining data + roads <- get_places(redlining_data, type = "roads") + rivers <- get_places(redlining_data, type = "rivers") + + # Filter residential zones with valid grades and where city survey is TRUE + residential_zones <- redlining_data %>% + filter(city_survey == TRUE & grade != "") + + # Colors for the grades + colors <- c("#76a865", "#7cb5bd", "#ffff00", "#d9838d") + + # Plot the data using ggplot2 + plot <- ggplot() + + geom_sf(data = roads, lwd = 0.1) + + geom_sf(data = rivers, color = "blue", alpha = 0.5, lwd = 1.1) + + geom_sf(data = residential_zones, aes(fill = grade), alpha = 0.5) + + theme_tufte() + + scale_fill_manual(values = colors) + + labs(fill = 'HOLC Categories') + + theme( + plot.background = element_rect(fill = "white", color = NA), + panel.background = element_rect(fill = "white", color = NA), + panel.grid.major = element_blank(), + panel.grid.minor = element_blank(), + legend.position = "right" + ) + + # Save the plot as a high-resolution PNG file + ggsave(filename, plot, width = 10, height = 8, units = "in", dpi = 600) + + # Return the plot object if needed for further manipulation or checking + return(plot) +} +``` + +
    +
    + +Plot Denver Redlining + + +``` r +denver_plot <- plot_city_redlining(denver_redlining) +``` + +
    + +![](../worksheets/redlining_plot.png) + +
    + +Stream amenities by category + + +``` r +food <- get_places(denver_redlining, type="food") + +food_processed <- get_places(denver_redlining, type="processed_food") + +natural_habitats <- get_places(denver_redlining, type="natural_habitats") + +roads <- get_places(denver_redlining, type="roads") + +rivers <- get_places(denver_redlining, type="rivers") + +#water_bodies <- get_places(denver_redlining, type="water_bodies") + +government_buildings <- get_places(denver_redlining, type="government_buildings") +``` + +
    +
    + +FUNCTION: Plot the HOLC grades individually + + +``` r +split_plot <- function(sf_data, roads, rivers) { + # Filter for grades A, B, C, and D + sf_data_filtered <- sf_data %>% + filter(grade %in% c('A', 'B', 'C', 'D')) + + # Define a color for each grade + grade_colors <- c("A" = "#76a865", "B" = "#7cb5bd", "C" = "#ffff00", "D" = "#d9838d") + + # Create the plot with panels for each grade + plot <- ggplot(data = sf_data_filtered) + + geom_sf(data = roads, alpha = 0.1, lwd = 0.1) + + geom_sf(data = rivers, color = "blue", alpha = 0.1, lwd = 1.1) + + geom_sf(aes(fill = grade)) + + facet_wrap(~ grade, nrow = 1) + # Free scales for different zoom levels if needed + scale_fill_manual(values = grade_colors) + + theme_minimal() + + labs(fill = 'HOLC Grade') + + theme_tufte() + + theme(plot.background = element_rect(fill = "white", color = NA), + panel.background = element_rect(fill = "white", color = NA), + legend.position = "none", # Optionally hide the legend + axis.text = element_blank(), # Remove axis text + axis.title = element_blank(), # Remove axis titles + axis.ticks = element_blank(), # Remove axis ticks + panel.grid.major = element_blank(), # Remove major grid lines + panel.grid.minor = element_blank()) + + ggsave(plot, filename = "HOLC_grades_individually.png", width = 10, height = 4, units = "in", dpi = 1200) + return(plot) +} +``` + +
    +
    + +Plot 4 HOLC grades individually + + +``` r +plot_row <- split_plot(denver_redlining, roads, rivers) +``` + +
    + +![](../worksheets/HOLC_grades_individually.png) + +
    + +FUNCTION: Map an amenity over each grade individually + + +``` r + +process_and_plot_sf_layers <- function(layer1, layer2, output_file = "output_plot.png") { + # Make geometries valid +layer1 <- st_make_valid(layer1) +layer2 <- st_make_valid(layer2) + +# Optionally, simplify geometries to remove duplicate vertices +layer1 <- st_simplify(layer1, preserveTopology = TRUE) |> + filter(grade != "") + +# Prepare a list to store results +results <- list() + +# Loop through each grade and perform operations +for (grade in c("A", "B", "C", "D")) { + # Filter layer1 for current grade + layer1_grade <- layer1[layer1$grade == grade, ] + + # Buffer the geometries of the current grade + buffered_layer1_grade <- st_buffer(layer1_grade, dist = 500) + + # Intersect with the second layer + intersections <- st_intersects(layer2, buffered_layer1_grade, sparse = FALSE) + selected_polygons <- layer2[rowSums(intersections) > 0, ] + + # Add a new column to store the grade information + selected_polygons$grade <- grade + + # Store the result + results[[grade]] <- selected_polygons +} + +# Combine all selected polygons from different grades into one sf object +final_selected_polygons <- do.call(rbind, results) + + # Define colors for the grades + grade_colors <- c("A" = "grey", "B" = "grey", "C" = "grey", "D" = "grey") + + # Create the plot + plot <- ggplot() + + geom_sf(data = roads, alpha = 0.05, lwd = 0.1) + + geom_sf(data = rivers, color = "blue", alpha = 0.1, lwd = 1.1) + + geom_sf(data = layer1, fill = "grey", color = "grey", size = 0.1) + + facet_wrap(~ grade, nrow = 1) + + geom_sf(data = final_selected_polygons,fill = "green", color = "green", size = 0.1) + + facet_wrap(~ grade, nrow = 1) + + #scale_fill_manual(values = grade_colors) + + #scale_color_manual(values = grade_colors) + + theme_minimal() + + labs(fill = 'HOLC Grade') + + theme_tufte() + + theme(plot.background = element_rect(fill = "white", color = NA), + panel.background = element_rect(fill = "white", color = NA), + legend.position = "none", + axis.text = element_blank(), + axis.title = element_blank(), + axis.ticks = element_blank(), + panel.grid.major = element_blank(), + panel.grid.minor = element_blank()) + + # Save the plot as a high-resolution PNG file + ggsave(output_file, plot, width = 10, height = 4, units = "in", dpi = 1200) + + # Return the plot for optional further use + return(list(plot=plot, sf = final_selected_polygons)) +} +``` + +
    +
    + +FUNCTION: Create word cloud per grade + + +``` r +create_wordclouds_by_grade <- function(sf_object, output_file = "food_word_cloud_per_grade.png",title = "Healthy food place names word cloud", max_size =25, col_select = "name") { + + + # Extract relevant data and prepare text data + text_data <- sf_object %>% + select(grade, col_select) %>% + filter(!is.na(col_select)) %>% + unnest_tokens(output = "word", input = col_select, token = "words") %>% + count(grade, word, sort = TRUE) %>% + ungroup() %>% + filter(n() > 1) # Filter to remove overly common or single-occurrence words + + # Ensure there are no NA values in the 'word' column + text_data <- text_data %>% filter(!is.na(word)) + + # Handle cases where text_data might be empty + if (nrow(text_data) == 0) { + stop("No data available for creating word clouds.") + } + + # Create a word cloud using ggplot2 and ggwordcloud + p <- ggplot( ) + + geom_text_wordcloud_area(data=text_data, aes(label = word, size = n),rm_outside = TRUE) + + scale_size_area(max_size = max_size) + + facet_wrap(~ grade, nrow = 1) + + scale_color_gradient(low = "darkred", high = "red") + + theme_minimal() + + theme(plot.background = element_rect(fill = "white", color = NA), + panel.background = element_rect(fill = "white", color = NA), + panel.spacing = unit(0.5, "lines"), + plot.title = element_text(size = 16, face = "bold"), + legend.position = "none") + + labs(title = title) + + # Attempt to save the plot and handle any errors + tryCatch({ + ggsave(output_file, p, width = 10, height = 4, units = "in", dpi = 600) + }, error = function(e) { + cat("Error in saving the plot: ", e$message, "\n") + }) + + return(p) +} +``` + +
    +
    + +Map food over each grade individually + + +``` r + layer1 <- denver_redlining + layer2 <- food + food_match <- process_and_plot_sf_layers(layer1, layer2, "food_match.png") +``` + +
    + +![](../worksheets/food_match.png) + +
    + +WORD CLOUD: Names of places with fresh food + + +``` r +food_word_cloud <- create_wordclouds_by_grade(food_match$sf, output_file = "food_word_cloud_per_grade.png") +``` + + Warning: Using an external vector in selections was deprecated in tidyselect 1.1.0. + ℹ Please use `all_of()` or `any_of()` instead. + # Was: + data %>% select(col_select) + + # Now: + data %>% select(all_of(col_select)) + + See . + + Warning in wordcloud_boxes(data_points = points_valid_first, boxes = boxes, : + Some words could not fit on page. They have been removed. + +
    + +![](../worksheets/food_word_cloud_per_grade.png) + +
    + +Map processed food over each grade individually + + +``` r + layer1 <- denver_redlining + layer2 <- food_processed + processed_food_match <- process_and_plot_sf_layers(layer1, layer2, "processed_food_match.png") +``` + +
    + +![](../worksheets/processed_food_match.png) + +
    + +WORD CLOUD: Names of places with processed food + + +``` r +processed_food_cloud <- create_wordclouds_by_grade(processed_food_match$sf, output_file = "processed_food_word_cloud_per_grade.png",title = "Processed food place names where larger text is more frequent", max_size =17) +``` + +
    + +![](../worksheets/processed_food_word_cloud_per_grade.png) + +## Part 2: Integrating Environmental Data + +### Data Processing + +- Use satellite data from 2010 to analyze greenspace using NDVI, an + index that measures the quantity of vegetation in an area. +- Apply methods to adjust for potential confounders as described in + the study, ensuring that comparisons of greenspace across HOLC + grades are valid and not biased by historical or socio-demographic + factors. + +
    + +Map natural habitats over each grade individually + + +``` r + layer1 <- denver_redlining + layer2 <- natural_habitats + natural_habitats_match <- process_and_plot_sf_layers(layer1, layer2, "natural_habitats_match.png") + print(natural_habitats_match$plot) +``` + +![](worksheet_redlining_files/figure-gfm/unnamed-chunk-18-1.png) + +
    + +![](../worksheets/natural_habitats_match.png) + +
    + +WORD CLOUD: Name of natural habitat area + + +``` r +natural_habitats_cloud <- create_wordclouds_by_grade(natural_habitats_match$sf, output_file = "natural_habitats_word_cloud_per_grade.png",title = "Natural habitats place names where larger text is more frequent", max_size =35) +``` + +
    + +![](../worksheets/natural_habitats_word_cloud_per_grade.png) + +
    + +FUNCTION: Stream NDVI data + + +``` r +polygon_layer <- denver_redlining +# Function to process satellite data based on an SF polygon's extent +process_satellite_data <- function(polygon_layer, start_date, end_date, assets, fps = 1, output_file = "anim.gif") { + # Record start time + start_time <- Sys.time() + + # Calculate the bbox from the polygon layer + bbox <- st_bbox(polygon_layer) + + s = stac("https://earth-search.aws.element84.com/v0") + + + # Use stacR to search for Sentinel-2 images within the bbox and date range + items = s |> stac_search( + collections = "sentinel-s2-l2a-cogs", + bbox = c(bbox["xmin"], bbox["ymin"], bbox["xmax"], bbox["ymax"]), + datetime = paste(start_date, end_date, sep = "/"), + limit = 500 + ) %>% + post_request() + + # Define mask for Sentinel-2 image quality + #S2.mask <- image_mask("SCL", values = c(3, 8, 9)) + + # Create a collection of images filtering by cloud cover + col <- stac_image_collection(items$features, asset_names = assets, property_filter = function(x) {x[["eo:cloud_cover"]] < 30}) + + # Define a view for processing the data + v <- cube_view(srs = "EPSG:4326", + extent = list(t0 = start_date, t1 = end_date, + left = bbox["xmin"], right = bbox["xmax"], + top = bbox["ymax"], bottom = bbox["ymin"]), + dx = 0.001, dy = 0.001, dt = "P1M", + aggregation = "median", resampling = "bilinear") + + # Calculate NDVI and create an animation + ndvi_col <- function(n) { + rev(sequential_hcl(n, "Green-Yellow")) + } + + #raster_cube(col, v, mask = S2.mask) %>% + raster_cube(col, v) %>% + select_bands(c("B04", "B08")) %>% + apply_pixel("(B08-B04)/(B08+B04)", "NDVI") %>% + gdalcubes::animate(col = ndvi_col, zlim = c(-0.2, 1), key.pos = 1, save_as = output_file, fps = fps) + + # Calculate processing time + end_time <- Sys.time() + processing_time <- difftime(end_time, start_time) + + # Return processing time + return(processing_time) +} +``` + +
    +
    + +Stream NDVI data: animation + + +``` r +processing_time <- process_satellite_data(denver_redlining, "2022-05-31", "2023-05-31", c("B04", "B08")) +``` + +
    + +![](../worksheets/anim.gif) + +
    + +FUNCTION: Stream year average NDVI + + +``` r + + + +yearly_average_ndvi <- function(polygon_layer, output_file = "ndvi.png", dx = 0.01, dy = 0.01) { + # Record start time + start_time <- Sys.time() + + # Calculate the bbox from the polygon layer + bbox <- st_bbox(polygon_layer) + + s = stac("https://earth-search.aws.element84.com/v0") + + # Search for Sentinel-2 images within the bbox for June + items <- s |> stac_search( + collections = "sentinel-s2-l2a-cogs", + bbox = c(bbox["xmin"], bbox["ymin"], bbox["xmax"], bbox["ymax"]), + datetime = "2023-01-01/2023-12-31", + limit = 500 + ) %>% + post_request() + + # Create a collection of images filtering by cloud cover + col <- stac_image_collection(items$features, asset_names = c("B04", "B08"), property_filter = function(x) {x[["eo:cloud_cover"]] < 80}) + + # Define a view for processing the data specifically for June + v <- cube_view(srs = "EPSG:4326", + extent = list(t0 = "2023-01-01", t1 = "2023-12-31", + left = bbox["xmin"], right = bbox["xmax"], + top = bbox["ymax"], bottom = bbox["ymin"]), + dx = dx, dy = dy, dt = "P1Y", + aggregation = "median", resampling = "bilinear") + + # Process NDVI + ndvi_rast <- raster_cube(col, v) %>% + select_bands(c("B04", "B08")) %>% + apply_pixel("(B08-B04)/(B08+B04)", "NDVI") %>% + write_tif() |> + terra::rast() + + + # Convert terra Raster to ggplot using tidyterra +ndvi_plot <- ggplot() + + geom_spatraster(data = ndvi_rast, aes(fill = NDVI)) + + scale_fill_viridis_c(option = "viridis", direction = -1, name = "NDVI") + + labs(title = "NDVI mean for 2023") + + theme_minimal() + + coord_sf() + + theme(plot.background = element_rect(fill = "white", color = NA), + panel.background = element_rect(fill = "white", color = NA), + legend.position = "right", + axis.text = element_blank(), + axis.title = element_blank(), + axis.ticks = element_blank(), + panel.grid.major = element_blank(), + panel.grid.minor = element_blank()) + + # Save the plot as a high-resolution PNG file + ggsave(output_file, ndvi_plot, width = 10, height = 8, dpi = 600) + + # Calculate processing time + end_time <- Sys.time() + processing_time <- difftime(end_time, start_time) + + # Return the plot and processing time + return(list(plot = ndvi_plot, processing_time = processing_time, raster = ndvi_rast)) +} +``` + +
    +
    + +Stream NDVI: high resolution + + +``` r +ndvi_background <- yearly_average_ndvi(denver_redlining,dx = 0.0001, dy = 0.0001) +``` + +
    + +![](../worksheets/ndvi_00001.png) + +
    + +FUNCTION: Map NDVI per HOLC grade individually + + +``` r + + +create_mask_and_plot <- function(redlining_sf, background_raster = ndvi$raster, roads = NULL, rivers = NULL){ + start_time <- Sys.time() # Start timing + + # Validate and prepare the redlining data + redlining_sf <- redlining_sf %>% + filter(grade != "") %>% + st_make_valid() + + +bbox <- st_bbox(redlining_sf) # Get original bounding box + + +expanded_bbox <- expand_bbox(bbox, 6000, 1000) # + + +expanded_bbox_poly <- st_as_sfc(expanded_bbox, crs = st_crs(redlining_sf)) %>% + st_make_valid() + + # Initialize an empty list to store masks + masks <- list() + + # Iterate over each grade to create masks + unique_grades <- unique(redlining_sf$grade) + for (grade in unique_grades) { + # Filter polygons by grade + grade_polygons <- redlining_sf[redlining_sf$grade == grade, ] + + # Create an "inverted" mask by subtracting these polygons from the background + mask <- st_difference(expanded_bbox_poly, st_union(grade_polygons)) + + # Store the mask in the list with the grade as the name + masks[[grade]] <- st_sf(geometry = mask, grade = grade) + } + + # Combine all masks into a single sf object + mask_sf <- do.call(rbind, masks) + + # Normalize the grades so that C.2 becomes C, but correctly handle other grades + mask_sf$grade <- ifelse(mask_sf$grade == "C.2", "C", mask_sf$grade) + + # Prepare the plot + plot <- ggplot() + + geom_spatraster(data = background_raster, aes(fill = NDVI)) + + scale_fill_viridis_c(name = "NDVI", option = "viridis", direction = -1) + + + geom_sf(data = mask_sf, aes(color = grade), fill = "white", size = 0.1, show.legend = FALSE) + + scale_color_manual(values = c("A" = "white", "B" = "white", "C" = "white", "D" = "white"), name = "Grade") + + facet_wrap(~ grade, nrow = 1) + + geom_sf(data = roads, alpha = 1, lwd = 0.1, color="white") + + geom_sf(data = rivers, color = "white", alpha = 0.5, lwd = 1.1) + + labs(title = "NDVI: Normalized Difference Vegetation Index") + + theme_minimal() + + coord_sf(xlim = c(bbox["xmin"], bbox["xmax"]), + ylim = c(bbox["ymin"], bbox["ymax"]), + expand = FALSE) + + theme(plot.background = element_rect(fill = "white", color = NA), + panel.background = element_rect(fill = "white", color = NA), + legend.position = "bottom", + axis.text = element_blank(), + axis.title = element_blank(), + axis.ticks = element_blank(), + panel.grid.major = element_blank(), + panel.grid.minor = element_blank()) + + # Save the plot + ggsave("redlining_mask_ndvi.png", plot, width = 10, height = 4, dpi = 600) + + end_time <- Sys.time() # End timing + runtime <- end_time - start_time + + # Return the plot and runtime + return(list(plot = plot, runtime = runtime, mask_sf = mask_sf)) +} +``` + +
    +
    + +Stream NDVI: low resolution + + +``` r +ndvi_background_low <- yearly_average_ndvi(denver_redlining) +``` + +
    +![](../worksheets/ndvi.png) +
    + +Map low resolution NDVI per HOLC grade + + +``` r +ndvi <- create_mask_and_plot(denver_redlining, background_raster = ndvi_background_low$raster, roads = roads, rivers = rivers) +``` + +
    + +![](../worksheets/redlining_mask_ndvi.png) + +
    + +FUNCTION: Map Denver City provided data per HOLC grade + + +``` r +process_city_inventory_data <- function(address, inner_file, polygon_layer, output_filename,variable_label= 'Tree Density') { + # Download and read the shapefile + full_path <- glue("/vsizip/vsicurl/{address}/{inner_file}") + shape_data <- st_read(full_path, quiet = TRUE) |> st_as_sf() + + # Process the shape data with the provided polygon layer + processed_data <- process_and_plot_sf_layers(polygon_layer, shape_data, paste0(output_filename, ".png")) + + # Extract trees from the processed data + trees <- processed_data$sf + denver_redlining_residential <- polygon_layer |> filter(grade != "") + + # Generate the density plot + plot <- ggplot() + + geom_sf(data = roads, alpha = 0.05, lwd = 0.1) + + geom_sf(data = rivers, color = "blue", alpha = 0.1, lwd = 1.1) + + geom_sf(data = denver_redlining_residential, fill = "grey", color = "grey", size = 0.1) + + facet_wrap(~ grade, nrow = 1) + + stat_density_2d(data = trees, + mapping = aes(x = map_dbl(geometry, ~.[1]), + y = map_dbl(geometry, ~.[2]), + fill = stat(density)), + geom = 'tile', + contour = FALSE, + alpha = 0.9) + + scale_fill_gradientn(colors = c("transparent", "white", "limegreen"), + values = scales::rescale(c(0, 0.1, 1)), # Adjust these based on your density range + guide = "colourbar") + + theme_minimal() + + labs(fill = variable_label) + + theme_tufte() + + theme(plot.background = element_rect(fill = "white", color = NA), + panel.background = element_rect(fill = "white", color = NA), + legend.position = "bottom", + axis.text = element_blank(), + axis.title = element_blank(), + axis.ticks = element_blank(), + panel.grid.major = element_blank(), + panel.grid.minor = element_blank()) + + # Save the plot + ggsave(paste0(output_filename, "_density_plot.png"), plot, width = 10, height = 4, units = "in", dpi = 600) + + # Return the plot and the tree layer + return(list(plot = plot, layer = trees)) +} +``` + +
    +
    + +Map tree inventory per HOLC grade + + +``` r +result <- process_city_inventory_data( + "https://www.denvergov.org/media/gis/DataCatalog/tree_inventory/shape/tree_inventory.zip", + "tree_inventory.shp", + denver_redlining, + "Denver_tree_inventory_2023" +) +``` + + Warning: `stat(density)` was deprecated in ggplot2 3.4.0. + ℹ Please use `after_stat(density)` instead. + +
    + +![](../worksheets/Denver_tree_inventory_2023.png) +![](../worksheets/Denver_tree_inventory_2023_density_plot.png) + +
    + +Map traffic accidents per HOLC grade + + +``` r +result <- process_city_inventory_data( + "https://www.denvergov.org/media/gis/DataCatalog/traffic_accidents/shape/traffic_accidents.zip", + "traffic_accidents.shp", + denver_redlining, + "Denver_traffic_accidents", + variable_label= 'Traffic accidents density' +) +``` + +
    + +![](../worksheets/Denver_traffic_accidents.png) +![](../worksheets/Denver_traffic_accidents_density_plot.png) + +
    + +Map stream sampling effort per HOLC grade + + +``` r +instream_sampling_sites <- process_city_inventory_data( + "https://www.denvergov.org/media/gis/DataCatalog/instream_sampling_sites/shape/instream_sampling_sites.zip", + "instream_sampling_sites.shp", + denver_redlining, + "instream_sampling_sites", + variable_label= 'Instream sampling sites density' +) +``` + +
    + +![](../worksheets/instream_sampling_sites.png) +![](../worksheets/instream_sampling_sites_density_plot.png) + +
    + +Map soil sampling effort per HOLC grade + + +``` r +soil_samples <- process_city_inventory_data( + "https://www.denvergov.org/media/gis/DataCatalog/soil_samples/shape/soil_samples.zip", + "soil_samples.shp", + denver_redlining, + "Soil samples", + variable_label= 'soil samples density' +) +``` + +
    + +![](../worksheets/Soil%20samples.png) +![](../worksheets/Soil%20samples_density_plot.png) + +
    + +Map public art density per HOLC grade + + +``` r +public_art <- process_city_inventory_data( + "https://www.denvergov.org/media/gis/DataCatalog/public_art/shape/public_art.zip", + "public_art.shp", + denver_redlining, + "Public art ", + variable_label= 'Public art density' +) +``` + +
    + +![](../worksheets/Public%20art%20.png) +![](../worksheets/Public%20art%20_density_plot.png) + +
    + +Map liquor licenses density per HOLC grade + + +``` r +liquor_licenses <- process_city_inventory_data( + "https://www.denvergov.org/media/gis/DataCatalog/liquor_licenses/shape/liquor_licenses.zip", + "liquor_licenses.shp", + denver_redlining, + "liquor licenses ", + variable_label= 'liquor licenses density' +) +``` + +
    + +![](../worksheets/liquor%20licenses%20.png) +![](../worksheets/liquor%20licenses%20_density_plot.png) + +
    + +Map crime density per HOLC grade + + +``` r +Crime <- process_city_inventory_data( + "https://www.denvergov.org/media/gis/DataCatalog/crime/shape/crime.zip", + "crime.shp", + denver_redlining, + "crime", + variable_label= 'Crime density' +) +``` + +
    +![](../worksheets/crime.png) ![](../worksheets/crime_density_plot.png) +
    + +WORD CLOUD: Types of crimes + + +``` r +crime_cloud <- create_wordclouds_by_grade(Crime$layer, output_file = "Crime_word_cloud_per_grade.png",title = "Crime type where larger text is more frequent", max_size =25, col_select = "OFFENSE_TY") +``` + + Warning: Using an external vector in selections was deprecated in tidyselect 1.1.0. + ℹ Please use `all_of()` or `any_of()` instead. + # Was: + data %>% select(col_select) + + # Now: + data %>% select(all_of(col_select)) + + See . + +
    + +![](../worksheets/Crime_word_cloud_per_grade.png) + +
    + +Map police shooting density per HOLC grade + + +``` r +Denver_police_shootings <- process_city_inventory_data( + "https://www.denvergov.org/media/gis/DataCatalog/denver_police_officer_involved_shootings/shape/denver_police_officer_involved_shootings.zip", + "denver_police_officer_involved_shootings.shp", + denver_redlining, + "Police shootings", + variable_label= 'Police shootings density' +) +``` + +
    + +![](../worksheets/Police%20shootings.png) + +**Not enough data for density across all 4** + +
    + +WORD CLOUD: Police involved shootings + + +``` r +Denver_police_shootings_cloud <- create_wordclouds_by_grade(Denver_police_shootings$layer, output_file = "police_shootings_word_cloud_per_grade.png",title = "police involved shooting per crime type where larger text is more frequent", max_size =35, col_select = "SHOOT_ACTI") +``` + +
    + +![](../worksheets/police_shootings_word_cloud_per_grade.png) + +## Part 3: Comparative Analysis and Visualization + +### Statistical Analysis + +- Conduct a detailed statistical analysis to compare greenspace across + different HOLC grades, using techniques like Targeted Maximum + Likelihood Estimation (TMLE) to assess the association between + historical redlining and current greenspace levels. +- Visualize the disparities in greenspace distribution using GIS + tools, highlighting how redlining has shaped urban ecological + landscapes. + +## Conclusion + +This tutorial provides tools and methodologies to explore the lingering +effects of historic redlining on urban greenspace, offering insights +into the intersection of urban planning, environmental justice, and +public health. + +### References + +- Nardone, A., Rudolph, K. E., Morello-Frosch, R., & Casey, J. A. + (2021). Redlines and Greenspace: The Relationship between Historical + Redlining and 2010 Greenspace across the United States. + *Environmental Health Perspectives*, 129(1), 017006. + DOI:10.1289/EHP7495. [Available + online](https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7839347/pdf/ehp7495.pdf) diff --git a/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-10-1.png b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-10-1.png new file mode 100644 index 0000000..286debd Binary files /dev/null and b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-10-1.png differ diff --git a/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-11-1.png b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-11-1.png new file mode 100644 index 0000000..286debd Binary files /dev/null and b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-11-1.png differ diff --git a/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-12-1.png b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-12-1.png new file mode 100644 index 0000000..e6ba149 Binary files /dev/null and b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-12-1.png differ diff --git a/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-13-1.png b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-13-1.png new file mode 100644 index 0000000..6f4afb0 Binary files /dev/null and b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-13-1.png differ diff --git a/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-14-1.png b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-14-1.png new file mode 100644 index 0000000..90121e8 Binary files /dev/null and b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-14-1.png differ diff --git a/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-15-1.png b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-15-1.png new file mode 100644 index 0000000..51b4908 Binary files /dev/null and b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-15-1.png differ diff --git a/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-16-1.png b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-16-1.png new file mode 100644 index 0000000..a838949 Binary files /dev/null and b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-16-1.png differ diff --git a/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-17-1.png b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-17-1.png new file mode 100644 index 0000000..a838949 Binary files /dev/null and b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-17-1.png differ diff --git a/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-18-1.png b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-18-1.png new file mode 100644 index 0000000..40baf21 Binary files /dev/null and b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-18-1.png differ diff --git a/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-19-1.png b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-19-1.png new file mode 100644 index 0000000..40baf21 Binary files /dev/null and b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-19-1.png differ diff --git a/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-2-1.png b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-2-1.png new file mode 100644 index 0000000..672aff3 Binary files /dev/null and b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-2-1.png differ diff --git a/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-20-1.png b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-20-1.png new file mode 100644 index 0000000..a8effe1 Binary files /dev/null and b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-20-1.png differ diff --git a/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-23-1.png b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-23-1.png new file mode 100644 index 0000000..69a7404 Binary files /dev/null and b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-23-1.png differ diff --git a/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-24-1.png b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-24-1.png new file mode 100644 index 0000000..69a7404 Binary files /dev/null and b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-24-1.png differ diff --git a/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-25-1.png b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-25-1.png new file mode 100644 index 0000000..6e82530 Binary files /dev/null and b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-25-1.png differ diff --git a/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-26-1.png b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-26-1.png new file mode 100644 index 0000000..6e82530 Binary files /dev/null and b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-26-1.png differ diff --git a/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-28-1.png b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-28-1.png new file mode 100644 index 0000000..76769b8 Binary files /dev/null and b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-28-1.png differ diff --git a/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-29-1.png b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-29-1.png new file mode 100644 index 0000000..76769b8 Binary files /dev/null and b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-29-1.png differ diff --git a/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-3-1.png b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-3-1.png new file mode 100644 index 0000000..e902733 Binary files /dev/null and b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-3-1.png differ diff --git a/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-30-1.png b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-30-1.png new file mode 100644 index 0000000..8f6c651 Binary files /dev/null and b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-30-1.png differ diff --git a/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-31-1.png b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-31-1.png new file mode 100644 index 0000000..7dbabc5 Binary files /dev/null and b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-31-1.png differ diff --git a/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-32-1.png b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-32-1.png new file mode 100644 index 0000000..63d784f Binary files /dev/null and b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-32-1.png differ diff --git a/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-33-1.png b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-33-1.png new file mode 100644 index 0000000..6f2f24d Binary files /dev/null and b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-33-1.png differ diff --git a/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-34-1.png b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-34-1.png new file mode 100644 index 0000000..ab3934f Binary files /dev/null and b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-34-1.png differ diff --git a/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-35-1.png b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-35-1.png new file mode 100644 index 0000000..ddf6b4f Binary files /dev/null and b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-35-1.png differ diff --git a/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-36-1.png b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-36-1.png new file mode 100644 index 0000000..666e42a Binary files /dev/null and b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-36-1.png differ diff --git a/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-37-1.png b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-37-1.png new file mode 100644 index 0000000..ddf6b4f Binary files /dev/null and b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-37-1.png differ diff --git a/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-38-1.png b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-38-1.png new file mode 100644 index 0000000..e1b9f1a Binary files /dev/null and b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-38-1.png differ diff --git a/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-4-1.png b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-4-1.png new file mode 100644 index 0000000..d3c4c14 Binary files /dev/null and b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-4-1.png differ diff --git a/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-5-1.png b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-5-1.png new file mode 100644 index 0000000..c6a0d2f Binary files /dev/null and b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-5-1.png differ diff --git a/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-6-1.png b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-6-1.png new file mode 100644 index 0000000..4b03f4c Binary files /dev/null and b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-6-1.png differ diff --git a/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-7-1.png b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-7-1.png new file mode 100644 index 0000000..733d68a Binary files /dev/null and b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-7-1.png differ diff --git a/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-8-1.png b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-8-1.png new file mode 100644 index 0000000..a640d56 Binary files /dev/null and b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-8-1.png differ diff --git a/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-9-1.png b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-9-1.png new file mode 100644 index 0000000..a640d56 Binary files /dev/null and b/worksheets/worksheet_redlining_files/figure-gfm/unnamed-chunk-9-1.png differ diff --git a/worksheets/worksheet_redlining_files/figure-html/unnamed-chunk-11-1.png b/worksheets/worksheet_redlining_files/figure-html/unnamed-chunk-11-1.png new file mode 100644 index 0000000..70548a4 Binary files /dev/null and b/worksheets/worksheet_redlining_files/figure-html/unnamed-chunk-11-1.png differ diff --git a/worksheets/worksheet_redlining_files/figure-html/unnamed-chunk-14-1.png b/worksheets/worksheet_redlining_files/figure-html/unnamed-chunk-14-1.png new file mode 100644 index 0000000..8aed996 Binary files /dev/null and b/worksheets/worksheet_redlining_files/figure-html/unnamed-chunk-14-1.png differ diff --git a/worksheets/worksheet_redlining_files/figure-html/unnamed-chunk-16-1.png b/worksheets/worksheet_redlining_files/figure-html/unnamed-chunk-16-1.png new file mode 100644 index 0000000..015777d Binary files /dev/null and b/worksheets/worksheet_redlining_files/figure-html/unnamed-chunk-16-1.png differ diff --git a/worksheets/worksheet_redlining_files/figure-html/unnamed-chunk-18-1.png b/worksheets/worksheet_redlining_files/figure-html/unnamed-chunk-18-1.png new file mode 100644 index 0000000..deaebbf Binary files /dev/null and b/worksheets/worksheet_redlining_files/figure-html/unnamed-chunk-18-1.png differ diff --git a/worksheets/worksheet_redlining_files/figure-html/unnamed-chunk-23-1.png b/worksheets/worksheet_redlining_files/figure-html/unnamed-chunk-23-1.png new file mode 100644 index 0000000..5317a09 Binary files /dev/null and b/worksheets/worksheet_redlining_files/figure-html/unnamed-chunk-23-1.png differ diff --git a/worksheets/worksheet_redlining_files/figure-html/unnamed-chunk-25-1.png b/worksheets/worksheet_redlining_files/figure-html/unnamed-chunk-25-1.png new file mode 100644 index 0000000..f620404 Binary files /dev/null and b/worksheets/worksheet_redlining_files/figure-html/unnamed-chunk-25-1.png differ diff --git a/worksheets/worksheet_redlining_files/figure-html/unnamed-chunk-26-1.png b/worksheets/worksheet_redlining_files/figure-html/unnamed-chunk-26-1.png new file mode 100644 index 0000000..bebc81a Binary files /dev/null and b/worksheets/worksheet_redlining_files/figure-html/unnamed-chunk-26-1.png differ diff --git a/worksheets/worksheet_redlining_files/figure-html/unnamed-chunk-8-1.png b/worksheets/worksheet_redlining_files/figure-html/unnamed-chunk-8-1.png new file mode 100644 index 0000000..e7cb6ce Binary files /dev/null and b/worksheets/worksheet_redlining_files/figure-html/unnamed-chunk-8-1.png differ diff --git a/worksheets/worksheet_redlining_files/libs/bootstrap/bootstrap-icons.css b/worksheets/worksheet_redlining_files/libs/bootstrap/bootstrap-icons.css new file mode 100644 index 0000000..f51d04b --- /dev/null +++ b/worksheets/worksheet_redlining_files/libs/bootstrap/bootstrap-icons.css @@ -0,0 +1,1704 @@ +@font-face { + font-family: "bootstrap-icons"; + src: +url("./bootstrap-icons.woff?524846017b983fc8ded9325d94ed40f3") format("woff"); +} + +.bi::before, +[class^="bi-"]::before, +[class*=" bi-"]::before { + display: inline-block; + font-family: bootstrap-icons !important; + font-style: normal; + font-weight: normal !important; + font-variant: normal; + text-transform: none; + line-height: 1; + vertical-align: -.125em; + -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; +} + +.bi-123::before { content: "\f67f"; } +.bi-alarm-fill::before { content: "\f101"; } +.bi-alarm::before { content: "\f102"; } +.bi-align-bottom::before { content: "\f103"; } +.bi-align-center::before { content: "\f104"; } +.bi-align-end::before { content: "\f105"; } +.bi-align-middle::before { content: "\f106"; } +.bi-align-start::before { content: "\f107"; } +.bi-align-top::before { content: "\f108"; } +.bi-alt::before { content: "\f109"; } +.bi-app-indicator::before { content: "\f10a"; } +.bi-app::before { content: "\f10b"; } +.bi-archive-fill::before { content: "\f10c"; } +.bi-archive::before { content: "\f10d"; } +.bi-arrow-90deg-down::before { content: "\f10e"; } +.bi-arrow-90deg-left::before { content: "\f10f"; } +.bi-arrow-90deg-right::before { content: "\f110"; } +.bi-arrow-90deg-up::before { content: "\f111"; } +.bi-arrow-bar-down::before { content: "\f112"; } +.bi-arrow-bar-left::before { content: "\f113"; } +.bi-arrow-bar-right::before { content: "\f114"; } +.bi-arrow-bar-up::before { content: "\f115"; } +.bi-arrow-clockwise::before { content: "\f116"; } +.bi-arrow-counterclockwise::before { content: "\f117"; } +.bi-arrow-down-circle-fill::before { content: "\f118"; } +.bi-arrow-down-circle::before { content: "\f119"; } +.bi-arrow-down-left-circle-fill::before { content: "\f11a"; } +.bi-arrow-down-left-circle::before { content: "\f11b"; } +.bi-arrow-down-left-square-fill::before { content: "\f11c"; } +.bi-arrow-down-left-square::before { content: "\f11d"; } +.bi-arrow-down-left::before { content: "\f11e"; } +.bi-arrow-down-right-circle-fill::before { content: "\f11f"; } +.bi-arrow-down-right-circle::before { content: "\f120"; } +.bi-arrow-down-right-square-fill::before { content: "\f121"; } +.bi-arrow-down-right-square::before { content: "\f122"; } +.bi-arrow-down-right::before { content: "\f123"; } +.bi-arrow-down-short::before { content: "\f124"; } +.bi-arrow-down-square-fill::before { content: "\f125"; } +.bi-arrow-down-square::before { content: "\f126"; } +.bi-arrow-down-up::before { content: "\f127"; } +.bi-arrow-down::before { content: "\f128"; } +.bi-arrow-left-circle-fill::before { content: "\f129"; } +.bi-arrow-left-circle::before { content: "\f12a"; } +.bi-arrow-left-right::before { content: "\f12b"; } +.bi-arrow-left-short::before { content: "\f12c"; } +.bi-arrow-left-square-fill::before { content: "\f12d"; } +.bi-arrow-left-square::before { content: "\f12e"; } +.bi-arrow-left::before { content: "\f12f"; } +.bi-arrow-repeat::before { content: "\f130"; } +.bi-arrow-return-left::before { content: "\f131"; } +.bi-arrow-return-right::before { content: "\f132"; } +.bi-arrow-right-circle-fill::before { content: "\f133"; } +.bi-arrow-right-circle::before { content: "\f134"; } +.bi-arrow-right-short::before { content: "\f135"; } +.bi-arrow-right-square-fill::before { content: "\f136"; } +.bi-arrow-right-square::before { content: "\f137"; } +.bi-arrow-right::before { content: "\f138"; } +.bi-arrow-up-circle-fill::before { content: "\f139"; } +.bi-arrow-up-circle::before { content: "\f13a"; } +.bi-arrow-up-left-circle-fill::before { content: "\f13b"; } +.bi-arrow-up-left-circle::before { content: "\f13c"; } +.bi-arrow-up-left-square-fill::before { content: "\f13d"; } +.bi-arrow-up-left-square::before { content: "\f13e"; } +.bi-arrow-up-left::before { content: "\f13f"; } +.bi-arrow-up-right-circle-fill::before { content: "\f140"; } +.bi-arrow-up-right-circle::before { content: "\f141"; } +.bi-arrow-up-right-square-fill::before { content: "\f142"; } +.bi-arrow-up-right-square::before { content: "\f143"; } +.bi-arrow-up-right::before { content: "\f144"; } +.bi-arrow-up-short::before { content: "\f145"; } +.bi-arrow-up-square-fill::before { content: "\f146"; } +.bi-arrow-up-square::before { content: "\f147"; } +.bi-arrow-up::before { content: "\f148"; } +.bi-arrows-angle-contract::before { content: "\f149"; } +.bi-arrows-angle-expand::before { content: "\f14a"; } +.bi-arrows-collapse::before { content: "\f14b"; } +.bi-arrows-expand::before { content: "\f14c"; } +.bi-arrows-fullscreen::before { content: "\f14d"; } +.bi-arrows-move::before { content: "\f14e"; } +.bi-aspect-ratio-fill::before { content: "\f14f"; } +.bi-aspect-ratio::before { content: "\f150"; } +.bi-asterisk::before { content: "\f151"; } +.bi-at::before { content: "\f152"; } +.bi-award-fill::before { content: "\f153"; } +.bi-award::before { content: "\f154"; } +.bi-back::before { content: "\f155"; } +.bi-backspace-fill::before { content: "\f156"; } +.bi-backspace-reverse-fill::before { content: "\f157"; } +.bi-backspace-reverse::before { content: "\f158"; } +.bi-backspace::before { content: "\f159"; } +.bi-badge-3d-fill::before { content: "\f15a"; } +.bi-badge-3d::before { content: "\f15b"; } +.bi-badge-4k-fill::before { content: "\f15c"; } +.bi-badge-4k::before { content: "\f15d"; } +.bi-badge-8k-fill::before { content: "\f15e"; } +.bi-badge-8k::before { content: "\f15f"; } +.bi-badge-ad-fill::before { content: "\f160"; } +.bi-badge-ad::before { content: "\f161"; } +.bi-badge-ar-fill::before { content: "\f162"; } +.bi-badge-ar::before { content: "\f163"; } +.bi-badge-cc-fill::before { content: "\f164"; } +.bi-badge-cc::before { content: "\f165"; } +.bi-badge-hd-fill::before { content: "\f166"; } +.bi-badge-hd::before { content: "\f167"; } +.bi-badge-tm-fill::before { content: "\f168"; } +.bi-badge-tm::before { content: "\f169"; } +.bi-badge-vo-fill::before { content: "\f16a"; } +.bi-badge-vo::before { content: "\f16b"; } +.bi-badge-vr-fill::before { content: "\f16c"; } +.bi-badge-vr::before { content: "\f16d"; } +.bi-badge-wc-fill::before { content: "\f16e"; } +.bi-badge-wc::before { content: "\f16f"; } +.bi-bag-check-fill::before { content: "\f170"; } +.bi-bag-check::before { content: "\f171"; } +.bi-bag-dash-fill::before { content: "\f172"; } +.bi-bag-dash::before { content: "\f173"; } +.bi-bag-fill::before { content: "\f174"; } +.bi-bag-plus-fill::before { content: "\f175"; } +.bi-bag-plus::before { content: "\f176"; } +.bi-bag-x-fill::before { content: "\f177"; } +.bi-bag-x::before { content: "\f178"; } +.bi-bag::before { content: "\f179"; } +.bi-bar-chart-fill::before { content: "\f17a"; } +.bi-bar-chart-line-fill::before { content: "\f17b"; } +.bi-bar-chart-line::before { content: "\f17c"; } +.bi-bar-chart-steps::before { content: "\f17d"; } +.bi-bar-chart::before { content: "\f17e"; } +.bi-basket-fill::before { content: "\f17f"; } +.bi-basket::before { content: "\f180"; } +.bi-basket2-fill::before { content: "\f181"; } +.bi-basket2::before { content: "\f182"; } +.bi-basket3-fill::before { content: "\f183"; } +.bi-basket3::before { content: "\f184"; } +.bi-battery-charging::before { content: "\f185"; } +.bi-battery-full::before { content: "\f186"; } +.bi-battery-half::before { content: "\f187"; } +.bi-battery::before { content: "\f188"; } +.bi-bell-fill::before { content: "\f189"; } +.bi-bell::before { content: "\f18a"; } +.bi-bezier::before { content: "\f18b"; } +.bi-bezier2::before { content: "\f18c"; } +.bi-bicycle::before { content: "\f18d"; } +.bi-binoculars-fill::before { content: "\f18e"; } +.bi-binoculars::before { content: "\f18f"; } +.bi-blockquote-left::before { content: "\f190"; } +.bi-blockquote-right::before { content: "\f191"; } +.bi-book-fill::before { content: "\f192"; } +.bi-book-half::before { content: "\f193"; } +.bi-book::before { content: "\f194"; } +.bi-bookmark-check-fill::before { content: "\f195"; } +.bi-bookmark-check::before { content: "\f196"; } +.bi-bookmark-dash-fill::before { content: "\f197"; } +.bi-bookmark-dash::before { content: "\f198"; } +.bi-bookmark-fill::before { content: "\f199"; } +.bi-bookmark-heart-fill::before { content: "\f19a"; } +.bi-bookmark-heart::before { content: "\f19b"; } +.bi-bookmark-plus-fill::before { content: "\f19c"; } +.bi-bookmark-plus::before { content: "\f19d"; } +.bi-bookmark-star-fill::before { content: "\f19e"; } +.bi-bookmark-star::before { content: "\f19f"; } +.bi-bookmark-x-fill::before { content: "\f1a0"; } +.bi-bookmark-x::before { content: "\f1a1"; } +.bi-bookmark::before { content: "\f1a2"; } +.bi-bookmarks-fill::before { content: "\f1a3"; } +.bi-bookmarks::before { content: "\f1a4"; } +.bi-bookshelf::before { content: "\f1a5"; } +.bi-bootstrap-fill::before { content: "\f1a6"; } +.bi-bootstrap-reboot::before { content: "\f1a7"; } +.bi-bootstrap::before { content: "\f1a8"; } +.bi-border-all::before { content: "\f1a9"; } +.bi-border-bottom::before { content: "\f1aa"; } +.bi-border-center::before { content: "\f1ab"; } +.bi-border-inner::before { content: "\f1ac"; } +.bi-border-left::before { content: "\f1ad"; } +.bi-border-middle::before { content: "\f1ae"; } +.bi-border-outer::before { content: "\f1af"; } +.bi-border-right::before { content: "\f1b0"; } +.bi-border-style::before { content: "\f1b1"; } +.bi-border-top::before { content: "\f1b2"; } +.bi-border-width::before { content: "\f1b3"; } +.bi-border::before { content: "\f1b4"; } +.bi-bounding-box-circles::before { content: "\f1b5"; } +.bi-bounding-box::before { content: "\f1b6"; } +.bi-box-arrow-down-left::before { content: "\f1b7"; } +.bi-box-arrow-down-right::before { content: "\f1b8"; } +.bi-box-arrow-down::before { content: "\f1b9"; } +.bi-box-arrow-in-down-left::before { content: "\f1ba"; } +.bi-box-arrow-in-down-right::before { content: "\f1bb"; } +.bi-box-arrow-in-down::before { content: "\f1bc"; } +.bi-box-arrow-in-left::before { content: "\f1bd"; } +.bi-box-arrow-in-right::before { content: "\f1be"; } +.bi-box-arrow-in-up-left::before { content: "\f1bf"; } +.bi-box-arrow-in-up-right::before { content: "\f1c0"; } +.bi-box-arrow-in-up::before { content: "\f1c1"; } +.bi-box-arrow-left::before { content: "\f1c2"; } +.bi-box-arrow-right::before { content: "\f1c3"; } +.bi-box-arrow-up-left::before { content: "\f1c4"; } +.bi-box-arrow-up-right::before { content: "\f1c5"; } +.bi-box-arrow-up::before { content: "\f1c6"; } +.bi-box-seam::before { content: "\f1c7"; } +.bi-box::before { content: "\f1c8"; } +.bi-braces::before { content: "\f1c9"; } +.bi-bricks::before { content: "\f1ca"; } +.bi-briefcase-fill::before { content: "\f1cb"; } +.bi-briefcase::before { content: "\f1cc"; } +.bi-brightness-alt-high-fill::before { content: "\f1cd"; } +.bi-brightness-alt-high::before { content: "\f1ce"; } +.bi-brightness-alt-low-fill::before { content: "\f1cf"; } +.bi-brightness-alt-low::before { content: "\f1d0"; } +.bi-brightness-high-fill::before { content: "\f1d1"; } +.bi-brightness-high::before { content: "\f1d2"; } +.bi-brightness-low-fill::before { content: "\f1d3"; } +.bi-brightness-low::before { content: "\f1d4"; } +.bi-broadcast-pin::before { content: "\f1d5"; } +.bi-broadcast::before { content: "\f1d6"; } +.bi-brush-fill::before { content: "\f1d7"; } +.bi-brush::before { content: "\f1d8"; } +.bi-bucket-fill::before { content: "\f1d9"; } +.bi-bucket::before { content: "\f1da"; } +.bi-bug-fill::before { content: "\f1db"; } +.bi-bug::before { content: "\f1dc"; } +.bi-building::before { content: "\f1dd"; } +.bi-bullseye::before { content: "\f1de"; } +.bi-calculator-fill::before { content: "\f1df"; } +.bi-calculator::before { content: "\f1e0"; } +.bi-calendar-check-fill::before { content: "\f1e1"; } +.bi-calendar-check::before { content: "\f1e2"; } +.bi-calendar-date-fill::before { content: "\f1e3"; } +.bi-calendar-date::before { content: "\f1e4"; } +.bi-calendar-day-fill::before { content: "\f1e5"; } +.bi-calendar-day::before { content: "\f1e6"; } +.bi-calendar-event-fill::before { content: "\f1e7"; } +.bi-calendar-event::before { content: "\f1e8"; } +.bi-calendar-fill::before { content: "\f1e9"; } +.bi-calendar-minus-fill::before { content: "\f1ea"; } +.bi-calendar-minus::before { content: "\f1eb"; } +.bi-calendar-month-fill::before { content: "\f1ec"; } +.bi-calendar-month::before { content: "\f1ed"; } +.bi-calendar-plus-fill::before { content: "\f1ee"; } +.bi-calendar-plus::before { content: "\f1ef"; } +.bi-calendar-range-fill::before { content: "\f1f0"; } +.bi-calendar-range::before { content: "\f1f1"; } +.bi-calendar-week-fill::before { content: "\f1f2"; } +.bi-calendar-week::before { content: "\f1f3"; } +.bi-calendar-x-fill::before { content: "\f1f4"; } +.bi-calendar-x::before { content: "\f1f5"; } +.bi-calendar::before { content: "\f1f6"; } +.bi-calendar2-check-fill::before { content: "\f1f7"; } +.bi-calendar2-check::before { content: "\f1f8"; } +.bi-calendar2-date-fill::before { content: "\f1f9"; } +.bi-calendar2-date::before { content: "\f1fa"; } +.bi-calendar2-day-fill::before { content: "\f1fb"; } +.bi-calendar2-day::before { content: "\f1fc"; } +.bi-calendar2-event-fill::before { content: "\f1fd"; } +.bi-calendar2-event::before { content: "\f1fe"; } +.bi-calendar2-fill::before { content: "\f1ff"; } +.bi-calendar2-minus-fill::before { content: "\f200"; } +.bi-calendar2-minus::before { content: "\f201"; } +.bi-calendar2-month-fill::before { content: "\f202"; } +.bi-calendar2-month::before { content: "\f203"; } +.bi-calendar2-plus-fill::before { content: "\f204"; } +.bi-calendar2-plus::before { content: "\f205"; } +.bi-calendar2-range-fill::before { content: "\f206"; } +.bi-calendar2-range::before { content: "\f207"; } +.bi-calendar2-week-fill::before { content: "\f208"; } +.bi-calendar2-week::before { content: "\f209"; } +.bi-calendar2-x-fill::before { content: "\f20a"; } +.bi-calendar2-x::before { content: "\f20b"; } +.bi-calendar2::before { content: "\f20c"; } +.bi-calendar3-event-fill::before { content: "\f20d"; } +.bi-calendar3-event::before { content: "\f20e"; } +.bi-calendar3-fill::before { content: "\f20f"; } +.bi-calendar3-range-fill::before { content: "\f210"; } +.bi-calendar3-range::before { content: "\f211"; } +.bi-calendar3-week-fill::before { content: "\f212"; } +.bi-calendar3-week::before { content: "\f213"; } +.bi-calendar3::before { content: "\f214"; } +.bi-calendar4-event::before { content: "\f215"; } +.bi-calendar4-range::before { content: "\f216"; } +.bi-calendar4-week::before { content: "\f217"; } +.bi-calendar4::before { content: "\f218"; } +.bi-camera-fill::before { content: "\f219"; } +.bi-camera-reels-fill::before { content: "\f21a"; } +.bi-camera-reels::before { content: "\f21b"; } +.bi-camera-video-fill::before { content: "\f21c"; } +.bi-camera-video-off-fill::before { content: "\f21d"; } +.bi-camera-video-off::before { content: "\f21e"; } +.bi-camera-video::before { content: "\f21f"; } +.bi-camera::before { content: "\f220"; } +.bi-camera2::before { content: "\f221"; } +.bi-capslock-fill::before { content: "\f222"; } +.bi-capslock::before { content: "\f223"; } +.bi-card-checklist::before { content: "\f224"; } +.bi-card-heading::before { content: "\f225"; } +.bi-card-image::before { content: "\f226"; } +.bi-card-list::before { content: "\f227"; } +.bi-card-text::before { content: "\f228"; } +.bi-caret-down-fill::before { content: "\f229"; } +.bi-caret-down-square-fill::before { content: "\f22a"; } +.bi-caret-down-square::before { content: "\f22b"; } +.bi-caret-down::before { content: "\f22c"; } +.bi-caret-left-fill::before { content: "\f22d"; } +.bi-caret-left-square-fill::before { content: "\f22e"; } +.bi-caret-left-square::before { content: "\f22f"; } +.bi-caret-left::before { content: "\f230"; } +.bi-caret-right-fill::before { content: "\f231"; } +.bi-caret-right-square-fill::before { content: "\f232"; } +.bi-caret-right-square::before { content: "\f233"; } +.bi-caret-right::before { content: "\f234"; } +.bi-caret-up-fill::before { content: "\f235"; } +.bi-caret-up-square-fill::before { content: "\f236"; } +.bi-caret-up-square::before { content: "\f237"; } +.bi-caret-up::before { content: "\f238"; } +.bi-cart-check-fill::before { content: "\f239"; } +.bi-cart-check::before { content: "\f23a"; } +.bi-cart-dash-fill::before { content: "\f23b"; } +.bi-cart-dash::before { content: "\f23c"; } +.bi-cart-fill::before { content: "\f23d"; } +.bi-cart-plus-fill::before { content: "\f23e"; } +.bi-cart-plus::before { content: "\f23f"; } +.bi-cart-x-fill::before { content: "\f240"; } +.bi-cart-x::before { content: "\f241"; } +.bi-cart::before { content: "\f242"; } +.bi-cart2::before { content: "\f243"; } +.bi-cart3::before { content: "\f244"; } +.bi-cart4::before { content: "\f245"; } +.bi-cash-stack::before { content: "\f246"; } +.bi-cash::before { content: "\f247"; } +.bi-cast::before { content: "\f248"; } +.bi-chat-dots-fill::before { content: "\f249"; } +.bi-chat-dots::before { content: "\f24a"; } +.bi-chat-fill::before { content: "\f24b"; } +.bi-chat-left-dots-fill::before { content: "\f24c"; } +.bi-chat-left-dots::before { content: "\f24d"; } +.bi-chat-left-fill::before { content: "\f24e"; } +.bi-chat-left-quote-fill::before { content: "\f24f"; } +.bi-chat-left-quote::before { content: "\f250"; } +.bi-chat-left-text-fill::before { content: "\f251"; } +.bi-chat-left-text::before { content: "\f252"; } +.bi-chat-left::before { content: "\f253"; } +.bi-chat-quote-fill::before { content: "\f254"; } +.bi-chat-quote::before { content: "\f255"; } +.bi-chat-right-dots-fill::before { content: "\f256"; } +.bi-chat-right-dots::before { content: "\f257"; } +.bi-chat-right-fill::before { content: "\f258"; } +.bi-chat-right-quote-fill::before { content: "\f259"; } +.bi-chat-right-quote::before { content: "\f25a"; } +.bi-chat-right-text-fill::before { content: "\f25b"; } +.bi-chat-right-text::before { content: "\f25c"; } +.bi-chat-right::before { content: "\f25d"; } +.bi-chat-square-dots-fill::before { content: "\f25e"; } +.bi-chat-square-dots::before { content: "\f25f"; } +.bi-chat-square-fill::before { content: "\f260"; } +.bi-chat-square-quote-fill::before { content: "\f261"; } +.bi-chat-square-quote::before { content: "\f262"; } +.bi-chat-square-text-fill::before { content: "\f263"; } +.bi-chat-square-text::before { content: "\f264"; } +.bi-chat-square::before { content: "\f265"; } +.bi-chat-text-fill::before { content: "\f266"; } +.bi-chat-text::before { content: "\f267"; } +.bi-chat::before { content: "\f268"; } +.bi-check-all::before { content: "\f269"; } +.bi-check-circle-fill::before { content: "\f26a"; } +.bi-check-circle::before { content: "\f26b"; } +.bi-check-square-fill::before { content: "\f26c"; } +.bi-check-square::before { content: "\f26d"; } +.bi-check::before { content: "\f26e"; } +.bi-check2-all::before { content: "\f26f"; } +.bi-check2-circle::before { content: "\f270"; } +.bi-check2-square::before { content: "\f271"; } +.bi-check2::before { content: "\f272"; } +.bi-chevron-bar-contract::before { content: "\f273"; } +.bi-chevron-bar-down::before { content: "\f274"; } +.bi-chevron-bar-expand::before { content: "\f275"; } +.bi-chevron-bar-left::before { content: "\f276"; } +.bi-chevron-bar-right::before { content: "\f277"; } +.bi-chevron-bar-up::before { content: "\f278"; } +.bi-chevron-compact-down::before { content: "\f279"; } +.bi-chevron-compact-left::before { content: "\f27a"; } +.bi-chevron-compact-right::before { content: "\f27b"; } +.bi-chevron-compact-up::before { content: "\f27c"; } +.bi-chevron-contract::before { content: "\f27d"; } +.bi-chevron-double-down::before { content: "\f27e"; } +.bi-chevron-double-left::before { content: "\f27f"; } +.bi-chevron-double-right::before { content: "\f280"; } +.bi-chevron-double-up::before { content: "\f281"; } +.bi-chevron-down::before { content: "\f282"; } +.bi-chevron-expand::before { content: "\f283"; } +.bi-chevron-left::before { content: "\f284"; } +.bi-chevron-right::before { content: "\f285"; } +.bi-chevron-up::before { content: "\f286"; } +.bi-circle-fill::before { content: "\f287"; } +.bi-circle-half::before { content: "\f288"; } +.bi-circle-square::before { content: "\f289"; } +.bi-circle::before { content: "\f28a"; } +.bi-clipboard-check::before { content: "\f28b"; } +.bi-clipboard-data::before { content: "\f28c"; } +.bi-clipboard-minus::before { content: "\f28d"; } +.bi-clipboard-plus::before { content: "\f28e"; } +.bi-clipboard-x::before { content: "\f28f"; } +.bi-clipboard::before { content: "\f290"; } +.bi-clock-fill::before { content: "\f291"; } +.bi-clock-history::before { content: "\f292"; } +.bi-clock::before { content: "\f293"; } +.bi-cloud-arrow-down-fill::before { content: "\f294"; } +.bi-cloud-arrow-down::before { content: "\f295"; } +.bi-cloud-arrow-up-fill::before { content: "\f296"; } +.bi-cloud-arrow-up::before { content: "\f297"; } +.bi-cloud-check-fill::before { content: "\f298"; } +.bi-cloud-check::before { content: "\f299"; } +.bi-cloud-download-fill::before { content: "\f29a"; } +.bi-cloud-download::before { content: "\f29b"; } +.bi-cloud-drizzle-fill::before { content: "\f29c"; } +.bi-cloud-drizzle::before { content: "\f29d"; } +.bi-cloud-fill::before { content: "\f29e"; } +.bi-cloud-fog-fill::before { content: "\f29f"; } +.bi-cloud-fog::before { content: "\f2a0"; } +.bi-cloud-fog2-fill::before { content: "\f2a1"; } +.bi-cloud-fog2::before { content: "\f2a2"; } +.bi-cloud-hail-fill::before { content: "\f2a3"; } +.bi-cloud-hail::before { content: "\f2a4"; } +.bi-cloud-haze-1::before { content: "\f2a5"; } +.bi-cloud-haze-fill::before { content: "\f2a6"; } +.bi-cloud-haze::before { content: "\f2a7"; } +.bi-cloud-haze2-fill::before { content: "\f2a8"; } +.bi-cloud-lightning-fill::before { content: "\f2a9"; } +.bi-cloud-lightning-rain-fill::before { content: "\f2aa"; } +.bi-cloud-lightning-rain::before { content: "\f2ab"; } +.bi-cloud-lightning::before { content: "\f2ac"; } +.bi-cloud-minus-fill::before { content: "\f2ad"; } +.bi-cloud-minus::before { content: "\f2ae"; } +.bi-cloud-moon-fill::before { content: "\f2af"; } +.bi-cloud-moon::before { content: "\f2b0"; } +.bi-cloud-plus-fill::before { content: "\f2b1"; } +.bi-cloud-plus::before { content: "\f2b2"; } +.bi-cloud-rain-fill::before { content: "\f2b3"; } +.bi-cloud-rain-heavy-fill::before { content: "\f2b4"; } +.bi-cloud-rain-heavy::before { content: "\f2b5"; } +.bi-cloud-rain::before { content: "\f2b6"; } +.bi-cloud-slash-fill::before { content: "\f2b7"; } +.bi-cloud-slash::before { content: "\f2b8"; } +.bi-cloud-sleet-fill::before { content: "\f2b9"; } +.bi-cloud-sleet::before { content: "\f2ba"; } +.bi-cloud-snow-fill::before { content: "\f2bb"; } +.bi-cloud-snow::before { content: "\f2bc"; } +.bi-cloud-sun-fill::before { content: "\f2bd"; } +.bi-cloud-sun::before { content: "\f2be"; } +.bi-cloud-upload-fill::before { content: "\f2bf"; } +.bi-cloud-upload::before { content: "\f2c0"; } +.bi-cloud::before { content: "\f2c1"; } +.bi-clouds-fill::before { content: "\f2c2"; } +.bi-clouds::before { content: "\f2c3"; } +.bi-cloudy-fill::before { content: "\f2c4"; } +.bi-cloudy::before { content: "\f2c5"; } +.bi-code-slash::before { content: "\f2c6"; } +.bi-code-square::before { content: "\f2c7"; } +.bi-code::before { content: "\f2c8"; } +.bi-collection-fill::before { content: "\f2c9"; } +.bi-collection-play-fill::before { content: "\f2ca"; } +.bi-collection-play::before { content: "\f2cb"; } +.bi-collection::before { content: "\f2cc"; } +.bi-columns-gap::before { content: "\f2cd"; } +.bi-columns::before { content: "\f2ce"; } +.bi-command::before { content: "\f2cf"; } +.bi-compass-fill::before { content: "\f2d0"; } +.bi-compass::before { content: "\f2d1"; } +.bi-cone-striped::before { content: "\f2d2"; } +.bi-cone::before { content: "\f2d3"; } +.bi-controller::before { content: "\f2d4"; } +.bi-cpu-fill::before { content: "\f2d5"; } +.bi-cpu::before { content: "\f2d6"; } +.bi-credit-card-2-back-fill::before { content: "\f2d7"; } +.bi-credit-card-2-back::before { content: "\f2d8"; } +.bi-credit-card-2-front-fill::before { content: "\f2d9"; } +.bi-credit-card-2-front::before { content: "\f2da"; } +.bi-credit-card-fill::before { content: "\f2db"; } +.bi-credit-card::before { content: "\f2dc"; } +.bi-crop::before { content: "\f2dd"; } +.bi-cup-fill::before { content: "\f2de"; } +.bi-cup-straw::before { content: "\f2df"; } +.bi-cup::before { content: "\f2e0"; } +.bi-cursor-fill::before { content: "\f2e1"; } +.bi-cursor-text::before { content: "\f2e2"; } +.bi-cursor::before { content: "\f2e3"; } +.bi-dash-circle-dotted::before { content: "\f2e4"; } +.bi-dash-circle-fill::before { content: "\f2e5"; } +.bi-dash-circle::before { content: "\f2e6"; } +.bi-dash-square-dotted::before { content: "\f2e7"; } +.bi-dash-square-fill::before { content: "\f2e8"; } +.bi-dash-square::before { content: "\f2e9"; } +.bi-dash::before { content: "\f2ea"; } +.bi-diagram-2-fill::before { content: "\f2eb"; } +.bi-diagram-2::before { content: "\f2ec"; } +.bi-diagram-3-fill::before { content: "\f2ed"; } +.bi-diagram-3::before { content: "\f2ee"; } +.bi-diamond-fill::before { content: "\f2ef"; } +.bi-diamond-half::before { content: "\f2f0"; } +.bi-diamond::before { content: "\f2f1"; } +.bi-dice-1-fill::before { content: "\f2f2"; } +.bi-dice-1::before { content: "\f2f3"; } +.bi-dice-2-fill::before { content: "\f2f4"; } +.bi-dice-2::before { content: "\f2f5"; } +.bi-dice-3-fill::before { content: "\f2f6"; } +.bi-dice-3::before { content: "\f2f7"; } +.bi-dice-4-fill::before { content: "\f2f8"; } +.bi-dice-4::before { content: "\f2f9"; } +.bi-dice-5-fill::before { content: "\f2fa"; } +.bi-dice-5::before { content: "\f2fb"; } +.bi-dice-6-fill::before { content: "\f2fc"; } +.bi-dice-6::before { content: "\f2fd"; } +.bi-disc-fill::before { content: "\f2fe"; } +.bi-disc::before { content: "\f2ff"; } +.bi-discord::before { content: "\f300"; } +.bi-display-fill::before { content: "\f301"; } +.bi-display::before { content: "\f302"; } +.bi-distribute-horizontal::before { content: "\f303"; } +.bi-distribute-vertical::before { content: "\f304"; } +.bi-door-closed-fill::before { content: "\f305"; } +.bi-door-closed::before { content: "\f306"; } +.bi-door-open-fill::before { content: "\f307"; } +.bi-door-open::before { content: "\f308"; } +.bi-dot::before { content: "\f309"; } +.bi-download::before { content: "\f30a"; } +.bi-droplet-fill::before { content: "\f30b"; } +.bi-droplet-half::before { content: "\f30c"; } +.bi-droplet::before { content: "\f30d"; } +.bi-earbuds::before { content: "\f30e"; } +.bi-easel-fill::before { content: "\f30f"; } +.bi-easel::before { content: "\f310"; } +.bi-egg-fill::before { content: "\f311"; } +.bi-egg-fried::before { content: "\f312"; } +.bi-egg::before { content: "\f313"; } +.bi-eject-fill::before { content: "\f314"; } +.bi-eject::before { content: "\f315"; } +.bi-emoji-angry-fill::before { content: "\f316"; } +.bi-emoji-angry::before { content: "\f317"; } +.bi-emoji-dizzy-fill::before { content: "\f318"; } +.bi-emoji-dizzy::before { content: "\f319"; } +.bi-emoji-expressionless-fill::before { content: "\f31a"; } +.bi-emoji-expressionless::before { content: "\f31b"; } +.bi-emoji-frown-fill::before { content: "\f31c"; } +.bi-emoji-frown::before { content: "\f31d"; } +.bi-emoji-heart-eyes-fill::before { content: "\f31e"; } +.bi-emoji-heart-eyes::before { content: "\f31f"; } +.bi-emoji-laughing-fill::before { content: "\f320"; } +.bi-emoji-laughing::before { content: "\f321"; } +.bi-emoji-neutral-fill::before { content: "\f322"; } +.bi-emoji-neutral::before { content: "\f323"; } +.bi-emoji-smile-fill::before { content: "\f324"; } +.bi-emoji-smile-upside-down-fill::before { content: "\f325"; } +.bi-emoji-smile-upside-down::before { content: "\f326"; } +.bi-emoji-smile::before { content: "\f327"; } +.bi-emoji-sunglasses-fill::before { content: "\f328"; } +.bi-emoji-sunglasses::before { content: "\f329"; } +.bi-emoji-wink-fill::before { content: "\f32a"; } +.bi-emoji-wink::before { content: "\f32b"; } +.bi-envelope-fill::before { content: "\f32c"; } +.bi-envelope-open-fill::before { content: "\f32d"; } +.bi-envelope-open::before { content: "\f32e"; } +.bi-envelope::before { content: "\f32f"; } +.bi-eraser-fill::before { content: "\f330"; } +.bi-eraser::before { content: "\f331"; } +.bi-exclamation-circle-fill::before { content: "\f332"; } +.bi-exclamation-circle::before { content: "\f333"; } +.bi-exclamation-diamond-fill::before { content: "\f334"; } +.bi-exclamation-diamond::before { content: "\f335"; } +.bi-exclamation-octagon-fill::before { content: "\f336"; } +.bi-exclamation-octagon::before { content: "\f337"; } +.bi-exclamation-square-fill::before { content: "\f338"; } +.bi-exclamation-square::before { content: "\f339"; } +.bi-exclamation-triangle-fill::before { content: "\f33a"; } +.bi-exclamation-triangle::before { content: "\f33b"; } +.bi-exclamation::before { content: "\f33c"; } +.bi-exclude::before { content: "\f33d"; } +.bi-eye-fill::before { content: "\f33e"; } +.bi-eye-slash-fill::before { content: "\f33f"; } +.bi-eye-slash::before { content: "\f340"; } +.bi-eye::before { content: "\f341"; } +.bi-eyedropper::before { content: "\f342"; } +.bi-eyeglasses::before { content: "\f343"; } +.bi-facebook::before { content: "\f344"; } +.bi-file-arrow-down-fill::before { content: "\f345"; } +.bi-file-arrow-down::before { content: "\f346"; } +.bi-file-arrow-up-fill::before { content: "\f347"; } +.bi-file-arrow-up::before { content: "\f348"; } +.bi-file-bar-graph-fill::before { content: "\f349"; } +.bi-file-bar-graph::before { content: "\f34a"; } +.bi-file-binary-fill::before { content: "\f34b"; } +.bi-file-binary::before { content: "\f34c"; } +.bi-file-break-fill::before { content: "\f34d"; } +.bi-file-break::before { content: "\f34e"; } +.bi-file-check-fill::before { content: "\f34f"; } +.bi-file-check::before { content: "\f350"; } +.bi-file-code-fill::before { content: "\f351"; } +.bi-file-code::before { content: "\f352"; } +.bi-file-diff-fill::before { content: "\f353"; } +.bi-file-diff::before { content: "\f354"; } +.bi-file-earmark-arrow-down-fill::before { content: "\f355"; } +.bi-file-earmark-arrow-down::before { content: "\f356"; } +.bi-file-earmark-arrow-up-fill::before { content: "\f357"; } +.bi-file-earmark-arrow-up::before { content: "\f358"; } +.bi-file-earmark-bar-graph-fill::before { content: "\f359"; } +.bi-file-earmark-bar-graph::before { content: "\f35a"; } +.bi-file-earmark-binary-fill::before { content: "\f35b"; } +.bi-file-earmark-binary::before { content: "\f35c"; } +.bi-file-earmark-break-fill::before { content: "\f35d"; } +.bi-file-earmark-break::before { content: "\f35e"; } +.bi-file-earmark-check-fill::before { content: "\f35f"; } +.bi-file-earmark-check::before { content: "\f360"; } +.bi-file-earmark-code-fill::before { content: "\f361"; } +.bi-file-earmark-code::before { content: "\f362"; } +.bi-file-earmark-diff-fill::before { content: "\f363"; } +.bi-file-earmark-diff::before { content: "\f364"; } +.bi-file-earmark-easel-fill::before { content: "\f365"; } +.bi-file-earmark-easel::before { content: "\f366"; } +.bi-file-earmark-excel-fill::before { content: "\f367"; } +.bi-file-earmark-excel::before { content: "\f368"; } +.bi-file-earmark-fill::before { content: "\f369"; } +.bi-file-earmark-font-fill::before { content: "\f36a"; } +.bi-file-earmark-font::before { content: "\f36b"; } +.bi-file-earmark-image-fill::before { content: "\f36c"; } +.bi-file-earmark-image::before { content: "\f36d"; } +.bi-file-earmark-lock-fill::before { content: "\f36e"; } +.bi-file-earmark-lock::before { content: "\f36f"; } +.bi-file-earmark-lock2-fill::before { content: "\f370"; } +.bi-file-earmark-lock2::before { content: "\f371"; } +.bi-file-earmark-medical-fill::before { content: "\f372"; } +.bi-file-earmark-medical::before { content: "\f373"; } +.bi-file-earmark-minus-fill::before { content: "\f374"; } +.bi-file-earmark-minus::before { content: "\f375"; } +.bi-file-earmark-music-fill::before { content: "\f376"; } +.bi-file-earmark-music::before { content: "\f377"; } +.bi-file-earmark-person-fill::before { content: "\f378"; } +.bi-file-earmark-person::before { content: "\f379"; } +.bi-file-earmark-play-fill::before { content: "\f37a"; } +.bi-file-earmark-play::before { content: "\f37b"; } +.bi-file-earmark-plus-fill::before { content: "\f37c"; } +.bi-file-earmark-plus::before { content: "\f37d"; } +.bi-file-earmark-post-fill::before { content: "\f37e"; } +.bi-file-earmark-post::before { content: "\f37f"; } +.bi-file-earmark-ppt-fill::before { content: "\f380"; } +.bi-file-earmark-ppt::before { content: "\f381"; } +.bi-file-earmark-richtext-fill::before { content: "\f382"; } +.bi-file-earmark-richtext::before { content: "\f383"; } +.bi-file-earmark-ruled-fill::before { content: "\f384"; } +.bi-file-earmark-ruled::before { content: "\f385"; } +.bi-file-earmark-slides-fill::before { content: "\f386"; } +.bi-file-earmark-slides::before { content: "\f387"; } +.bi-file-earmark-spreadsheet-fill::before { content: "\f388"; } +.bi-file-earmark-spreadsheet::before { content: "\f389"; } +.bi-file-earmark-text-fill::before { content: "\f38a"; } +.bi-file-earmark-text::before { content: "\f38b"; } +.bi-file-earmark-word-fill::before { content: "\f38c"; } +.bi-file-earmark-word::before { content: "\f38d"; } +.bi-file-earmark-x-fill::before { content: "\f38e"; } +.bi-file-earmark-x::before { content: "\f38f"; } +.bi-file-earmark-zip-fill::before { content: "\f390"; } +.bi-file-earmark-zip::before { content: "\f391"; } +.bi-file-earmark::before { content: "\f392"; } +.bi-file-easel-fill::before { content: "\f393"; } +.bi-file-easel::before { content: "\f394"; } +.bi-file-excel-fill::before { content: "\f395"; } +.bi-file-excel::before { content: "\f396"; } +.bi-file-fill::before { content: "\f397"; } +.bi-file-font-fill::before { content: "\f398"; } +.bi-file-font::before { content: "\f399"; } +.bi-file-image-fill::before { content: "\f39a"; } +.bi-file-image::before { content: "\f39b"; } +.bi-file-lock-fill::before { content: "\f39c"; } +.bi-file-lock::before { content: "\f39d"; } +.bi-file-lock2-fill::before { content: "\f39e"; } +.bi-file-lock2::before { content: "\f39f"; } +.bi-file-medical-fill::before { content: "\f3a0"; } +.bi-file-medical::before { content: "\f3a1"; } +.bi-file-minus-fill::before { content: "\f3a2"; } +.bi-file-minus::before { content: "\f3a3"; } +.bi-file-music-fill::before { content: "\f3a4"; } +.bi-file-music::before { content: "\f3a5"; } +.bi-file-person-fill::before { content: "\f3a6"; } +.bi-file-person::before { content: "\f3a7"; } +.bi-file-play-fill::before { content: "\f3a8"; } +.bi-file-play::before { content: "\f3a9"; } +.bi-file-plus-fill::before { content: "\f3aa"; } +.bi-file-plus::before { content: "\f3ab"; } +.bi-file-post-fill::before { content: "\f3ac"; } +.bi-file-post::before { content: "\f3ad"; } +.bi-file-ppt-fill::before { content: "\f3ae"; } +.bi-file-ppt::before { content: "\f3af"; } +.bi-file-richtext-fill::before { content: "\f3b0"; } +.bi-file-richtext::before { content: "\f3b1"; } +.bi-file-ruled-fill::before { content: "\f3b2"; } +.bi-file-ruled::before { content: "\f3b3"; } +.bi-file-slides-fill::before { content: "\f3b4"; } +.bi-file-slides::before { content: "\f3b5"; } +.bi-file-spreadsheet-fill::before { content: "\f3b6"; } +.bi-file-spreadsheet::before { content: "\f3b7"; } +.bi-file-text-fill::before { content: "\f3b8"; } +.bi-file-text::before { content: "\f3b9"; } +.bi-file-word-fill::before { content: "\f3ba"; } +.bi-file-word::before { content: "\f3bb"; } +.bi-file-x-fill::before { content: "\f3bc"; } +.bi-file-x::before { content: "\f3bd"; } +.bi-file-zip-fill::before { content: "\f3be"; } +.bi-file-zip::before { content: "\f3bf"; } +.bi-file::before { content: "\f3c0"; } +.bi-files-alt::before { content: "\f3c1"; } +.bi-files::before { content: "\f3c2"; } +.bi-film::before { content: "\f3c3"; } +.bi-filter-circle-fill::before { content: "\f3c4"; } +.bi-filter-circle::before { content: "\f3c5"; } +.bi-filter-left::before { content: "\f3c6"; } +.bi-filter-right::before { content: "\f3c7"; } +.bi-filter-square-fill::before { content: "\f3c8"; } +.bi-filter-square::before { content: "\f3c9"; } +.bi-filter::before { content: "\f3ca"; } +.bi-flag-fill::before { content: "\f3cb"; } +.bi-flag::before { content: "\f3cc"; } +.bi-flower1::before { content: "\f3cd"; } +.bi-flower2::before { content: "\f3ce"; } +.bi-flower3::before { content: "\f3cf"; } +.bi-folder-check::before { content: "\f3d0"; } +.bi-folder-fill::before { content: "\f3d1"; } +.bi-folder-minus::before { content: "\f3d2"; } +.bi-folder-plus::before { content: "\f3d3"; } +.bi-folder-symlink-fill::before { content: "\f3d4"; } +.bi-folder-symlink::before { content: "\f3d5"; } +.bi-folder-x::before { content: "\f3d6"; } +.bi-folder::before { content: "\f3d7"; } +.bi-folder2-open::before { content: "\f3d8"; } +.bi-folder2::before { content: "\f3d9"; } +.bi-fonts::before { content: "\f3da"; } +.bi-forward-fill::before { content: "\f3db"; } +.bi-forward::before { content: "\f3dc"; } +.bi-front::before { content: "\f3dd"; } +.bi-fullscreen-exit::before { content: "\f3de"; } +.bi-fullscreen::before { content: "\f3df"; } +.bi-funnel-fill::before { content: "\f3e0"; } +.bi-funnel::before { content: "\f3e1"; } +.bi-gear-fill::before { content: "\f3e2"; } +.bi-gear-wide-connected::before { content: "\f3e3"; } +.bi-gear-wide::before { content: "\f3e4"; } +.bi-gear::before { content: "\f3e5"; } +.bi-gem::before { content: "\f3e6"; } +.bi-geo-alt-fill::before { content: "\f3e7"; } +.bi-geo-alt::before { content: "\f3e8"; } +.bi-geo-fill::before { content: "\f3e9"; } +.bi-geo::before { content: "\f3ea"; } +.bi-gift-fill::before { content: "\f3eb"; } +.bi-gift::before { content: "\f3ec"; } +.bi-github::before { content: "\f3ed"; } +.bi-globe::before { content: "\f3ee"; } +.bi-globe2::before { content: "\f3ef"; } +.bi-google::before { content: "\f3f0"; } +.bi-graph-down::before { content: "\f3f1"; } +.bi-graph-up::before { content: "\f3f2"; } +.bi-grid-1x2-fill::before { content: "\f3f3"; } +.bi-grid-1x2::before { content: "\f3f4"; } +.bi-grid-3x2-gap-fill::before { content: "\f3f5"; } +.bi-grid-3x2-gap::before { content: "\f3f6"; } +.bi-grid-3x2::before { content: "\f3f7"; } +.bi-grid-3x3-gap-fill::before { content: "\f3f8"; } +.bi-grid-3x3-gap::before { content: "\f3f9"; } +.bi-grid-3x3::before { content: "\f3fa"; } +.bi-grid-fill::before { content: "\f3fb"; } +.bi-grid::before { content: "\f3fc"; } +.bi-grip-horizontal::before { content: "\f3fd"; } +.bi-grip-vertical::before { content: "\f3fe"; } +.bi-hammer::before { content: "\f3ff"; } +.bi-hand-index-fill::before { content: "\f400"; } +.bi-hand-index-thumb-fill::before { content: "\f401"; } +.bi-hand-index-thumb::before { content: "\f402"; } +.bi-hand-index::before { content: "\f403"; } +.bi-hand-thumbs-down-fill::before { content: "\f404"; } +.bi-hand-thumbs-down::before { content: "\f405"; } +.bi-hand-thumbs-up-fill::before { content: "\f406"; } +.bi-hand-thumbs-up::before { content: "\f407"; } +.bi-handbag-fill::before { content: "\f408"; } +.bi-handbag::before { content: "\f409"; } +.bi-hash::before { content: "\f40a"; } +.bi-hdd-fill::before { content: "\f40b"; } +.bi-hdd-network-fill::before { content: "\f40c"; } +.bi-hdd-network::before { content: "\f40d"; } +.bi-hdd-rack-fill::before { content: "\f40e"; } +.bi-hdd-rack::before { content: "\f40f"; } +.bi-hdd-stack-fill::before { content: "\f410"; } +.bi-hdd-stack::before { content: "\f411"; } +.bi-hdd::before { content: "\f412"; } +.bi-headphones::before { content: "\f413"; } +.bi-headset::before { content: "\f414"; } +.bi-heart-fill::before { content: "\f415"; } +.bi-heart-half::before { content: "\f416"; } +.bi-heart::before { content: "\f417"; } +.bi-heptagon-fill::before { content: "\f418"; } +.bi-heptagon-half::before { content: "\f419"; } +.bi-heptagon::before { content: "\f41a"; } +.bi-hexagon-fill::before { content: "\f41b"; } +.bi-hexagon-half::before { content: "\f41c"; } +.bi-hexagon::before { content: "\f41d"; } +.bi-hourglass-bottom::before { content: "\f41e"; } +.bi-hourglass-split::before { content: "\f41f"; } +.bi-hourglass-top::before { content: "\f420"; } +.bi-hourglass::before { content: "\f421"; } +.bi-house-door-fill::before { content: "\f422"; } +.bi-house-door::before { content: "\f423"; } +.bi-house-fill::before { content: "\f424"; } +.bi-house::before { content: "\f425"; } +.bi-hr::before { content: "\f426"; } +.bi-hurricane::before { content: "\f427"; } +.bi-image-alt::before { content: "\f428"; } +.bi-image-fill::before { content: "\f429"; } +.bi-image::before { content: "\f42a"; } +.bi-images::before { content: "\f42b"; } +.bi-inbox-fill::before { content: "\f42c"; } +.bi-inbox::before { content: "\f42d"; } +.bi-inboxes-fill::before { content: "\f42e"; } +.bi-inboxes::before { content: "\f42f"; } +.bi-info-circle-fill::before { content: "\f430"; } +.bi-info-circle::before { content: "\f431"; } +.bi-info-square-fill::before { content: "\f432"; } +.bi-info-square::before { content: "\f433"; } +.bi-info::before { content: "\f434"; } +.bi-input-cursor-text::before { content: "\f435"; } +.bi-input-cursor::before { content: "\f436"; } +.bi-instagram::before { content: "\f437"; } +.bi-intersect::before { content: "\f438"; } +.bi-journal-album::before { content: "\f439"; } +.bi-journal-arrow-down::before { content: "\f43a"; } +.bi-journal-arrow-up::before { content: "\f43b"; } +.bi-journal-bookmark-fill::before { content: "\f43c"; } +.bi-journal-bookmark::before { content: "\f43d"; } +.bi-journal-check::before { content: "\f43e"; } +.bi-journal-code::before { content: "\f43f"; } +.bi-journal-medical::before { content: "\f440"; } +.bi-journal-minus::before { content: "\f441"; } +.bi-journal-plus::before { content: "\f442"; } +.bi-journal-richtext::before { content: "\f443"; } +.bi-journal-text::before { content: "\f444"; } +.bi-journal-x::before { content: "\f445"; } +.bi-journal::before { content: "\f446"; } +.bi-journals::before { content: "\f447"; } +.bi-joystick::before { content: "\f448"; } +.bi-justify-left::before { content: "\f449"; } +.bi-justify-right::before { content: "\f44a"; } +.bi-justify::before { content: "\f44b"; } +.bi-kanban-fill::before { content: "\f44c"; } +.bi-kanban::before { content: "\f44d"; } +.bi-key-fill::before { content: "\f44e"; } +.bi-key::before { content: "\f44f"; } +.bi-keyboard-fill::before { content: "\f450"; } +.bi-keyboard::before { content: "\f451"; } +.bi-ladder::before { content: "\f452"; } +.bi-lamp-fill::before { content: "\f453"; } +.bi-lamp::before { content: "\f454"; } +.bi-laptop-fill::before { content: "\f455"; } +.bi-laptop::before { content: "\f456"; } +.bi-layer-backward::before { content: "\f457"; } +.bi-layer-forward::before { content: "\f458"; } +.bi-layers-fill::before { content: "\f459"; } +.bi-layers-half::before { content: "\f45a"; } +.bi-layers::before { content: "\f45b"; } +.bi-layout-sidebar-inset-reverse::before { content: "\f45c"; } +.bi-layout-sidebar-inset::before { content: "\f45d"; } +.bi-layout-sidebar-reverse::before { content: "\f45e"; } +.bi-layout-sidebar::before { content: "\f45f"; } +.bi-layout-split::before { content: "\f460"; } +.bi-layout-text-sidebar-reverse::before { content: "\f461"; } +.bi-layout-text-sidebar::before { content: "\f462"; } +.bi-layout-text-window-reverse::before { content: "\f463"; } +.bi-layout-text-window::before { content: "\f464"; } +.bi-layout-three-columns::before { content: "\f465"; } +.bi-layout-wtf::before { content: "\f466"; } +.bi-life-preserver::before { content: "\f467"; } +.bi-lightbulb-fill::before { content: "\f468"; } +.bi-lightbulb-off-fill::before { content: "\f469"; } +.bi-lightbulb-off::before { content: "\f46a"; } +.bi-lightbulb::before { content: "\f46b"; } +.bi-lightning-charge-fill::before { content: "\f46c"; } +.bi-lightning-charge::before { content: "\f46d"; } +.bi-lightning-fill::before { content: "\f46e"; } +.bi-lightning::before { content: "\f46f"; } +.bi-link-45deg::before { content: "\f470"; } +.bi-link::before { content: "\f471"; } +.bi-linkedin::before { content: "\f472"; } +.bi-list-check::before { content: "\f473"; } +.bi-list-nested::before { content: "\f474"; } +.bi-list-ol::before { content: "\f475"; } +.bi-list-stars::before { content: "\f476"; } +.bi-list-task::before { content: "\f477"; } +.bi-list-ul::before { content: "\f478"; } +.bi-list::before { content: "\f479"; } +.bi-lock-fill::before { content: "\f47a"; } +.bi-lock::before { content: "\f47b"; } +.bi-mailbox::before { content: "\f47c"; } +.bi-mailbox2::before { content: "\f47d"; } +.bi-map-fill::before { content: "\f47e"; } +.bi-map::before { content: "\f47f"; } +.bi-markdown-fill::before { content: "\f480"; } +.bi-markdown::before { content: "\f481"; } +.bi-mask::before { content: "\f482"; } +.bi-megaphone-fill::before { content: "\f483"; } +.bi-megaphone::before { content: "\f484"; } +.bi-menu-app-fill::before { content: "\f485"; } +.bi-menu-app::before { content: "\f486"; } +.bi-menu-button-fill::before { content: "\f487"; } +.bi-menu-button-wide-fill::before { content: "\f488"; } +.bi-menu-button-wide::before { content: "\f489"; } +.bi-menu-button::before { content: "\f48a"; } +.bi-menu-down::before { content: "\f48b"; } +.bi-menu-up::before { content: "\f48c"; } +.bi-mic-fill::before { content: "\f48d"; } +.bi-mic-mute-fill::before { content: "\f48e"; } +.bi-mic-mute::before { content: "\f48f"; } +.bi-mic::before { content: "\f490"; } +.bi-minecart-loaded::before { content: "\f491"; } +.bi-minecart::before { content: "\f492"; } +.bi-moisture::before { content: "\f493"; } +.bi-moon-fill::before { content: "\f494"; } +.bi-moon-stars-fill::before { content: "\f495"; } +.bi-moon-stars::before { content: "\f496"; } +.bi-moon::before { content: "\f497"; } +.bi-mouse-fill::before { content: "\f498"; } +.bi-mouse::before { content: "\f499"; } +.bi-mouse2-fill::before { content: "\f49a"; } +.bi-mouse2::before { content: "\f49b"; } +.bi-mouse3-fill::before { content: "\f49c"; } +.bi-mouse3::before { content: "\f49d"; } +.bi-music-note-beamed::before { content: "\f49e"; } +.bi-music-note-list::before { content: "\f49f"; } +.bi-music-note::before { content: "\f4a0"; } +.bi-music-player-fill::before { content: "\f4a1"; } +.bi-music-player::before { content: "\f4a2"; } +.bi-newspaper::before { content: "\f4a3"; } +.bi-node-minus-fill::before { content: "\f4a4"; } +.bi-node-minus::before { content: "\f4a5"; } +.bi-node-plus-fill::before { content: "\f4a6"; } +.bi-node-plus::before { content: "\f4a7"; } +.bi-nut-fill::before { content: "\f4a8"; } +.bi-nut::before { content: "\f4a9"; } +.bi-octagon-fill::before { content: "\f4aa"; } +.bi-octagon-half::before { content: "\f4ab"; } +.bi-octagon::before { content: "\f4ac"; } +.bi-option::before { content: "\f4ad"; } +.bi-outlet::before { content: "\f4ae"; } +.bi-paint-bucket::before { content: "\f4af"; } +.bi-palette-fill::before { content: "\f4b0"; } +.bi-palette::before { content: "\f4b1"; } +.bi-palette2::before { content: "\f4b2"; } +.bi-paperclip::before { content: "\f4b3"; } +.bi-paragraph::before { content: "\f4b4"; } +.bi-patch-check-fill::before { content: "\f4b5"; } +.bi-patch-check::before { content: "\f4b6"; } +.bi-patch-exclamation-fill::before { content: "\f4b7"; } +.bi-patch-exclamation::before { content: "\f4b8"; } +.bi-patch-minus-fill::before { content: "\f4b9"; } +.bi-patch-minus::before { content: "\f4ba"; } +.bi-patch-plus-fill::before { content: "\f4bb"; } +.bi-patch-plus::before { content: "\f4bc"; } +.bi-patch-question-fill::before { content: "\f4bd"; } +.bi-patch-question::before { content: "\f4be"; } +.bi-pause-btn-fill::before { content: "\f4bf"; } +.bi-pause-btn::before { content: "\f4c0"; } +.bi-pause-circle-fill::before { content: "\f4c1"; } +.bi-pause-circle::before { content: "\f4c2"; } +.bi-pause-fill::before { content: "\f4c3"; } +.bi-pause::before { content: "\f4c4"; } +.bi-peace-fill::before { content: "\f4c5"; } +.bi-peace::before { content: "\f4c6"; } +.bi-pen-fill::before { content: "\f4c7"; } +.bi-pen::before { content: "\f4c8"; } +.bi-pencil-fill::before { content: "\f4c9"; } +.bi-pencil-square::before { content: "\f4ca"; } +.bi-pencil::before { content: "\f4cb"; } +.bi-pentagon-fill::before { content: "\f4cc"; } +.bi-pentagon-half::before { content: "\f4cd"; } +.bi-pentagon::before { content: "\f4ce"; } +.bi-people-fill::before { content: "\f4cf"; } +.bi-people::before { content: "\f4d0"; } +.bi-percent::before { content: "\f4d1"; } +.bi-person-badge-fill::before { content: "\f4d2"; } +.bi-person-badge::before { content: "\f4d3"; } +.bi-person-bounding-box::before { content: "\f4d4"; } +.bi-person-check-fill::before { content: "\f4d5"; } +.bi-person-check::before { content: "\f4d6"; } +.bi-person-circle::before { content: "\f4d7"; } +.bi-person-dash-fill::before { content: "\f4d8"; } +.bi-person-dash::before { content: "\f4d9"; } +.bi-person-fill::before { content: "\f4da"; } +.bi-person-lines-fill::before { content: "\f4db"; } +.bi-person-plus-fill::before { content: "\f4dc"; } +.bi-person-plus::before { content: "\f4dd"; } +.bi-person-square::before { content: "\f4de"; } +.bi-person-x-fill::before { content: "\f4df"; } +.bi-person-x::before { content: "\f4e0"; } +.bi-person::before { content: "\f4e1"; } +.bi-phone-fill::before { content: "\f4e2"; } +.bi-phone-landscape-fill::before { content: "\f4e3"; } +.bi-phone-landscape::before { content: "\f4e4"; } +.bi-phone-vibrate-fill::before { content: "\f4e5"; } +.bi-phone-vibrate::before { content: "\f4e6"; } +.bi-phone::before { content: "\f4e7"; } +.bi-pie-chart-fill::before { content: "\f4e8"; } +.bi-pie-chart::before { content: "\f4e9"; } +.bi-pin-angle-fill::before { content: "\f4ea"; } +.bi-pin-angle::before { content: "\f4eb"; } +.bi-pin-fill::before { content: "\f4ec"; } +.bi-pin::before { content: "\f4ed"; } +.bi-pip-fill::before { content: "\f4ee"; } +.bi-pip::before { content: "\f4ef"; } +.bi-play-btn-fill::before { content: "\f4f0"; } +.bi-play-btn::before { content: "\f4f1"; } +.bi-play-circle-fill::before { content: "\f4f2"; } +.bi-play-circle::before { content: "\f4f3"; } +.bi-play-fill::before { content: "\f4f4"; } +.bi-play::before { content: "\f4f5"; } +.bi-plug-fill::before { content: "\f4f6"; } +.bi-plug::before { content: "\f4f7"; } +.bi-plus-circle-dotted::before { content: "\f4f8"; } +.bi-plus-circle-fill::before { content: "\f4f9"; } +.bi-plus-circle::before { content: "\f4fa"; } +.bi-plus-square-dotted::before { content: "\f4fb"; } +.bi-plus-square-fill::before { content: "\f4fc"; } +.bi-plus-square::before { content: "\f4fd"; } +.bi-plus::before { content: "\f4fe"; } +.bi-power::before { content: "\f4ff"; } +.bi-printer-fill::before { content: "\f500"; } +.bi-printer::before { content: "\f501"; } +.bi-puzzle-fill::before { content: "\f502"; } +.bi-puzzle::before { content: "\f503"; } +.bi-question-circle-fill::before { content: "\f504"; } +.bi-question-circle::before { content: "\f505"; } +.bi-question-diamond-fill::before { content: "\f506"; } +.bi-question-diamond::before { content: "\f507"; } +.bi-question-octagon-fill::before { content: "\f508"; } +.bi-question-octagon::before { content: "\f509"; } +.bi-question-square-fill::before { content: "\f50a"; } +.bi-question-square::before { content: "\f50b"; } +.bi-question::before { content: "\f50c"; } +.bi-rainbow::before { content: "\f50d"; } +.bi-receipt-cutoff::before { content: "\f50e"; } +.bi-receipt::before { content: "\f50f"; } +.bi-reception-0::before { content: "\f510"; } +.bi-reception-1::before { content: "\f511"; } +.bi-reception-2::before { content: "\f512"; } +.bi-reception-3::before { content: "\f513"; } +.bi-reception-4::before { content: "\f514"; } +.bi-record-btn-fill::before { content: "\f515"; } +.bi-record-btn::before { content: "\f516"; } +.bi-record-circle-fill::before { content: "\f517"; } +.bi-record-circle::before { content: "\f518"; } +.bi-record-fill::before { content: "\f519"; } +.bi-record::before { content: "\f51a"; } +.bi-record2-fill::before { content: "\f51b"; } +.bi-record2::before { content: "\f51c"; } +.bi-reply-all-fill::before { content: "\f51d"; } +.bi-reply-all::before { content: "\f51e"; } +.bi-reply-fill::before { content: "\f51f"; } +.bi-reply::before { content: "\f520"; } +.bi-rss-fill::before { content: "\f521"; } +.bi-rss::before { content: "\f522"; } +.bi-rulers::before { content: "\f523"; } +.bi-save-fill::before { content: "\f524"; } +.bi-save::before { content: "\f525"; } +.bi-save2-fill::before { content: "\f526"; } +.bi-save2::before { content: "\f527"; } +.bi-scissors::before { content: "\f528"; } +.bi-screwdriver::before { content: "\f529"; } +.bi-search::before { content: "\f52a"; } +.bi-segmented-nav::before { content: "\f52b"; } +.bi-server::before { content: "\f52c"; } +.bi-share-fill::before { content: "\f52d"; } +.bi-share::before { content: "\f52e"; } +.bi-shield-check::before { content: "\f52f"; } +.bi-shield-exclamation::before { content: "\f530"; } +.bi-shield-fill-check::before { content: "\f531"; } +.bi-shield-fill-exclamation::before { content: "\f532"; } +.bi-shield-fill-minus::before { content: "\f533"; } +.bi-shield-fill-plus::before { content: "\f534"; } +.bi-shield-fill-x::before { content: "\f535"; } +.bi-shield-fill::before { content: "\f536"; } +.bi-shield-lock-fill::before { content: "\f537"; } +.bi-shield-lock::before { content: "\f538"; } +.bi-shield-minus::before { content: "\f539"; } +.bi-shield-plus::before { content: "\f53a"; } +.bi-shield-shaded::before { content: "\f53b"; } +.bi-shield-slash-fill::before { content: "\f53c"; } +.bi-shield-slash::before { content: "\f53d"; } +.bi-shield-x::before { content: "\f53e"; } +.bi-shield::before { content: "\f53f"; } +.bi-shift-fill::before { content: "\f540"; } +.bi-shift::before { content: "\f541"; } +.bi-shop-window::before { content: "\f542"; } +.bi-shop::before { content: "\f543"; } +.bi-shuffle::before { content: "\f544"; } +.bi-signpost-2-fill::before { content: "\f545"; } +.bi-signpost-2::before { content: "\f546"; } +.bi-signpost-fill::before { content: "\f547"; } +.bi-signpost-split-fill::before { content: "\f548"; } +.bi-signpost-split::before { content: "\f549"; } +.bi-signpost::before { content: "\f54a"; } +.bi-sim-fill::before { content: "\f54b"; } +.bi-sim::before { content: "\f54c"; } +.bi-skip-backward-btn-fill::before { content: "\f54d"; } +.bi-skip-backward-btn::before { content: "\f54e"; } +.bi-skip-backward-circle-fill::before { content: "\f54f"; } +.bi-skip-backward-circle::before { content: "\f550"; } +.bi-skip-backward-fill::before { content: "\f551"; } +.bi-skip-backward::before { content: "\f552"; } +.bi-skip-end-btn-fill::before { content: "\f553"; } +.bi-skip-end-btn::before { content: "\f554"; } +.bi-skip-end-circle-fill::before { content: "\f555"; } +.bi-skip-end-circle::before { content: "\f556"; } +.bi-skip-end-fill::before { content: "\f557"; } +.bi-skip-end::before { content: "\f558"; } +.bi-skip-forward-btn-fill::before { content: "\f559"; } +.bi-skip-forward-btn::before { content: "\f55a"; } +.bi-skip-forward-circle-fill::before { content: "\f55b"; } +.bi-skip-forward-circle::before { content: "\f55c"; } +.bi-skip-forward-fill::before { content: "\f55d"; } +.bi-skip-forward::before { content: "\f55e"; } +.bi-skip-start-btn-fill::before { content: "\f55f"; } +.bi-skip-start-btn::before { content: "\f560"; } +.bi-skip-start-circle-fill::before { content: "\f561"; } +.bi-skip-start-circle::before { content: "\f562"; } +.bi-skip-start-fill::before { content: "\f563"; } +.bi-skip-start::before { content: "\f564"; } +.bi-slack::before { content: "\f565"; } +.bi-slash-circle-fill::before { content: "\f566"; } +.bi-slash-circle::before { content: "\f567"; } +.bi-slash-square-fill::before { content: "\f568"; } +.bi-slash-square::before { content: "\f569"; } +.bi-slash::before { content: "\f56a"; } +.bi-sliders::before { content: "\f56b"; } +.bi-smartwatch::before { content: "\f56c"; } +.bi-snow::before { content: "\f56d"; } +.bi-snow2::before { content: "\f56e"; } +.bi-snow3::before { content: "\f56f"; } +.bi-sort-alpha-down-alt::before { content: "\f570"; } +.bi-sort-alpha-down::before { content: "\f571"; } +.bi-sort-alpha-up-alt::before { content: "\f572"; } +.bi-sort-alpha-up::before { content: "\f573"; } +.bi-sort-down-alt::before { content: "\f574"; } +.bi-sort-down::before { content: "\f575"; } +.bi-sort-numeric-down-alt::before { content: "\f576"; } +.bi-sort-numeric-down::before { content: "\f577"; } +.bi-sort-numeric-up-alt::before { content: "\f578"; } +.bi-sort-numeric-up::before { content: "\f579"; } +.bi-sort-up-alt::before { content: "\f57a"; } +.bi-sort-up::before { content: "\f57b"; } +.bi-soundwave::before { content: "\f57c"; } +.bi-speaker-fill::before { content: "\f57d"; } +.bi-speaker::before { content: "\f57e"; } +.bi-speedometer::before { content: "\f57f"; } +.bi-speedometer2::before { content: "\f580"; } +.bi-spellcheck::before { content: "\f581"; } +.bi-square-fill::before { content: "\f582"; } +.bi-square-half::before { content: "\f583"; } +.bi-square::before { content: "\f584"; } +.bi-stack::before { content: "\f585"; } +.bi-star-fill::before { content: "\f586"; } +.bi-star-half::before { content: "\f587"; } +.bi-star::before { content: "\f588"; } +.bi-stars::before { content: "\f589"; } +.bi-stickies-fill::before { content: "\f58a"; } +.bi-stickies::before { content: "\f58b"; } +.bi-sticky-fill::before { content: "\f58c"; } +.bi-sticky::before { content: "\f58d"; } +.bi-stop-btn-fill::before { content: "\f58e"; } +.bi-stop-btn::before { content: "\f58f"; } +.bi-stop-circle-fill::before { content: "\f590"; } +.bi-stop-circle::before { content: "\f591"; } +.bi-stop-fill::before { content: "\f592"; } +.bi-stop::before { content: "\f593"; } +.bi-stoplights-fill::before { content: "\f594"; } +.bi-stoplights::before { content: "\f595"; } +.bi-stopwatch-fill::before { content: "\f596"; } +.bi-stopwatch::before { content: "\f597"; } +.bi-subtract::before { content: "\f598"; } +.bi-suit-club-fill::before { content: "\f599"; } +.bi-suit-club::before { content: "\f59a"; } +.bi-suit-diamond-fill::before { content: "\f59b"; } +.bi-suit-diamond::before { content: "\f59c"; } +.bi-suit-heart-fill::before { content: "\f59d"; } +.bi-suit-heart::before { content: "\f59e"; } +.bi-suit-spade-fill::before { content: "\f59f"; } +.bi-suit-spade::before { content: "\f5a0"; } +.bi-sun-fill::before { content: "\f5a1"; } +.bi-sun::before { content: "\f5a2"; } +.bi-sunglasses::before { content: "\f5a3"; } +.bi-sunrise-fill::before { content: "\f5a4"; } +.bi-sunrise::before { content: "\f5a5"; } +.bi-sunset-fill::before { content: "\f5a6"; } +.bi-sunset::before { content: "\f5a7"; } +.bi-symmetry-horizontal::before { content: "\f5a8"; } +.bi-symmetry-vertical::before { content: "\f5a9"; } +.bi-table::before { content: "\f5aa"; } +.bi-tablet-fill::before { content: "\f5ab"; } +.bi-tablet-landscape-fill::before { content: "\f5ac"; } +.bi-tablet-landscape::before { content: "\f5ad"; } +.bi-tablet::before { content: "\f5ae"; } +.bi-tag-fill::before { content: "\f5af"; } +.bi-tag::before { content: "\f5b0"; } +.bi-tags-fill::before { content: "\f5b1"; } +.bi-tags::before { content: "\f5b2"; } +.bi-telegram::before { content: "\f5b3"; } +.bi-telephone-fill::before { content: "\f5b4"; } +.bi-telephone-forward-fill::before { content: "\f5b5"; } +.bi-telephone-forward::before { content: "\f5b6"; } +.bi-telephone-inbound-fill::before { content: "\f5b7"; } +.bi-telephone-inbound::before { content: "\f5b8"; } +.bi-telephone-minus-fill::before { content: "\f5b9"; } +.bi-telephone-minus::before { content: "\f5ba"; } +.bi-telephone-outbound-fill::before { content: "\f5bb"; } +.bi-telephone-outbound::before { content: "\f5bc"; } +.bi-telephone-plus-fill::before { content: "\f5bd"; } +.bi-telephone-plus::before { content: "\f5be"; } +.bi-telephone-x-fill::before { content: "\f5bf"; } +.bi-telephone-x::before { content: "\f5c0"; } +.bi-telephone::before { content: "\f5c1"; } +.bi-terminal-fill::before { content: "\f5c2"; } +.bi-terminal::before { content: "\f5c3"; } +.bi-text-center::before { content: "\f5c4"; } +.bi-text-indent-left::before { content: "\f5c5"; } +.bi-text-indent-right::before { content: "\f5c6"; } +.bi-text-left::before { content: "\f5c7"; } +.bi-text-paragraph::before { content: "\f5c8"; } +.bi-text-right::before { content: "\f5c9"; } +.bi-textarea-resize::before { content: "\f5ca"; } +.bi-textarea-t::before { content: "\f5cb"; } +.bi-textarea::before { content: "\f5cc"; } +.bi-thermometer-half::before { content: "\f5cd"; } +.bi-thermometer-high::before { content: "\f5ce"; } +.bi-thermometer-low::before { content: "\f5cf"; } +.bi-thermometer-snow::before { content: "\f5d0"; } +.bi-thermometer-sun::before { content: "\f5d1"; } +.bi-thermometer::before { content: "\f5d2"; } +.bi-three-dots-vertical::before { content: "\f5d3"; } +.bi-three-dots::before { content: "\f5d4"; } +.bi-toggle-off::before { content: "\f5d5"; } +.bi-toggle-on::before { content: "\f5d6"; } +.bi-toggle2-off::before { content: "\f5d7"; } +.bi-toggle2-on::before { content: "\f5d8"; } +.bi-toggles::before { content: "\f5d9"; } +.bi-toggles2::before { content: "\f5da"; } +.bi-tools::before { content: "\f5db"; } +.bi-tornado::before { content: "\f5dc"; } +.bi-trash-fill::before { content: "\f5dd"; } +.bi-trash::before { content: "\f5de"; } +.bi-trash2-fill::before { content: "\f5df"; } +.bi-trash2::before { content: "\f5e0"; } +.bi-tree-fill::before { content: "\f5e1"; } +.bi-tree::before { content: "\f5e2"; } +.bi-triangle-fill::before { content: "\f5e3"; } +.bi-triangle-half::before { content: "\f5e4"; } +.bi-triangle::before { content: "\f5e5"; } +.bi-trophy-fill::before { content: "\f5e6"; } +.bi-trophy::before { content: "\f5e7"; } +.bi-tropical-storm::before { content: "\f5e8"; } +.bi-truck-flatbed::before { content: "\f5e9"; } +.bi-truck::before { content: "\f5ea"; } +.bi-tsunami::before { content: "\f5eb"; } +.bi-tv-fill::before { content: "\f5ec"; } +.bi-tv::before { content: "\f5ed"; } +.bi-twitch::before { content: "\f5ee"; } +.bi-twitter::before { content: "\f5ef"; } +.bi-type-bold::before { content: "\f5f0"; } +.bi-type-h1::before { content: "\f5f1"; } +.bi-type-h2::before { content: "\f5f2"; } +.bi-type-h3::before { content: "\f5f3"; } +.bi-type-italic::before { content: "\f5f4"; } +.bi-type-strikethrough::before { content: "\f5f5"; } +.bi-type-underline::before { content: "\f5f6"; } +.bi-type::before { content: "\f5f7"; } +.bi-ui-checks-grid::before { content: "\f5f8"; } +.bi-ui-checks::before { content: "\f5f9"; } +.bi-ui-radios-grid::before { content: "\f5fa"; } +.bi-ui-radios::before { content: "\f5fb"; } +.bi-umbrella-fill::before { content: "\f5fc"; } +.bi-umbrella::before { content: "\f5fd"; } +.bi-union::before { content: "\f5fe"; } +.bi-unlock-fill::before { content: "\f5ff"; } +.bi-unlock::before { content: "\f600"; } +.bi-upc-scan::before { content: "\f601"; } +.bi-upc::before { content: "\f602"; } +.bi-upload::before { content: "\f603"; } +.bi-vector-pen::before { content: "\f604"; } +.bi-view-list::before { content: "\f605"; } +.bi-view-stacked::before { content: "\f606"; } +.bi-vinyl-fill::before { content: "\f607"; } +.bi-vinyl::before { content: "\f608"; } +.bi-voicemail::before { content: "\f609"; } +.bi-volume-down-fill::before { content: "\f60a"; } +.bi-volume-down::before { content: "\f60b"; } +.bi-volume-mute-fill::before { content: "\f60c"; } +.bi-volume-mute::before { content: "\f60d"; } +.bi-volume-off-fill::before { content: "\f60e"; } +.bi-volume-off::before { content: "\f60f"; } +.bi-volume-up-fill::before { content: "\f610"; } +.bi-volume-up::before { content: "\f611"; } +.bi-vr::before { content: "\f612"; } +.bi-wallet-fill::before { content: "\f613"; } +.bi-wallet::before { content: "\f614"; } +.bi-wallet2::before { content: "\f615"; } +.bi-watch::before { content: "\f616"; } +.bi-water::before { content: "\f617"; } +.bi-whatsapp::before { content: "\f618"; } +.bi-wifi-1::before { content: "\f619"; } +.bi-wifi-2::before { content: "\f61a"; } +.bi-wifi-off::before { content: "\f61b"; } +.bi-wifi::before { content: "\f61c"; } +.bi-wind::before { content: "\f61d"; } +.bi-window-dock::before { content: "\f61e"; } +.bi-window-sidebar::before { content: "\f61f"; } +.bi-window::before { content: "\f620"; } +.bi-wrench::before { content: "\f621"; } +.bi-x-circle-fill::before { content: "\f622"; } +.bi-x-circle::before { content: "\f623"; } +.bi-x-diamond-fill::before { content: "\f624"; } +.bi-x-diamond::before { content: "\f625"; } +.bi-x-octagon-fill::before { content: "\f626"; } +.bi-x-octagon::before { content: "\f627"; } +.bi-x-square-fill::before { content: "\f628"; } +.bi-x-square::before { content: "\f629"; } +.bi-x::before { content: "\f62a"; } +.bi-youtube::before { content: "\f62b"; } +.bi-zoom-in::before { content: "\f62c"; } +.bi-zoom-out::before { content: "\f62d"; } +.bi-bank::before { content: "\f62e"; } +.bi-bank2::before { content: "\f62f"; } +.bi-bell-slash-fill::before { content: "\f630"; } +.bi-bell-slash::before { content: "\f631"; } +.bi-cash-coin::before { content: "\f632"; } +.bi-check-lg::before { content: "\f633"; } +.bi-coin::before { content: "\f634"; } +.bi-currency-bitcoin::before { content: "\f635"; } +.bi-currency-dollar::before { content: "\f636"; } +.bi-currency-euro::before { content: "\f637"; } +.bi-currency-exchange::before { content: "\f638"; } +.bi-currency-pound::before { content: "\f639"; } +.bi-currency-yen::before { content: "\f63a"; } +.bi-dash-lg::before { content: "\f63b"; } +.bi-exclamation-lg::before { content: "\f63c"; } +.bi-file-earmark-pdf-fill::before { content: "\f63d"; } +.bi-file-earmark-pdf::before { content: "\f63e"; } +.bi-file-pdf-fill::before { content: "\f63f"; } +.bi-file-pdf::before { content: "\f640"; } +.bi-gender-ambiguous::before { content: "\f641"; } +.bi-gender-female::before { content: "\f642"; } +.bi-gender-male::before { content: "\f643"; } +.bi-gender-trans::before { content: "\f644"; } +.bi-headset-vr::before { content: "\f645"; } +.bi-info-lg::before { content: "\f646"; } +.bi-mastodon::before { content: "\f647"; } +.bi-messenger::before { content: "\f648"; } +.bi-piggy-bank-fill::before { content: "\f649"; } +.bi-piggy-bank::before { content: "\f64a"; } +.bi-pin-map-fill::before { content: "\f64b"; } +.bi-pin-map::before { content: "\f64c"; } +.bi-plus-lg::before { content: "\f64d"; } +.bi-question-lg::before { content: "\f64e"; } +.bi-recycle::before { content: "\f64f"; } +.bi-reddit::before { content: "\f650"; } +.bi-safe-fill::before { content: "\f651"; } +.bi-safe2-fill::before { content: "\f652"; } +.bi-safe2::before { content: "\f653"; } +.bi-sd-card-fill::before { content: "\f654"; } +.bi-sd-card::before { content: "\f655"; } +.bi-skype::before { content: "\f656"; } +.bi-slash-lg::before { content: "\f657"; } +.bi-translate::before { content: "\f658"; } +.bi-x-lg::before { content: "\f659"; } +.bi-safe::before { content: "\f65a"; } +.bi-apple::before { content: "\f65b"; } +.bi-microsoft::before { content: "\f65d"; } +.bi-windows::before { content: "\f65e"; } +.bi-behance::before { content: "\f65c"; } +.bi-dribbble::before { content: "\f65f"; } +.bi-line::before { content: "\f660"; } +.bi-medium::before { content: "\f661"; } +.bi-paypal::before { content: "\f662"; } +.bi-pinterest::before { content: "\f663"; } +.bi-signal::before { content: "\f664"; } +.bi-snapchat::before { content: "\f665"; } +.bi-spotify::before { content: "\f666"; } +.bi-stack-overflow::before { content: "\f667"; } +.bi-strava::before { content: "\f668"; } +.bi-wordpress::before { content: "\f669"; } +.bi-vimeo::before { content: "\f66a"; } +.bi-activity::before { content: "\f66b"; } +.bi-easel2-fill::before { content: "\f66c"; } +.bi-easel2::before { content: "\f66d"; } +.bi-easel3-fill::before { content: "\f66e"; } +.bi-easel3::before { content: "\f66f"; } +.bi-fan::before { content: "\f670"; } +.bi-fingerprint::before { content: "\f671"; } +.bi-graph-down-arrow::before { content: "\f672"; } +.bi-graph-up-arrow::before { content: "\f673"; } +.bi-hypnotize::before { content: "\f674"; } +.bi-magic::before { content: "\f675"; } +.bi-person-rolodex::before { content: "\f676"; } +.bi-person-video::before { content: "\f677"; } +.bi-person-video2::before { content: "\f678"; } +.bi-person-video3::before { content: "\f679"; } +.bi-person-workspace::before { content: "\f67a"; } +.bi-radioactive::before { content: "\f67b"; } +.bi-webcam-fill::before { content: "\f67c"; } +.bi-webcam::before { content: "\f67d"; } +.bi-yin-yang::before { content: "\f67e"; } +.bi-bandaid-fill::before { content: "\f680"; } +.bi-bandaid::before { content: "\f681"; } +.bi-bluetooth::before { content: "\f682"; } +.bi-body-text::before { content: "\f683"; } +.bi-boombox::before { content: "\f684"; } +.bi-boxes::before { content: "\f685"; } +.bi-dpad-fill::before { content: "\f686"; } +.bi-dpad::before { content: "\f687"; } +.bi-ear-fill::before { content: "\f688"; } +.bi-ear::before { content: "\f689"; } +.bi-envelope-check-1::before { content: "\f68a"; } +.bi-envelope-check-fill::before { content: "\f68b"; } +.bi-envelope-check::before { content: "\f68c"; } +.bi-envelope-dash-1::before { content: "\f68d"; } +.bi-envelope-dash-fill::before { content: "\f68e"; } +.bi-envelope-dash::before { content: "\f68f"; } +.bi-envelope-exclamation-1::before { content: "\f690"; } +.bi-envelope-exclamation-fill::before { content: "\f691"; } +.bi-envelope-exclamation::before { content: "\f692"; } +.bi-envelope-plus-fill::before { content: "\f693"; } +.bi-envelope-plus::before { content: "\f694"; } +.bi-envelope-slash-1::before { content: "\f695"; } +.bi-envelope-slash-fill::before { content: "\f696"; } +.bi-envelope-slash::before { content: "\f697"; } +.bi-envelope-x-1::before { content: "\f698"; } +.bi-envelope-x-fill::before { content: "\f699"; } +.bi-envelope-x::before { content: "\f69a"; } +.bi-explicit-fill::before { content: "\f69b"; } +.bi-explicit::before { content: "\f69c"; } +.bi-git::before { content: "\f69d"; } +.bi-infinity::before { content: "\f69e"; } +.bi-list-columns-reverse::before { content: "\f69f"; } +.bi-list-columns::before { content: "\f6a0"; } +.bi-meta::before { content: "\f6a1"; } +.bi-mortorboard-fill::before { content: "\f6a2"; } +.bi-mortorboard::before { content: "\f6a3"; } +.bi-nintendo-switch::before { content: "\f6a4"; } +.bi-pc-display-horizontal::before { content: "\f6a5"; } +.bi-pc-display::before { content: "\f6a6"; } +.bi-pc-horizontal::before { content: "\f6a7"; } +.bi-pc::before { content: "\f6a8"; } +.bi-playstation::before { content: "\f6a9"; } +.bi-plus-slash-minus::before { content: "\f6aa"; } +.bi-projector-fill::before { content: "\f6ab"; } +.bi-projector::before { content: "\f6ac"; } +.bi-qr-code-scan::before { content: "\f6ad"; } +.bi-qr-code::before { content: "\f6ae"; } +.bi-quora::before { content: "\f6af"; } +.bi-quote::before { content: "\f6b0"; } +.bi-robot::before { content: "\f6b1"; } +.bi-send-check-fill::before { content: "\f6b2"; } +.bi-send-check::before { content: "\f6b3"; } +.bi-send-dash-fill::before { content: "\f6b4"; } +.bi-send-dash::before { content: "\f6b5"; } +.bi-send-exclamation-1::before { content: "\f6b6"; } +.bi-send-exclamation-fill::before { content: "\f6b7"; } +.bi-send-exclamation::before { content: "\f6b8"; } +.bi-send-fill::before { content: "\f6b9"; } +.bi-send-plus-fill::before { content: "\f6ba"; } +.bi-send-plus::before { content: "\f6bb"; } +.bi-send-slash-fill::before { content: "\f6bc"; } +.bi-send-slash::before { content: "\f6bd"; } +.bi-send-x-fill::before { content: "\f6be"; } +.bi-send-x::before { content: "\f6bf"; } +.bi-send::before { content: "\f6c0"; } +.bi-steam::before { content: "\f6c1"; } +.bi-terminal-dash-1::before { content: "\f6c2"; } +.bi-terminal-dash::before { content: "\f6c3"; } +.bi-terminal-plus::before { content: "\f6c4"; } +.bi-terminal-split::before { content: "\f6c5"; } +.bi-ticket-detailed-fill::before { content: "\f6c6"; } +.bi-ticket-detailed::before { content: "\f6c7"; } +.bi-ticket-fill::before { content: "\f6c8"; } +.bi-ticket-perforated-fill::before { content: "\f6c9"; } +.bi-ticket-perforated::before { content: "\f6ca"; } +.bi-ticket::before { content: "\f6cb"; } +.bi-tiktok::before { content: "\f6cc"; } +.bi-window-dash::before { content: "\f6cd"; } +.bi-window-desktop::before { content: "\f6ce"; } +.bi-window-fullscreen::before { content: "\f6cf"; } +.bi-window-plus::before { content: "\f6d0"; } +.bi-window-split::before { content: "\f6d1"; } +.bi-window-stack::before { content: "\f6d2"; } +.bi-window-x::before { content: "\f6d3"; } +.bi-xbox::before { content: "\f6d4"; } +.bi-ethernet::before { content: "\f6d5"; } +.bi-hdmi-fill::before { content: "\f6d6"; } +.bi-hdmi::before { content: "\f6d7"; } +.bi-usb-c-fill::before { content: "\f6d8"; } +.bi-usb-c::before { content: "\f6d9"; } +.bi-usb-fill::before { content: "\f6da"; } +.bi-usb-plug-fill::before { content: "\f6db"; } +.bi-usb-plug::before { content: "\f6dc"; } +.bi-usb-symbol::before { content: "\f6dd"; } +.bi-usb::before { content: "\f6de"; } +.bi-boombox-fill::before { content: "\f6df"; } +.bi-displayport-1::before { content: "\f6e0"; } +.bi-displayport::before { content: "\f6e1"; } +.bi-gpu-card::before { content: "\f6e2"; } +.bi-memory::before { content: "\f6e3"; } +.bi-modem-fill::before { content: "\f6e4"; } +.bi-modem::before { content: "\f6e5"; } +.bi-motherboard-fill::before { content: "\f6e6"; } +.bi-motherboard::before { content: "\f6e7"; } +.bi-optical-audio-fill::before { content: "\f6e8"; } +.bi-optical-audio::before { content: "\f6e9"; } +.bi-pci-card::before { content: "\f6ea"; } +.bi-router-fill::before { content: "\f6eb"; } +.bi-router::before { content: "\f6ec"; } +.bi-ssd-fill::before { content: "\f6ed"; } +.bi-ssd::before { content: "\f6ee"; } +.bi-thunderbolt-fill::before { content: "\f6ef"; } +.bi-thunderbolt::before { content: "\f6f0"; } +.bi-usb-drive-fill::before { content: "\f6f1"; } +.bi-usb-drive::before { content: "\f6f2"; } +.bi-usb-micro-fill::before { content: "\f6f3"; } +.bi-usb-micro::before { content: "\f6f4"; } +.bi-usb-mini-fill::before { content: "\f6f5"; } +.bi-usb-mini::before { content: "\f6f6"; } +.bi-cloud-haze2::before { content: "\f6f7"; } +.bi-device-hdd-fill::before { content: "\f6f8"; } +.bi-device-hdd::before { content: "\f6f9"; } +.bi-device-ssd-fill::before { content: "\f6fa"; } +.bi-device-ssd::before { content: "\f6fb"; } +.bi-displayport-fill::before { content: "\f6fc"; } +.bi-mortarboard-fill::before { content: "\f6fd"; } +.bi-mortarboard::before { content: "\f6fe"; } +.bi-terminal-x::before { content: "\f6ff"; } +.bi-arrow-through-heart-fill::before { content: "\f700"; } +.bi-arrow-through-heart::before { content: "\f701"; } +.bi-badge-sd-fill::before { content: "\f702"; } +.bi-badge-sd::before { content: "\f703"; } +.bi-bag-heart-fill::before { content: "\f704"; } +.bi-bag-heart::before { content: "\f705"; } +.bi-balloon-fill::before { content: "\f706"; } +.bi-balloon-heart-fill::before { content: "\f707"; } +.bi-balloon-heart::before { content: "\f708"; } +.bi-balloon::before { content: "\f709"; } +.bi-box2-fill::before { content: "\f70a"; } +.bi-box2-heart-fill::before { content: "\f70b"; } +.bi-box2-heart::before { content: "\f70c"; } +.bi-box2::before { content: "\f70d"; } +.bi-braces-asterisk::before { content: "\f70e"; } +.bi-calendar-heart-fill::before { content: "\f70f"; } +.bi-calendar-heart::before { content: "\f710"; } +.bi-calendar2-heart-fill::before { content: "\f711"; } +.bi-calendar2-heart::before { content: "\f712"; } +.bi-chat-heart-fill::before { content: "\f713"; } +.bi-chat-heart::before { content: "\f714"; } +.bi-chat-left-heart-fill::before { content: "\f715"; } +.bi-chat-left-heart::before { content: "\f716"; } +.bi-chat-right-heart-fill::before { content: "\f717"; } +.bi-chat-right-heart::before { content: "\f718"; } +.bi-chat-square-heart-fill::before { content: "\f719"; } +.bi-chat-square-heart::before { content: "\f71a"; } +.bi-clipboard-check-fill::before { content: "\f71b"; } +.bi-clipboard-data-fill::before { content: "\f71c"; } +.bi-clipboard-fill::before { content: "\f71d"; } +.bi-clipboard-heart-fill::before { content: "\f71e"; } +.bi-clipboard-heart::before { content: "\f71f"; } +.bi-clipboard-minus-fill::before { content: "\f720"; } +.bi-clipboard-plus-fill::before { content: "\f721"; } +.bi-clipboard-pulse::before { content: "\f722"; } +.bi-clipboard-x-fill::before { content: "\f723"; } +.bi-clipboard2-check-fill::before { content: "\f724"; } +.bi-clipboard2-check::before { content: "\f725"; } +.bi-clipboard2-data-fill::before { content: "\f726"; } +.bi-clipboard2-data::before { content: "\f727"; } +.bi-clipboard2-fill::before { content: "\f728"; } +.bi-clipboard2-heart-fill::before { content: "\f729"; } +.bi-clipboard2-heart::before { content: "\f72a"; } +.bi-clipboard2-minus-fill::before { content: "\f72b"; } +.bi-clipboard2-minus::before { content: "\f72c"; } +.bi-clipboard2-plus-fill::before { content: "\f72d"; } +.bi-clipboard2-plus::before { content: "\f72e"; } +.bi-clipboard2-pulse-fill::before { content: "\f72f"; } +.bi-clipboard2-pulse::before { content: "\f730"; } +.bi-clipboard2-x-fill::before { content: "\f731"; } +.bi-clipboard2-x::before { content: "\f732"; } +.bi-clipboard2::before { content: "\f733"; } +.bi-emoji-kiss-fill::before { content: "\f734"; } +.bi-emoji-kiss::before { content: "\f735"; } +.bi-envelope-heart-fill::before { content: "\f736"; } +.bi-envelope-heart::before { content: "\f737"; } +.bi-envelope-open-heart-fill::before { content: "\f738"; } +.bi-envelope-open-heart::before { content: "\f739"; } +.bi-envelope-paper-fill::before { content: "\f73a"; } +.bi-envelope-paper-heart-fill::before { content: "\f73b"; } +.bi-envelope-paper-heart::before { content: "\f73c"; } +.bi-envelope-paper::before { content: "\f73d"; } +.bi-filetype-aac::before { content: "\f73e"; } +.bi-filetype-ai::before { content: "\f73f"; } +.bi-filetype-bmp::before { content: "\f740"; } +.bi-filetype-cs::before { content: "\f741"; } +.bi-filetype-css::before { content: "\f742"; } +.bi-filetype-csv::before { content: "\f743"; } +.bi-filetype-doc::before { content: "\f744"; } +.bi-filetype-docx::before { content: "\f745"; } +.bi-filetype-exe::before { content: "\f746"; } +.bi-filetype-gif::before { content: "\f747"; } +.bi-filetype-heic::before { content: "\f748"; } +.bi-filetype-html::before { content: "\f749"; } +.bi-filetype-java::before { content: "\f74a"; } +.bi-filetype-jpg::before { content: "\f74b"; } +.bi-filetype-js::before { content: "\f74c"; } +.bi-filetype-jsx::before { content: "\f74d"; } +.bi-filetype-key::before { content: "\f74e"; } +.bi-filetype-m4p::before { content: "\f74f"; } +.bi-filetype-md::before { content: "\f750"; } +.bi-filetype-mdx::before { content: "\f751"; } +.bi-filetype-mov::before { content: "\f752"; } +.bi-filetype-mp3::before { content: "\f753"; } +.bi-filetype-mp4::before { content: "\f754"; } +.bi-filetype-otf::before { content: "\f755"; } +.bi-filetype-pdf::before { content: "\f756"; } +.bi-filetype-php::before { content: "\f757"; } +.bi-filetype-png::before { content: "\f758"; } +.bi-filetype-ppt-1::before { content: "\f759"; } +.bi-filetype-ppt::before { content: "\f75a"; } +.bi-filetype-psd::before { content: "\f75b"; } +.bi-filetype-py::before { content: "\f75c"; } +.bi-filetype-raw::before { content: "\f75d"; } +.bi-filetype-rb::before { content: "\f75e"; } +.bi-filetype-sass::before { content: "\f75f"; } +.bi-filetype-scss::before { content: "\f760"; } +.bi-filetype-sh::before { content: "\f761"; } +.bi-filetype-svg::before { content: "\f762"; } +.bi-filetype-tiff::before { content: "\f763"; } +.bi-filetype-tsx::before { content: "\f764"; } +.bi-filetype-ttf::before { content: "\f765"; } +.bi-filetype-txt::before { content: "\f766"; } +.bi-filetype-wav::before { content: "\f767"; } +.bi-filetype-woff::before { content: "\f768"; } +.bi-filetype-xls-1::before { content: "\f769"; } +.bi-filetype-xls::before { content: "\f76a"; } +.bi-filetype-xml::before { content: "\f76b"; } +.bi-filetype-yml::before { content: "\f76c"; } +.bi-heart-arrow::before { content: "\f76d"; } +.bi-heart-pulse-fill::before { content: "\f76e"; } +.bi-heart-pulse::before { content: "\f76f"; } +.bi-heartbreak-fill::before { content: "\f770"; } +.bi-heartbreak::before { content: "\f771"; } +.bi-hearts::before { content: "\f772"; } +.bi-hospital-fill::before { content: "\f773"; } +.bi-hospital::before { content: "\f774"; } +.bi-house-heart-fill::before { content: "\f775"; } +.bi-house-heart::before { content: "\f776"; } +.bi-incognito::before { content: "\f777"; } +.bi-magnet-fill::before { content: "\f778"; } +.bi-magnet::before { content: "\f779"; } +.bi-person-heart::before { content: "\f77a"; } +.bi-person-hearts::before { content: "\f77b"; } +.bi-phone-flip::before { content: "\f77c"; } +.bi-plugin::before { content: "\f77d"; } +.bi-postage-fill::before { content: "\f77e"; } +.bi-postage-heart-fill::before { content: "\f77f"; } +.bi-postage-heart::before { content: "\f780"; } +.bi-postage::before { content: "\f781"; } +.bi-postcard-fill::before { content: "\f782"; } +.bi-postcard-heart-fill::before { content: "\f783"; } +.bi-postcard-heart::before { content: "\f784"; } +.bi-postcard::before { content: "\f785"; } +.bi-search-heart-fill::before { content: "\f786"; } +.bi-search-heart::before { content: "\f787"; } +.bi-sliders2-vertical::before { content: "\f788"; } +.bi-sliders2::before { content: "\f789"; } +.bi-trash3-fill::before { content: "\f78a"; } +.bi-trash3::before { content: "\f78b"; } +.bi-valentine::before { content: "\f78c"; } +.bi-valentine2::before { content: "\f78d"; } +.bi-wrench-adjustable-circle-fill::before { content: "\f78e"; } +.bi-wrench-adjustable-circle::before { content: "\f78f"; } +.bi-wrench-adjustable::before { content: "\f790"; } +.bi-filetype-json::before { content: "\f791"; } +.bi-filetype-pptx::before { content: "\f792"; } +.bi-filetype-xlsx::before { content: "\f793"; } diff --git a/worksheets/worksheet_redlining_files/libs/bootstrap/bootstrap-icons.woff b/worksheets/worksheet_redlining_files/libs/bootstrap/bootstrap-icons.woff new file mode 100644 index 0000000..b26ccd1 Binary files /dev/null and b/worksheets/worksheet_redlining_files/libs/bootstrap/bootstrap-icons.woff differ diff --git a/worksheets/worksheet_redlining_files/libs/bootstrap/bootstrap.min.css b/worksheets/worksheet_redlining_files/libs/bootstrap/bootstrap.min.css new file mode 100644 index 0000000..13556c3 --- /dev/null +++ b/worksheets/worksheet_redlining_files/libs/bootstrap/bootstrap.min.css @@ -0,0 +1,10 @@ +/*! + * Bootstrap v5.1.3 (https://getbootstrap.com/) + * Copyright 2011-2021 The Bootstrap Authors + * Copyright 2011-2021 Twitter, Inc. + * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE) + */@import"https://fonts.googleapis.com/css2?family=Source+Sans+Pro:wght@300;400;700&display=swap";:root{--bs-blue: #2780e3;--bs-indigo: #6610f2;--bs-purple: #613d7c;--bs-pink: #e83e8c;--bs-red: #ff0039;--bs-orange: #f0ad4e;--bs-yellow: #ff7518;--bs-green: #3fb618;--bs-teal: #20c997;--bs-cyan: #9954bb;--bs-white: #fff;--bs-gray: #868e96;--bs-gray-dark: #373a3c;--bs-gray-100: #f8f9fa;--bs-gray-200: #e9ecef;--bs-gray-300: #dee2e6;--bs-gray-400: #ced4da;--bs-gray-500: #adb5bd;--bs-gray-600: #868e96;--bs-gray-700: #495057;--bs-gray-800: #373a3c;--bs-gray-900: #212529;--bs-default: #373a3c;--bs-primary: #2780e3;--bs-secondary: #373a3c;--bs-success: #3fb618;--bs-info: #9954bb;--bs-warning: #ff7518;--bs-danger: #ff0039;--bs-light: #f8f9fa;--bs-dark: #373a3c;--bs-default-rgb: 55, 58, 60;--bs-primary-rgb: 39, 128, 227;--bs-secondary-rgb: 55, 58, 60;--bs-success-rgb: 63, 182, 24;--bs-info-rgb: 153, 84, 187;--bs-warning-rgb: 255, 117, 24;--bs-danger-rgb: 255, 0, 57;--bs-light-rgb: 248, 249, 250;--bs-dark-rgb: 55, 58, 60;--bs-white-rgb: 255, 255, 255;--bs-black-rgb: 0, 0, 0;--bs-body-color-rgb: 55, 58, 60;--bs-body-bg-rgb: 255, 255, 255;--bs-font-sans-serif: "Source Sans Pro", -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, "Helvetica Neue", Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol";--bs-font-monospace: SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace;--bs-gradient: linear-gradient(180deg, rgba(255, 255, 255, 0.15), rgba(255, 255, 255, 0));--bs-root-font-size: 18px;--bs-body-font-family: var(--bs-font-sans-serif);--bs-body-font-size: 1rem;--bs-body-font-weight: 400;--bs-body-line-height: 1.5;--bs-body-color: #373a3c;--bs-body-bg: #fff}*,*::before,*::after{box-sizing:border-box}:root{font-size:var(--bs-root-font-size)}body{margin:0;font-family:var(--bs-body-font-family);font-size:var(--bs-body-font-size);font-weight:var(--bs-body-font-weight);line-height:var(--bs-body-line-height);color:var(--bs-body-color);text-align:var(--bs-body-text-align);background-color:var(--bs-body-bg);-webkit-text-size-adjust:100%;-webkit-tap-highlight-color:rgba(0,0,0,0)}hr{margin:1rem 0;color:inherit;background-color:currentColor;border:0;opacity:.25}hr:not([size]){height:1px}h6,.h6,h5,.h5,h4,.h4,h3,.h3,h2,.h2,h1,.h1{margin-top:0;margin-bottom:.5rem;font-weight:400;line-height:1.2}h1,.h1{font-size:calc(1.345rem + 1.14vw)}@media(min-width: 1200px){h1,.h1{font-size:2.2rem}}h2,.h2{font-size:calc(1.3rem + 0.6vw)}@media(min-width: 1200px){h2,.h2{font-size:1.75rem}}h3,.h3{font-size:calc(1.275rem + 0.3vw)}@media(min-width: 1200px){h3,.h3{font-size:1.5rem}}h4,.h4{font-size:1.25rem}h5,.h5{font-size:1.1rem}h6,.h6{font-size:1rem}p{margin-top:0;margin-bottom:1rem}abbr[title],abbr[data-bs-original-title]{text-decoration:underline dotted;-webkit-text-decoration:underline dotted;-moz-text-decoration:underline dotted;-ms-text-decoration:underline dotted;-o-text-decoration:underline dotted;cursor:help;text-decoration-skip-ink:none}address{margin-bottom:1rem;font-style:normal;line-height:inherit}ol,ul{padding-left:2rem}ol,ul,dl{margin-top:0;margin-bottom:1rem}ol ol,ul ul,ol ul,ul ol{margin-bottom:0}dt{font-weight:700}dd{margin-bottom:.5rem;margin-left:0}blockquote{margin:0 0 1rem;padding:.625rem 1.25rem;border-left:.25rem solid #e9ecef}blockquote p:last-child,blockquote ul:last-child,blockquote ol:last-child{margin-bottom:0}b,strong{font-weight:bolder}small,.small{font-size:0.875em}mark,.mark{padding:.2em;background-color:#fcf8e3}sub,sup{position:relative;font-size:0.75em;line-height:0;vertical-align:baseline}sub{bottom:-0.25em}sup{top:-0.5em}a{color:#2780e3;text-decoration:underline;-webkit-text-decoration:underline;-moz-text-decoration:underline;-ms-text-decoration:underline;-o-text-decoration:underline}a:hover{color:#1f66b6}a:not([href]):not([class]),a:not([href]):not([class]):hover{color:inherit;text-decoration:none}pre,code,kbd,samp{font-family:var(--bs-font-monospace);font-size:1em;direction:ltr /* rtl:ignore */;unicode-bidi:bidi-override}pre{display:block;margin-top:0;margin-bottom:1rem;overflow:auto;font-size:0.875em;color:#000;background-color:#f7f7f7;padding:.5rem;border:1px solid #dee2e6}pre code{background-color:transparent;font-size:inherit;color:inherit;word-break:normal}code{font-size:0.875em;color:#9954bb;background-color:#f7f7f7;padding:.125rem .25rem;word-wrap:break-word}a>code{color:inherit}kbd{padding:.4rem .4rem;font-size:0.875em;color:#fff;background-color:#212529}kbd kbd{padding:0;font-size:1em;font-weight:700}figure{margin:0 0 1rem}img,svg{vertical-align:middle}table{caption-side:bottom;border-collapse:collapse}caption{padding-top:.5rem;padding-bottom:.5rem;color:#868e96;text-align:left}th{text-align:inherit;text-align:-webkit-match-parent}thead,tbody,tfoot,tr,td,th{border-color:inherit;border-style:solid;border-width:0}label{display:inline-block}button{border-radius:0}button:focus:not(:focus-visible){outline:0}input,button,select,optgroup,textarea{margin:0;font-family:inherit;font-size:inherit;line-height:inherit}button,select{text-transform:none}[role=button]{cursor:pointer}select{word-wrap:normal}select:disabled{opacity:1}[list]::-webkit-calendar-picker-indicator{display:none}button,[type=button],[type=reset],[type=submit]{-webkit-appearance:button}button:not(:disabled),[type=button]:not(:disabled),[type=reset]:not(:disabled),[type=submit]:not(:disabled){cursor:pointer}::-moz-focus-inner{padding:0;border-style:none}textarea{resize:vertical}fieldset{min-width:0;padding:0;margin:0;border:0}legend{float:left;width:100%;padding:0;margin-bottom:.5rem;font-size:calc(1.275rem + 0.3vw);line-height:inherit}@media(min-width: 1200px){legend{font-size:1.5rem}}legend+*{clear:left}::-webkit-datetime-edit-fields-wrapper,::-webkit-datetime-edit-text,::-webkit-datetime-edit-minute,::-webkit-datetime-edit-hour-field,::-webkit-datetime-edit-day-field,::-webkit-datetime-edit-month-field,::-webkit-datetime-edit-year-field{padding:0}::-webkit-inner-spin-button{height:auto}[type=search]{outline-offset:-2px;-webkit-appearance:textfield}::-webkit-search-decoration{-webkit-appearance:none}::-webkit-color-swatch-wrapper{padding:0}::file-selector-button{font:inherit}::-webkit-file-upload-button{font:inherit;-webkit-appearance:button}output{display:inline-block}iframe{border:0}summary{display:list-item;cursor:pointer}progress{vertical-align:baseline}[hidden]{display:none !important}.lead{font-size:1.25rem;font-weight:300}.display-1{font-size:calc(1.625rem + 4.5vw);font-weight:300;line-height:1.2}@media(min-width: 1200px){.display-1{font-size:5rem}}.display-2{font-size:calc(1.575rem + 3.9vw);font-weight:300;line-height:1.2}@media(min-width: 1200px){.display-2{font-size:4.5rem}}.display-3{font-size:calc(1.525rem + 3.3vw);font-weight:300;line-height:1.2}@media(min-width: 1200px){.display-3{font-size:4rem}}.display-4{font-size:calc(1.475rem + 2.7vw);font-weight:300;line-height:1.2}@media(min-width: 1200px){.display-4{font-size:3.5rem}}.display-5{font-size:calc(1.425rem + 2.1vw);font-weight:300;line-height:1.2}@media(min-width: 1200px){.display-5{font-size:3rem}}.display-6{font-size:calc(1.375rem + 1.5vw);font-weight:300;line-height:1.2}@media(min-width: 1200px){.display-6{font-size:2.5rem}}.list-unstyled{padding-left:0;list-style:none}.list-inline{padding-left:0;list-style:none}.list-inline-item{display:inline-block}.list-inline-item:not(:last-child){margin-right:.5rem}.initialism{font-size:0.875em;text-transform:uppercase}.blockquote{margin-bottom:1rem;font-size:1.25rem}.blockquote>:last-child{margin-bottom:0}.blockquote-footer{margin-top:-1rem;margin-bottom:1rem;font-size:0.875em;color:#868e96}.blockquote-footer::before{content:"— "}.img-fluid{max-width:100%;height:auto}.img-thumbnail{padding:.25rem;background-color:#fff;border:1px solid #dee2e6;max-width:100%;height:auto}.figure{display:inline-block}.figure-img{margin-bottom:.5rem;line-height:1}.figure-caption{font-size:0.875em;color:#868e96}.grid{display:grid;grid-template-rows:repeat(var(--bs-rows, 1), 1fr);grid-template-columns:repeat(var(--bs-columns, 12), 1fr);gap:var(--bs-gap, 1.5rem)}.grid .g-col-1{grid-column:auto/span 1}.grid .g-col-2{grid-column:auto/span 2}.grid .g-col-3{grid-column:auto/span 3}.grid .g-col-4{grid-column:auto/span 4}.grid .g-col-5{grid-column:auto/span 5}.grid .g-col-6{grid-column:auto/span 6}.grid .g-col-7{grid-column:auto/span 7}.grid .g-col-8{grid-column:auto/span 8}.grid .g-col-9{grid-column:auto/span 9}.grid .g-col-10{grid-column:auto/span 10}.grid .g-col-11{grid-column:auto/span 11}.grid .g-col-12{grid-column:auto/span 12}.grid .g-start-1{grid-column-start:1}.grid .g-start-2{grid-column-start:2}.grid .g-start-3{grid-column-start:3}.grid .g-start-4{grid-column-start:4}.grid .g-start-5{grid-column-start:5}.grid .g-start-6{grid-column-start:6}.grid .g-start-7{grid-column-start:7}.grid .g-start-8{grid-column-start:8}.grid .g-start-9{grid-column-start:9}.grid .g-start-10{grid-column-start:10}.grid .g-start-11{grid-column-start:11}@media(min-width: 576px){.grid .g-col-sm-1{grid-column:auto/span 1}.grid .g-col-sm-2{grid-column:auto/span 2}.grid .g-col-sm-3{grid-column:auto/span 3}.grid .g-col-sm-4{grid-column:auto/span 4}.grid .g-col-sm-5{grid-column:auto/span 5}.grid .g-col-sm-6{grid-column:auto/span 6}.grid .g-col-sm-7{grid-column:auto/span 7}.grid .g-col-sm-8{grid-column:auto/span 8}.grid .g-col-sm-9{grid-column:auto/span 9}.grid .g-col-sm-10{grid-column:auto/span 10}.grid .g-col-sm-11{grid-column:auto/span 11}.grid .g-col-sm-12{grid-column:auto/span 12}.grid .g-start-sm-1{grid-column-start:1}.grid .g-start-sm-2{grid-column-start:2}.grid .g-start-sm-3{grid-column-start:3}.grid .g-start-sm-4{grid-column-start:4}.grid .g-start-sm-5{grid-column-start:5}.grid .g-start-sm-6{grid-column-start:6}.grid .g-start-sm-7{grid-column-start:7}.grid .g-start-sm-8{grid-column-start:8}.grid .g-start-sm-9{grid-column-start:9}.grid .g-start-sm-10{grid-column-start:10}.grid .g-start-sm-11{grid-column-start:11}}@media(min-width: 768px){.grid .g-col-md-1{grid-column:auto/span 1}.grid .g-col-md-2{grid-column:auto/span 2}.grid .g-col-md-3{grid-column:auto/span 3}.grid .g-col-md-4{grid-column:auto/span 4}.grid .g-col-md-5{grid-column:auto/span 5}.grid .g-col-md-6{grid-column:auto/span 6}.grid .g-col-md-7{grid-column:auto/span 7}.grid .g-col-md-8{grid-column:auto/span 8}.grid .g-col-md-9{grid-column:auto/span 9}.grid .g-col-md-10{grid-column:auto/span 10}.grid .g-col-md-11{grid-column:auto/span 11}.grid .g-col-md-12{grid-column:auto/span 12}.grid .g-start-md-1{grid-column-start:1}.grid .g-start-md-2{grid-column-start:2}.grid .g-start-md-3{grid-column-start:3}.grid .g-start-md-4{grid-column-start:4}.grid .g-start-md-5{grid-column-start:5}.grid .g-start-md-6{grid-column-start:6}.grid .g-start-md-7{grid-column-start:7}.grid .g-start-md-8{grid-column-start:8}.grid .g-start-md-9{grid-column-start:9}.grid .g-start-md-10{grid-column-start:10}.grid .g-start-md-11{grid-column-start:11}}@media(min-width: 992px){.grid .g-col-lg-1{grid-column:auto/span 1}.grid .g-col-lg-2{grid-column:auto/span 2}.grid .g-col-lg-3{grid-column:auto/span 3}.grid .g-col-lg-4{grid-column:auto/span 4}.grid .g-col-lg-5{grid-column:auto/span 5}.grid .g-col-lg-6{grid-column:auto/span 6}.grid .g-col-lg-7{grid-column:auto/span 7}.grid .g-col-lg-8{grid-column:auto/span 8}.grid .g-col-lg-9{grid-column:auto/span 9}.grid .g-col-lg-10{grid-column:auto/span 10}.grid .g-col-lg-11{grid-column:auto/span 11}.grid .g-col-lg-12{grid-column:auto/span 12}.grid .g-start-lg-1{grid-column-start:1}.grid .g-start-lg-2{grid-column-start:2}.grid .g-start-lg-3{grid-column-start:3}.grid .g-start-lg-4{grid-column-start:4}.grid .g-start-lg-5{grid-column-start:5}.grid .g-start-lg-6{grid-column-start:6}.grid .g-start-lg-7{grid-column-start:7}.grid .g-start-lg-8{grid-column-start:8}.grid .g-start-lg-9{grid-column-start:9}.grid .g-start-lg-10{grid-column-start:10}.grid .g-start-lg-11{grid-column-start:11}}@media(min-width: 1200px){.grid .g-col-xl-1{grid-column:auto/span 1}.grid .g-col-xl-2{grid-column:auto/span 2}.grid .g-col-xl-3{grid-column:auto/span 3}.grid .g-col-xl-4{grid-column:auto/span 4}.grid .g-col-xl-5{grid-column:auto/span 5}.grid .g-col-xl-6{grid-column:auto/span 6}.grid .g-col-xl-7{grid-column:auto/span 7}.grid .g-col-xl-8{grid-column:auto/span 8}.grid .g-col-xl-9{grid-column:auto/span 9}.grid .g-col-xl-10{grid-column:auto/span 10}.grid .g-col-xl-11{grid-column:auto/span 11}.grid .g-col-xl-12{grid-column:auto/span 12}.grid .g-start-xl-1{grid-column-start:1}.grid .g-start-xl-2{grid-column-start:2}.grid .g-start-xl-3{grid-column-start:3}.grid .g-start-xl-4{grid-column-start:4}.grid .g-start-xl-5{grid-column-start:5}.grid .g-start-xl-6{grid-column-start:6}.grid .g-start-xl-7{grid-column-start:7}.grid .g-start-xl-8{grid-column-start:8}.grid .g-start-xl-9{grid-column-start:9}.grid .g-start-xl-10{grid-column-start:10}.grid .g-start-xl-11{grid-column-start:11}}@media(min-width: 1400px){.grid .g-col-xxl-1{grid-column:auto/span 1}.grid .g-col-xxl-2{grid-column:auto/span 2}.grid .g-col-xxl-3{grid-column:auto/span 3}.grid .g-col-xxl-4{grid-column:auto/span 4}.grid .g-col-xxl-5{grid-column:auto/span 5}.grid .g-col-xxl-6{grid-column:auto/span 6}.grid .g-col-xxl-7{grid-column:auto/span 7}.grid .g-col-xxl-8{grid-column:auto/span 8}.grid .g-col-xxl-9{grid-column:auto/span 9}.grid .g-col-xxl-10{grid-column:auto/span 10}.grid .g-col-xxl-11{grid-column:auto/span 11}.grid .g-col-xxl-12{grid-column:auto/span 12}.grid .g-start-xxl-1{grid-column-start:1}.grid .g-start-xxl-2{grid-column-start:2}.grid .g-start-xxl-3{grid-column-start:3}.grid .g-start-xxl-4{grid-column-start:4}.grid .g-start-xxl-5{grid-column-start:5}.grid .g-start-xxl-6{grid-column-start:6}.grid .g-start-xxl-7{grid-column-start:7}.grid .g-start-xxl-8{grid-column-start:8}.grid .g-start-xxl-9{grid-column-start:9}.grid .g-start-xxl-10{grid-column-start:10}.grid .g-start-xxl-11{grid-column-start:11}}.table{--bs-table-bg: transparent;--bs-table-accent-bg: transparent;--bs-table-striped-color: #373a3c;--bs-table-striped-bg: rgba(0, 0, 0, 0.05);--bs-table-active-color: #373a3c;--bs-table-active-bg: rgba(0, 0, 0, 0.1);--bs-table-hover-color: #373a3c;--bs-table-hover-bg: rgba(0, 0, 0, 0.075);width:100%;margin-bottom:1rem;color:#373a3c;vertical-align:top;border-color:#dee2e6}.table>:not(caption)>*>*{padding:.5rem .5rem;background-color:var(--bs-table-bg);border-bottom-width:1px;box-shadow:inset 0 0 0 9999px var(--bs-table-accent-bg)}.table>tbody{vertical-align:inherit}.table>thead{vertical-align:bottom}.table>:not(:first-child){border-top:2px solid currentColor}.caption-top{caption-side:top}.table-sm>:not(caption)>*>*{padding:.25rem .25rem}.table-bordered>:not(caption)>*{border-width:1px 0}.table-bordered>:not(caption)>*>*{border-width:0 1px}.table-borderless>:not(caption)>*>*{border-bottom-width:0}.table-borderless>:not(:first-child){border-top-width:0}.table-striped>tbody>tr:nth-of-type(odd)>*{--bs-table-accent-bg: var(--bs-table-striped-bg);color:var(--bs-table-striped-color)}.table-active{--bs-table-accent-bg: var(--bs-table-active-bg);color:var(--bs-table-active-color)}.table-hover>tbody>tr:hover>*{--bs-table-accent-bg: var(--bs-table-hover-bg);color:var(--bs-table-hover-color)}.table-primary{--bs-table-bg: #d4e6f9;--bs-table-striped-bg: #c9dbed;--bs-table-striped-color: #000;--bs-table-active-bg: #bfcfe0;--bs-table-active-color: #000;--bs-table-hover-bg: #c4d5e6;--bs-table-hover-color: #000;color:#000;border-color:#bfcfe0}.table-secondary{--bs-table-bg: #d7d8d8;--bs-table-striped-bg: #cccdcd;--bs-table-striped-color: #000;--bs-table-active-bg: #c2c2c2;--bs-table-active-color: #000;--bs-table-hover-bg: #c7c8c8;--bs-table-hover-color: #000;color:#000;border-color:#c2c2c2}.table-success{--bs-table-bg: #d9f0d1;--bs-table-striped-bg: #cee4c7;--bs-table-striped-color: #000;--bs-table-active-bg: #c3d8bc;--bs-table-active-color: #000;--bs-table-hover-bg: #c9dec1;--bs-table-hover-color: #000;color:#000;border-color:#c3d8bc}.table-info{--bs-table-bg: #ebddf1;--bs-table-striped-bg: #dfd2e5;--bs-table-striped-color: #000;--bs-table-active-bg: #d4c7d9;--bs-table-active-color: #000;--bs-table-hover-bg: #d9ccdf;--bs-table-hover-color: #000;color:#000;border-color:#d4c7d9}.table-warning{--bs-table-bg: #ffe3d1;--bs-table-striped-bg: #f2d8c7;--bs-table-striped-color: #000;--bs-table-active-bg: #e6ccbc;--bs-table-active-color: #000;--bs-table-hover-bg: #ecd2c1;--bs-table-hover-color: #000;color:#000;border-color:#e6ccbc}.table-danger{--bs-table-bg: #ffccd7;--bs-table-striped-bg: #f2c2cc;--bs-table-striped-color: #000;--bs-table-active-bg: #e6b8c2;--bs-table-active-color: #000;--bs-table-hover-bg: #ecbdc7;--bs-table-hover-color: #000;color:#000;border-color:#e6b8c2}.table-light{--bs-table-bg: #f8f9fa;--bs-table-striped-bg: #ecedee;--bs-table-striped-color: #000;--bs-table-active-bg: #dfe0e1;--bs-table-active-color: #000;--bs-table-hover-bg: #e5e6e7;--bs-table-hover-color: #000;color:#000;border-color:#dfe0e1}.table-dark{--bs-table-bg: #373a3c;--bs-table-striped-bg: #414446;--bs-table-striped-color: #fff;--bs-table-active-bg: #4b4e50;--bs-table-active-color: #fff;--bs-table-hover-bg: #46494b;--bs-table-hover-color: #fff;color:#fff;border-color:#4b4e50}.table-responsive{overflow-x:auto;-webkit-overflow-scrolling:touch}@media(max-width: 575.98px){.table-responsive-sm{overflow-x:auto;-webkit-overflow-scrolling:touch}}@media(max-width: 767.98px){.table-responsive-md{overflow-x:auto;-webkit-overflow-scrolling:touch}}@media(max-width: 991.98px){.table-responsive-lg{overflow-x:auto;-webkit-overflow-scrolling:touch}}@media(max-width: 1199.98px){.table-responsive-xl{overflow-x:auto;-webkit-overflow-scrolling:touch}}@media(max-width: 1399.98px){.table-responsive-xxl{overflow-x:auto;-webkit-overflow-scrolling:touch}}.form-label,.shiny-input-container .control-label{margin-bottom:.5rem}.col-form-label{padding-top:calc(0.375rem + 1px);padding-bottom:calc(0.375rem + 1px);margin-bottom:0;font-size:inherit;line-height:1.5}.col-form-label-lg{padding-top:calc(0.5rem + 1px);padding-bottom:calc(0.5rem + 1px);font-size:1.25rem}.col-form-label-sm{padding-top:calc(0.25rem + 1px);padding-bottom:calc(0.25rem + 1px);font-size:0.875rem}.form-text{margin-top:.25rem;font-size:0.875em;color:#868e96}.form-control{display:block;width:100%;padding:.375rem .75rem;font-size:1rem;font-weight:400;line-height:1.5;color:#373a3c;background-color:#fff;background-clip:padding-box;border:1px solid #ced4da;appearance:none;-webkit-appearance:none;-moz-appearance:none;-ms-appearance:none;-o-appearance:none;border-radius:0;transition:border-color .15s ease-in-out,box-shadow .15s ease-in-out}@media(prefers-reduced-motion: reduce){.form-control{transition:none}}.form-control[type=file]{overflow:hidden}.form-control[type=file]:not(:disabled):not([readonly]){cursor:pointer}.form-control:focus{color:#373a3c;background-color:#fff;border-color:#93c0f1;outline:0;box-shadow:0 0 0 .25rem rgba(39,128,227,.25)}.form-control::-webkit-date-and-time-value{height:1.5em}.form-control::placeholder{color:#868e96;opacity:1}.form-control:disabled,.form-control[readonly]{background-color:#e9ecef;opacity:1}.form-control::file-selector-button{padding:.375rem .75rem;margin:-0.375rem -0.75rem;margin-inline-end:.75rem;color:#373a3c;background-color:#e9ecef;pointer-events:none;border-color:inherit;border-style:solid;border-width:0;border-inline-end-width:1px;border-radius:0;transition:color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out}@media(prefers-reduced-motion: reduce){.form-control::file-selector-button{transition:none}}.form-control:hover:not(:disabled):not([readonly])::file-selector-button{background-color:#dde0e3}.form-control::-webkit-file-upload-button{padding:.375rem .75rem;margin:-0.375rem -0.75rem;margin-inline-end:.75rem;color:#373a3c;background-color:#e9ecef;pointer-events:none;border-color:inherit;border-style:solid;border-width:0;border-inline-end-width:1px;border-radius:0;transition:color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out}@media(prefers-reduced-motion: reduce){.form-control::-webkit-file-upload-button{transition:none}}.form-control:hover:not(:disabled):not([readonly])::-webkit-file-upload-button{background-color:#dde0e3}.form-control-plaintext{display:block;width:100%;padding:.375rem 0;margin-bottom:0;line-height:1.5;color:#373a3c;background-color:transparent;border:solid transparent;border-width:1px 0}.form-control-plaintext.form-control-sm,.form-control-plaintext.form-control-lg{padding-right:0;padding-left:0}.form-control-sm{min-height:calc(1.5em + 0.5rem + 2px);padding:.25rem .5rem;font-size:0.875rem}.form-control-sm::file-selector-button{padding:.25rem .5rem;margin:-0.25rem -0.5rem;margin-inline-end:.5rem}.form-control-sm::-webkit-file-upload-button{padding:.25rem .5rem;margin:-0.25rem -0.5rem;margin-inline-end:.5rem}.form-control-lg{min-height:calc(1.5em + 1rem + 2px);padding:.5rem 1rem;font-size:1.25rem}.form-control-lg::file-selector-button{padding:.5rem 1rem;margin:-0.5rem -1rem;margin-inline-end:1rem}.form-control-lg::-webkit-file-upload-button{padding:.5rem 1rem;margin:-0.5rem -1rem;margin-inline-end:1rem}textarea.form-control{min-height:calc(1.5em + 0.75rem + 2px)}textarea.form-control-sm{min-height:calc(1.5em + 0.5rem + 2px)}textarea.form-control-lg{min-height:calc(1.5em + 1rem + 2px)}.form-control-color{width:3rem;height:auto;padding:.375rem}.form-control-color:not(:disabled):not([readonly]){cursor:pointer}.form-control-color::-moz-color-swatch{height:1.5em}.form-control-color::-webkit-color-swatch{height:1.5em}.form-select{display:block;width:100%;padding:.375rem 2.25rem .375rem .75rem;-moz-padding-start:calc(0.75rem - 3px);font-size:1rem;font-weight:400;line-height:1.5;color:#373a3c;background-color:#fff;background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16'%3e%3cpath fill='none' stroke='%23373a3c' stroke-linecap='round' stroke-linejoin='round' stroke-width='2' d='M2 5l6 6 6-6'/%3e%3c/svg%3e");background-repeat:no-repeat;background-position:right .75rem center;background-size:16px 12px;border:1px solid #ced4da;border-radius:0;transition:border-color .15s ease-in-out,box-shadow .15s ease-in-out;appearance:none;-webkit-appearance:none;-moz-appearance:none;-ms-appearance:none;-o-appearance:none}@media(prefers-reduced-motion: reduce){.form-select{transition:none}}.form-select:focus{border-color:#93c0f1;outline:0;box-shadow:0 0 0 .25rem rgba(39,128,227,.25)}.form-select[multiple],.form-select[size]:not([size="1"]){padding-right:.75rem;background-image:none}.form-select:disabled{background-color:#e9ecef}.form-select:-moz-focusring{color:transparent;text-shadow:0 0 0 #373a3c}.form-select-sm{padding-top:.25rem;padding-bottom:.25rem;padding-left:.5rem;font-size:0.875rem}.form-select-lg{padding-top:.5rem;padding-bottom:.5rem;padding-left:1rem;font-size:1.25rem}.form-check,.shiny-input-container .checkbox,.shiny-input-container .radio{display:block;min-height:1.5rem;padding-left:0;margin-bottom:.125rem}.form-check .form-check-input,.form-check .shiny-input-container .checkbox input,.form-check .shiny-input-container .radio input,.shiny-input-container .checkbox .form-check-input,.shiny-input-container .checkbox .shiny-input-container .checkbox input,.shiny-input-container .checkbox .shiny-input-container .radio input,.shiny-input-container .radio .form-check-input,.shiny-input-container .radio .shiny-input-container .checkbox input,.shiny-input-container .radio .shiny-input-container .radio input{float:left;margin-left:0}.form-check-input,.shiny-input-container .checkbox input,.shiny-input-container .checkbox-inline input,.shiny-input-container .radio input,.shiny-input-container .radio-inline input{width:1em;height:1em;margin-top:.25em;vertical-align:top;background-color:#fff;background-repeat:no-repeat;background-position:center;background-size:contain;border:1px solid rgba(0,0,0,.25);appearance:none;-webkit-appearance:none;-moz-appearance:none;-ms-appearance:none;-o-appearance:none;color-adjust:exact;-webkit-print-color-adjust:exact}.form-check-input[type=radio],.shiny-input-container .checkbox input[type=radio],.shiny-input-container .checkbox-inline input[type=radio],.shiny-input-container .radio input[type=radio],.shiny-input-container .radio-inline input[type=radio]{border-radius:50%}.form-check-input:active,.shiny-input-container .checkbox input:active,.shiny-input-container .checkbox-inline input:active,.shiny-input-container .radio input:active,.shiny-input-container .radio-inline input:active{filter:brightness(90%)}.form-check-input:focus,.shiny-input-container .checkbox input:focus,.shiny-input-container .checkbox-inline input:focus,.shiny-input-container .radio input:focus,.shiny-input-container .radio-inline input:focus{border-color:#93c0f1;outline:0;box-shadow:0 0 0 .25rem rgba(39,128,227,.25)}.form-check-input:checked,.shiny-input-container .checkbox input:checked,.shiny-input-container .checkbox-inline input:checked,.shiny-input-container .radio input:checked,.shiny-input-container .radio-inline input:checked{background-color:#2780e3;border-color:#2780e3}.form-check-input:checked[type=checkbox],.shiny-input-container .checkbox input:checked[type=checkbox],.shiny-input-container .checkbox-inline input:checked[type=checkbox],.shiny-input-container .radio input:checked[type=checkbox],.shiny-input-container .radio-inline input:checked[type=checkbox]{background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 20 20'%3e%3cpath fill='none' stroke='%23fff' stroke-linecap='round' stroke-linejoin='round' stroke-width='3' d='M6 10l3 3l6-6'/%3e%3c/svg%3e")}.form-check-input:checked[type=radio],.shiny-input-container .checkbox input:checked[type=radio],.shiny-input-container .checkbox-inline input:checked[type=radio],.shiny-input-container .radio input:checked[type=radio],.shiny-input-container .radio-inline input:checked[type=radio]{background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='-4 -4 8 8'%3e%3ccircle r='2' fill='%23fff'/%3e%3c/svg%3e")}.form-check-input[type=checkbox]:indeterminate,.shiny-input-container .checkbox input[type=checkbox]:indeterminate,.shiny-input-container .checkbox-inline input[type=checkbox]:indeterminate,.shiny-input-container .radio input[type=checkbox]:indeterminate,.shiny-input-container .radio-inline input[type=checkbox]:indeterminate{background-color:#2780e3;border-color:#2780e3;background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 20 20'%3e%3cpath fill='none' stroke='%23fff' stroke-linecap='round' stroke-linejoin='round' stroke-width='3' d='M6 10h8'/%3e%3c/svg%3e")}.form-check-input:disabled,.shiny-input-container .checkbox input:disabled,.shiny-input-container .checkbox-inline input:disabled,.shiny-input-container .radio input:disabled,.shiny-input-container .radio-inline input:disabled{pointer-events:none;filter:none;opacity:.5}.form-check-input[disabled]~.form-check-label,.form-check-input[disabled]~span,.form-check-input:disabled~.form-check-label,.form-check-input:disabled~span,.shiny-input-container .checkbox input[disabled]~.form-check-label,.shiny-input-container .checkbox input[disabled]~span,.shiny-input-container .checkbox input:disabled~.form-check-label,.shiny-input-container .checkbox input:disabled~span,.shiny-input-container .checkbox-inline input[disabled]~.form-check-label,.shiny-input-container .checkbox-inline input[disabled]~span,.shiny-input-container .checkbox-inline input:disabled~.form-check-label,.shiny-input-container .checkbox-inline input:disabled~span,.shiny-input-container .radio input[disabled]~.form-check-label,.shiny-input-container .radio input[disabled]~span,.shiny-input-container .radio input:disabled~.form-check-label,.shiny-input-container .radio input:disabled~span,.shiny-input-container .radio-inline input[disabled]~.form-check-label,.shiny-input-container .radio-inline input[disabled]~span,.shiny-input-container .radio-inline input:disabled~.form-check-label,.shiny-input-container .radio-inline input:disabled~span{opacity:.5}.form-check-label,.shiny-input-container .checkbox label,.shiny-input-container .checkbox-inline label,.shiny-input-container .radio label,.shiny-input-container .radio-inline label{cursor:pointer}.form-switch{padding-left:2.5em}.form-switch .form-check-input{width:2em;margin-left:-2.5em;background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='-4 -4 8 8'%3e%3ccircle r='3' fill='rgba%280, 0, 0, 0.25%29'/%3e%3c/svg%3e");background-position:left center;transition:background-position .15s ease-in-out}@media(prefers-reduced-motion: reduce){.form-switch .form-check-input{transition:none}}.form-switch .form-check-input:focus{background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='-4 -4 8 8'%3e%3ccircle r='3' fill='%2393c0f1'/%3e%3c/svg%3e")}.form-switch .form-check-input:checked{background-position:right center;background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='-4 -4 8 8'%3e%3ccircle r='3' fill='%23fff'/%3e%3c/svg%3e")}.form-check-inline,.shiny-input-container .checkbox-inline,.shiny-input-container .radio-inline{display:inline-block;margin-right:1rem}.btn-check{position:absolute;clip:rect(0, 0, 0, 0);pointer-events:none}.btn-check[disabled]+.btn,.btn-check:disabled+.btn{pointer-events:none;filter:none;opacity:.65}.form-range{width:100%;height:1.5rem;padding:0;background-color:transparent;appearance:none;-webkit-appearance:none;-moz-appearance:none;-ms-appearance:none;-o-appearance:none}.form-range:focus{outline:0}.form-range:focus::-webkit-slider-thumb{box-shadow:0 0 0 1px #fff,0 0 0 .25rem rgba(39,128,227,.25)}.form-range:focus::-moz-range-thumb{box-shadow:0 0 0 1px #fff,0 0 0 .25rem rgba(39,128,227,.25)}.form-range::-moz-focus-outer{border:0}.form-range::-webkit-slider-thumb{width:1rem;height:1rem;margin-top:-0.25rem;background-color:#2780e3;border:0;transition:background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out;appearance:none;-webkit-appearance:none;-moz-appearance:none;-ms-appearance:none;-o-appearance:none}@media(prefers-reduced-motion: reduce){.form-range::-webkit-slider-thumb{transition:none}}.form-range::-webkit-slider-thumb:active{background-color:#bed9f7}.form-range::-webkit-slider-runnable-track{width:100%;height:.5rem;color:transparent;cursor:pointer;background-color:#dee2e6;border-color:transparent}.form-range::-moz-range-thumb{width:1rem;height:1rem;background-color:#2780e3;border:0;transition:background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out;appearance:none;-webkit-appearance:none;-moz-appearance:none;-ms-appearance:none;-o-appearance:none}@media(prefers-reduced-motion: reduce){.form-range::-moz-range-thumb{transition:none}}.form-range::-moz-range-thumb:active{background-color:#bed9f7}.form-range::-moz-range-track{width:100%;height:.5rem;color:transparent;cursor:pointer;background-color:#dee2e6;border-color:transparent}.form-range:disabled{pointer-events:none}.form-range:disabled::-webkit-slider-thumb{background-color:#adb5bd}.form-range:disabled::-moz-range-thumb{background-color:#adb5bd}.form-floating{position:relative}.form-floating>.form-control,.form-floating>.form-select{height:calc(3.5rem + 2px);line-height:1.25}.form-floating>label{position:absolute;top:0;left:0;height:100%;padding:1rem .75rem;pointer-events:none;border:1px solid transparent;transform-origin:0 0;transition:opacity .1s ease-in-out,transform .1s ease-in-out}@media(prefers-reduced-motion: reduce){.form-floating>label{transition:none}}.form-floating>.form-control{padding:1rem .75rem}.form-floating>.form-control::placeholder{color:transparent}.form-floating>.form-control:focus,.form-floating>.form-control:not(:placeholder-shown){padding-top:1.625rem;padding-bottom:.625rem}.form-floating>.form-control:-webkit-autofill{padding-top:1.625rem;padding-bottom:.625rem}.form-floating>.form-select{padding-top:1.625rem;padding-bottom:.625rem}.form-floating>.form-control:focus~label,.form-floating>.form-control:not(:placeholder-shown)~label,.form-floating>.form-select~label{opacity:.65;transform:scale(0.85) translateY(-0.5rem) translateX(0.15rem)}.form-floating>.form-control:-webkit-autofill~label{opacity:.65;transform:scale(0.85) translateY(-0.5rem) translateX(0.15rem)}.input-group{position:relative;display:flex;display:-webkit-flex;flex-wrap:wrap;-webkit-flex-wrap:wrap;align-items:stretch;-webkit-align-items:stretch;width:100%}.input-group>.form-control,.input-group>.form-select{position:relative;flex:1 1 auto;-webkit-flex:1 1 auto;width:1%;min-width:0}.input-group>.form-control:focus,.input-group>.form-select:focus{z-index:3}.input-group .btn{position:relative;z-index:2}.input-group .btn:focus{z-index:3}.input-group-text{display:flex;display:-webkit-flex;align-items:center;-webkit-align-items:center;padding:.375rem .75rem;font-size:1rem;font-weight:400;line-height:1.5;color:#373a3c;text-align:center;white-space:nowrap;background-color:#e9ecef;border:1px solid #ced4da}.input-group-lg>.form-control,.input-group-lg>.form-select,.input-group-lg>.input-group-text,.input-group-lg>.btn{padding:.5rem 1rem;font-size:1.25rem}.input-group-sm>.form-control,.input-group-sm>.form-select,.input-group-sm>.input-group-text,.input-group-sm>.btn{padding:.25rem .5rem;font-size:0.875rem}.input-group-lg>.form-select,.input-group-sm>.form-select{padding-right:3rem}.input-group>:not(:first-child):not(.dropdown-menu):not(.valid-tooltip):not(.valid-feedback):not(.invalid-tooltip):not(.invalid-feedback){margin-left:-1px}.valid-feedback{display:none;width:100%;margin-top:.25rem;font-size:0.875em;color:#3fb618}.valid-tooltip{position:absolute;top:100%;z-index:5;display:none;max-width:100%;padding:.25rem .5rem;margin-top:.1rem;font-size:0.875rem;color:#fff;background-color:rgba(63,182,24,.9)}.was-validated :valid~.valid-feedback,.was-validated :valid~.valid-tooltip,.is-valid~.valid-feedback,.is-valid~.valid-tooltip{display:block}.was-validated .form-control:valid,.form-control.is-valid{border-color:#3fb618;padding-right:calc(1.5em + 0.75rem);background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 8 8'%3e%3cpath fill='%233fb618' d='M2.3 6.73L.6 4.53c-.4-1.04.46-1.4 1.1-.8l1.1 1.4 3.4-3.8c.6-.63 1.6-.27 1.2.7l-4 4.6c-.43.5-.8.4-1.1.1z'/%3e%3c/svg%3e");background-repeat:no-repeat;background-position:right calc(0.375em + 0.1875rem) center;background-size:calc(0.75em + 0.375rem) calc(0.75em + 0.375rem)}.was-validated .form-control:valid:focus,.form-control.is-valid:focus{border-color:#3fb618;box-shadow:0 0 0 .25rem rgba(63,182,24,.25)}.was-validated textarea.form-control:valid,textarea.form-control.is-valid{padding-right:calc(1.5em + 0.75rem);background-position:top calc(0.375em + 0.1875rem) right calc(0.375em + 0.1875rem)}.was-validated .form-select:valid,.form-select.is-valid{border-color:#3fb618}.was-validated .form-select:valid:not([multiple]):not([size]),.was-validated .form-select:valid:not([multiple])[size="1"],.form-select.is-valid:not([multiple]):not([size]),.form-select.is-valid:not([multiple])[size="1"]{padding-right:4.125rem;background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16'%3e%3cpath fill='none' stroke='%23373a3c' stroke-linecap='round' stroke-linejoin='round' stroke-width='2' d='M2 5l6 6 6-6'/%3e%3c/svg%3e"),url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 8 8'%3e%3cpath fill='%233fb618' d='M2.3 6.73L.6 4.53c-.4-1.04.46-1.4 1.1-.8l1.1 1.4 3.4-3.8c.6-.63 1.6-.27 1.2.7l-4 4.6c-.43.5-.8.4-1.1.1z'/%3e%3c/svg%3e");background-position:right .75rem center,center right 2.25rem;background-size:16px 12px,calc(0.75em + 0.375rem) calc(0.75em + 0.375rem)}.was-validated .form-select:valid:focus,.form-select.is-valid:focus{border-color:#3fb618;box-shadow:0 0 0 .25rem rgba(63,182,24,.25)}.was-validated .form-check-input:valid,.form-check-input.is-valid{border-color:#3fb618}.was-validated .form-check-input:valid:checked,.form-check-input.is-valid:checked{background-color:#3fb618}.was-validated .form-check-input:valid:focus,.form-check-input.is-valid:focus{box-shadow:0 0 0 .25rem rgba(63,182,24,.25)}.was-validated .form-check-input:valid~.form-check-label,.form-check-input.is-valid~.form-check-label{color:#3fb618}.form-check-inline .form-check-input~.valid-feedback{margin-left:.5em}.was-validated .input-group .form-control:valid,.input-group .form-control.is-valid,.was-validated .input-group .form-select:valid,.input-group .form-select.is-valid{z-index:1}.was-validated .input-group .form-control:valid:focus,.input-group .form-control.is-valid:focus,.was-validated .input-group .form-select:valid:focus,.input-group .form-select.is-valid:focus{z-index:3}.invalid-feedback{display:none;width:100%;margin-top:.25rem;font-size:0.875em;color:#ff0039}.invalid-tooltip{position:absolute;top:100%;z-index:5;display:none;max-width:100%;padding:.25rem .5rem;margin-top:.1rem;font-size:0.875rem;color:#fff;background-color:rgba(255,0,57,.9)}.was-validated :invalid~.invalid-feedback,.was-validated :invalid~.invalid-tooltip,.is-invalid~.invalid-feedback,.is-invalid~.invalid-tooltip{display:block}.was-validated .form-control:invalid,.form-control.is-invalid{border-color:#ff0039;padding-right:calc(1.5em + 0.75rem);background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 12 12' width='12' height='12' fill='none' stroke='%23ff0039'%3e%3ccircle cx='6' cy='6' r='4.5'/%3e%3cpath stroke-linejoin='round' d='M5.8 3.6h.4L6 6.5z'/%3e%3ccircle cx='6' cy='8.2' r='.6' fill='%23ff0039' stroke='none'/%3e%3c/svg%3e");background-repeat:no-repeat;background-position:right calc(0.375em + 0.1875rem) center;background-size:calc(0.75em + 0.375rem) calc(0.75em + 0.375rem)}.was-validated .form-control:invalid:focus,.form-control.is-invalid:focus{border-color:#ff0039;box-shadow:0 0 0 .25rem rgba(255,0,57,.25)}.was-validated textarea.form-control:invalid,textarea.form-control.is-invalid{padding-right:calc(1.5em + 0.75rem);background-position:top calc(0.375em + 0.1875rem) right calc(0.375em + 0.1875rem)}.was-validated .form-select:invalid,.form-select.is-invalid{border-color:#ff0039}.was-validated .form-select:invalid:not([multiple]):not([size]),.was-validated .form-select:invalid:not([multiple])[size="1"],.form-select.is-invalid:not([multiple]):not([size]),.form-select.is-invalid:not([multiple])[size="1"]{padding-right:4.125rem;background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16'%3e%3cpath fill='none' stroke='%23373a3c' stroke-linecap='round' stroke-linejoin='round' stroke-width='2' d='M2 5l6 6 6-6'/%3e%3c/svg%3e"),url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 12 12' width='12' height='12' fill='none' stroke='%23ff0039'%3e%3ccircle cx='6' cy='6' r='4.5'/%3e%3cpath stroke-linejoin='round' d='M5.8 3.6h.4L6 6.5z'/%3e%3ccircle cx='6' cy='8.2' r='.6' fill='%23ff0039' stroke='none'/%3e%3c/svg%3e");background-position:right .75rem center,center right 2.25rem;background-size:16px 12px,calc(0.75em + 0.375rem) calc(0.75em + 0.375rem)}.was-validated .form-select:invalid:focus,.form-select.is-invalid:focus{border-color:#ff0039;box-shadow:0 0 0 .25rem rgba(255,0,57,.25)}.was-validated .form-check-input:invalid,.form-check-input.is-invalid{border-color:#ff0039}.was-validated .form-check-input:invalid:checked,.form-check-input.is-invalid:checked{background-color:#ff0039}.was-validated .form-check-input:invalid:focus,.form-check-input.is-invalid:focus{box-shadow:0 0 0 .25rem rgba(255,0,57,.25)}.was-validated .form-check-input:invalid~.form-check-label,.form-check-input.is-invalid~.form-check-label{color:#ff0039}.form-check-inline .form-check-input~.invalid-feedback{margin-left:.5em}.was-validated .input-group .form-control:invalid,.input-group .form-control.is-invalid,.was-validated .input-group .form-select:invalid,.input-group .form-select.is-invalid{z-index:2}.was-validated .input-group .form-control:invalid:focus,.input-group .form-control.is-invalid:focus,.was-validated .input-group .form-select:invalid:focus,.input-group .form-select.is-invalid:focus{z-index:3}.btn{display:inline-block;font-weight:400;line-height:1.5;color:#373a3c;text-align:center;text-decoration:none;-webkit-text-decoration:none;-moz-text-decoration:none;-ms-text-decoration:none;-o-text-decoration:none;vertical-align:middle;cursor:pointer;user-select:none;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;-o-user-select:none;background-color:transparent;border:1px solid transparent;padding:.375rem .75rem;font-size:1rem;border-radius:0;transition:color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out}@media(prefers-reduced-motion: reduce){.btn{transition:none}}.btn:hover{color:#373a3c}.btn-check:focus+.btn,.btn:focus{outline:0;box-shadow:0 0 0 .25rem rgba(39,128,227,.25)}.btn:disabled,.btn.disabled,fieldset:disabled .btn{pointer-events:none;opacity:.65}.btn-default{color:#fff;background-color:#373a3c;border-color:#373a3c}.btn-default:hover{color:#fff;background-color:#2f3133;border-color:#2c2e30}.btn-check:focus+.btn-default,.btn-default:focus{color:#fff;background-color:#2f3133;border-color:#2c2e30;box-shadow:0 0 0 .25rem rgba(85,88,89,.5)}.btn-check:checked+.btn-default,.btn-check:active+.btn-default,.btn-default:active,.btn-default.active,.show>.btn-default.dropdown-toggle{color:#fff;background-color:#2c2e30;border-color:#292c2d}.btn-check:checked+.btn-default:focus,.btn-check:active+.btn-default:focus,.btn-default:active:focus,.btn-default.active:focus,.show>.btn-default.dropdown-toggle:focus{box-shadow:0 0 0 .25rem rgba(85,88,89,.5)}.btn-default:disabled,.btn-default.disabled{color:#fff;background-color:#373a3c;border-color:#373a3c}.btn-primary{color:#fff;background-color:#2780e3;border-color:#2780e3}.btn-primary:hover{color:#fff;background-color:#216dc1;border-color:#1f66b6}.btn-check:focus+.btn-primary,.btn-primary:focus{color:#fff;background-color:#216dc1;border-color:#1f66b6;box-shadow:0 0 0 .25rem rgba(71,147,231,.5)}.btn-check:checked+.btn-primary,.btn-check:active+.btn-primary,.btn-primary:active,.btn-primary.active,.show>.btn-primary.dropdown-toggle{color:#fff;background-color:#1f66b6;border-color:#1d60aa}.btn-check:checked+.btn-primary:focus,.btn-check:active+.btn-primary:focus,.btn-primary:active:focus,.btn-primary.active:focus,.show>.btn-primary.dropdown-toggle:focus{box-shadow:0 0 0 .25rem rgba(71,147,231,.5)}.btn-primary:disabled,.btn-primary.disabled{color:#fff;background-color:#2780e3;border-color:#2780e3}.btn-secondary{color:#fff;background-color:#373a3c;border-color:#373a3c}.btn-secondary:hover{color:#fff;background-color:#2f3133;border-color:#2c2e30}.btn-check:focus+.btn-secondary,.btn-secondary:focus{color:#fff;background-color:#2f3133;border-color:#2c2e30;box-shadow:0 0 0 .25rem rgba(85,88,89,.5)}.btn-check:checked+.btn-secondary,.btn-check:active+.btn-secondary,.btn-secondary:active,.btn-secondary.active,.show>.btn-secondary.dropdown-toggle{color:#fff;background-color:#2c2e30;border-color:#292c2d}.btn-check:checked+.btn-secondary:focus,.btn-check:active+.btn-secondary:focus,.btn-secondary:active:focus,.btn-secondary.active:focus,.show>.btn-secondary.dropdown-toggle:focus{box-shadow:0 0 0 .25rem rgba(85,88,89,.5)}.btn-secondary:disabled,.btn-secondary.disabled{color:#fff;background-color:#373a3c;border-color:#373a3c}.btn-success{color:#fff;background-color:#3fb618;border-color:#3fb618}.btn-success:hover{color:#fff;background-color:#369b14;border-color:#329213}.btn-check:focus+.btn-success,.btn-success:focus{color:#fff;background-color:#369b14;border-color:#329213;box-shadow:0 0 0 .25rem rgba(92,193,59,.5)}.btn-check:checked+.btn-success,.btn-check:active+.btn-success,.btn-success:active,.btn-success.active,.show>.btn-success.dropdown-toggle{color:#fff;background-color:#329213;border-color:#2f8912}.btn-check:checked+.btn-success:focus,.btn-check:active+.btn-success:focus,.btn-success:active:focus,.btn-success.active:focus,.show>.btn-success.dropdown-toggle:focus{box-shadow:0 0 0 .25rem rgba(92,193,59,.5)}.btn-success:disabled,.btn-success.disabled{color:#fff;background-color:#3fb618;border-color:#3fb618}.btn-info{color:#fff;background-color:#9954bb;border-color:#9954bb}.btn-info:hover{color:#fff;background-color:#82479f;border-color:#7a4396}.btn-check:focus+.btn-info,.btn-info:focus{color:#fff;background-color:#82479f;border-color:#7a4396;box-shadow:0 0 0 .25rem rgba(168,110,197,.5)}.btn-check:checked+.btn-info,.btn-check:active+.btn-info,.btn-info:active,.btn-info.active,.show>.btn-info.dropdown-toggle{color:#fff;background-color:#7a4396;border-color:#733f8c}.btn-check:checked+.btn-info:focus,.btn-check:active+.btn-info:focus,.btn-info:active:focus,.btn-info.active:focus,.show>.btn-info.dropdown-toggle:focus{box-shadow:0 0 0 .25rem rgba(168,110,197,.5)}.btn-info:disabled,.btn-info.disabled{color:#fff;background-color:#9954bb;border-color:#9954bb}.btn-warning{color:#fff;background-color:#ff7518;border-color:#ff7518}.btn-warning:hover{color:#fff;background-color:#d96314;border-color:#cc5e13}.btn-check:focus+.btn-warning,.btn-warning:focus{color:#fff;background-color:#d96314;border-color:#cc5e13;box-shadow:0 0 0 .25rem rgba(255,138,59,.5)}.btn-check:checked+.btn-warning,.btn-check:active+.btn-warning,.btn-warning:active,.btn-warning.active,.show>.btn-warning.dropdown-toggle{color:#fff;background-color:#cc5e13;border-color:#bf5812}.btn-check:checked+.btn-warning:focus,.btn-check:active+.btn-warning:focus,.btn-warning:active:focus,.btn-warning.active:focus,.show>.btn-warning.dropdown-toggle:focus{box-shadow:0 0 0 .25rem rgba(255,138,59,.5)}.btn-warning:disabled,.btn-warning.disabled{color:#fff;background-color:#ff7518;border-color:#ff7518}.btn-danger{color:#fff;background-color:#ff0039;border-color:#ff0039}.btn-danger:hover{color:#fff;background-color:#d90030;border-color:#cc002e}.btn-check:focus+.btn-danger,.btn-danger:focus{color:#fff;background-color:#d90030;border-color:#cc002e;box-shadow:0 0 0 .25rem rgba(255,38,87,.5)}.btn-check:checked+.btn-danger,.btn-check:active+.btn-danger,.btn-danger:active,.btn-danger.active,.show>.btn-danger.dropdown-toggle{color:#fff;background-color:#cc002e;border-color:#bf002b}.btn-check:checked+.btn-danger:focus,.btn-check:active+.btn-danger:focus,.btn-danger:active:focus,.btn-danger.active:focus,.show>.btn-danger.dropdown-toggle:focus{box-shadow:0 0 0 .25rem rgba(255,38,87,.5)}.btn-danger:disabled,.btn-danger.disabled{color:#fff;background-color:#ff0039;border-color:#ff0039}.btn-light{color:#000;background-color:#f8f9fa;border-color:#f8f9fa}.btn-light:hover{color:#000;background-color:#f9fafb;border-color:#f9fafb}.btn-check:focus+.btn-light,.btn-light:focus{color:#000;background-color:#f9fafb;border-color:#f9fafb;box-shadow:0 0 0 .25rem rgba(211,212,213,.5)}.btn-check:checked+.btn-light,.btn-check:active+.btn-light,.btn-light:active,.btn-light.active,.show>.btn-light.dropdown-toggle{color:#000;background-color:#f9fafb;border-color:#f9fafb}.btn-check:checked+.btn-light:focus,.btn-check:active+.btn-light:focus,.btn-light:active:focus,.btn-light.active:focus,.show>.btn-light.dropdown-toggle:focus{box-shadow:0 0 0 .25rem rgba(211,212,213,.5)}.btn-light:disabled,.btn-light.disabled{color:#000;background-color:#f8f9fa;border-color:#f8f9fa}.btn-dark{color:#fff;background-color:#373a3c;border-color:#373a3c}.btn-dark:hover{color:#fff;background-color:#2f3133;border-color:#2c2e30}.btn-check:focus+.btn-dark,.btn-dark:focus{color:#fff;background-color:#2f3133;border-color:#2c2e30;box-shadow:0 0 0 .25rem rgba(85,88,89,.5)}.btn-check:checked+.btn-dark,.btn-check:active+.btn-dark,.btn-dark:active,.btn-dark.active,.show>.btn-dark.dropdown-toggle{color:#fff;background-color:#2c2e30;border-color:#292c2d}.btn-check:checked+.btn-dark:focus,.btn-check:active+.btn-dark:focus,.btn-dark:active:focus,.btn-dark.active:focus,.show>.btn-dark.dropdown-toggle:focus{box-shadow:0 0 0 .25rem rgba(85,88,89,.5)}.btn-dark:disabled,.btn-dark.disabled{color:#fff;background-color:#373a3c;border-color:#373a3c}.btn-outline-default{color:#373a3c;border-color:#373a3c;background-color:transparent}.btn-outline-default:hover{color:#fff;background-color:#373a3c;border-color:#373a3c}.btn-check:focus+.btn-outline-default,.btn-outline-default:focus{box-shadow:0 0 0 .25rem rgba(55,58,60,.5)}.btn-check:checked+.btn-outline-default,.btn-check:active+.btn-outline-default,.btn-outline-default:active,.btn-outline-default.active,.btn-outline-default.dropdown-toggle.show{color:#fff;background-color:#373a3c;border-color:#373a3c}.btn-check:checked+.btn-outline-default:focus,.btn-check:active+.btn-outline-default:focus,.btn-outline-default:active:focus,.btn-outline-default.active:focus,.btn-outline-default.dropdown-toggle.show:focus{box-shadow:0 0 0 .25rem rgba(55,58,60,.5)}.btn-outline-default:disabled,.btn-outline-default.disabled{color:#373a3c;background-color:transparent}.btn-outline-primary{color:#2780e3;border-color:#2780e3;background-color:transparent}.btn-outline-primary:hover{color:#fff;background-color:#2780e3;border-color:#2780e3}.btn-check:focus+.btn-outline-primary,.btn-outline-primary:focus{box-shadow:0 0 0 .25rem rgba(39,128,227,.5)}.btn-check:checked+.btn-outline-primary,.btn-check:active+.btn-outline-primary,.btn-outline-primary:active,.btn-outline-primary.active,.btn-outline-primary.dropdown-toggle.show{color:#fff;background-color:#2780e3;border-color:#2780e3}.btn-check:checked+.btn-outline-primary:focus,.btn-check:active+.btn-outline-primary:focus,.btn-outline-primary:active:focus,.btn-outline-primary.active:focus,.btn-outline-primary.dropdown-toggle.show:focus{box-shadow:0 0 0 .25rem rgba(39,128,227,.5)}.btn-outline-primary:disabled,.btn-outline-primary.disabled{color:#2780e3;background-color:transparent}.btn-outline-secondary{color:#373a3c;border-color:#373a3c;background-color:transparent}.btn-outline-secondary:hover{color:#fff;background-color:#373a3c;border-color:#373a3c}.btn-check:focus+.btn-outline-secondary,.btn-outline-secondary:focus{box-shadow:0 0 0 .25rem rgba(55,58,60,.5)}.btn-check:checked+.btn-outline-secondary,.btn-check:active+.btn-outline-secondary,.btn-outline-secondary:active,.btn-outline-secondary.active,.btn-outline-secondary.dropdown-toggle.show{color:#fff;background-color:#373a3c;border-color:#373a3c}.btn-check:checked+.btn-outline-secondary:focus,.btn-check:active+.btn-outline-secondary:focus,.btn-outline-secondary:active:focus,.btn-outline-secondary.active:focus,.btn-outline-secondary.dropdown-toggle.show:focus{box-shadow:0 0 0 .25rem rgba(55,58,60,.5)}.btn-outline-secondary:disabled,.btn-outline-secondary.disabled{color:#373a3c;background-color:transparent}.btn-outline-success{color:#3fb618;border-color:#3fb618;background-color:transparent}.btn-outline-success:hover{color:#fff;background-color:#3fb618;border-color:#3fb618}.btn-check:focus+.btn-outline-success,.btn-outline-success:focus{box-shadow:0 0 0 .25rem rgba(63,182,24,.5)}.btn-check:checked+.btn-outline-success,.btn-check:active+.btn-outline-success,.btn-outline-success:active,.btn-outline-success.active,.btn-outline-success.dropdown-toggle.show{color:#fff;background-color:#3fb618;border-color:#3fb618}.btn-check:checked+.btn-outline-success:focus,.btn-check:active+.btn-outline-success:focus,.btn-outline-success:active:focus,.btn-outline-success.active:focus,.btn-outline-success.dropdown-toggle.show:focus{box-shadow:0 0 0 .25rem rgba(63,182,24,.5)}.btn-outline-success:disabled,.btn-outline-success.disabled{color:#3fb618;background-color:transparent}.btn-outline-info{color:#9954bb;border-color:#9954bb;background-color:transparent}.btn-outline-info:hover{color:#fff;background-color:#9954bb;border-color:#9954bb}.btn-check:focus+.btn-outline-info,.btn-outline-info:focus{box-shadow:0 0 0 .25rem rgba(153,84,187,.5)}.btn-check:checked+.btn-outline-info,.btn-check:active+.btn-outline-info,.btn-outline-info:active,.btn-outline-info.active,.btn-outline-info.dropdown-toggle.show{color:#fff;background-color:#9954bb;border-color:#9954bb}.btn-check:checked+.btn-outline-info:focus,.btn-check:active+.btn-outline-info:focus,.btn-outline-info:active:focus,.btn-outline-info.active:focus,.btn-outline-info.dropdown-toggle.show:focus{box-shadow:0 0 0 .25rem rgba(153,84,187,.5)}.btn-outline-info:disabled,.btn-outline-info.disabled{color:#9954bb;background-color:transparent}.btn-outline-warning{color:#ff7518;border-color:#ff7518;background-color:transparent}.btn-outline-warning:hover{color:#fff;background-color:#ff7518;border-color:#ff7518}.btn-check:focus+.btn-outline-warning,.btn-outline-warning:focus{box-shadow:0 0 0 .25rem rgba(255,117,24,.5)}.btn-check:checked+.btn-outline-warning,.btn-check:active+.btn-outline-warning,.btn-outline-warning:active,.btn-outline-warning.active,.btn-outline-warning.dropdown-toggle.show{color:#fff;background-color:#ff7518;border-color:#ff7518}.btn-check:checked+.btn-outline-warning:focus,.btn-check:active+.btn-outline-warning:focus,.btn-outline-warning:active:focus,.btn-outline-warning.active:focus,.btn-outline-warning.dropdown-toggle.show:focus{box-shadow:0 0 0 .25rem rgba(255,117,24,.5)}.btn-outline-warning:disabled,.btn-outline-warning.disabled{color:#ff7518;background-color:transparent}.btn-outline-danger{color:#ff0039;border-color:#ff0039;background-color:transparent}.btn-outline-danger:hover{color:#fff;background-color:#ff0039;border-color:#ff0039}.btn-check:focus+.btn-outline-danger,.btn-outline-danger:focus{box-shadow:0 0 0 .25rem rgba(255,0,57,.5)}.btn-check:checked+.btn-outline-danger,.btn-check:active+.btn-outline-danger,.btn-outline-danger:active,.btn-outline-danger.active,.btn-outline-danger.dropdown-toggle.show{color:#fff;background-color:#ff0039;border-color:#ff0039}.btn-check:checked+.btn-outline-danger:focus,.btn-check:active+.btn-outline-danger:focus,.btn-outline-danger:active:focus,.btn-outline-danger.active:focus,.btn-outline-danger.dropdown-toggle.show:focus{box-shadow:0 0 0 .25rem rgba(255,0,57,.5)}.btn-outline-danger:disabled,.btn-outline-danger.disabled{color:#ff0039;background-color:transparent}.btn-outline-light{color:#f8f9fa;border-color:#f8f9fa;background-color:transparent}.btn-outline-light:hover{color:#000;background-color:#f8f9fa;border-color:#f8f9fa}.btn-check:focus+.btn-outline-light,.btn-outline-light:focus{box-shadow:0 0 0 .25rem rgba(248,249,250,.5)}.btn-check:checked+.btn-outline-light,.btn-check:active+.btn-outline-light,.btn-outline-light:active,.btn-outline-light.active,.btn-outline-light.dropdown-toggle.show{color:#000;background-color:#f8f9fa;border-color:#f8f9fa}.btn-check:checked+.btn-outline-light:focus,.btn-check:active+.btn-outline-light:focus,.btn-outline-light:active:focus,.btn-outline-light.active:focus,.btn-outline-light.dropdown-toggle.show:focus{box-shadow:0 0 0 .25rem rgba(248,249,250,.5)}.btn-outline-light:disabled,.btn-outline-light.disabled{color:#f8f9fa;background-color:transparent}.btn-outline-dark{color:#373a3c;border-color:#373a3c;background-color:transparent}.btn-outline-dark:hover{color:#fff;background-color:#373a3c;border-color:#373a3c}.btn-check:focus+.btn-outline-dark,.btn-outline-dark:focus{box-shadow:0 0 0 .25rem rgba(55,58,60,.5)}.btn-check:checked+.btn-outline-dark,.btn-check:active+.btn-outline-dark,.btn-outline-dark:active,.btn-outline-dark.active,.btn-outline-dark.dropdown-toggle.show{color:#fff;background-color:#373a3c;border-color:#373a3c}.btn-check:checked+.btn-outline-dark:focus,.btn-check:active+.btn-outline-dark:focus,.btn-outline-dark:active:focus,.btn-outline-dark.active:focus,.btn-outline-dark.dropdown-toggle.show:focus{box-shadow:0 0 0 .25rem rgba(55,58,60,.5)}.btn-outline-dark:disabled,.btn-outline-dark.disabled{color:#373a3c;background-color:transparent}.btn-link{font-weight:400;color:#2780e3;text-decoration:underline;-webkit-text-decoration:underline;-moz-text-decoration:underline;-ms-text-decoration:underline;-o-text-decoration:underline}.btn-link:hover{color:#1f66b6}.btn-link:disabled,.btn-link.disabled{color:#868e96}.btn-lg,.btn-group-lg>.btn{padding:.5rem 1rem;font-size:1.25rem;border-radius:0}.btn-sm,.btn-group-sm>.btn{padding:.25rem .5rem;font-size:0.875rem;border-radius:0}.fade{transition:opacity .15s linear}@media(prefers-reduced-motion: reduce){.fade{transition:none}}.fade:not(.show){opacity:0}.collapse:not(.show){display:none}.collapsing{height:0;overflow:hidden;transition:height .2s ease}@media(prefers-reduced-motion: reduce){.collapsing{transition:none}}.collapsing.collapse-horizontal{width:0;height:auto;transition:width .35s ease}@media(prefers-reduced-motion: reduce){.collapsing.collapse-horizontal{transition:none}}.dropup,.dropend,.dropdown,.dropstart{position:relative}.dropdown-toggle{white-space:nowrap}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.dropdown-toggle:empty::after{margin-left:0}.dropdown-menu{position:absolute;z-index:1000;display:none;min-width:10rem;padding:.5rem 0;margin:0;font-size:1rem;color:#373a3c;text-align:left;list-style:none;background-color:#fff;background-clip:padding-box;border:1px solid rgba(0,0,0,.15)}.dropdown-menu[data-bs-popper]{top:100%;left:0;margin-top:.125rem}.dropdown-menu-start{--bs-position: start}.dropdown-menu-start[data-bs-popper]{right:auto;left:0}.dropdown-menu-end{--bs-position: end}.dropdown-menu-end[data-bs-popper]{right:0;left:auto}@media(min-width: 576px){.dropdown-menu-sm-start{--bs-position: start}.dropdown-menu-sm-start[data-bs-popper]{right:auto;left:0}.dropdown-menu-sm-end{--bs-position: end}.dropdown-menu-sm-end[data-bs-popper]{right:0;left:auto}}@media(min-width: 768px){.dropdown-menu-md-start{--bs-position: start}.dropdown-menu-md-start[data-bs-popper]{right:auto;left:0}.dropdown-menu-md-end{--bs-position: end}.dropdown-menu-md-end[data-bs-popper]{right:0;left:auto}}@media(min-width: 992px){.dropdown-menu-lg-start{--bs-position: start}.dropdown-menu-lg-start[data-bs-popper]{right:auto;left:0}.dropdown-menu-lg-end{--bs-position: end}.dropdown-menu-lg-end[data-bs-popper]{right:0;left:auto}}@media(min-width: 1200px){.dropdown-menu-xl-start{--bs-position: start}.dropdown-menu-xl-start[data-bs-popper]{right:auto;left:0}.dropdown-menu-xl-end{--bs-position: end}.dropdown-menu-xl-end[data-bs-popper]{right:0;left:auto}}@media(min-width: 1400px){.dropdown-menu-xxl-start{--bs-position: start}.dropdown-menu-xxl-start[data-bs-popper]{right:auto;left:0}.dropdown-menu-xxl-end{--bs-position: end}.dropdown-menu-xxl-end[data-bs-popper]{right:0;left:auto}}.dropup .dropdown-menu[data-bs-popper]{top:auto;bottom:100%;margin-top:0;margin-bottom:.125rem}.dropup .dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:0;border-right:.3em solid transparent;border-bottom:.3em solid;border-left:.3em solid transparent}.dropup .dropdown-toggle:empty::after{margin-left:0}.dropend .dropdown-menu[data-bs-popper]{top:0;right:auto;left:100%;margin-top:0;margin-left:.125rem}.dropend .dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid transparent;border-right:0;border-bottom:.3em solid transparent;border-left:.3em solid}.dropend .dropdown-toggle:empty::after{margin-left:0}.dropend .dropdown-toggle::after{vertical-align:0}.dropstart .dropdown-menu[data-bs-popper]{top:0;right:100%;left:auto;margin-top:0;margin-right:.125rem}.dropstart .dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:""}.dropstart .dropdown-toggle::after{display:none}.dropstart .dropdown-toggle::before{display:inline-block;margin-right:.255em;vertical-align:.255em;content:"";border-top:.3em solid transparent;border-right:.3em solid;border-bottom:.3em solid transparent}.dropstart .dropdown-toggle:empty::after{margin-left:0}.dropstart .dropdown-toggle::before{vertical-align:0}.dropdown-divider{height:0;margin:.5rem 0;overflow:hidden;border-top:1px solid rgba(0,0,0,.15)}.dropdown-item{display:block;width:100%;padding:.25rem 1rem;clear:both;font-weight:400;color:#212529;text-align:inherit;text-decoration:none;-webkit-text-decoration:none;-moz-text-decoration:none;-ms-text-decoration:none;-o-text-decoration:none;white-space:nowrap;background-color:transparent;border:0}.dropdown-item:hover,.dropdown-item:focus{color:#1e2125;background-color:#e9ecef}.dropdown-item.active,.dropdown-item:active{color:#fff;text-decoration:none;background-color:#2780e3}.dropdown-item.disabled,.dropdown-item:disabled{color:#adb5bd;pointer-events:none;background-color:transparent}.dropdown-menu.show{display:block}.dropdown-header{display:block;padding:.5rem 1rem;margin-bottom:0;font-size:0.875rem;color:#868e96;white-space:nowrap}.dropdown-item-text{display:block;padding:.25rem 1rem;color:#212529}.dropdown-menu-dark{color:#dee2e6;background-color:#373a3c;border-color:rgba(0,0,0,.15)}.dropdown-menu-dark .dropdown-item{color:#dee2e6}.dropdown-menu-dark .dropdown-item:hover,.dropdown-menu-dark .dropdown-item:focus{color:#fff;background-color:rgba(255,255,255,.15)}.dropdown-menu-dark .dropdown-item.active,.dropdown-menu-dark .dropdown-item:active{color:#fff;background-color:#2780e3}.dropdown-menu-dark .dropdown-item.disabled,.dropdown-menu-dark .dropdown-item:disabled{color:#adb5bd}.dropdown-menu-dark .dropdown-divider{border-color:rgba(0,0,0,.15)}.dropdown-menu-dark .dropdown-item-text{color:#dee2e6}.dropdown-menu-dark .dropdown-header{color:#adb5bd}.btn-group,.btn-group-vertical{position:relative;display:inline-flex;vertical-align:middle}.btn-group>.btn,.btn-group-vertical>.btn{position:relative;flex:1 1 auto;-webkit-flex:1 1 auto}.btn-group>.btn-check:checked+.btn,.btn-group>.btn-check:focus+.btn,.btn-group>.btn:hover,.btn-group>.btn:focus,.btn-group>.btn:active,.btn-group>.btn.active,.btn-group-vertical>.btn-check:checked+.btn,.btn-group-vertical>.btn-check:focus+.btn,.btn-group-vertical>.btn:hover,.btn-group-vertical>.btn:focus,.btn-group-vertical>.btn:active,.btn-group-vertical>.btn.active{z-index:1}.btn-toolbar{display:flex;display:-webkit-flex;flex-wrap:wrap;-webkit-flex-wrap:wrap;justify-content:flex-start;-webkit-justify-content:flex-start}.btn-toolbar .input-group{width:auto}.btn-group>.btn:not(:first-child),.btn-group>.btn-group:not(:first-child){margin-left:-1px}.dropdown-toggle-split{padding-right:.5625rem;padding-left:.5625rem}.dropdown-toggle-split::after,.dropup .dropdown-toggle-split::after,.dropend .dropdown-toggle-split::after{margin-left:0}.dropstart .dropdown-toggle-split::before{margin-right:0}.btn-sm+.dropdown-toggle-split,.btn-group-sm>.btn+.dropdown-toggle-split{padding-right:.375rem;padding-left:.375rem}.btn-lg+.dropdown-toggle-split,.btn-group-lg>.btn+.dropdown-toggle-split{padding-right:.75rem;padding-left:.75rem}.btn-group-vertical{flex-direction:column;-webkit-flex-direction:column;align-items:flex-start;-webkit-align-items:flex-start;justify-content:center;-webkit-justify-content:center}.btn-group-vertical>.btn,.btn-group-vertical>.btn-group{width:100%}.btn-group-vertical>.btn:not(:first-child),.btn-group-vertical>.btn-group:not(:first-child){margin-top:-1px}.nav{display:flex;display:-webkit-flex;flex-wrap:wrap;-webkit-flex-wrap:wrap;padding-left:0;margin-bottom:0;list-style:none}.nav-link{display:block;padding:.5rem 1rem;color:#2780e3;text-decoration:none;-webkit-text-decoration:none;-moz-text-decoration:none;-ms-text-decoration:none;-o-text-decoration:none;transition:color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out}@media(prefers-reduced-motion: reduce){.nav-link{transition:none}}.nav-link:hover,.nav-link:focus{color:#1f66b6}.nav-link.disabled{color:#868e96;pointer-events:none;cursor:default}.nav-tabs{border-bottom:1px solid #dee2e6}.nav-tabs .nav-link{margin-bottom:-1px;background:none;border:1px solid transparent}.nav-tabs .nav-link:hover,.nav-tabs .nav-link:focus{border-color:#e9ecef #e9ecef #dee2e6;isolation:isolate}.nav-tabs .nav-link.disabled{color:#868e96;background-color:transparent;border-color:transparent}.nav-tabs .nav-link.active,.nav-tabs .nav-item.show .nav-link{color:#495057;background-color:#fff;border-color:#dee2e6 #dee2e6 #fff}.nav-tabs .dropdown-menu{margin-top:-1px}.nav-pills .nav-link{background:none;border:0}.nav-pills .nav-link.active,.nav-pills .show>.nav-link{color:#fff;background-color:#2780e3}.nav-fill>.nav-link,.nav-fill .nav-item{flex:1 1 auto;-webkit-flex:1 1 auto;text-align:center}.nav-justified>.nav-link,.nav-justified .nav-item{flex-basis:0;-webkit-flex-basis:0;flex-grow:1;-webkit-flex-grow:1;text-align:center}.nav-fill .nav-item .nav-link,.nav-justified .nav-item .nav-link{width:100%}.tab-content>.tab-pane{display:none}.tab-content>.active{display:block}.navbar{position:relative;display:flex;display:-webkit-flex;flex-wrap:wrap;-webkit-flex-wrap:wrap;align-items:center;-webkit-align-items:center;justify-content:space-between;-webkit-justify-content:space-between;padding-top:.5rem;padding-bottom:.5rem}.navbar>.container-xxl,.navbar>.container-xl,.navbar>.container-lg,.navbar>.container-md,.navbar>.container-sm,.navbar>.container,.navbar>.container-fluid{display:flex;display:-webkit-flex;flex-wrap:inherit;-webkit-flex-wrap:inherit;align-items:center;-webkit-align-items:center;justify-content:space-between;-webkit-justify-content:space-between}.navbar-brand{padding-top:.3125rem;padding-bottom:.3125rem;margin-right:1rem;font-size:1.25rem;text-decoration:none;-webkit-text-decoration:none;-moz-text-decoration:none;-ms-text-decoration:none;-o-text-decoration:none;white-space:nowrap}.navbar-nav{display:flex;display:-webkit-flex;flex-direction:column;-webkit-flex-direction:column;padding-left:0;margin-bottom:0;list-style:none}.navbar-nav .nav-link{padding-right:0;padding-left:0}.navbar-nav .dropdown-menu{position:static}.navbar-text{padding-top:.5rem;padding-bottom:.5rem}.navbar-collapse{flex-basis:100%;-webkit-flex-basis:100%;flex-grow:1;-webkit-flex-grow:1;align-items:center;-webkit-align-items:center}.navbar-toggler{padding:.25rem .75rem;font-size:1.25rem;line-height:1;background-color:transparent;border:1px solid transparent;transition:box-shadow .15s ease-in-out}@media(prefers-reduced-motion: reduce){.navbar-toggler{transition:none}}.navbar-toggler:hover{text-decoration:none}.navbar-toggler:focus{text-decoration:none;outline:0;box-shadow:0 0 0 .25rem}.navbar-toggler-icon{display:inline-block;width:1.5em;height:1.5em;vertical-align:middle;background-repeat:no-repeat;background-position:center;background-size:100%}.navbar-nav-scroll{max-height:var(--bs-scroll-height, 75vh);overflow-y:auto}@media(min-width: 576px){.navbar-expand-sm{flex-wrap:nowrap;-webkit-flex-wrap:nowrap;justify-content:flex-start;-webkit-justify-content:flex-start}.navbar-expand-sm .navbar-nav{flex-direction:row;-webkit-flex-direction:row}.navbar-expand-sm .navbar-nav .dropdown-menu{position:absolute}.navbar-expand-sm .navbar-nav .nav-link{padding-right:.5rem;padding-left:.5rem}.navbar-expand-sm .navbar-nav-scroll{overflow:visible}.navbar-expand-sm .navbar-collapse{display:flex !important;display:-webkit-flex !important;flex-basis:auto;-webkit-flex-basis:auto}.navbar-expand-sm .navbar-toggler{display:none}.navbar-expand-sm .offcanvas-header{display:none}.navbar-expand-sm .offcanvas{position:inherit;bottom:0;z-index:1000;flex-grow:1;-webkit-flex-grow:1;visibility:visible !important;background-color:transparent;border-right:0;border-left:0;transition:none;transform:none}.navbar-expand-sm .offcanvas-top,.navbar-expand-sm .offcanvas-bottom{height:auto;border-top:0;border-bottom:0}.navbar-expand-sm .offcanvas-body{display:flex;display:-webkit-flex;flex-grow:0;-webkit-flex-grow:0;padding:0;overflow-y:visible}}@media(min-width: 768px){.navbar-expand-md{flex-wrap:nowrap;-webkit-flex-wrap:nowrap;justify-content:flex-start;-webkit-justify-content:flex-start}.navbar-expand-md .navbar-nav{flex-direction:row;-webkit-flex-direction:row}.navbar-expand-md .navbar-nav .dropdown-menu{position:absolute}.navbar-expand-md .navbar-nav .nav-link{padding-right:.5rem;padding-left:.5rem}.navbar-expand-md .navbar-nav-scroll{overflow:visible}.navbar-expand-md .navbar-collapse{display:flex !important;display:-webkit-flex !important;flex-basis:auto;-webkit-flex-basis:auto}.navbar-expand-md .navbar-toggler{display:none}.navbar-expand-md .offcanvas-header{display:none}.navbar-expand-md .offcanvas{position:inherit;bottom:0;z-index:1000;flex-grow:1;-webkit-flex-grow:1;visibility:visible !important;background-color:transparent;border-right:0;border-left:0;transition:none;transform:none}.navbar-expand-md .offcanvas-top,.navbar-expand-md .offcanvas-bottom{height:auto;border-top:0;border-bottom:0}.navbar-expand-md .offcanvas-body{display:flex;display:-webkit-flex;flex-grow:0;-webkit-flex-grow:0;padding:0;overflow-y:visible}}@media(min-width: 992px){.navbar-expand-lg{flex-wrap:nowrap;-webkit-flex-wrap:nowrap;justify-content:flex-start;-webkit-justify-content:flex-start}.navbar-expand-lg .navbar-nav{flex-direction:row;-webkit-flex-direction:row}.navbar-expand-lg .navbar-nav .dropdown-menu{position:absolute}.navbar-expand-lg .navbar-nav .nav-link{padding-right:.5rem;padding-left:.5rem}.navbar-expand-lg .navbar-nav-scroll{overflow:visible}.navbar-expand-lg .navbar-collapse{display:flex !important;display:-webkit-flex !important;flex-basis:auto;-webkit-flex-basis:auto}.navbar-expand-lg .navbar-toggler{display:none}.navbar-expand-lg .offcanvas-header{display:none}.navbar-expand-lg .offcanvas{position:inherit;bottom:0;z-index:1000;flex-grow:1;-webkit-flex-grow:1;visibility:visible !important;background-color:transparent;border-right:0;border-left:0;transition:none;transform:none}.navbar-expand-lg .offcanvas-top,.navbar-expand-lg .offcanvas-bottom{height:auto;border-top:0;border-bottom:0}.navbar-expand-lg .offcanvas-body{display:flex;display:-webkit-flex;flex-grow:0;-webkit-flex-grow:0;padding:0;overflow-y:visible}}@media(min-width: 1200px){.navbar-expand-xl{flex-wrap:nowrap;-webkit-flex-wrap:nowrap;justify-content:flex-start;-webkit-justify-content:flex-start}.navbar-expand-xl .navbar-nav{flex-direction:row;-webkit-flex-direction:row}.navbar-expand-xl .navbar-nav .dropdown-menu{position:absolute}.navbar-expand-xl .navbar-nav .nav-link{padding-right:.5rem;padding-left:.5rem}.navbar-expand-xl .navbar-nav-scroll{overflow:visible}.navbar-expand-xl .navbar-collapse{display:flex !important;display:-webkit-flex !important;flex-basis:auto;-webkit-flex-basis:auto}.navbar-expand-xl .navbar-toggler{display:none}.navbar-expand-xl .offcanvas-header{display:none}.navbar-expand-xl .offcanvas{position:inherit;bottom:0;z-index:1000;flex-grow:1;-webkit-flex-grow:1;visibility:visible !important;background-color:transparent;border-right:0;border-left:0;transition:none;transform:none}.navbar-expand-xl .offcanvas-top,.navbar-expand-xl .offcanvas-bottom{height:auto;border-top:0;border-bottom:0}.navbar-expand-xl .offcanvas-body{display:flex;display:-webkit-flex;flex-grow:0;-webkit-flex-grow:0;padding:0;overflow-y:visible}}@media(min-width: 1400px){.navbar-expand-xxl{flex-wrap:nowrap;-webkit-flex-wrap:nowrap;justify-content:flex-start;-webkit-justify-content:flex-start}.navbar-expand-xxl .navbar-nav{flex-direction:row;-webkit-flex-direction:row}.navbar-expand-xxl .navbar-nav .dropdown-menu{position:absolute}.navbar-expand-xxl .navbar-nav .nav-link{padding-right:.5rem;padding-left:.5rem}.navbar-expand-xxl .navbar-nav-scroll{overflow:visible}.navbar-expand-xxl .navbar-collapse{display:flex !important;display:-webkit-flex !important;flex-basis:auto;-webkit-flex-basis:auto}.navbar-expand-xxl .navbar-toggler{display:none}.navbar-expand-xxl .offcanvas-header{display:none}.navbar-expand-xxl .offcanvas{position:inherit;bottom:0;z-index:1000;flex-grow:1;-webkit-flex-grow:1;visibility:visible !important;background-color:transparent;border-right:0;border-left:0;transition:none;transform:none}.navbar-expand-xxl .offcanvas-top,.navbar-expand-xxl .offcanvas-bottom{height:auto;border-top:0;border-bottom:0}.navbar-expand-xxl .offcanvas-body{display:flex;display:-webkit-flex;flex-grow:0;-webkit-flex-grow:0;padding:0;overflow-y:visible}}.navbar-expand{flex-wrap:nowrap;-webkit-flex-wrap:nowrap;justify-content:flex-start;-webkit-justify-content:flex-start}.navbar-expand .navbar-nav{flex-direction:row;-webkit-flex-direction:row}.navbar-expand .navbar-nav .dropdown-menu{position:absolute}.navbar-expand .navbar-nav .nav-link{padding-right:.5rem;padding-left:.5rem}.navbar-expand .navbar-nav-scroll{overflow:visible}.navbar-expand .navbar-collapse{display:flex !important;display:-webkit-flex !important;flex-basis:auto;-webkit-flex-basis:auto}.navbar-expand .navbar-toggler{display:none}.navbar-expand .offcanvas-header{display:none}.navbar-expand .offcanvas{position:inherit;bottom:0;z-index:1000;flex-grow:1;-webkit-flex-grow:1;visibility:visible !important;background-color:transparent;border-right:0;border-left:0;transition:none;transform:none}.navbar-expand .offcanvas-top,.navbar-expand .offcanvas-bottom{height:auto;border-top:0;border-bottom:0}.navbar-expand .offcanvas-body{display:flex;display:-webkit-flex;flex-grow:0;-webkit-flex-grow:0;padding:0;overflow-y:visible}.navbar-light{background-color:#2780e3}.navbar-light .navbar-brand{color:#fdfeff}.navbar-light .navbar-brand:hover,.navbar-light .navbar-brand:focus{color:#fdfeff}.navbar-light .navbar-nav .nav-link{color:#fdfeff}.navbar-light .navbar-nav .nav-link:hover,.navbar-light .navbar-nav .nav-link:focus{color:rgba(253,254,255,.8)}.navbar-light .navbar-nav .nav-link.disabled{color:rgba(253,254,255,.75)}.navbar-light .navbar-nav .show>.nav-link,.navbar-light .navbar-nav .nav-link.active{color:#fdfeff}.navbar-light .navbar-toggler{color:#fdfeff;border-color:rgba(253,254,255,.4)}.navbar-light .navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 30 30'%3e%3cpath stroke='%23fdfeff' stroke-linecap='round' stroke-miterlimit='10' stroke-width='2' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-light .navbar-text{color:#fdfeff}.navbar-light .navbar-text a,.navbar-light .navbar-text a:hover,.navbar-light .navbar-text a:focus{color:#fdfeff}.navbar-dark{background-color:#2780e3}.navbar-dark .navbar-brand{color:#fdfeff}.navbar-dark .navbar-brand:hover,.navbar-dark .navbar-brand:focus{color:#fdfeff}.navbar-dark .navbar-nav .nav-link{color:#fdfeff}.navbar-dark .navbar-nav .nav-link:hover,.navbar-dark .navbar-nav .nav-link:focus{color:rgba(253,254,255,.8)}.navbar-dark .navbar-nav .nav-link.disabled{color:rgba(253,254,255,.75)}.navbar-dark .navbar-nav .show>.nav-link,.navbar-dark .navbar-nav .active>.nav-link,.navbar-dark .navbar-nav .nav-link.active{color:#fdfeff}.navbar-dark .navbar-toggler{color:#fdfeff;border-color:rgba(253,254,255,.4)}.navbar-dark .navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 30 30'%3e%3cpath stroke='%23fdfeff' stroke-linecap='round' stroke-miterlimit='10' stroke-width='2' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-dark .navbar-text{color:#fdfeff}.navbar-dark .navbar-text a,.navbar-dark .navbar-text a:hover,.navbar-dark .navbar-text a:focus{color:#fdfeff}.card{position:relative;display:flex;display:-webkit-flex;flex-direction:column;-webkit-flex-direction:column;min-width:0;word-wrap:break-word;background-color:#fff;background-clip:border-box;border:1px solid rgba(0,0,0,.125)}.card>hr{margin-right:0;margin-left:0}.card>.list-group{border-top:inherit;border-bottom:inherit}.card>.list-group:first-child{border-top-width:0}.card>.list-group:last-child{border-bottom-width:0}.card>.card-header+.list-group,.card>.list-group+.card-footer{border-top:0}.card-body{flex:1 1 auto;-webkit-flex:1 1 auto;padding:1rem 1rem}.card-title{margin-bottom:.5rem}.card-subtitle{margin-top:-0.25rem;margin-bottom:0}.card-text:last-child{margin-bottom:0}.card-link+.card-link{margin-left:1rem}.card-header{padding:.5rem 1rem;margin-bottom:0;background-color:rgba(0,0,0,.03);border-bottom:1px solid rgba(0,0,0,.125)}.card-footer{padding:.5rem 1rem;background-color:rgba(0,0,0,.03);border-top:1px solid rgba(0,0,0,.125)}.card-header-tabs{margin-right:-0.5rem;margin-bottom:-0.5rem;margin-left:-0.5rem;border-bottom:0}.card-header-pills{margin-right:-0.5rem;margin-left:-0.5rem}.card-img-overlay{position:absolute;top:0;right:0;bottom:0;left:0;padding:1rem}.card-img,.card-img-top,.card-img-bottom{width:100%}.card-group>.card{margin-bottom:.75rem}@media(min-width: 576px){.card-group{display:flex;display:-webkit-flex;flex-flow:row wrap;-webkit-flex-flow:row wrap}.card-group>.card{flex:1 0 0%;-webkit-flex:1 0 0%;margin-bottom:0}.card-group>.card+.card{margin-left:0;border-left:0}}.accordion-button{position:relative;display:flex;display:-webkit-flex;align-items:center;-webkit-align-items:center;width:100%;padding:1rem 1.25rem;font-size:1rem;color:#373a3c;text-align:left;background-color:#fff;border:0;overflow-anchor:none;transition:color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out,border-radius .15s ease}@media(prefers-reduced-motion: reduce){.accordion-button{transition:none}}.accordion-button:not(.collapsed){color:#2373cc;background-color:#e9f2fc;box-shadow:inset 0 -1px 0 rgba(0,0,0,.125)}.accordion-button:not(.collapsed)::after{background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16' fill='%232373cc'%3e%3cpath fill-rule='evenodd' d='M1.646 4.646a.5.5 0 0 1 .708 0L8 10.293l5.646-5.647a.5.5 0 0 1 .708.708l-6 6a.5.5 0 0 1-.708 0l-6-6a.5.5 0 0 1 0-.708z'/%3e%3c/svg%3e");transform:rotate(-180deg)}.accordion-button::after{flex-shrink:0;-webkit-flex-shrink:0;width:1.25rem;height:1.25rem;margin-left:auto;content:"";background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16' fill='%23373a3c'%3e%3cpath fill-rule='evenodd' d='M1.646 4.646a.5.5 0 0 1 .708 0L8 10.293l5.646-5.647a.5.5 0 0 1 .708.708l-6 6a.5.5 0 0 1-.708 0l-6-6a.5.5 0 0 1 0-.708z'/%3e%3c/svg%3e");background-repeat:no-repeat;background-size:1.25rem;transition:transform .2s ease-in-out}@media(prefers-reduced-motion: reduce){.accordion-button::after{transition:none}}.accordion-button:hover{z-index:2}.accordion-button:focus{z-index:3;border-color:#93c0f1;outline:0;box-shadow:0 0 0 .25rem rgba(39,128,227,.25)}.accordion-header{margin-bottom:0}.accordion-item{background-color:#fff;border:1px solid rgba(0,0,0,.125)}.accordion-item:not(:first-of-type){border-top:0}.accordion-body{padding:1rem 1.25rem}.accordion-flush .accordion-collapse{border-width:0}.accordion-flush .accordion-item{border-right:0;border-left:0}.accordion-flush .accordion-item:first-child{border-top:0}.accordion-flush .accordion-item:last-child{border-bottom:0}.breadcrumb{display:flex;display:-webkit-flex;flex-wrap:wrap;-webkit-flex-wrap:wrap;padding:0 0;margin-bottom:1rem;list-style:none}.breadcrumb-item+.breadcrumb-item{padding-left:.5rem}.breadcrumb-item+.breadcrumb-item::before{float:left;padding-right:.5rem;color:#868e96;content:var(--bs-breadcrumb-divider, "/") /* rtl: var(--bs-breadcrumb-divider, "/") */}.breadcrumb-item.active{color:#868e96}.pagination{display:flex;display:-webkit-flex;padding-left:0;list-style:none}.page-link{position:relative;display:block;color:#2780e3;text-decoration:none;-webkit-text-decoration:none;-moz-text-decoration:none;-ms-text-decoration:none;-o-text-decoration:none;background-color:#fff;border:1px solid #dee2e6;transition:color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out}@media(prefers-reduced-motion: reduce){.page-link{transition:none}}.page-link:hover{z-index:2;color:#1f66b6;background-color:#e9ecef;border-color:#dee2e6}.page-link:focus{z-index:3;color:#1f66b6;background-color:#e9ecef;outline:0;box-shadow:0 0 0 .25rem rgba(39,128,227,.25)}.page-item:not(:first-child) .page-link{margin-left:-1px}.page-item.active .page-link{z-index:3;color:#fff;background-color:#2780e3;border-color:#2780e3}.page-item.disabled .page-link{color:#868e96;pointer-events:none;background-color:#fff;border-color:#dee2e6}.page-link{padding:.375rem .75rem}.pagination-lg .page-link{padding:.75rem 1.5rem;font-size:1.25rem}.pagination-sm .page-link{padding:.25rem .5rem;font-size:0.875rem}.badge{display:inline-block;padding:.35em .65em;font-size:0.75em;font-weight:700;line-height:1;color:#fff;text-align:center;white-space:nowrap;vertical-align:baseline}.badge:empty{display:none}.btn .badge{position:relative;top:-1px}.alert{position:relative;padding:1rem 1rem;margin-bottom:1rem;border:0 solid transparent}.alert-heading{color:inherit}.alert-link{font-weight:700}.alert-dismissible{padding-right:3rem}.alert-dismissible .btn-close{position:absolute;top:0;right:0;z-index:2;padding:1.25rem 1rem}.alert-default{color:#212324;background-color:#d7d8d8;border-color:#c3c4c5}.alert-default .alert-link{color:#1a1c1d}.alert-primary{color:#174d88;background-color:#d4e6f9;border-color:#bed9f7}.alert-primary .alert-link{color:#123e6d}.alert-secondary{color:#212324;background-color:#d7d8d8;border-color:#c3c4c5}.alert-secondary .alert-link{color:#1a1c1d}.alert-success{color:#266d0e;background-color:#d9f0d1;border-color:#c5e9ba}.alert-success .alert-link{color:#1e570b}.alert-info{color:#5c3270;background-color:#ebddf1;border-color:#e0cceb}.alert-info .alert-link{color:#4a285a}.alert-warning{color:#99460e;background-color:#ffe3d1;border-color:#ffd6ba}.alert-warning .alert-link{color:#7a380b}.alert-danger{color:#902;background-color:#ffccd7;border-color:#ffb3c4}.alert-danger .alert-link{color:#7a001b}.alert-light{color:#959596;background-color:#fefefe;border-color:#fdfdfe}.alert-light .alert-link{color:#777778}.alert-dark{color:#212324;background-color:#d7d8d8;border-color:#c3c4c5}.alert-dark .alert-link{color:#1a1c1d}@keyframes progress-bar-stripes{0%{background-position-x:.5rem}}.progress{display:flex;display:-webkit-flex;height:.5rem;overflow:hidden;font-size:0.75rem;background-color:#e9ecef}.progress-bar{display:flex;display:-webkit-flex;flex-direction:column;-webkit-flex-direction:column;justify-content:center;-webkit-justify-content:center;overflow:hidden;color:#fff;text-align:center;white-space:nowrap;background-color:#2780e3;transition:width .6s ease}@media(prefers-reduced-motion: reduce){.progress-bar{transition:none}}.progress-bar-striped{background-image:linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);background-size:.5rem .5rem}.progress-bar-animated{animation:1s linear infinite progress-bar-stripes}@media(prefers-reduced-motion: reduce){.progress-bar-animated{animation:none}}.list-group{display:flex;display:-webkit-flex;flex-direction:column;-webkit-flex-direction:column;padding-left:0;margin-bottom:0}.list-group-numbered{list-style-type:none;counter-reset:section}.list-group-numbered>li::before{content:counters(section, ".") ". ";counter-increment:section}.list-group-item-action{width:100%;color:#495057;text-align:inherit}.list-group-item-action:hover,.list-group-item-action:focus{z-index:1;color:#495057;text-decoration:none;background-color:#f8f9fa}.list-group-item-action:active{color:#373a3c;background-color:#e9ecef}.list-group-item{position:relative;display:block;padding:.5rem 1rem;color:#212529;text-decoration:none;-webkit-text-decoration:none;-moz-text-decoration:none;-ms-text-decoration:none;-o-text-decoration:none;background-color:#fff;border:1px solid rgba(0,0,0,.125)}.list-group-item.disabled,.list-group-item:disabled{color:#868e96;pointer-events:none;background-color:#fff}.list-group-item.active{z-index:2;color:#fff;background-color:#2780e3;border-color:#2780e3}.list-group-item+.list-group-item{border-top-width:0}.list-group-item+.list-group-item.active{margin-top:-1px;border-top-width:1px}.list-group-horizontal{flex-direction:row;-webkit-flex-direction:row}.list-group-horizontal>.list-group-item.active{margin-top:0}.list-group-horizontal>.list-group-item+.list-group-item{border-top-width:1px;border-left-width:0}.list-group-horizontal>.list-group-item+.list-group-item.active{margin-left:-1px;border-left-width:1px}@media(min-width: 576px){.list-group-horizontal-sm{flex-direction:row;-webkit-flex-direction:row}.list-group-horizontal-sm>.list-group-item.active{margin-top:0}.list-group-horizontal-sm>.list-group-item+.list-group-item{border-top-width:1px;border-left-width:0}.list-group-horizontal-sm>.list-group-item+.list-group-item.active{margin-left:-1px;border-left-width:1px}}@media(min-width: 768px){.list-group-horizontal-md{flex-direction:row;-webkit-flex-direction:row}.list-group-horizontal-md>.list-group-item.active{margin-top:0}.list-group-horizontal-md>.list-group-item+.list-group-item{border-top-width:1px;border-left-width:0}.list-group-horizontal-md>.list-group-item+.list-group-item.active{margin-left:-1px;border-left-width:1px}}@media(min-width: 992px){.list-group-horizontal-lg{flex-direction:row;-webkit-flex-direction:row}.list-group-horizontal-lg>.list-group-item.active{margin-top:0}.list-group-horizontal-lg>.list-group-item+.list-group-item{border-top-width:1px;border-left-width:0}.list-group-horizontal-lg>.list-group-item+.list-group-item.active{margin-left:-1px;border-left-width:1px}}@media(min-width: 1200px){.list-group-horizontal-xl{flex-direction:row;-webkit-flex-direction:row}.list-group-horizontal-xl>.list-group-item.active{margin-top:0}.list-group-horizontal-xl>.list-group-item+.list-group-item{border-top-width:1px;border-left-width:0}.list-group-horizontal-xl>.list-group-item+.list-group-item.active{margin-left:-1px;border-left-width:1px}}@media(min-width: 1400px){.list-group-horizontal-xxl{flex-direction:row;-webkit-flex-direction:row}.list-group-horizontal-xxl>.list-group-item.active{margin-top:0}.list-group-horizontal-xxl>.list-group-item+.list-group-item{border-top-width:1px;border-left-width:0}.list-group-horizontal-xxl>.list-group-item+.list-group-item.active{margin-left:-1px;border-left-width:1px}}.list-group-flush>.list-group-item{border-width:0 0 1px}.list-group-flush>.list-group-item:last-child{border-bottom-width:0}.list-group-item-default{color:#212324;background-color:#d7d8d8}.list-group-item-default.list-group-item-action:hover,.list-group-item-default.list-group-item-action:focus{color:#212324;background-color:#c2c2c2}.list-group-item-default.list-group-item-action.active{color:#fff;background-color:#212324;border-color:#212324}.list-group-item-primary{color:#174d88;background-color:#d4e6f9}.list-group-item-primary.list-group-item-action:hover,.list-group-item-primary.list-group-item-action:focus{color:#174d88;background-color:#bfcfe0}.list-group-item-primary.list-group-item-action.active{color:#fff;background-color:#174d88;border-color:#174d88}.list-group-item-secondary{color:#212324;background-color:#d7d8d8}.list-group-item-secondary.list-group-item-action:hover,.list-group-item-secondary.list-group-item-action:focus{color:#212324;background-color:#c2c2c2}.list-group-item-secondary.list-group-item-action.active{color:#fff;background-color:#212324;border-color:#212324}.list-group-item-success{color:#266d0e;background-color:#d9f0d1}.list-group-item-success.list-group-item-action:hover,.list-group-item-success.list-group-item-action:focus{color:#266d0e;background-color:#c3d8bc}.list-group-item-success.list-group-item-action.active{color:#fff;background-color:#266d0e;border-color:#266d0e}.list-group-item-info{color:#5c3270;background-color:#ebddf1}.list-group-item-info.list-group-item-action:hover,.list-group-item-info.list-group-item-action:focus{color:#5c3270;background-color:#d4c7d9}.list-group-item-info.list-group-item-action.active{color:#fff;background-color:#5c3270;border-color:#5c3270}.list-group-item-warning{color:#99460e;background-color:#ffe3d1}.list-group-item-warning.list-group-item-action:hover,.list-group-item-warning.list-group-item-action:focus{color:#99460e;background-color:#e6ccbc}.list-group-item-warning.list-group-item-action.active{color:#fff;background-color:#99460e;border-color:#99460e}.list-group-item-danger{color:#902;background-color:#ffccd7}.list-group-item-danger.list-group-item-action:hover,.list-group-item-danger.list-group-item-action:focus{color:#902;background-color:#e6b8c2}.list-group-item-danger.list-group-item-action.active{color:#fff;background-color:#902;border-color:#902}.list-group-item-light{color:#959596;background-color:#fefefe}.list-group-item-light.list-group-item-action:hover,.list-group-item-light.list-group-item-action:focus{color:#959596;background-color:#e5e5e5}.list-group-item-light.list-group-item-action.active{color:#fff;background-color:#959596;border-color:#959596}.list-group-item-dark{color:#212324;background-color:#d7d8d8}.list-group-item-dark.list-group-item-action:hover,.list-group-item-dark.list-group-item-action:focus{color:#212324;background-color:#c2c2c2}.list-group-item-dark.list-group-item-action.active{color:#fff;background-color:#212324;border-color:#212324}.btn-close{box-sizing:content-box;width:1em;height:1em;padding:.25em .25em;color:#000;background:transparent url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16' fill='%23000'%3e%3cpath d='M.293.293a1 1 0 011.414 0L8 6.586 14.293.293a1 1 0 111.414 1.414L9.414 8l6.293 6.293a1 1 0 01-1.414 1.414L8 9.414l-6.293 6.293a1 1 0 01-1.414-1.414L6.586 8 .293 1.707a1 1 0 010-1.414z'/%3e%3c/svg%3e") center/1em auto no-repeat;border:0;opacity:.5}.btn-close:hover{color:#000;text-decoration:none;opacity:.75}.btn-close:focus{outline:0;box-shadow:0 0 0 .25rem rgba(39,128,227,.25);opacity:1}.btn-close:disabled,.btn-close.disabled{pointer-events:none;user-select:none;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;-o-user-select:none;opacity:.25}.btn-close-white{filter:invert(1) grayscale(100%) brightness(200%)}.toast{width:350px;max-width:100%;font-size:0.875rem;pointer-events:auto;background-color:rgba(255,255,255,.85);background-clip:padding-box;border:1px solid rgba(0,0,0,.1);box-shadow:0 .5rem 1rem rgba(0,0,0,.15)}.toast.showing{opacity:0}.toast:not(.show){display:none}.toast-container{width:max-content;width:-webkit-max-content;width:-moz-max-content;width:-ms-max-content;width:-o-max-content;max-width:100%;pointer-events:none}.toast-container>:not(:last-child){margin-bottom:.75rem}.toast-header{display:flex;display:-webkit-flex;align-items:center;-webkit-align-items:center;padding:.5rem .75rem;color:#868e96;background-color:rgba(255,255,255,.85);background-clip:padding-box;border-bottom:1px solid rgba(0,0,0,.05)}.toast-header .btn-close{margin-right:-0.375rem;margin-left:.75rem}.toast-body{padding:.75rem;word-wrap:break-word}.modal{position:fixed;top:0;left:0;z-index:1055;display:none;width:100%;height:100%;overflow-x:hidden;overflow-y:auto;outline:0}.modal-dialog{position:relative;width:auto;margin:.5rem;pointer-events:none}.modal.fade .modal-dialog{transition:transform .3s ease-out;transform:translate(0, -50px)}@media(prefers-reduced-motion: reduce){.modal.fade .modal-dialog{transition:none}}.modal.show .modal-dialog{transform:none}.modal.modal-static .modal-dialog{transform:scale(1.02)}.modal-dialog-scrollable{height:calc(100% - 1rem)}.modal-dialog-scrollable .modal-content{max-height:100%;overflow:hidden}.modal-dialog-scrollable .modal-body{overflow-y:auto}.modal-dialog-centered{display:flex;display:-webkit-flex;align-items:center;-webkit-align-items:center;min-height:calc(100% - 1rem)}.modal-content{position:relative;display:flex;display:-webkit-flex;flex-direction:column;-webkit-flex-direction:column;width:100%;pointer-events:auto;background-color:#fff;background-clip:padding-box;border:1px solid rgba(0,0,0,.2);outline:0}.modal-backdrop{position:fixed;top:0;left:0;z-index:1050;width:100vw;height:100vh;background-color:#000}.modal-backdrop.fade{opacity:0}.modal-backdrop.show{opacity:.5}.modal-header{display:flex;display:-webkit-flex;flex-shrink:0;-webkit-flex-shrink:0;align-items:center;-webkit-align-items:center;justify-content:space-between;-webkit-justify-content:space-between;padding:1rem 1rem;border-bottom:1px solid #dee2e6}.modal-header .btn-close{padding:.5rem .5rem;margin:-0.5rem -0.5rem -0.5rem auto}.modal-title{margin-bottom:0;line-height:1.5}.modal-body{position:relative;flex:1 1 auto;-webkit-flex:1 1 auto;padding:1rem}.modal-footer{display:flex;display:-webkit-flex;flex-wrap:wrap;-webkit-flex-wrap:wrap;flex-shrink:0;-webkit-flex-shrink:0;align-items:center;-webkit-align-items:center;justify-content:flex-end;-webkit-justify-content:flex-end;padding:.75rem;border-top:1px solid #dee2e6}.modal-footer>*{margin:.25rem}@media(min-width: 576px){.modal-dialog{max-width:500px;margin:1.75rem auto}.modal-dialog-scrollable{height:calc(100% - 3.5rem)}.modal-dialog-centered{min-height:calc(100% - 3.5rem)}.modal-sm{max-width:300px}}@media(min-width: 992px){.modal-lg,.modal-xl{max-width:800px}}@media(min-width: 1200px){.modal-xl{max-width:1140px}}.modal-fullscreen{width:100vw;max-width:none;height:100%;margin:0}.modal-fullscreen .modal-content{height:100%;border:0}.modal-fullscreen .modal-body{overflow-y:auto}@media(max-width: 575.98px){.modal-fullscreen-sm-down{width:100vw;max-width:none;height:100%;margin:0}.modal-fullscreen-sm-down .modal-content{height:100%;border:0}.modal-fullscreen-sm-down .modal-body{overflow-y:auto}}@media(max-width: 767.98px){.modal-fullscreen-md-down{width:100vw;max-width:none;height:100%;margin:0}.modal-fullscreen-md-down .modal-content{height:100%;border:0}.modal-fullscreen-md-down .modal-body{overflow-y:auto}}@media(max-width: 991.98px){.modal-fullscreen-lg-down{width:100vw;max-width:none;height:100%;margin:0}.modal-fullscreen-lg-down .modal-content{height:100%;border:0}.modal-fullscreen-lg-down .modal-body{overflow-y:auto}}@media(max-width: 1199.98px){.modal-fullscreen-xl-down{width:100vw;max-width:none;height:100%;margin:0}.modal-fullscreen-xl-down .modal-content{height:100%;border:0}.modal-fullscreen-xl-down .modal-body{overflow-y:auto}}@media(max-width: 1399.98px){.modal-fullscreen-xxl-down{width:100vw;max-width:none;height:100%;margin:0}.modal-fullscreen-xxl-down .modal-content{height:100%;border:0}.modal-fullscreen-xxl-down .modal-body{overflow-y:auto}}.tooltip{position:absolute;z-index:1080;display:block;margin:0;font-family:var(--bs-font-sans-serif);font-style:normal;font-weight:400;line-height:1.5;text-align:left;text-align:start;text-decoration:none;text-shadow:none;text-transform:none;letter-spacing:normal;word-break:normal;word-spacing:normal;white-space:normal;line-break:auto;font-size:0.875rem;word-wrap:break-word;opacity:0}.tooltip.show{opacity:.9}.tooltip .tooltip-arrow{position:absolute;display:block;width:.8rem;height:.4rem}.tooltip .tooltip-arrow::before{position:absolute;content:"";border-color:transparent;border-style:solid}.bs-tooltip-top,.bs-tooltip-auto[data-popper-placement^=top]{padding:.4rem 0}.bs-tooltip-top .tooltip-arrow,.bs-tooltip-auto[data-popper-placement^=top] .tooltip-arrow{bottom:0}.bs-tooltip-top .tooltip-arrow::before,.bs-tooltip-auto[data-popper-placement^=top] .tooltip-arrow::before{top:-1px;border-width:.4rem .4rem 0;border-top-color:#000}.bs-tooltip-end,.bs-tooltip-auto[data-popper-placement^=right]{padding:0 .4rem}.bs-tooltip-end .tooltip-arrow,.bs-tooltip-auto[data-popper-placement^=right] .tooltip-arrow{left:0;width:.4rem;height:.8rem}.bs-tooltip-end .tooltip-arrow::before,.bs-tooltip-auto[data-popper-placement^=right] .tooltip-arrow::before{right:-1px;border-width:.4rem .4rem .4rem 0;border-right-color:#000}.bs-tooltip-bottom,.bs-tooltip-auto[data-popper-placement^=bottom]{padding:.4rem 0}.bs-tooltip-bottom .tooltip-arrow,.bs-tooltip-auto[data-popper-placement^=bottom] .tooltip-arrow{top:0}.bs-tooltip-bottom .tooltip-arrow::before,.bs-tooltip-auto[data-popper-placement^=bottom] .tooltip-arrow::before{bottom:-1px;border-width:0 .4rem .4rem;border-bottom-color:#000}.bs-tooltip-start,.bs-tooltip-auto[data-popper-placement^=left]{padding:0 .4rem}.bs-tooltip-start .tooltip-arrow,.bs-tooltip-auto[data-popper-placement^=left] .tooltip-arrow{right:0;width:.4rem;height:.8rem}.bs-tooltip-start .tooltip-arrow::before,.bs-tooltip-auto[data-popper-placement^=left] .tooltip-arrow::before{left:-1px;border-width:.4rem 0 .4rem .4rem;border-left-color:#000}.tooltip-inner{max-width:200px;padding:.25rem .5rem;color:#fff;text-align:center;background-color:#000}.popover{position:absolute;top:0;left:0 /* rtl:ignore */;z-index:1070;display:block;max-width:276px;font-family:var(--bs-font-sans-serif);font-style:normal;font-weight:400;line-height:1.5;text-align:left;text-align:start;text-decoration:none;text-shadow:none;text-transform:none;letter-spacing:normal;word-break:normal;word-spacing:normal;white-space:normal;line-break:auto;font-size:0.875rem;word-wrap:break-word;background-color:#fff;background-clip:padding-box;border:1px solid rgba(0,0,0,.2)}.popover .popover-arrow{position:absolute;display:block;width:1rem;height:.5rem}.popover .popover-arrow::before,.popover .popover-arrow::after{position:absolute;display:block;content:"";border-color:transparent;border-style:solid}.bs-popover-top>.popover-arrow,.bs-popover-auto[data-popper-placement^=top]>.popover-arrow{bottom:calc(-0.5rem - 1px)}.bs-popover-top>.popover-arrow::before,.bs-popover-auto[data-popper-placement^=top]>.popover-arrow::before{bottom:0;border-width:.5rem .5rem 0;border-top-color:rgba(0,0,0,.25)}.bs-popover-top>.popover-arrow::after,.bs-popover-auto[data-popper-placement^=top]>.popover-arrow::after{bottom:1px;border-width:.5rem .5rem 0;border-top-color:#fff}.bs-popover-end>.popover-arrow,.bs-popover-auto[data-popper-placement^=right]>.popover-arrow{left:calc(-0.5rem - 1px);width:.5rem;height:1rem}.bs-popover-end>.popover-arrow::before,.bs-popover-auto[data-popper-placement^=right]>.popover-arrow::before{left:0;border-width:.5rem .5rem .5rem 0;border-right-color:rgba(0,0,0,.25)}.bs-popover-end>.popover-arrow::after,.bs-popover-auto[data-popper-placement^=right]>.popover-arrow::after{left:1px;border-width:.5rem .5rem .5rem 0;border-right-color:#fff}.bs-popover-bottom>.popover-arrow,.bs-popover-auto[data-popper-placement^=bottom]>.popover-arrow{top:calc(-0.5rem - 1px)}.bs-popover-bottom>.popover-arrow::before,.bs-popover-auto[data-popper-placement^=bottom]>.popover-arrow::before{top:0;border-width:0 .5rem .5rem .5rem;border-bottom-color:rgba(0,0,0,.25)}.bs-popover-bottom>.popover-arrow::after,.bs-popover-auto[data-popper-placement^=bottom]>.popover-arrow::after{top:1px;border-width:0 .5rem .5rem .5rem;border-bottom-color:#fff}.bs-popover-bottom .popover-header::before,.bs-popover-auto[data-popper-placement^=bottom] .popover-header::before{position:absolute;top:0;left:50%;display:block;width:1rem;margin-left:-0.5rem;content:"";border-bottom:1px solid #f0f0f0}.bs-popover-start>.popover-arrow,.bs-popover-auto[data-popper-placement^=left]>.popover-arrow{right:calc(-0.5rem - 1px);width:.5rem;height:1rem}.bs-popover-start>.popover-arrow::before,.bs-popover-auto[data-popper-placement^=left]>.popover-arrow::before{right:0;border-width:.5rem 0 .5rem .5rem;border-left-color:rgba(0,0,0,.25)}.bs-popover-start>.popover-arrow::after,.bs-popover-auto[data-popper-placement^=left]>.popover-arrow::after{right:1px;border-width:.5rem 0 .5rem .5rem;border-left-color:#fff}.popover-header{padding:.5rem 1rem;margin-bottom:0;font-size:1rem;background-color:#f0f0f0;border-bottom:1px solid rgba(0,0,0,.2)}.popover-header:empty{display:none}.popover-body{padding:1rem 1rem;color:#373a3c}.carousel{position:relative}.carousel.pointer-event{touch-action:pan-y;-webkit-touch-action:pan-y;-moz-touch-action:pan-y;-ms-touch-action:pan-y;-o-touch-action:pan-y}.carousel-inner{position:relative;width:100%;overflow:hidden}.carousel-inner::after{display:block;clear:both;content:""}.carousel-item{position:relative;display:none;float:left;width:100%;margin-right:-100%;backface-visibility:hidden;-webkit-backface-visibility:hidden;-moz-backface-visibility:hidden;-ms-backface-visibility:hidden;-o-backface-visibility:hidden;transition:transform .6s ease-in-out}@media(prefers-reduced-motion: reduce){.carousel-item{transition:none}}.carousel-item.active,.carousel-item-next,.carousel-item-prev{display:block}.carousel-item-next:not(.carousel-item-start),.active.carousel-item-end{transform:translateX(100%)}.carousel-item-prev:not(.carousel-item-end),.active.carousel-item-start{transform:translateX(-100%)}.carousel-fade .carousel-item{opacity:0;transition-property:opacity;transform:none}.carousel-fade .carousel-item.active,.carousel-fade .carousel-item-next.carousel-item-start,.carousel-fade .carousel-item-prev.carousel-item-end{z-index:1;opacity:1}.carousel-fade .active.carousel-item-start,.carousel-fade .active.carousel-item-end{z-index:0;opacity:0;transition:opacity 0s .6s}@media(prefers-reduced-motion: reduce){.carousel-fade .active.carousel-item-start,.carousel-fade .active.carousel-item-end{transition:none}}.carousel-control-prev,.carousel-control-next{position:absolute;top:0;bottom:0;z-index:1;display:flex;display:-webkit-flex;align-items:center;-webkit-align-items:center;justify-content:center;-webkit-justify-content:center;width:15%;padding:0;color:#fff;text-align:center;background:none;border:0;opacity:.5;transition:opacity .15s ease}@media(prefers-reduced-motion: reduce){.carousel-control-prev,.carousel-control-next{transition:none}}.carousel-control-prev:hover,.carousel-control-prev:focus,.carousel-control-next:hover,.carousel-control-next:focus{color:#fff;text-decoration:none;outline:0;opacity:.9}.carousel-control-prev{left:0}.carousel-control-next{right:0}.carousel-control-prev-icon,.carousel-control-next-icon{display:inline-block;width:2rem;height:2rem;background-repeat:no-repeat;background-position:50%;background-size:100% 100%}.carousel-control-prev-icon{background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16' fill='%23fff'%3e%3cpath d='M11.354 1.646a.5.5 0 0 1 0 .708L5.707 8l5.647 5.646a.5.5 0 0 1-.708.708l-6-6a.5.5 0 0 1 0-.708l6-6a.5.5 0 0 1 .708 0z'/%3e%3c/svg%3e")}.carousel-control-next-icon{background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16' fill='%23fff'%3e%3cpath d='M4.646 1.646a.5.5 0 0 1 .708 0l6 6a.5.5 0 0 1 0 .708l-6 6a.5.5 0 0 1-.708-.708L10.293 8 4.646 2.354a.5.5 0 0 1 0-.708z'/%3e%3c/svg%3e")}.carousel-indicators{position:absolute;right:0;bottom:0;left:0;z-index:2;display:flex;display:-webkit-flex;justify-content:center;-webkit-justify-content:center;padding:0;margin-right:15%;margin-bottom:1rem;margin-left:15%;list-style:none}.carousel-indicators [data-bs-target]{box-sizing:content-box;flex:0 1 auto;-webkit-flex:0 1 auto;width:30px;height:3px;padding:0;margin-right:3px;margin-left:3px;text-indent:-999px;cursor:pointer;background-color:#fff;background-clip:padding-box;border:0;border-top:10px solid transparent;border-bottom:10px solid transparent;opacity:.5;transition:opacity .6s ease}@media(prefers-reduced-motion: reduce){.carousel-indicators [data-bs-target]{transition:none}}.carousel-indicators .active{opacity:1}.carousel-caption{position:absolute;right:15%;bottom:1.25rem;left:15%;padding-top:1.25rem;padding-bottom:1.25rem;color:#fff;text-align:center}.carousel-dark .carousel-control-prev-icon,.carousel-dark .carousel-control-next-icon{filter:invert(1) grayscale(100)}.carousel-dark .carousel-indicators [data-bs-target]{background-color:#000}.carousel-dark .carousel-caption{color:#000}@keyframes spinner-border{to{transform:rotate(360deg) /* rtl:ignore */}}.spinner-border{display:inline-block;width:2rem;height:2rem;vertical-align:-0.125em;border:.25em solid currentColor;border-right-color:transparent;border-radius:50%;animation:.75s linear infinite spinner-border}.spinner-border-sm{width:1rem;height:1rem;border-width:.2em}@keyframes spinner-grow{0%{transform:scale(0)}50%{opacity:1;transform:none}}.spinner-grow{display:inline-block;width:2rem;height:2rem;vertical-align:-0.125em;background-color:currentColor;border-radius:50%;opacity:0;animation:.75s linear infinite spinner-grow}.spinner-grow-sm{width:1rem;height:1rem}@media(prefers-reduced-motion: reduce){.spinner-border,.spinner-grow{animation-duration:1.5s;-webkit-animation-duration:1.5s;-moz-animation-duration:1.5s;-ms-animation-duration:1.5s;-o-animation-duration:1.5s}}.offcanvas{position:fixed;bottom:0;z-index:1045;display:flex;display:-webkit-flex;flex-direction:column;-webkit-flex-direction:column;max-width:100%;visibility:hidden;background-color:#fff;background-clip:padding-box;outline:0;transition:transform .3s ease-in-out}@media(prefers-reduced-motion: reduce){.offcanvas{transition:none}}.offcanvas-backdrop{position:fixed;top:0;left:0;z-index:1040;width:100vw;height:100vh;background-color:#000}.offcanvas-backdrop.fade{opacity:0}.offcanvas-backdrop.show{opacity:.5}.offcanvas-header{display:flex;display:-webkit-flex;align-items:center;-webkit-align-items:center;justify-content:space-between;-webkit-justify-content:space-between;padding:1rem 1rem}.offcanvas-header .btn-close{padding:.5rem .5rem;margin-top:-0.5rem;margin-right:-0.5rem;margin-bottom:-0.5rem}.offcanvas-title{margin-bottom:0;line-height:1.5}.offcanvas-body{flex-grow:1;-webkit-flex-grow:1;padding:1rem 1rem;overflow-y:auto}.offcanvas-start{top:0;left:0;width:400px;border-right:1px solid rgba(0,0,0,.2);transform:translateX(-100%)}.offcanvas-end{top:0;right:0;width:400px;border-left:1px solid rgba(0,0,0,.2);transform:translateX(100%)}.offcanvas-top{top:0;right:0;left:0;height:30vh;max-height:100%;border-bottom:1px solid rgba(0,0,0,.2);transform:translateY(-100%)}.offcanvas-bottom{right:0;left:0;height:30vh;max-height:100%;border-top:1px solid rgba(0,0,0,.2);transform:translateY(100%)}.offcanvas.show{transform:none}.placeholder{display:inline-block;min-height:1em;vertical-align:middle;cursor:wait;background-color:currentColor;opacity:.5}.placeholder.btn::before{display:inline-block;content:""}.placeholder-xs{min-height:.6em}.placeholder-sm{min-height:.8em}.placeholder-lg{min-height:1.2em}.placeholder-glow .placeholder{animation:placeholder-glow 2s ease-in-out infinite}@keyframes placeholder-glow{50%{opacity:.2}}.placeholder-wave{mask-image:linear-gradient(130deg, #000 55%, rgba(0, 0, 0, 0.8) 75%, #000 95%);-webkit-mask-image:linear-gradient(130deg, #000 55%, rgba(0, 0, 0, 0.8) 75%, #000 95%);mask-size:200% 100%;-webkit-mask-size:200% 100%;animation:placeholder-wave 2s linear infinite}@keyframes placeholder-wave{100%{mask-position:-200% 0%;-webkit-mask-position:-200% 0%}}.clearfix::after{display:block;clear:both;content:""}.link-default{color:#373a3c}.link-default:hover,.link-default:focus{color:#2c2e30}.link-primary{color:#2780e3}.link-primary:hover,.link-primary:focus{color:#1f66b6}.link-secondary{color:#373a3c}.link-secondary:hover,.link-secondary:focus{color:#2c2e30}.link-success{color:#3fb618}.link-success:hover,.link-success:focus{color:#329213}.link-info{color:#9954bb}.link-info:hover,.link-info:focus{color:#7a4396}.link-warning{color:#ff7518}.link-warning:hover,.link-warning:focus{color:#cc5e13}.link-danger{color:#ff0039}.link-danger:hover,.link-danger:focus{color:#cc002e}.link-light{color:#f8f9fa}.link-light:hover,.link-light:focus{color:#f9fafb}.link-dark{color:#373a3c}.link-dark:hover,.link-dark:focus{color:#2c2e30}.ratio{position:relative;width:100%}.ratio::before{display:block;padding-top:var(--bs-aspect-ratio);content:""}.ratio>*{position:absolute;top:0;left:0;width:100%;height:100%}.ratio-1x1{--bs-aspect-ratio: 100%}.ratio-4x3{--bs-aspect-ratio: calc(3 / 4 * 100%)}.ratio-16x9{--bs-aspect-ratio: calc(9 / 16 * 100%)}.ratio-21x9{--bs-aspect-ratio: calc(9 / 21 * 100%)}.fixed-top{position:fixed;top:0;right:0;left:0;z-index:1030}.fixed-bottom{position:fixed;right:0;bottom:0;left:0;z-index:1030}.sticky-top{position:sticky;top:0;z-index:1020}@media(min-width: 576px){.sticky-sm-top{position:sticky;top:0;z-index:1020}}@media(min-width: 768px){.sticky-md-top{position:sticky;top:0;z-index:1020}}@media(min-width: 992px){.sticky-lg-top{position:sticky;top:0;z-index:1020}}@media(min-width: 1200px){.sticky-xl-top{position:sticky;top:0;z-index:1020}}@media(min-width: 1400px){.sticky-xxl-top{position:sticky;top:0;z-index:1020}}.hstack{display:flex;display:-webkit-flex;flex-direction:row;-webkit-flex-direction:row;align-items:center;-webkit-align-items:center;align-self:stretch;-webkit-align-self:stretch}.vstack{display:flex;display:-webkit-flex;flex:1 1 auto;-webkit-flex:1 1 auto;flex-direction:column;-webkit-flex-direction:column;align-self:stretch;-webkit-align-self:stretch}.visually-hidden,.visually-hidden-focusable:not(:focus):not(:focus-within){position:absolute !important;width:1px !important;height:1px !important;padding:0 !important;margin:-1px !important;overflow:hidden !important;clip:rect(0, 0, 0, 0) !important;white-space:nowrap !important;border:0 !important}.stretched-link::after{position:absolute;top:0;right:0;bottom:0;left:0;z-index:1;content:""}.text-truncate{overflow:hidden;text-overflow:ellipsis;white-space:nowrap}.vr{display:inline-block;align-self:stretch;-webkit-align-self:stretch;width:1px;min-height:1em;background-color:currentColor;opacity:.25}.align-baseline{vertical-align:baseline !important}.align-top{vertical-align:top !important}.align-middle{vertical-align:middle !important}.align-bottom{vertical-align:bottom !important}.align-text-bottom{vertical-align:text-bottom !important}.align-text-top{vertical-align:text-top !important}.float-start{float:left !important}.float-end{float:right !important}.float-none{float:none !important}.opacity-0{opacity:0 !important}.opacity-25{opacity:.25 !important}.opacity-50{opacity:.5 !important}.opacity-75{opacity:.75 !important}.opacity-100{opacity:1 !important}.overflow-auto{overflow:auto !important}.overflow-hidden{overflow:hidden !important}.overflow-visible{overflow:visible !important}.overflow-scroll{overflow:scroll !important}.d-inline{display:inline !important}.d-inline-block{display:inline-block !important}.d-block{display:block !important}.d-grid{display:grid !important}.d-table{display:table !important}.d-table-row{display:table-row !important}.d-table-cell{display:table-cell !important}.d-flex{display:flex !important}.d-inline-flex{display:inline-flex !important}.d-none{display:none !important}.shadow{box-shadow:0 .5rem 1rem rgba(0,0,0,.15) !important}.shadow-sm{box-shadow:0 .125rem .25rem rgba(0,0,0,.075) !important}.shadow-lg{box-shadow:0 1rem 3rem rgba(0,0,0,.175) !important}.shadow-none{box-shadow:none !important}.position-static{position:static !important}.position-relative{position:relative !important}.position-absolute{position:absolute !important}.position-fixed{position:fixed !important}.position-sticky{position:sticky !important}.top-0{top:0 !important}.top-50{top:50% !important}.top-100{top:100% !important}.bottom-0{bottom:0 !important}.bottom-50{bottom:50% !important}.bottom-100{bottom:100% !important}.start-0{left:0 !important}.start-50{left:50% !important}.start-100{left:100% !important}.end-0{right:0 !important}.end-50{right:50% !important}.end-100{right:100% !important}.translate-middle{transform:translate(-50%, -50%) !important}.translate-middle-x{transform:translateX(-50%) !important}.translate-middle-y{transform:translateY(-50%) !important}.border{border:1px solid #dee2e6 !important}.border-0{border:0 !important}.border-top{border-top:1px solid #dee2e6 !important}.border-top-0{border-top:0 !important}.border-end{border-right:1px solid #dee2e6 !important}.border-end-0{border-right:0 !important}.border-bottom{border-bottom:1px solid #dee2e6 !important}.border-bottom-0{border-bottom:0 !important}.border-start{border-left:1px solid #dee2e6 !important}.border-start-0{border-left:0 !important}.border-default{border-color:#373a3c !important}.border-primary{border-color:#2780e3 !important}.border-secondary{border-color:#373a3c !important}.border-success{border-color:#3fb618 !important}.border-info{border-color:#9954bb !important}.border-warning{border-color:#ff7518 !important}.border-danger{border-color:#ff0039 !important}.border-light{border-color:#f8f9fa !important}.border-dark{border-color:#373a3c !important}.border-white{border-color:#fff !important}.border-1{border-width:1px !important}.border-2{border-width:2px !important}.border-3{border-width:3px !important}.border-4{border-width:4px !important}.border-5{border-width:5px !important}.w-25{width:25% !important}.w-50{width:50% !important}.w-75{width:75% !important}.w-100{width:100% !important}.w-auto{width:auto !important}.mw-100{max-width:100% !important}.vw-100{width:100vw !important}.min-vw-100{min-width:100vw !important}.h-25{height:25% !important}.h-50{height:50% !important}.h-75{height:75% !important}.h-100{height:100% !important}.h-auto{height:auto !important}.mh-100{max-height:100% !important}.vh-100{height:100vh !important}.min-vh-100{min-height:100vh !important}.flex-fill{flex:1 1 auto !important}.flex-row{flex-direction:row !important}.flex-column{flex-direction:column !important}.flex-row-reverse{flex-direction:row-reverse !important}.flex-column-reverse{flex-direction:column-reverse !important}.flex-grow-0{flex-grow:0 !important}.flex-grow-1{flex-grow:1 !important}.flex-shrink-0{flex-shrink:0 !important}.flex-shrink-1{flex-shrink:1 !important}.flex-wrap{flex-wrap:wrap !important}.flex-nowrap{flex-wrap:nowrap !important}.flex-wrap-reverse{flex-wrap:wrap-reverse !important}.gap-0{gap:0 !important}.gap-1{gap:.25rem !important}.gap-2{gap:.5rem !important}.gap-3{gap:1rem !important}.gap-4{gap:1.5rem !important}.gap-5{gap:3rem !important}.justify-content-start{justify-content:flex-start !important}.justify-content-end{justify-content:flex-end !important}.justify-content-center{justify-content:center !important}.justify-content-between{justify-content:space-between !important}.justify-content-around{justify-content:space-around !important}.justify-content-evenly{justify-content:space-evenly !important}.align-items-start{align-items:flex-start !important}.align-items-end{align-items:flex-end !important}.align-items-center{align-items:center !important}.align-items-baseline{align-items:baseline !important}.align-items-stretch{align-items:stretch !important}.align-content-start{align-content:flex-start !important}.align-content-end{align-content:flex-end !important}.align-content-center{align-content:center !important}.align-content-between{align-content:space-between !important}.align-content-around{align-content:space-around !important}.align-content-stretch{align-content:stretch !important}.align-self-auto{align-self:auto !important}.align-self-start{align-self:flex-start !important}.align-self-end{align-self:flex-end !important}.align-self-center{align-self:center !important}.align-self-baseline{align-self:baseline !important}.align-self-stretch{align-self:stretch !important}.order-first{order:-1 !important}.order-0{order:0 !important}.order-1{order:1 !important}.order-2{order:2 !important}.order-3{order:3 !important}.order-4{order:4 !important}.order-5{order:5 !important}.order-last{order:6 !important}.m-0{margin:0 !important}.m-1{margin:.25rem !important}.m-2{margin:.5rem !important}.m-3{margin:1rem !important}.m-4{margin:1.5rem !important}.m-5{margin:3rem !important}.m-auto{margin:auto !important}.mx-0{margin-right:0 !important;margin-left:0 !important}.mx-1{margin-right:.25rem !important;margin-left:.25rem !important}.mx-2{margin-right:.5rem !important;margin-left:.5rem !important}.mx-3{margin-right:1rem !important;margin-left:1rem !important}.mx-4{margin-right:1.5rem !important;margin-left:1.5rem !important}.mx-5{margin-right:3rem !important;margin-left:3rem !important}.mx-auto{margin-right:auto !important;margin-left:auto !important}.my-0{margin-top:0 !important;margin-bottom:0 !important}.my-1{margin-top:.25rem !important;margin-bottom:.25rem !important}.my-2{margin-top:.5rem !important;margin-bottom:.5rem !important}.my-3{margin-top:1rem !important;margin-bottom:1rem !important}.my-4{margin-top:1.5rem !important;margin-bottom:1.5rem !important}.my-5{margin-top:3rem !important;margin-bottom:3rem !important}.my-auto{margin-top:auto !important;margin-bottom:auto !important}.mt-0{margin-top:0 !important}.mt-1{margin-top:.25rem !important}.mt-2{margin-top:.5rem !important}.mt-3{margin-top:1rem !important}.mt-4{margin-top:1.5rem !important}.mt-5{margin-top:3rem !important}.mt-auto{margin-top:auto !important}.me-0{margin-right:0 !important}.me-1{margin-right:.25rem !important}.me-2{margin-right:.5rem !important}.me-3{margin-right:1rem !important}.me-4{margin-right:1.5rem !important}.me-5{margin-right:3rem !important}.me-auto{margin-right:auto !important}.mb-0{margin-bottom:0 !important}.mb-1{margin-bottom:.25rem !important}.mb-2{margin-bottom:.5rem !important}.mb-3{margin-bottom:1rem !important}.mb-4{margin-bottom:1.5rem !important}.mb-5{margin-bottom:3rem !important}.mb-auto{margin-bottom:auto !important}.ms-0{margin-left:0 !important}.ms-1{margin-left:.25rem !important}.ms-2{margin-left:.5rem !important}.ms-3{margin-left:1rem !important}.ms-4{margin-left:1.5rem !important}.ms-5{margin-left:3rem !important}.ms-auto{margin-left:auto !important}.p-0{padding:0 !important}.p-1{padding:.25rem !important}.p-2{padding:.5rem !important}.p-3{padding:1rem !important}.p-4{padding:1.5rem !important}.p-5{padding:3rem !important}.px-0{padding-right:0 !important;padding-left:0 !important}.px-1{padding-right:.25rem !important;padding-left:.25rem !important}.px-2{padding-right:.5rem !important;padding-left:.5rem !important}.px-3{padding-right:1rem !important;padding-left:1rem !important}.px-4{padding-right:1.5rem !important;padding-left:1.5rem !important}.px-5{padding-right:3rem !important;padding-left:3rem !important}.py-0{padding-top:0 !important;padding-bottom:0 !important}.py-1{padding-top:.25rem !important;padding-bottom:.25rem !important}.py-2{padding-top:.5rem !important;padding-bottom:.5rem !important}.py-3{padding-top:1rem !important;padding-bottom:1rem !important}.py-4{padding-top:1.5rem !important;padding-bottom:1.5rem !important}.py-5{padding-top:3rem !important;padding-bottom:3rem !important}.pt-0{padding-top:0 !important}.pt-1{padding-top:.25rem !important}.pt-2{padding-top:.5rem !important}.pt-3{padding-top:1rem !important}.pt-4{padding-top:1.5rem !important}.pt-5{padding-top:3rem !important}.pe-0{padding-right:0 !important}.pe-1{padding-right:.25rem !important}.pe-2{padding-right:.5rem !important}.pe-3{padding-right:1rem !important}.pe-4{padding-right:1.5rem !important}.pe-5{padding-right:3rem !important}.pb-0{padding-bottom:0 !important}.pb-1{padding-bottom:.25rem !important}.pb-2{padding-bottom:.5rem !important}.pb-3{padding-bottom:1rem !important}.pb-4{padding-bottom:1.5rem !important}.pb-5{padding-bottom:3rem !important}.ps-0{padding-left:0 !important}.ps-1{padding-left:.25rem !important}.ps-2{padding-left:.5rem !important}.ps-3{padding-left:1rem !important}.ps-4{padding-left:1.5rem !important}.ps-5{padding-left:3rem !important}.font-monospace{font-family:var(--bs-font-monospace) !important}.fs-1{font-size:calc(1.345rem + 1.14vw) !important}.fs-2{font-size:calc(1.3rem + 0.6vw) !important}.fs-3{font-size:calc(1.275rem + 0.3vw) !important}.fs-4{font-size:1.25rem !important}.fs-5{font-size:1.1rem !important}.fs-6{font-size:1rem !important}.fst-italic{font-style:italic !important}.fst-normal{font-style:normal !important}.fw-light{font-weight:300 !important}.fw-lighter{font-weight:lighter !important}.fw-normal{font-weight:400 !important}.fw-bold{font-weight:700 !important}.fw-bolder{font-weight:bolder !important}.lh-1{line-height:1 !important}.lh-sm{line-height:1.25 !important}.lh-base{line-height:1.5 !important}.lh-lg{line-height:2 !important}.text-start{text-align:left !important}.text-end{text-align:right !important}.text-center{text-align:center !important}.text-decoration-none{text-decoration:none !important}.text-decoration-underline{text-decoration:underline !important}.text-decoration-line-through{text-decoration:line-through !important}.text-lowercase{text-transform:lowercase !important}.text-uppercase{text-transform:uppercase !important}.text-capitalize{text-transform:capitalize !important}.text-wrap{white-space:normal !important}.text-nowrap{white-space:nowrap !important}.text-break{word-wrap:break-word !important;word-break:break-word !important}.text-default{--bs-text-opacity: 1;color:rgba(var(--bs-default-rgb), var(--bs-text-opacity)) !important}.text-primary{--bs-text-opacity: 1;color:rgba(var(--bs-primary-rgb), var(--bs-text-opacity)) !important}.text-secondary{--bs-text-opacity: 1;color:rgba(var(--bs-secondary-rgb), var(--bs-text-opacity)) !important}.text-success{--bs-text-opacity: 1;color:rgba(var(--bs-success-rgb), var(--bs-text-opacity)) !important}.text-info{--bs-text-opacity: 1;color:rgba(var(--bs-info-rgb), var(--bs-text-opacity)) !important}.text-warning{--bs-text-opacity: 1;color:rgba(var(--bs-warning-rgb), var(--bs-text-opacity)) !important}.text-danger{--bs-text-opacity: 1;color:rgba(var(--bs-danger-rgb), var(--bs-text-opacity)) !important}.text-light{--bs-text-opacity: 1;color:rgba(var(--bs-light-rgb), var(--bs-text-opacity)) !important}.text-dark{--bs-text-opacity: 1;color:rgba(var(--bs-dark-rgb), var(--bs-text-opacity)) !important}.text-black{--bs-text-opacity: 1;color:rgba(var(--bs-black-rgb), var(--bs-text-opacity)) !important}.text-white{--bs-text-opacity: 1;color:rgba(var(--bs-white-rgb), var(--bs-text-opacity)) !important}.text-body{--bs-text-opacity: 1;color:rgba(var(--bs-body-color-rgb), var(--bs-text-opacity)) !important}.text-muted{--bs-text-opacity: 1;color:#868e96 !important}.text-black-50{--bs-text-opacity: 1;color:rgba(0,0,0,.5) !important}.text-white-50{--bs-text-opacity: 1;color:rgba(255,255,255,.5) !important}.text-reset{--bs-text-opacity: 1;color:inherit !important}.text-opacity-25{--bs-text-opacity: 0.25}.text-opacity-50{--bs-text-opacity: 0.5}.text-opacity-75{--bs-text-opacity: 0.75}.text-opacity-100{--bs-text-opacity: 1}.bg-default{--bs-bg-opacity: 1;background-color:rgba(var(--bs-default-rgb), var(--bs-bg-opacity)) !important}.bg-primary{--bs-bg-opacity: 1;background-color:rgba(var(--bs-primary-rgb), var(--bs-bg-opacity)) !important}.bg-secondary{--bs-bg-opacity: 1;background-color:rgba(var(--bs-secondary-rgb), var(--bs-bg-opacity)) !important}.bg-success{--bs-bg-opacity: 1;background-color:rgba(var(--bs-success-rgb), var(--bs-bg-opacity)) !important}.bg-info{--bs-bg-opacity: 1;background-color:rgba(var(--bs-info-rgb), var(--bs-bg-opacity)) !important}.bg-warning{--bs-bg-opacity: 1;background-color:rgba(var(--bs-warning-rgb), var(--bs-bg-opacity)) !important}.bg-danger{--bs-bg-opacity: 1;background-color:rgba(var(--bs-danger-rgb), var(--bs-bg-opacity)) !important}.bg-light{--bs-bg-opacity: 1;background-color:rgba(var(--bs-light-rgb), var(--bs-bg-opacity)) !important}.bg-dark{--bs-bg-opacity: 1;background-color:rgba(var(--bs-dark-rgb), var(--bs-bg-opacity)) !important}.bg-black{--bs-bg-opacity: 1;background-color:rgba(var(--bs-black-rgb), var(--bs-bg-opacity)) !important}.bg-white{--bs-bg-opacity: 1;background-color:rgba(var(--bs-white-rgb), var(--bs-bg-opacity)) !important}.bg-body{--bs-bg-opacity: 1;background-color:rgba(var(--bs-body-bg-rgb), var(--bs-bg-opacity)) !important}.bg-transparent{--bs-bg-opacity: 1;background-color:transparent !important}.bg-opacity-10{--bs-bg-opacity: 0.1}.bg-opacity-25{--bs-bg-opacity: 0.25}.bg-opacity-50{--bs-bg-opacity: 0.5}.bg-opacity-75{--bs-bg-opacity: 0.75}.bg-opacity-100{--bs-bg-opacity: 1}.bg-gradient{background-image:var(--bs-gradient) !important}.user-select-all{user-select:all !important}.user-select-auto{user-select:auto !important}.user-select-none{user-select:none !important}.pe-none{pointer-events:none !important}.pe-auto{pointer-events:auto !important}.rounded{border-radius:.25rem !important}.rounded-0{border-radius:0 !important}.rounded-1{border-radius:.2rem !important}.rounded-2{border-radius:.25rem !important}.rounded-3{border-radius:.3rem !important}.rounded-circle{border-radius:50% !important}.rounded-pill{border-radius:50rem !important}.rounded-top{border-top-left-radius:.25rem !important;border-top-right-radius:.25rem !important}.rounded-end{border-top-right-radius:.25rem !important;border-bottom-right-radius:.25rem !important}.rounded-bottom{border-bottom-right-radius:.25rem !important;border-bottom-left-radius:.25rem !important}.rounded-start{border-bottom-left-radius:.25rem !important;border-top-left-radius:.25rem !important}.visible{visibility:visible !important}.invisible{visibility:hidden !important}@media(min-width: 576px){.float-sm-start{float:left !important}.float-sm-end{float:right !important}.float-sm-none{float:none !important}.d-sm-inline{display:inline !important}.d-sm-inline-block{display:inline-block !important}.d-sm-block{display:block !important}.d-sm-grid{display:grid !important}.d-sm-table{display:table !important}.d-sm-table-row{display:table-row !important}.d-sm-table-cell{display:table-cell !important}.d-sm-flex{display:flex !important}.d-sm-inline-flex{display:inline-flex !important}.d-sm-none{display:none !important}.flex-sm-fill{flex:1 1 auto !important}.flex-sm-row{flex-direction:row !important}.flex-sm-column{flex-direction:column !important}.flex-sm-row-reverse{flex-direction:row-reverse !important}.flex-sm-column-reverse{flex-direction:column-reverse !important}.flex-sm-grow-0{flex-grow:0 !important}.flex-sm-grow-1{flex-grow:1 !important}.flex-sm-shrink-0{flex-shrink:0 !important}.flex-sm-shrink-1{flex-shrink:1 !important}.flex-sm-wrap{flex-wrap:wrap !important}.flex-sm-nowrap{flex-wrap:nowrap !important}.flex-sm-wrap-reverse{flex-wrap:wrap-reverse !important}.gap-sm-0{gap:0 !important}.gap-sm-1{gap:.25rem !important}.gap-sm-2{gap:.5rem !important}.gap-sm-3{gap:1rem !important}.gap-sm-4{gap:1.5rem !important}.gap-sm-5{gap:3rem !important}.justify-content-sm-start{justify-content:flex-start !important}.justify-content-sm-end{justify-content:flex-end !important}.justify-content-sm-center{justify-content:center !important}.justify-content-sm-between{justify-content:space-between !important}.justify-content-sm-around{justify-content:space-around !important}.justify-content-sm-evenly{justify-content:space-evenly !important}.align-items-sm-start{align-items:flex-start !important}.align-items-sm-end{align-items:flex-end !important}.align-items-sm-center{align-items:center !important}.align-items-sm-baseline{align-items:baseline !important}.align-items-sm-stretch{align-items:stretch !important}.align-content-sm-start{align-content:flex-start !important}.align-content-sm-end{align-content:flex-end !important}.align-content-sm-center{align-content:center !important}.align-content-sm-between{align-content:space-between !important}.align-content-sm-around{align-content:space-around !important}.align-content-sm-stretch{align-content:stretch !important}.align-self-sm-auto{align-self:auto !important}.align-self-sm-start{align-self:flex-start !important}.align-self-sm-end{align-self:flex-end !important}.align-self-sm-center{align-self:center !important}.align-self-sm-baseline{align-self:baseline !important}.align-self-sm-stretch{align-self:stretch !important}.order-sm-first{order:-1 !important}.order-sm-0{order:0 !important}.order-sm-1{order:1 !important}.order-sm-2{order:2 !important}.order-sm-3{order:3 !important}.order-sm-4{order:4 !important}.order-sm-5{order:5 !important}.order-sm-last{order:6 !important}.m-sm-0{margin:0 !important}.m-sm-1{margin:.25rem !important}.m-sm-2{margin:.5rem !important}.m-sm-3{margin:1rem !important}.m-sm-4{margin:1.5rem !important}.m-sm-5{margin:3rem !important}.m-sm-auto{margin:auto !important}.mx-sm-0{margin-right:0 !important;margin-left:0 !important}.mx-sm-1{margin-right:.25rem !important;margin-left:.25rem !important}.mx-sm-2{margin-right:.5rem !important;margin-left:.5rem !important}.mx-sm-3{margin-right:1rem !important;margin-left:1rem !important}.mx-sm-4{margin-right:1.5rem !important;margin-left:1.5rem !important}.mx-sm-5{margin-right:3rem !important;margin-left:3rem !important}.mx-sm-auto{margin-right:auto !important;margin-left:auto !important}.my-sm-0{margin-top:0 !important;margin-bottom:0 !important}.my-sm-1{margin-top:.25rem !important;margin-bottom:.25rem !important}.my-sm-2{margin-top:.5rem !important;margin-bottom:.5rem !important}.my-sm-3{margin-top:1rem !important;margin-bottom:1rem !important}.my-sm-4{margin-top:1.5rem !important;margin-bottom:1.5rem !important}.my-sm-5{margin-top:3rem !important;margin-bottom:3rem !important}.my-sm-auto{margin-top:auto !important;margin-bottom:auto !important}.mt-sm-0{margin-top:0 !important}.mt-sm-1{margin-top:.25rem !important}.mt-sm-2{margin-top:.5rem !important}.mt-sm-3{margin-top:1rem !important}.mt-sm-4{margin-top:1.5rem !important}.mt-sm-5{margin-top:3rem !important}.mt-sm-auto{margin-top:auto !important}.me-sm-0{margin-right:0 !important}.me-sm-1{margin-right:.25rem !important}.me-sm-2{margin-right:.5rem !important}.me-sm-3{margin-right:1rem !important}.me-sm-4{margin-right:1.5rem !important}.me-sm-5{margin-right:3rem !important}.me-sm-auto{margin-right:auto !important}.mb-sm-0{margin-bottom:0 !important}.mb-sm-1{margin-bottom:.25rem !important}.mb-sm-2{margin-bottom:.5rem !important}.mb-sm-3{margin-bottom:1rem !important}.mb-sm-4{margin-bottom:1.5rem !important}.mb-sm-5{margin-bottom:3rem !important}.mb-sm-auto{margin-bottom:auto !important}.ms-sm-0{margin-left:0 !important}.ms-sm-1{margin-left:.25rem !important}.ms-sm-2{margin-left:.5rem !important}.ms-sm-3{margin-left:1rem !important}.ms-sm-4{margin-left:1.5rem !important}.ms-sm-5{margin-left:3rem !important}.ms-sm-auto{margin-left:auto !important}.p-sm-0{padding:0 !important}.p-sm-1{padding:.25rem !important}.p-sm-2{padding:.5rem !important}.p-sm-3{padding:1rem !important}.p-sm-4{padding:1.5rem !important}.p-sm-5{padding:3rem !important}.px-sm-0{padding-right:0 !important;padding-left:0 !important}.px-sm-1{padding-right:.25rem !important;padding-left:.25rem !important}.px-sm-2{padding-right:.5rem !important;padding-left:.5rem !important}.px-sm-3{padding-right:1rem !important;padding-left:1rem !important}.px-sm-4{padding-right:1.5rem !important;padding-left:1.5rem !important}.px-sm-5{padding-right:3rem !important;padding-left:3rem !important}.py-sm-0{padding-top:0 !important;padding-bottom:0 !important}.py-sm-1{padding-top:.25rem !important;padding-bottom:.25rem !important}.py-sm-2{padding-top:.5rem !important;padding-bottom:.5rem !important}.py-sm-3{padding-top:1rem !important;padding-bottom:1rem !important}.py-sm-4{padding-top:1.5rem !important;padding-bottom:1.5rem !important}.py-sm-5{padding-top:3rem !important;padding-bottom:3rem !important}.pt-sm-0{padding-top:0 !important}.pt-sm-1{padding-top:.25rem !important}.pt-sm-2{padding-top:.5rem !important}.pt-sm-3{padding-top:1rem !important}.pt-sm-4{padding-top:1.5rem !important}.pt-sm-5{padding-top:3rem !important}.pe-sm-0{padding-right:0 !important}.pe-sm-1{padding-right:.25rem !important}.pe-sm-2{padding-right:.5rem !important}.pe-sm-3{padding-right:1rem !important}.pe-sm-4{padding-right:1.5rem !important}.pe-sm-5{padding-right:3rem !important}.pb-sm-0{padding-bottom:0 !important}.pb-sm-1{padding-bottom:.25rem !important}.pb-sm-2{padding-bottom:.5rem !important}.pb-sm-3{padding-bottom:1rem !important}.pb-sm-4{padding-bottom:1.5rem !important}.pb-sm-5{padding-bottom:3rem !important}.ps-sm-0{padding-left:0 !important}.ps-sm-1{padding-left:.25rem !important}.ps-sm-2{padding-left:.5rem !important}.ps-sm-3{padding-left:1rem !important}.ps-sm-4{padding-left:1.5rem !important}.ps-sm-5{padding-left:3rem !important}.text-sm-start{text-align:left !important}.text-sm-end{text-align:right !important}.text-sm-center{text-align:center !important}}@media(min-width: 768px){.float-md-start{float:left !important}.float-md-end{float:right !important}.float-md-none{float:none !important}.d-md-inline{display:inline !important}.d-md-inline-block{display:inline-block !important}.d-md-block{display:block !important}.d-md-grid{display:grid !important}.d-md-table{display:table !important}.d-md-table-row{display:table-row !important}.d-md-table-cell{display:table-cell !important}.d-md-flex{display:flex !important}.d-md-inline-flex{display:inline-flex !important}.d-md-none{display:none !important}.flex-md-fill{flex:1 1 auto !important}.flex-md-row{flex-direction:row !important}.flex-md-column{flex-direction:column !important}.flex-md-row-reverse{flex-direction:row-reverse !important}.flex-md-column-reverse{flex-direction:column-reverse !important}.flex-md-grow-0{flex-grow:0 !important}.flex-md-grow-1{flex-grow:1 !important}.flex-md-shrink-0{flex-shrink:0 !important}.flex-md-shrink-1{flex-shrink:1 !important}.flex-md-wrap{flex-wrap:wrap !important}.flex-md-nowrap{flex-wrap:nowrap !important}.flex-md-wrap-reverse{flex-wrap:wrap-reverse !important}.gap-md-0{gap:0 !important}.gap-md-1{gap:.25rem !important}.gap-md-2{gap:.5rem !important}.gap-md-3{gap:1rem !important}.gap-md-4{gap:1.5rem !important}.gap-md-5{gap:3rem !important}.justify-content-md-start{justify-content:flex-start !important}.justify-content-md-end{justify-content:flex-end !important}.justify-content-md-center{justify-content:center !important}.justify-content-md-between{justify-content:space-between !important}.justify-content-md-around{justify-content:space-around !important}.justify-content-md-evenly{justify-content:space-evenly !important}.align-items-md-start{align-items:flex-start !important}.align-items-md-end{align-items:flex-end !important}.align-items-md-center{align-items:center !important}.align-items-md-baseline{align-items:baseline !important}.align-items-md-stretch{align-items:stretch !important}.align-content-md-start{align-content:flex-start !important}.align-content-md-end{align-content:flex-end !important}.align-content-md-center{align-content:center !important}.align-content-md-between{align-content:space-between !important}.align-content-md-around{align-content:space-around !important}.align-content-md-stretch{align-content:stretch !important}.align-self-md-auto{align-self:auto !important}.align-self-md-start{align-self:flex-start !important}.align-self-md-end{align-self:flex-end !important}.align-self-md-center{align-self:center !important}.align-self-md-baseline{align-self:baseline !important}.align-self-md-stretch{align-self:stretch !important}.order-md-first{order:-1 !important}.order-md-0{order:0 !important}.order-md-1{order:1 !important}.order-md-2{order:2 !important}.order-md-3{order:3 !important}.order-md-4{order:4 !important}.order-md-5{order:5 !important}.order-md-last{order:6 !important}.m-md-0{margin:0 !important}.m-md-1{margin:.25rem !important}.m-md-2{margin:.5rem !important}.m-md-3{margin:1rem !important}.m-md-4{margin:1.5rem !important}.m-md-5{margin:3rem !important}.m-md-auto{margin:auto !important}.mx-md-0{margin-right:0 !important;margin-left:0 !important}.mx-md-1{margin-right:.25rem !important;margin-left:.25rem !important}.mx-md-2{margin-right:.5rem !important;margin-left:.5rem !important}.mx-md-3{margin-right:1rem !important;margin-left:1rem !important}.mx-md-4{margin-right:1.5rem !important;margin-left:1.5rem !important}.mx-md-5{margin-right:3rem !important;margin-left:3rem !important}.mx-md-auto{margin-right:auto !important;margin-left:auto !important}.my-md-0{margin-top:0 !important;margin-bottom:0 !important}.my-md-1{margin-top:.25rem !important;margin-bottom:.25rem !important}.my-md-2{margin-top:.5rem !important;margin-bottom:.5rem !important}.my-md-3{margin-top:1rem !important;margin-bottom:1rem !important}.my-md-4{margin-top:1.5rem !important;margin-bottom:1.5rem !important}.my-md-5{margin-top:3rem !important;margin-bottom:3rem !important}.my-md-auto{margin-top:auto !important;margin-bottom:auto !important}.mt-md-0{margin-top:0 !important}.mt-md-1{margin-top:.25rem !important}.mt-md-2{margin-top:.5rem !important}.mt-md-3{margin-top:1rem !important}.mt-md-4{margin-top:1.5rem !important}.mt-md-5{margin-top:3rem !important}.mt-md-auto{margin-top:auto !important}.me-md-0{margin-right:0 !important}.me-md-1{margin-right:.25rem !important}.me-md-2{margin-right:.5rem !important}.me-md-3{margin-right:1rem !important}.me-md-4{margin-right:1.5rem !important}.me-md-5{margin-right:3rem !important}.me-md-auto{margin-right:auto !important}.mb-md-0{margin-bottom:0 !important}.mb-md-1{margin-bottom:.25rem !important}.mb-md-2{margin-bottom:.5rem !important}.mb-md-3{margin-bottom:1rem !important}.mb-md-4{margin-bottom:1.5rem !important}.mb-md-5{margin-bottom:3rem !important}.mb-md-auto{margin-bottom:auto !important}.ms-md-0{margin-left:0 !important}.ms-md-1{margin-left:.25rem !important}.ms-md-2{margin-left:.5rem !important}.ms-md-3{margin-left:1rem !important}.ms-md-4{margin-left:1.5rem !important}.ms-md-5{margin-left:3rem !important}.ms-md-auto{margin-left:auto !important}.p-md-0{padding:0 !important}.p-md-1{padding:.25rem !important}.p-md-2{padding:.5rem !important}.p-md-3{padding:1rem !important}.p-md-4{padding:1.5rem !important}.p-md-5{padding:3rem !important}.px-md-0{padding-right:0 !important;padding-left:0 !important}.px-md-1{padding-right:.25rem !important;padding-left:.25rem !important}.px-md-2{padding-right:.5rem !important;padding-left:.5rem !important}.px-md-3{padding-right:1rem !important;padding-left:1rem !important}.px-md-4{padding-right:1.5rem !important;padding-left:1.5rem !important}.px-md-5{padding-right:3rem !important;padding-left:3rem !important}.py-md-0{padding-top:0 !important;padding-bottom:0 !important}.py-md-1{padding-top:.25rem !important;padding-bottom:.25rem !important}.py-md-2{padding-top:.5rem !important;padding-bottom:.5rem !important}.py-md-3{padding-top:1rem !important;padding-bottom:1rem !important}.py-md-4{padding-top:1.5rem !important;padding-bottom:1.5rem !important}.py-md-5{padding-top:3rem !important;padding-bottom:3rem !important}.pt-md-0{padding-top:0 !important}.pt-md-1{padding-top:.25rem !important}.pt-md-2{padding-top:.5rem !important}.pt-md-3{padding-top:1rem !important}.pt-md-4{padding-top:1.5rem !important}.pt-md-5{padding-top:3rem !important}.pe-md-0{padding-right:0 !important}.pe-md-1{padding-right:.25rem !important}.pe-md-2{padding-right:.5rem !important}.pe-md-3{padding-right:1rem !important}.pe-md-4{padding-right:1.5rem !important}.pe-md-5{padding-right:3rem !important}.pb-md-0{padding-bottom:0 !important}.pb-md-1{padding-bottom:.25rem !important}.pb-md-2{padding-bottom:.5rem !important}.pb-md-3{padding-bottom:1rem !important}.pb-md-4{padding-bottom:1.5rem !important}.pb-md-5{padding-bottom:3rem !important}.ps-md-0{padding-left:0 !important}.ps-md-1{padding-left:.25rem !important}.ps-md-2{padding-left:.5rem !important}.ps-md-3{padding-left:1rem !important}.ps-md-4{padding-left:1.5rem !important}.ps-md-5{padding-left:3rem !important}.text-md-start{text-align:left !important}.text-md-end{text-align:right !important}.text-md-center{text-align:center !important}}@media(min-width: 992px){.float-lg-start{float:left !important}.float-lg-end{float:right !important}.float-lg-none{float:none !important}.d-lg-inline{display:inline !important}.d-lg-inline-block{display:inline-block !important}.d-lg-block{display:block !important}.d-lg-grid{display:grid !important}.d-lg-table{display:table !important}.d-lg-table-row{display:table-row !important}.d-lg-table-cell{display:table-cell !important}.d-lg-flex{display:flex !important}.d-lg-inline-flex{display:inline-flex !important}.d-lg-none{display:none !important}.flex-lg-fill{flex:1 1 auto !important}.flex-lg-row{flex-direction:row !important}.flex-lg-column{flex-direction:column !important}.flex-lg-row-reverse{flex-direction:row-reverse !important}.flex-lg-column-reverse{flex-direction:column-reverse !important}.flex-lg-grow-0{flex-grow:0 !important}.flex-lg-grow-1{flex-grow:1 !important}.flex-lg-shrink-0{flex-shrink:0 !important}.flex-lg-shrink-1{flex-shrink:1 !important}.flex-lg-wrap{flex-wrap:wrap !important}.flex-lg-nowrap{flex-wrap:nowrap !important}.flex-lg-wrap-reverse{flex-wrap:wrap-reverse !important}.gap-lg-0{gap:0 !important}.gap-lg-1{gap:.25rem !important}.gap-lg-2{gap:.5rem !important}.gap-lg-3{gap:1rem !important}.gap-lg-4{gap:1.5rem !important}.gap-lg-5{gap:3rem !important}.justify-content-lg-start{justify-content:flex-start !important}.justify-content-lg-end{justify-content:flex-end !important}.justify-content-lg-center{justify-content:center !important}.justify-content-lg-between{justify-content:space-between !important}.justify-content-lg-around{justify-content:space-around !important}.justify-content-lg-evenly{justify-content:space-evenly !important}.align-items-lg-start{align-items:flex-start !important}.align-items-lg-end{align-items:flex-end !important}.align-items-lg-center{align-items:center !important}.align-items-lg-baseline{align-items:baseline !important}.align-items-lg-stretch{align-items:stretch !important}.align-content-lg-start{align-content:flex-start !important}.align-content-lg-end{align-content:flex-end !important}.align-content-lg-center{align-content:center !important}.align-content-lg-between{align-content:space-between !important}.align-content-lg-around{align-content:space-around !important}.align-content-lg-stretch{align-content:stretch !important}.align-self-lg-auto{align-self:auto !important}.align-self-lg-start{align-self:flex-start !important}.align-self-lg-end{align-self:flex-end !important}.align-self-lg-center{align-self:center !important}.align-self-lg-baseline{align-self:baseline !important}.align-self-lg-stretch{align-self:stretch !important}.order-lg-first{order:-1 !important}.order-lg-0{order:0 !important}.order-lg-1{order:1 !important}.order-lg-2{order:2 !important}.order-lg-3{order:3 !important}.order-lg-4{order:4 !important}.order-lg-5{order:5 !important}.order-lg-last{order:6 !important}.m-lg-0{margin:0 !important}.m-lg-1{margin:.25rem !important}.m-lg-2{margin:.5rem !important}.m-lg-3{margin:1rem !important}.m-lg-4{margin:1.5rem !important}.m-lg-5{margin:3rem !important}.m-lg-auto{margin:auto !important}.mx-lg-0{margin-right:0 !important;margin-left:0 !important}.mx-lg-1{margin-right:.25rem !important;margin-left:.25rem !important}.mx-lg-2{margin-right:.5rem !important;margin-left:.5rem !important}.mx-lg-3{margin-right:1rem !important;margin-left:1rem !important}.mx-lg-4{margin-right:1.5rem !important;margin-left:1.5rem !important}.mx-lg-5{margin-right:3rem !important;margin-left:3rem !important}.mx-lg-auto{margin-right:auto !important;margin-left:auto !important}.my-lg-0{margin-top:0 !important;margin-bottom:0 !important}.my-lg-1{margin-top:.25rem !important;margin-bottom:.25rem !important}.my-lg-2{margin-top:.5rem !important;margin-bottom:.5rem !important}.my-lg-3{margin-top:1rem !important;margin-bottom:1rem !important}.my-lg-4{margin-top:1.5rem !important;margin-bottom:1.5rem !important}.my-lg-5{margin-top:3rem !important;margin-bottom:3rem !important}.my-lg-auto{margin-top:auto !important;margin-bottom:auto !important}.mt-lg-0{margin-top:0 !important}.mt-lg-1{margin-top:.25rem !important}.mt-lg-2{margin-top:.5rem !important}.mt-lg-3{margin-top:1rem !important}.mt-lg-4{margin-top:1.5rem !important}.mt-lg-5{margin-top:3rem !important}.mt-lg-auto{margin-top:auto !important}.me-lg-0{margin-right:0 !important}.me-lg-1{margin-right:.25rem !important}.me-lg-2{margin-right:.5rem !important}.me-lg-3{margin-right:1rem !important}.me-lg-4{margin-right:1.5rem !important}.me-lg-5{margin-right:3rem !important}.me-lg-auto{margin-right:auto !important}.mb-lg-0{margin-bottom:0 !important}.mb-lg-1{margin-bottom:.25rem !important}.mb-lg-2{margin-bottom:.5rem !important}.mb-lg-3{margin-bottom:1rem !important}.mb-lg-4{margin-bottom:1.5rem !important}.mb-lg-5{margin-bottom:3rem !important}.mb-lg-auto{margin-bottom:auto !important}.ms-lg-0{margin-left:0 !important}.ms-lg-1{margin-left:.25rem !important}.ms-lg-2{margin-left:.5rem !important}.ms-lg-3{margin-left:1rem !important}.ms-lg-4{margin-left:1.5rem !important}.ms-lg-5{margin-left:3rem !important}.ms-lg-auto{margin-left:auto !important}.p-lg-0{padding:0 !important}.p-lg-1{padding:.25rem !important}.p-lg-2{padding:.5rem !important}.p-lg-3{padding:1rem !important}.p-lg-4{padding:1.5rem !important}.p-lg-5{padding:3rem !important}.px-lg-0{padding-right:0 !important;padding-left:0 !important}.px-lg-1{padding-right:.25rem !important;padding-left:.25rem !important}.px-lg-2{padding-right:.5rem !important;padding-left:.5rem !important}.px-lg-3{padding-right:1rem !important;padding-left:1rem !important}.px-lg-4{padding-right:1.5rem !important;padding-left:1.5rem !important}.px-lg-5{padding-right:3rem !important;padding-left:3rem !important}.py-lg-0{padding-top:0 !important;padding-bottom:0 !important}.py-lg-1{padding-top:.25rem !important;padding-bottom:.25rem !important}.py-lg-2{padding-top:.5rem !important;padding-bottom:.5rem !important}.py-lg-3{padding-top:1rem !important;padding-bottom:1rem !important}.py-lg-4{padding-top:1.5rem !important;padding-bottom:1.5rem !important}.py-lg-5{padding-top:3rem !important;padding-bottom:3rem !important}.pt-lg-0{padding-top:0 !important}.pt-lg-1{padding-top:.25rem !important}.pt-lg-2{padding-top:.5rem !important}.pt-lg-3{padding-top:1rem !important}.pt-lg-4{padding-top:1.5rem !important}.pt-lg-5{padding-top:3rem !important}.pe-lg-0{padding-right:0 !important}.pe-lg-1{padding-right:.25rem !important}.pe-lg-2{padding-right:.5rem !important}.pe-lg-3{padding-right:1rem !important}.pe-lg-4{padding-right:1.5rem !important}.pe-lg-5{padding-right:3rem !important}.pb-lg-0{padding-bottom:0 !important}.pb-lg-1{padding-bottom:.25rem !important}.pb-lg-2{padding-bottom:.5rem !important}.pb-lg-3{padding-bottom:1rem !important}.pb-lg-4{padding-bottom:1.5rem !important}.pb-lg-5{padding-bottom:3rem !important}.ps-lg-0{padding-left:0 !important}.ps-lg-1{padding-left:.25rem !important}.ps-lg-2{padding-left:.5rem !important}.ps-lg-3{padding-left:1rem !important}.ps-lg-4{padding-left:1.5rem !important}.ps-lg-5{padding-left:3rem !important}.text-lg-start{text-align:left !important}.text-lg-end{text-align:right !important}.text-lg-center{text-align:center !important}}@media(min-width: 1200px){.float-xl-start{float:left !important}.float-xl-end{float:right !important}.float-xl-none{float:none !important}.d-xl-inline{display:inline !important}.d-xl-inline-block{display:inline-block !important}.d-xl-block{display:block !important}.d-xl-grid{display:grid !important}.d-xl-table{display:table !important}.d-xl-table-row{display:table-row !important}.d-xl-table-cell{display:table-cell !important}.d-xl-flex{display:flex !important}.d-xl-inline-flex{display:inline-flex !important}.d-xl-none{display:none !important}.flex-xl-fill{flex:1 1 auto !important}.flex-xl-row{flex-direction:row !important}.flex-xl-column{flex-direction:column !important}.flex-xl-row-reverse{flex-direction:row-reverse !important}.flex-xl-column-reverse{flex-direction:column-reverse !important}.flex-xl-grow-0{flex-grow:0 !important}.flex-xl-grow-1{flex-grow:1 !important}.flex-xl-shrink-0{flex-shrink:0 !important}.flex-xl-shrink-1{flex-shrink:1 !important}.flex-xl-wrap{flex-wrap:wrap !important}.flex-xl-nowrap{flex-wrap:nowrap !important}.flex-xl-wrap-reverse{flex-wrap:wrap-reverse !important}.gap-xl-0{gap:0 !important}.gap-xl-1{gap:.25rem !important}.gap-xl-2{gap:.5rem !important}.gap-xl-3{gap:1rem !important}.gap-xl-4{gap:1.5rem !important}.gap-xl-5{gap:3rem !important}.justify-content-xl-start{justify-content:flex-start !important}.justify-content-xl-end{justify-content:flex-end !important}.justify-content-xl-center{justify-content:center !important}.justify-content-xl-between{justify-content:space-between !important}.justify-content-xl-around{justify-content:space-around !important}.justify-content-xl-evenly{justify-content:space-evenly !important}.align-items-xl-start{align-items:flex-start !important}.align-items-xl-end{align-items:flex-end !important}.align-items-xl-center{align-items:center !important}.align-items-xl-baseline{align-items:baseline !important}.align-items-xl-stretch{align-items:stretch !important}.align-content-xl-start{align-content:flex-start !important}.align-content-xl-end{align-content:flex-end !important}.align-content-xl-center{align-content:center !important}.align-content-xl-between{align-content:space-between !important}.align-content-xl-around{align-content:space-around !important}.align-content-xl-stretch{align-content:stretch !important}.align-self-xl-auto{align-self:auto !important}.align-self-xl-start{align-self:flex-start !important}.align-self-xl-end{align-self:flex-end !important}.align-self-xl-center{align-self:center !important}.align-self-xl-baseline{align-self:baseline !important}.align-self-xl-stretch{align-self:stretch !important}.order-xl-first{order:-1 !important}.order-xl-0{order:0 !important}.order-xl-1{order:1 !important}.order-xl-2{order:2 !important}.order-xl-3{order:3 !important}.order-xl-4{order:4 !important}.order-xl-5{order:5 !important}.order-xl-last{order:6 !important}.m-xl-0{margin:0 !important}.m-xl-1{margin:.25rem !important}.m-xl-2{margin:.5rem !important}.m-xl-3{margin:1rem !important}.m-xl-4{margin:1.5rem !important}.m-xl-5{margin:3rem !important}.m-xl-auto{margin:auto !important}.mx-xl-0{margin-right:0 !important;margin-left:0 !important}.mx-xl-1{margin-right:.25rem !important;margin-left:.25rem !important}.mx-xl-2{margin-right:.5rem !important;margin-left:.5rem !important}.mx-xl-3{margin-right:1rem !important;margin-left:1rem !important}.mx-xl-4{margin-right:1.5rem !important;margin-left:1.5rem !important}.mx-xl-5{margin-right:3rem !important;margin-left:3rem !important}.mx-xl-auto{margin-right:auto !important;margin-left:auto !important}.my-xl-0{margin-top:0 !important;margin-bottom:0 !important}.my-xl-1{margin-top:.25rem !important;margin-bottom:.25rem !important}.my-xl-2{margin-top:.5rem !important;margin-bottom:.5rem !important}.my-xl-3{margin-top:1rem !important;margin-bottom:1rem !important}.my-xl-4{margin-top:1.5rem !important;margin-bottom:1.5rem !important}.my-xl-5{margin-top:3rem !important;margin-bottom:3rem !important}.my-xl-auto{margin-top:auto !important;margin-bottom:auto !important}.mt-xl-0{margin-top:0 !important}.mt-xl-1{margin-top:.25rem !important}.mt-xl-2{margin-top:.5rem !important}.mt-xl-3{margin-top:1rem !important}.mt-xl-4{margin-top:1.5rem !important}.mt-xl-5{margin-top:3rem !important}.mt-xl-auto{margin-top:auto !important}.me-xl-0{margin-right:0 !important}.me-xl-1{margin-right:.25rem !important}.me-xl-2{margin-right:.5rem !important}.me-xl-3{margin-right:1rem !important}.me-xl-4{margin-right:1.5rem !important}.me-xl-5{margin-right:3rem !important}.me-xl-auto{margin-right:auto !important}.mb-xl-0{margin-bottom:0 !important}.mb-xl-1{margin-bottom:.25rem !important}.mb-xl-2{margin-bottom:.5rem !important}.mb-xl-3{margin-bottom:1rem !important}.mb-xl-4{margin-bottom:1.5rem !important}.mb-xl-5{margin-bottom:3rem !important}.mb-xl-auto{margin-bottom:auto !important}.ms-xl-0{margin-left:0 !important}.ms-xl-1{margin-left:.25rem !important}.ms-xl-2{margin-left:.5rem !important}.ms-xl-3{margin-left:1rem !important}.ms-xl-4{margin-left:1.5rem !important}.ms-xl-5{margin-left:3rem !important}.ms-xl-auto{margin-left:auto !important}.p-xl-0{padding:0 !important}.p-xl-1{padding:.25rem !important}.p-xl-2{padding:.5rem !important}.p-xl-3{padding:1rem !important}.p-xl-4{padding:1.5rem !important}.p-xl-5{padding:3rem !important}.px-xl-0{padding-right:0 !important;padding-left:0 !important}.px-xl-1{padding-right:.25rem !important;padding-left:.25rem !important}.px-xl-2{padding-right:.5rem !important;padding-left:.5rem !important}.px-xl-3{padding-right:1rem !important;padding-left:1rem !important}.px-xl-4{padding-right:1.5rem !important;padding-left:1.5rem !important}.px-xl-5{padding-right:3rem !important;padding-left:3rem !important}.py-xl-0{padding-top:0 !important;padding-bottom:0 !important}.py-xl-1{padding-top:.25rem !important;padding-bottom:.25rem !important}.py-xl-2{padding-top:.5rem !important;padding-bottom:.5rem !important}.py-xl-3{padding-top:1rem !important;padding-bottom:1rem !important}.py-xl-4{padding-top:1.5rem !important;padding-bottom:1.5rem !important}.py-xl-5{padding-top:3rem !important;padding-bottom:3rem !important}.pt-xl-0{padding-top:0 !important}.pt-xl-1{padding-top:.25rem !important}.pt-xl-2{padding-top:.5rem !important}.pt-xl-3{padding-top:1rem !important}.pt-xl-4{padding-top:1.5rem !important}.pt-xl-5{padding-top:3rem !important}.pe-xl-0{padding-right:0 !important}.pe-xl-1{padding-right:.25rem !important}.pe-xl-2{padding-right:.5rem !important}.pe-xl-3{padding-right:1rem !important}.pe-xl-4{padding-right:1.5rem !important}.pe-xl-5{padding-right:3rem !important}.pb-xl-0{padding-bottom:0 !important}.pb-xl-1{padding-bottom:.25rem !important}.pb-xl-2{padding-bottom:.5rem !important}.pb-xl-3{padding-bottom:1rem !important}.pb-xl-4{padding-bottom:1.5rem !important}.pb-xl-5{padding-bottom:3rem !important}.ps-xl-0{padding-left:0 !important}.ps-xl-1{padding-left:.25rem !important}.ps-xl-2{padding-left:.5rem !important}.ps-xl-3{padding-left:1rem !important}.ps-xl-4{padding-left:1.5rem !important}.ps-xl-5{padding-left:3rem !important}.text-xl-start{text-align:left !important}.text-xl-end{text-align:right !important}.text-xl-center{text-align:center !important}}@media(min-width: 1400px){.float-xxl-start{float:left !important}.float-xxl-end{float:right !important}.float-xxl-none{float:none !important}.d-xxl-inline{display:inline !important}.d-xxl-inline-block{display:inline-block !important}.d-xxl-block{display:block !important}.d-xxl-grid{display:grid !important}.d-xxl-table{display:table !important}.d-xxl-table-row{display:table-row !important}.d-xxl-table-cell{display:table-cell !important}.d-xxl-flex{display:flex !important}.d-xxl-inline-flex{display:inline-flex !important}.d-xxl-none{display:none !important}.flex-xxl-fill{flex:1 1 auto !important}.flex-xxl-row{flex-direction:row !important}.flex-xxl-column{flex-direction:column !important}.flex-xxl-row-reverse{flex-direction:row-reverse !important}.flex-xxl-column-reverse{flex-direction:column-reverse !important}.flex-xxl-grow-0{flex-grow:0 !important}.flex-xxl-grow-1{flex-grow:1 !important}.flex-xxl-shrink-0{flex-shrink:0 !important}.flex-xxl-shrink-1{flex-shrink:1 !important}.flex-xxl-wrap{flex-wrap:wrap !important}.flex-xxl-nowrap{flex-wrap:nowrap !important}.flex-xxl-wrap-reverse{flex-wrap:wrap-reverse !important}.gap-xxl-0{gap:0 !important}.gap-xxl-1{gap:.25rem !important}.gap-xxl-2{gap:.5rem !important}.gap-xxl-3{gap:1rem !important}.gap-xxl-4{gap:1.5rem !important}.gap-xxl-5{gap:3rem !important}.justify-content-xxl-start{justify-content:flex-start !important}.justify-content-xxl-end{justify-content:flex-end !important}.justify-content-xxl-center{justify-content:center !important}.justify-content-xxl-between{justify-content:space-between !important}.justify-content-xxl-around{justify-content:space-around !important}.justify-content-xxl-evenly{justify-content:space-evenly !important}.align-items-xxl-start{align-items:flex-start !important}.align-items-xxl-end{align-items:flex-end !important}.align-items-xxl-center{align-items:center !important}.align-items-xxl-baseline{align-items:baseline !important}.align-items-xxl-stretch{align-items:stretch !important}.align-content-xxl-start{align-content:flex-start !important}.align-content-xxl-end{align-content:flex-end !important}.align-content-xxl-center{align-content:center !important}.align-content-xxl-between{align-content:space-between !important}.align-content-xxl-around{align-content:space-around !important}.align-content-xxl-stretch{align-content:stretch !important}.align-self-xxl-auto{align-self:auto !important}.align-self-xxl-start{align-self:flex-start !important}.align-self-xxl-end{align-self:flex-end !important}.align-self-xxl-center{align-self:center !important}.align-self-xxl-baseline{align-self:baseline !important}.align-self-xxl-stretch{align-self:stretch !important}.order-xxl-first{order:-1 !important}.order-xxl-0{order:0 !important}.order-xxl-1{order:1 !important}.order-xxl-2{order:2 !important}.order-xxl-3{order:3 !important}.order-xxl-4{order:4 !important}.order-xxl-5{order:5 !important}.order-xxl-last{order:6 !important}.m-xxl-0{margin:0 !important}.m-xxl-1{margin:.25rem !important}.m-xxl-2{margin:.5rem !important}.m-xxl-3{margin:1rem !important}.m-xxl-4{margin:1.5rem !important}.m-xxl-5{margin:3rem !important}.m-xxl-auto{margin:auto !important}.mx-xxl-0{margin-right:0 !important;margin-left:0 !important}.mx-xxl-1{margin-right:.25rem !important;margin-left:.25rem !important}.mx-xxl-2{margin-right:.5rem !important;margin-left:.5rem !important}.mx-xxl-3{margin-right:1rem !important;margin-left:1rem !important}.mx-xxl-4{margin-right:1.5rem !important;margin-left:1.5rem !important}.mx-xxl-5{margin-right:3rem !important;margin-left:3rem !important}.mx-xxl-auto{margin-right:auto !important;margin-left:auto !important}.my-xxl-0{margin-top:0 !important;margin-bottom:0 !important}.my-xxl-1{margin-top:.25rem !important;margin-bottom:.25rem !important}.my-xxl-2{margin-top:.5rem !important;margin-bottom:.5rem !important}.my-xxl-3{margin-top:1rem !important;margin-bottom:1rem !important}.my-xxl-4{margin-top:1.5rem !important;margin-bottom:1.5rem !important}.my-xxl-5{margin-top:3rem !important;margin-bottom:3rem !important}.my-xxl-auto{margin-top:auto !important;margin-bottom:auto !important}.mt-xxl-0{margin-top:0 !important}.mt-xxl-1{margin-top:.25rem !important}.mt-xxl-2{margin-top:.5rem !important}.mt-xxl-3{margin-top:1rem !important}.mt-xxl-4{margin-top:1.5rem !important}.mt-xxl-5{margin-top:3rem !important}.mt-xxl-auto{margin-top:auto !important}.me-xxl-0{margin-right:0 !important}.me-xxl-1{margin-right:.25rem !important}.me-xxl-2{margin-right:.5rem !important}.me-xxl-3{margin-right:1rem !important}.me-xxl-4{margin-right:1.5rem !important}.me-xxl-5{margin-right:3rem !important}.me-xxl-auto{margin-right:auto !important}.mb-xxl-0{margin-bottom:0 !important}.mb-xxl-1{margin-bottom:.25rem !important}.mb-xxl-2{margin-bottom:.5rem !important}.mb-xxl-3{margin-bottom:1rem !important}.mb-xxl-4{margin-bottom:1.5rem !important}.mb-xxl-5{margin-bottom:3rem !important}.mb-xxl-auto{margin-bottom:auto !important}.ms-xxl-0{margin-left:0 !important}.ms-xxl-1{margin-left:.25rem !important}.ms-xxl-2{margin-left:.5rem !important}.ms-xxl-3{margin-left:1rem !important}.ms-xxl-4{margin-left:1.5rem !important}.ms-xxl-5{margin-left:3rem !important}.ms-xxl-auto{margin-left:auto !important}.p-xxl-0{padding:0 !important}.p-xxl-1{padding:.25rem !important}.p-xxl-2{padding:.5rem !important}.p-xxl-3{padding:1rem !important}.p-xxl-4{padding:1.5rem !important}.p-xxl-5{padding:3rem !important}.px-xxl-0{padding-right:0 !important;padding-left:0 !important}.px-xxl-1{padding-right:.25rem !important;padding-left:.25rem !important}.px-xxl-2{padding-right:.5rem !important;padding-left:.5rem !important}.px-xxl-3{padding-right:1rem !important;padding-left:1rem !important}.px-xxl-4{padding-right:1.5rem !important;padding-left:1.5rem !important}.px-xxl-5{padding-right:3rem !important;padding-left:3rem !important}.py-xxl-0{padding-top:0 !important;padding-bottom:0 !important}.py-xxl-1{padding-top:.25rem !important;padding-bottom:.25rem !important}.py-xxl-2{padding-top:.5rem !important;padding-bottom:.5rem !important}.py-xxl-3{padding-top:1rem !important;padding-bottom:1rem !important}.py-xxl-4{padding-top:1.5rem !important;padding-bottom:1.5rem !important}.py-xxl-5{padding-top:3rem !important;padding-bottom:3rem !important}.pt-xxl-0{padding-top:0 !important}.pt-xxl-1{padding-top:.25rem !important}.pt-xxl-2{padding-top:.5rem !important}.pt-xxl-3{padding-top:1rem !important}.pt-xxl-4{padding-top:1.5rem !important}.pt-xxl-5{padding-top:3rem !important}.pe-xxl-0{padding-right:0 !important}.pe-xxl-1{padding-right:.25rem !important}.pe-xxl-2{padding-right:.5rem !important}.pe-xxl-3{padding-right:1rem !important}.pe-xxl-4{padding-right:1.5rem !important}.pe-xxl-5{padding-right:3rem !important}.pb-xxl-0{padding-bottom:0 !important}.pb-xxl-1{padding-bottom:.25rem !important}.pb-xxl-2{padding-bottom:.5rem !important}.pb-xxl-3{padding-bottom:1rem !important}.pb-xxl-4{padding-bottom:1.5rem !important}.pb-xxl-5{padding-bottom:3rem !important}.ps-xxl-0{padding-left:0 !important}.ps-xxl-1{padding-left:.25rem !important}.ps-xxl-2{padding-left:.5rem !important}.ps-xxl-3{padding-left:1rem !important}.ps-xxl-4{padding-left:1.5rem !important}.ps-xxl-5{padding-left:3rem !important}.text-xxl-start{text-align:left !important}.text-xxl-end{text-align:right !important}.text-xxl-center{text-align:center !important}}.bg-default{color:#fff}.bg-primary{color:#fff}.bg-secondary{color:#fff}.bg-success{color:#fff}.bg-info{color:#fff}.bg-warning{color:#fff}.bg-danger{color:#fff}.bg-light{color:#000}.bg-dark{color:#fff}@media(min-width: 1200px){.fs-1{font-size:2.2rem !important}.fs-2{font-size:1.75rem !important}.fs-3{font-size:1.5rem !important}}@media print{.d-print-inline{display:inline !important}.d-print-inline-block{display:inline-block !important}.d-print-block{display:block !important}.d-print-grid{display:grid !important}.d-print-table{display:table !important}.d-print-table-row{display:table-row !important}.d-print-table-cell{display:table-cell !important}.d-print-flex{display:flex !important}.d-print-inline-flex{display:inline-flex !important}.d-print-none{display:none !important}}.tippy-box[data-theme~=quarto]{background-color:#fff;color:#373a3c;border-radius:.25rem;border:solid 1px #dee2e6;font-size:.875rem}.tippy-box[data-theme~=quarto] .tippy-arrow{color:#dee2e6}.tippy-box[data-placement^=bottom]>.tippy-arrow{top:-1px}.tippy-box[data-placement^=bottom]>.tippy-content{padding:.75em 1em;z-index:1}.top-right{position:absolute;top:1em;right:1em}.hidden{display:none !important}.quarto-layout-panel{margin-bottom:1em}.quarto-layout-panel>figure{width:100%}.quarto-layout-panel>figure>figcaption,.quarto-layout-panel>.panel-caption{margin-top:10pt}.quarto-layout-panel>.table-caption{margin-top:0px}.table-caption p{margin-bottom:.5em}.quarto-layout-row{display:flex;flex-direction:row;align-items:flex-start}.quarto-layout-valign-top{align-items:flex-start}.quarto-layout-valign-bottom{align-items:flex-end}.quarto-layout-valign-center{align-items:center}.quarto-layout-cell{position:relative;margin-right:20px}.quarto-layout-cell:last-child{margin-right:0}.quarto-layout-cell figure,.quarto-layout-cell>p{margin:.2em}.quarto-layout-cell img{max-width:100%}.quarto-layout-cell .html-widget{width:100% !important}.quarto-layout-cell div figure p{margin:0}.quarto-layout-cell figure{display:inline-block;margin-inline-start:0;margin-inline-end:0}.quarto-layout-cell table{display:inline-table}.quarto-layout-cell-subref figcaption,figure .quarto-layout-row figure figcaption{text-align:center;font-style:italic}.quarto-figure{position:relative;margin-bottom:1em}.quarto-figure>figure{width:100%;margin-bottom:0}.quarto-figure-left>figure>p{text-align:left}.quarto-figure-center>figure>p{text-align:center}.quarto-figure-right>figure>p{text-align:right}figure>p:empty{display:none}figure>p:first-child{margin-top:0;margin-bottom:0}figure>figcaption{margin-top:.5em}div[id^=tbl-]{position:relative}.quarto-figure>.anchorjs-link,div[id^=tbl-]>.anchorjs-link{position:absolute;top:0;right:0}.quarto-figure:hover>.anchorjs-link,div[id^=tbl-]:hover>.anchorjs-link,h2:hover>.anchorjs-link,.h2:hover>.anchorjs-link,h3:hover>.anchorjs-link,.h3:hover>.anchorjs-link,h4:hover>.anchorjs-link,.h4:hover>.anchorjs-link,h5:hover>.anchorjs-link,.h5:hover>.anchorjs-link,h6:hover>.anchorjs-link,.h6:hover>.anchorjs-link,.reveal-anchorjs-link>.anchorjs-link{opacity:1}#title-block-header{margin-block-end:1rem;position:relative;margin-top:-1px}#title-block-header .abstract{margin-block-start:1rem}#title-block-header .abstract .abstract-title{font-weight:600}#title-block-header a{text-decoration:none}#title-block-header .author,#title-block-header .date,#title-block-header .doi{margin-block-end:.2rem}#title-block-header .quarto-title-block>div{display:flex}#title-block-header .quarto-title-block>div>h1,#title-block-header .quarto-title-block>div>.h1{flex-grow:1}#title-block-header .quarto-title-block>div>button{flex-shrink:0;height:2.25rem;margin-top:0}@media(min-width: 992px){#title-block-header .quarto-title-block>div>button{margin-top:5px}}tr.header>th>p:last-of-type{margin-bottom:0px}table,.table{caption-side:top;margin-bottom:1.5rem}caption,.table-caption{text-align:center}.utterances{max-width:none;margin-left:-8px}iframe{margin-bottom:1em}details{margin-bottom:1em}details[show]{margin-bottom:0}details>summary{color:#868e96}details>summary>p:only-child{display:inline}pre.sourceCode,code.sourceCode{position:relative}code{white-space:pre}@media print{code{white-space:pre-wrap}}pre>code{display:block}pre>code.sourceCode{white-space:pre}pre>code.sourceCode>span>a:first-child::before{text-decoration:none}pre.code-overflow-wrap>code.sourceCode{white-space:pre-wrap}pre.code-overflow-scroll>code.sourceCode{white-space:pre}code a:any-link{color:inherit;text-decoration:none}code a:hover{color:inherit;text-decoration:underline}ul.task-list{padding-left:1em}[data-tippy-root]{display:inline-block}.tippy-content .footnote-back{display:none}.quarto-embedded-source-code{display:none}.quarto-unresolved-ref{font-weight:600}.quarto-cover-image{max-width:35%;float:right;margin-left:30px}.cell-output-display .widget-subarea{margin-bottom:1em}.cell-output-display:not(.no-overflow-x){overflow-x:auto}.panel-input{margin-bottom:1em}.panel-input>div,.panel-input>div>div{display:inline-block;vertical-align:top;padding-right:12px}.panel-input>p:last-child{margin-bottom:0}.layout-sidebar{margin-bottom:1em}.layout-sidebar .tab-content{border:none}.tab-content>.page-columns.active{display:grid}div.sourceCode>iframe{width:100%;height:300px;margin-bottom:-0.5em}div.ansi-escaped-output{font-family:monospace;display:block}/*! +* +* ansi colors from IPython notebook's +* +*/.ansi-black-fg{color:#3e424d}.ansi-black-bg{background-color:#3e424d}.ansi-black-intense-fg{color:#282c36}.ansi-black-intense-bg{background-color:#282c36}.ansi-red-fg{color:#e75c58}.ansi-red-bg{background-color:#e75c58}.ansi-red-intense-fg{color:#b22b31}.ansi-red-intense-bg{background-color:#b22b31}.ansi-green-fg{color:#00a250}.ansi-green-bg{background-color:#00a250}.ansi-green-intense-fg{color:#007427}.ansi-green-intense-bg{background-color:#007427}.ansi-yellow-fg{color:#ddb62b}.ansi-yellow-bg{background-color:#ddb62b}.ansi-yellow-intense-fg{color:#b27d12}.ansi-yellow-intense-bg{background-color:#b27d12}.ansi-blue-fg{color:#208ffb}.ansi-blue-bg{background-color:#208ffb}.ansi-blue-intense-fg{color:#0065ca}.ansi-blue-intense-bg{background-color:#0065ca}.ansi-magenta-fg{color:#d160c4}.ansi-magenta-bg{background-color:#d160c4}.ansi-magenta-intense-fg{color:#a03196}.ansi-magenta-intense-bg{background-color:#a03196}.ansi-cyan-fg{color:#60c6c8}.ansi-cyan-bg{background-color:#60c6c8}.ansi-cyan-intense-fg{color:#258f8f}.ansi-cyan-intense-bg{background-color:#258f8f}.ansi-white-fg{color:#c5c1b4}.ansi-white-bg{background-color:#c5c1b4}.ansi-white-intense-fg{color:#a1a6b2}.ansi-white-intense-bg{background-color:#a1a6b2}.ansi-default-inverse-fg{color:#fff}.ansi-default-inverse-bg{background-color:#000}.ansi-bold{font-weight:bold}.ansi-underline{text-decoration:underline}:root{--quarto-body-bg: #fff;--quarto-body-color: #373a3c;--quarto-text-muted: #868e96;--quarto-border-color: #dee2e6;--quarto-border-width: 1px;--quarto-border-radius: 0.25rem}.code-copy-button{position:absolute;top:0;right:0;border:0;margin-top:5px;margin-right:5px;background-color:transparent}.code-copy-button:focus{outline:none}pre.sourceCode:hover>.code-copy-button>.bi::before{display:inline-block;height:1rem;width:1rem;content:"";vertical-align:-0.125em;background-image:url('data:image/svg+xml,');background-repeat:no-repeat;background-size:1rem 1rem}pre.sourceCode:hover>.code-copy-button-checked>.bi::before{background-image:url('data:image/svg+xml,')}pre.sourceCode:hover>.code-copy-button:hover>.bi::before{background-image:url('data:image/svg+xml,')}pre.sourceCode:hover>.code-copy-button-checked:hover>.bi::before{background-image:url('data:image/svg+xml,')}main ol ol,main ul ul,main ol ul,main ul ol{margin-bottom:1em}body{margin:0}main.page-columns>header>h1.title,main.page-columns>header>.title.h1{margin-bottom:0}@media(min-width: 992px){body .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset] 5fr [page-start page-start-inset] 35px [body-start-outset] 35px [body-start] 1.5em [body-content-start] minmax(500px, calc(850px - 3em)) [body-content-end] 1.5em [body-end] 35px [body-end-outset] minmax(75px, 145px) [page-end-inset] 35px [page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.fullcontent:not(.floating):not(.docked) .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset] 5fr [page-start page-start-inset] 35px [body-start-outset] 35px [body-start] 1.5em [body-content-start] minmax(500px, calc(850px - 3em)) [body-content-end] 1.5em [body-end] 35px [body-end-outset] 35px [page-end-inset page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.slimcontent:not(.floating):not(.docked) .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset] 5fr [page-start page-start-inset] 35px [body-start-outset] 35px [body-start] 1.5em [body-content-start] minmax(500px, calc(750px - 3em)) [body-content-end] 1.5em [body-end] 50px [body-end-outset] minmax(0px, 200px) [page-end-inset] 50px [page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.listing:not(.floating):not(.docked) .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset page-start] minmax(50px, 100px) [page-start-inset] 50px [body-start-outset] 50px [body-start] 1.5em [body-content-start] minmax(500px, calc(1200px - 3em)) [body-content-end] 3em [body-end] 50px [body-end-outset] minmax(0px, 250px) [page-end-inset] 50px [page-end] 1fr [screen-end-inset] 1.5em [screen-end]}body:not(.floating):not(.docked) .page-columns.toc-left{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset] 5fr [page-start] 35px [page-start-inset] minmax(0px, 175px) [body-start-outset] 35px [body-start] 1.5em [body-content-start] minmax(450px, calc(750px - 3em)) [body-content-end] 1.5em [body-end] 50px [body-end-outset] minmax(0px, 200px) [page-end-inset] 50px [page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body:not(.floating):not(.docked) .page-columns.toc-left .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset] 5fr [page-start] 35px [page-start-inset] minmax(0px, 175px) [body-start-outset] 35px [body-start] 1.5em [body-content-start] minmax(450px, calc(750px - 3em)) [body-content-end] 1.5em [body-end] 50px [body-end-outset] minmax(0px, 200px) [page-end-inset] 50px [page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.floating .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset] 5fr [page-start] minmax(25px, 50px) [page-start-inset] minmax(50px, 150px) [body-start-outset] minmax(25px, 50px) [body-start] 1.5em [body-content-start] minmax(500px, calc(800px - 3em)) [body-content-end] 1.5em [body-end] minmax(25px, 50px) [body-end-outset] minmax(50px, 150px) [page-end-inset] minmax(25px, 50px) [page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.docked .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset page-start] minmax(50px, 100px) [page-start-inset] 50px [body-start-outset] 50px [body-start] 1.5em [body-content-start] minmax(500px, calc( 1000px - 3em )) [body-content-end] 1.5em [body-end] 50px [body-end-outset] minmax(0px, 100px) [page-end-inset] 50px [page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.docked.fullcontent .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset page-start] minmax(50px, 100px) [page-start-inset] 50px [body-start-outset] 50px [body-start] 1.5em [body-content-start] minmax(500px, calc( 1000px - 3em )) [body-content-end] 1.5em [body-end body-end-outset page-end-inset page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.floating.fullcontent .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset] 5fr [page-start] 50px [page-start-inset] minmax(50px, 150px) [body-start-outset] 50px [body-start] 1.5em [body-content-start] minmax(500px, calc(800px - 3em)) [body-content-end] 1.5em [body-end body-end-outset page-end-inset page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.docked.slimcontent .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset page-start] minmax(50px, 100px) [page-start-inset] 50px [body-start-outset] 50px [body-start] 1.5em [body-content-start] minmax(450px, calc(750px - 3em)) [body-content-end] 1.5em [body-end] 50px [body-end-outset] minmax(0px, 200px) [page-end-inset] 50px [page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.docked.listing .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset page-start] minmax(50px, 100px) [page-start-inset] 50px [body-start-outset] 50px [body-start] 1.5em [body-content-start] minmax(500px, calc( 1000px - 3em )) [body-content-end] 1.5em [body-end] 50px [body-end-outset] minmax(0px, 100px) [page-end-inset] 50px [page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.floating.slimcontent .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset] 5fr [page-start] 50px [page-start-inset] minmax(50px, 150px) [body-start-outset] 50px [body-start] 1.5em [body-content-start] minmax(450px, calc(750px - 3em)) [body-content-end] 1.5em [body-end] 50px [body-end-outset] minmax(50px, 150px) [page-end-inset] 50px [page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.floating.listing .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset] 5fr [page-start] minmax(25px, 50px) [page-start-inset] minmax(50px, 150px) [body-start-outset] minmax(25px, 50px) [body-start] 1.5em [body-content-start] minmax(500px, calc(800px - 3em)) [body-content-end] 1.5em [body-end] minmax(25px, 50px) [body-end-outset] minmax(50px, 150px) [page-end-inset] minmax(25px, 50px) [page-end] 5fr [screen-end-inset] 1.5em [screen-end]}}@media(max-width: 991.98px){body .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset page-start page-start-inset body-start-outset] 5fr [body-start] 1.5em [body-content-start] minmax(500px, calc(750px - 3em)) [body-content-end] 1.5em [body-end] 35px [body-end-outset] minmax(75px, 145px) [page-end-inset] 35px [page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.fullcontent:not(.floating):not(.docked) .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset page-start page-start-inset body-start-outset] 5fr [body-start] 1.5em [body-content-start] minmax(500px, calc(750px - 3em)) [body-content-end] 1.5em [body-end body-end-outset page-end-inset page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.slimcontent:not(.floating):not(.docked) .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset page-start page-start-inset body-start-outset] 5fr [body-start] 1.5em [body-content-start] minmax(500px, calc(750px - 3em)) [body-content-end] 1.5em [body-end] 35px [body-end-outset] minmax(75px, 145px) [page-end-inset] 35px [page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.listing:not(.floating):not(.docked) .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset page-start page-start-inset body-start-outset] 5fr [body-start] 1.5em [body-content-start] minmax(500px, calc(1200px - 3em)) [body-content-end body-end body-end-outset page-end-inset page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body:not(.floating):not(.docked) .page-columns.toc-left{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset] 5fr [page-start] 35px [page-start-inset] minmax(0px, 145px) [body-start-outset] 35px [body-start] 1.5em [body-content-start] minmax(450px, calc(750px - 3em)) [body-content-end] 1.5em [body-end body-end-outset page-end-inset page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body:not(.floating):not(.docked) .page-columns.toc-left .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset] 5fr [page-start] 35px [page-start-inset] minmax(0px, 145px) [body-start-outset] 35px [body-start] 1.5em [body-content-start] minmax(450px, calc(750px - 3em)) [body-content-end] 1.5em [body-end body-end-outset page-end-inset page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.floating .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset] 5fr [page-start page-start-inset body-start-outset body-start] 1em [body-content-start] minmax(500px, calc(750px - 3em)) [body-content-end] 1.5em [body-end] 50px [body-end-outset] minmax(75px, 150px) [page-end-inset] 25px [page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.docked .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset page-start page-start-inset body-start-outset body-start body-content-start] minmax(500px, calc(750px - 3em)) [body-content-end] 1.5em [body-end] 50px [body-end-outset] minmax(25px, 50px) [page-end-inset] 50px [page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.docked.fullcontent .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset page-start page-start-inset body-start-outset body-start body-content-start] minmax(500px, calc( 1000px - 3em )) [body-content-end] 1.5em [body-end body-end-outset page-end-inset page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.floating.fullcontent .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset] 5fr [page-start page-start-inset body-start-outset body-start] 1em [body-content-start] minmax(500px, calc(800px - 3em)) [body-content-end] 1.5em [body-end body-end-outset page-end-inset page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.docked.slimcontent .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset page-start page-start-inset body-start-outset body-start body-content-start] minmax(500px, calc(750px - 3em)) [body-content-end] 1.5em [body-end] 35px [body-end-outset] minmax(75px, 145px) [page-end-inset] 35px [page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.docked.listing .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset page-start page-start-inset body-start-outset body-start body-content-start] minmax(500px, calc(750px - 3em)) [body-content-end] 1.5em [body-end] 50px [body-end-outset] minmax(25px, 50px) [page-end-inset] 50px [page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.floating.slimcontent .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset] 5fr [page-start page-start-inset body-start-outset body-start] 1em [body-content-start] minmax(500px, calc(750px - 3em)) [body-content-end] 1.5em [body-end] 35px [body-end-outset] minmax(75px, 145px) [page-end-inset] 35px [page-end] 4fr [screen-end-inset] 1.5em [screen-end]}body.floating.listing .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset] 5fr [page-start page-start-inset body-start-outset body-start] 1em [body-content-start] minmax(500px, calc(750px - 3em)) [body-content-end] 1.5em [body-end] 50px [body-end-outset] minmax(75px, 150px) [page-end-inset] 25px [page-end] 5fr [screen-end-inset] 1.5em [screen-end]}}@media(max-width: 767.98px){body .page-columns,body.fullcontent:not(.floating):not(.docked) .page-columns,body.slimcontent:not(.floating):not(.docked) .page-columns,body.docked .page-columns,body.docked.slimcontent .page-columns,body.docked.fullcontent .page-columns,body.floating .page-columns,body.floating.slimcontent .page-columns,body.floating.fullcontent .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset page-start page-start-inset body-start-outset body-start body-content-start] minmax(0px, 1fr) [body-content-end body-end body-end-outset page-end-inset page-end screen-end-inset] 1.5em [screen-end]}body:not(.floating):not(.docked) .page-columns.toc-left{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset page-start page-start-inset body-start-outset body-start body-content-start] minmax(0px, 1fr) [body-content-end body-end body-end-outset page-end-inset page-end screen-end-inset] 1.5em [screen-end]}body:not(.floating):not(.docked) .page-columns.toc-left .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset page-start page-start-inset body-start-outset body-start body-content-start] minmax(0px, 1fr) [body-content-end body-end body-end-outset page-end-inset page-end screen-end-inset] 1.5em [screen-end]}nav[role=doc-toc]{display:none}}body,.page-row-navigation{grid-template-rows:[page-top] max-content [contents-top] max-content [contents-bottom] max-content [page-bottom]}.page-rows-contents{grid-template-rows:[content-top] minmax(max-content, 1fr) [content-bottom] minmax(60px, max-content) [page-bottom]}.page-full{grid-column:screen-start/screen-end !important}.page-columns>*{grid-column:body-content-start/body-content-end}.page-columns.column-page>*{grid-column:page-start/page-end}.page-columns.column-page-left>*{grid-column:page-start/body-content-end}.page-columns.column-page-right>*{grid-column:body-content-start/page-end}.page-rows{grid-auto-rows:auto}.header{grid-column:screen-start/screen-end;grid-row:page-top/contents-top}#quarto-content{padding:0;grid-column:screen-start/screen-end;grid-row:contents-top/contents-bottom}body.floating .sidebar.sidebar-navigation{grid-column:page-start/body-start;grid-row:content-top/page-bottom}body.docked .sidebar.sidebar-navigation{grid-column:screen-start/body-start;grid-row:content-top/page-bottom}.sidebar.toc-left{grid-column:page-start/body-start;grid-row:content-top/page-bottom}.sidebar.margin-sidebar{grid-column:body-end/page-end;grid-row:content-top/page-bottom}.page-columns .content{grid-column:body-content-start/body-content-end;grid-row:content-top/content-bottom;align-content:flex-start}.page-columns .page-navigation{grid-column:body-content-start/body-content-end;grid-row:content-bottom/page-bottom}.page-columns .footer{grid-column:screen-start/screen-end;grid-row:contents-bottom/page-bottom}.page-columns .column-body{grid-column:body-content-start/body-content-end}.page-columns .column-body-fullbleed{grid-column:body-start/body-end}.page-columns .column-body-outset{grid-column:body-start-outset/body-end-outset;z-index:998;transform:translate3d(0, 0, 0)}.page-columns .column-body-outset table{background:#fff}.page-columns .column-body-outset-left{grid-column:body-start-outset/body-content-end;z-index:998;transform:translate3d(0, 0, 0)}.page-columns .column-body-outset-left table{background:#fff}.page-columns .column-body-outset-right{grid-column:body-content-start/body-end-outset;z-index:998;transform:translate3d(0, 0, 0)}.page-columns .column-body-outset-right table{background:#fff}.page-columns .column-page{grid-column:page-start/page-end;z-index:998;transform:translate3d(0, 0, 0)}.page-columns .column-page table{background:#fff}.page-columns .column-page-inset{grid-column:page-start-inset/page-end-inset;z-index:998;transform:translate3d(0, 0, 0)}.page-columns .column-page-inset table{background:#fff}.page-columns .column-page-inset-left{grid-column:page-start-inset/body-content-end;z-index:998;transform:translate3d(0, 0, 0)}.page-columns .column-page-inset-left table{background:#fff}.page-columns .column-page-inset-right{grid-column:body-content-start/page-end-inset;z-index:998;transform:translate3d(0, 0, 0)}.page-columns .column-page-inset-right figcaption table{background:#fff}.page-columns .column-page-left{grid-column:page-start/body-content-end;z-index:998;transform:translate3d(0, 0, 0)}.page-columns .column-page-left table{background:#fff}.page-columns .column-page-right{grid-column:body-content-start/page-end;z-index:998;transform:translate3d(0, 0, 0)}.page-columns .column-page-right figcaption table{background:#fff}#quarto-content.page-columns #quarto-margin-sidebar,#quarto-content.page-columns #quarto-sidebar{z-index:1}@media(max-width: 991.98px){#quarto-content.page-columns #quarto-margin-sidebar.collapse,#quarto-content.page-columns #quarto-sidebar.collapse{z-index:1055}}#quarto-content.page-columns main.column-page,#quarto-content.page-columns main.column-page-right,#quarto-content.page-columns main.column-page-left{z-index:0}.page-columns .column-screen-inset{grid-column:screen-start-inset/screen-end-inset;z-index:998;transform:translate3d(0, 0, 0)}.page-columns .column-screen-inset table{background:#fff}.page-columns .column-screen-inset-left{grid-column:screen-start-inset/body-content-end;z-index:998;transform:translate3d(0, 0, 0)}.page-columns .column-screen-inset-left table{background:#fff}.page-columns .column-screen-inset-right{grid-column:body-content-start/screen-end-inset;z-index:998;transform:translate3d(0, 0, 0)}.page-columns .column-screen-inset-right table{background:#fff}.page-columns .column-screen{grid-column:screen-start/screen-end;z-index:998;transform:translate3d(0, 0, 0)}.page-columns .column-screen table{background:#fff}.page-columns .column-screen-left{grid-column:screen-start/body-content-end;z-index:998;transform:translate3d(0, 0, 0)}.page-columns .column-screen-left table{background:#fff}.page-columns .column-screen-right{grid-column:body-content-start/screen-end;z-index:998;transform:translate3d(0, 0, 0)}.page-columns .column-screen-right table{background:#fff}.page-columns .column-screen-inset-shaded{grid-column:screen-start/screen-end;padding:1em;background:#f8f9fa;z-index:998;transform:translate3d(0, 0, 0);margin-bottom:1em}.zindex-content{z-index:998;transform:translate3d(0, 0, 0)}.zindex-modal{z-index:1055;transform:translate3d(0, 0, 0)}.zindex-over-content{z-index:999;transform:translate3d(0, 0, 0)}img.img-fluid.column-screen,img.img-fluid.column-screen-inset-shaded,img.img-fluid.column-screen-inset,img.img-fluid.column-screen-inset-left,img.img-fluid.column-screen-inset-right,img.img-fluid.column-screen-left,img.img-fluid.column-screen-right{width:100%}@media(min-width: 992px){.margin-caption,div.aside,aside,.column-margin{grid-column:body-end/page-end !important;z-index:998}.column-sidebar{grid-column:page-start/body-start !important;z-index:998}.column-leftmargin{grid-column:screen-start-inset/body-start !important;z-index:998}.no-row-height{height:1em;overflow:visible}}@media(max-width: 991.98px){.margin-caption,div.aside,aside,.column-margin{grid-column:body-end/page-end !important;z-index:998}.no-row-height{height:1em;overflow:visible}.page-columns.page-full{overflow:visible}.page-columns.toc-left .margin-caption,.page-columns.toc-left div.aside,.page-columns.toc-left aside,.page-columns.toc-left .column-margin{grid-column:body-content-start/body-content-end !important;z-index:998;transform:translate3d(0, 0, 0)}.page-columns.toc-left .no-row-height{height:initial;overflow:initial}}@media(max-width: 767.98px){.margin-caption,div.aside,aside,.column-margin{grid-column:body-content-start/body-content-end !important;z-index:998;transform:translate3d(0, 0, 0)}.no-row-height{height:initial;overflow:initial}#quarto-margin-sidebar{display:none}.hidden-sm{display:none}}.panel-grid{display:grid;grid-template-rows:repeat(1, 1fr);grid-template-columns:repeat(24, 1fr);gap:1em}.panel-grid .g-col-1{grid-column:auto/span 1}.panel-grid .g-col-2{grid-column:auto/span 2}.panel-grid .g-col-3{grid-column:auto/span 3}.panel-grid .g-col-4{grid-column:auto/span 4}.panel-grid .g-col-5{grid-column:auto/span 5}.panel-grid .g-col-6{grid-column:auto/span 6}.panel-grid .g-col-7{grid-column:auto/span 7}.panel-grid .g-col-8{grid-column:auto/span 8}.panel-grid .g-col-9{grid-column:auto/span 9}.panel-grid .g-col-10{grid-column:auto/span 10}.panel-grid .g-col-11{grid-column:auto/span 11}.panel-grid .g-col-12{grid-column:auto/span 12}.panel-grid .g-col-13{grid-column:auto/span 13}.panel-grid .g-col-14{grid-column:auto/span 14}.panel-grid .g-col-15{grid-column:auto/span 15}.panel-grid .g-col-16{grid-column:auto/span 16}.panel-grid .g-col-17{grid-column:auto/span 17}.panel-grid .g-col-18{grid-column:auto/span 18}.panel-grid .g-col-19{grid-column:auto/span 19}.panel-grid .g-col-20{grid-column:auto/span 20}.panel-grid .g-col-21{grid-column:auto/span 21}.panel-grid .g-col-22{grid-column:auto/span 22}.panel-grid .g-col-23{grid-column:auto/span 23}.panel-grid .g-col-24{grid-column:auto/span 24}.panel-grid .g-start-1{grid-column-start:1}.panel-grid .g-start-2{grid-column-start:2}.panel-grid .g-start-3{grid-column-start:3}.panel-grid .g-start-4{grid-column-start:4}.panel-grid .g-start-5{grid-column-start:5}.panel-grid .g-start-6{grid-column-start:6}.panel-grid .g-start-7{grid-column-start:7}.panel-grid .g-start-8{grid-column-start:8}.panel-grid .g-start-9{grid-column-start:9}.panel-grid .g-start-10{grid-column-start:10}.panel-grid .g-start-11{grid-column-start:11}.panel-grid .g-start-12{grid-column-start:12}.panel-grid .g-start-13{grid-column-start:13}.panel-grid .g-start-14{grid-column-start:14}.panel-grid .g-start-15{grid-column-start:15}.panel-grid .g-start-16{grid-column-start:16}.panel-grid .g-start-17{grid-column-start:17}.panel-grid .g-start-18{grid-column-start:18}.panel-grid .g-start-19{grid-column-start:19}.panel-grid .g-start-20{grid-column-start:20}.panel-grid .g-start-21{grid-column-start:21}.panel-grid .g-start-22{grid-column-start:22}.panel-grid .g-start-23{grid-column-start:23}@media(min-width: 576px){.panel-grid .g-col-sm-1{grid-column:auto/span 1}.panel-grid .g-col-sm-2{grid-column:auto/span 2}.panel-grid .g-col-sm-3{grid-column:auto/span 3}.panel-grid .g-col-sm-4{grid-column:auto/span 4}.panel-grid .g-col-sm-5{grid-column:auto/span 5}.panel-grid .g-col-sm-6{grid-column:auto/span 6}.panel-grid .g-col-sm-7{grid-column:auto/span 7}.panel-grid .g-col-sm-8{grid-column:auto/span 8}.panel-grid .g-col-sm-9{grid-column:auto/span 9}.panel-grid .g-col-sm-10{grid-column:auto/span 10}.panel-grid .g-col-sm-11{grid-column:auto/span 11}.panel-grid .g-col-sm-12{grid-column:auto/span 12}.panel-grid .g-col-sm-13{grid-column:auto/span 13}.panel-grid .g-col-sm-14{grid-column:auto/span 14}.panel-grid .g-col-sm-15{grid-column:auto/span 15}.panel-grid .g-col-sm-16{grid-column:auto/span 16}.panel-grid .g-col-sm-17{grid-column:auto/span 17}.panel-grid .g-col-sm-18{grid-column:auto/span 18}.panel-grid .g-col-sm-19{grid-column:auto/span 19}.panel-grid .g-col-sm-20{grid-column:auto/span 20}.panel-grid .g-col-sm-21{grid-column:auto/span 21}.panel-grid .g-col-sm-22{grid-column:auto/span 22}.panel-grid .g-col-sm-23{grid-column:auto/span 23}.panel-grid .g-col-sm-24{grid-column:auto/span 24}.panel-grid .g-start-sm-1{grid-column-start:1}.panel-grid .g-start-sm-2{grid-column-start:2}.panel-grid .g-start-sm-3{grid-column-start:3}.panel-grid .g-start-sm-4{grid-column-start:4}.panel-grid .g-start-sm-5{grid-column-start:5}.panel-grid .g-start-sm-6{grid-column-start:6}.panel-grid .g-start-sm-7{grid-column-start:7}.panel-grid .g-start-sm-8{grid-column-start:8}.panel-grid .g-start-sm-9{grid-column-start:9}.panel-grid .g-start-sm-10{grid-column-start:10}.panel-grid .g-start-sm-11{grid-column-start:11}.panel-grid .g-start-sm-12{grid-column-start:12}.panel-grid .g-start-sm-13{grid-column-start:13}.panel-grid .g-start-sm-14{grid-column-start:14}.panel-grid .g-start-sm-15{grid-column-start:15}.panel-grid .g-start-sm-16{grid-column-start:16}.panel-grid .g-start-sm-17{grid-column-start:17}.panel-grid .g-start-sm-18{grid-column-start:18}.panel-grid .g-start-sm-19{grid-column-start:19}.panel-grid .g-start-sm-20{grid-column-start:20}.panel-grid .g-start-sm-21{grid-column-start:21}.panel-grid .g-start-sm-22{grid-column-start:22}.panel-grid .g-start-sm-23{grid-column-start:23}}@media(min-width: 768px){.panel-grid .g-col-md-1{grid-column:auto/span 1}.panel-grid .g-col-md-2{grid-column:auto/span 2}.panel-grid .g-col-md-3{grid-column:auto/span 3}.panel-grid .g-col-md-4{grid-column:auto/span 4}.panel-grid .g-col-md-5{grid-column:auto/span 5}.panel-grid .g-col-md-6{grid-column:auto/span 6}.panel-grid .g-col-md-7{grid-column:auto/span 7}.panel-grid .g-col-md-8{grid-column:auto/span 8}.panel-grid .g-col-md-9{grid-column:auto/span 9}.panel-grid .g-col-md-10{grid-column:auto/span 10}.panel-grid .g-col-md-11{grid-column:auto/span 11}.panel-grid .g-col-md-12{grid-column:auto/span 12}.panel-grid .g-col-md-13{grid-column:auto/span 13}.panel-grid .g-col-md-14{grid-column:auto/span 14}.panel-grid .g-col-md-15{grid-column:auto/span 15}.panel-grid .g-col-md-16{grid-column:auto/span 16}.panel-grid .g-col-md-17{grid-column:auto/span 17}.panel-grid .g-col-md-18{grid-column:auto/span 18}.panel-grid .g-col-md-19{grid-column:auto/span 19}.panel-grid .g-col-md-20{grid-column:auto/span 20}.panel-grid .g-col-md-21{grid-column:auto/span 21}.panel-grid .g-col-md-22{grid-column:auto/span 22}.panel-grid .g-col-md-23{grid-column:auto/span 23}.panel-grid .g-col-md-24{grid-column:auto/span 24}.panel-grid .g-start-md-1{grid-column-start:1}.panel-grid .g-start-md-2{grid-column-start:2}.panel-grid .g-start-md-3{grid-column-start:3}.panel-grid .g-start-md-4{grid-column-start:4}.panel-grid .g-start-md-5{grid-column-start:5}.panel-grid .g-start-md-6{grid-column-start:6}.panel-grid .g-start-md-7{grid-column-start:7}.panel-grid .g-start-md-8{grid-column-start:8}.panel-grid .g-start-md-9{grid-column-start:9}.panel-grid .g-start-md-10{grid-column-start:10}.panel-grid .g-start-md-11{grid-column-start:11}.panel-grid .g-start-md-12{grid-column-start:12}.panel-grid .g-start-md-13{grid-column-start:13}.panel-grid .g-start-md-14{grid-column-start:14}.panel-grid .g-start-md-15{grid-column-start:15}.panel-grid .g-start-md-16{grid-column-start:16}.panel-grid .g-start-md-17{grid-column-start:17}.panel-grid .g-start-md-18{grid-column-start:18}.panel-grid .g-start-md-19{grid-column-start:19}.panel-grid .g-start-md-20{grid-column-start:20}.panel-grid .g-start-md-21{grid-column-start:21}.panel-grid .g-start-md-22{grid-column-start:22}.panel-grid .g-start-md-23{grid-column-start:23}}@media(min-width: 992px){.panel-grid .g-col-lg-1{grid-column:auto/span 1}.panel-grid .g-col-lg-2{grid-column:auto/span 2}.panel-grid .g-col-lg-3{grid-column:auto/span 3}.panel-grid .g-col-lg-4{grid-column:auto/span 4}.panel-grid .g-col-lg-5{grid-column:auto/span 5}.panel-grid .g-col-lg-6{grid-column:auto/span 6}.panel-grid .g-col-lg-7{grid-column:auto/span 7}.panel-grid .g-col-lg-8{grid-column:auto/span 8}.panel-grid .g-col-lg-9{grid-column:auto/span 9}.panel-grid .g-col-lg-10{grid-column:auto/span 10}.panel-grid .g-col-lg-11{grid-column:auto/span 11}.panel-grid .g-col-lg-12{grid-column:auto/span 12}.panel-grid .g-col-lg-13{grid-column:auto/span 13}.panel-grid .g-col-lg-14{grid-column:auto/span 14}.panel-grid .g-col-lg-15{grid-column:auto/span 15}.panel-grid .g-col-lg-16{grid-column:auto/span 16}.panel-grid .g-col-lg-17{grid-column:auto/span 17}.panel-grid .g-col-lg-18{grid-column:auto/span 18}.panel-grid .g-col-lg-19{grid-column:auto/span 19}.panel-grid .g-col-lg-20{grid-column:auto/span 20}.panel-grid .g-col-lg-21{grid-column:auto/span 21}.panel-grid .g-col-lg-22{grid-column:auto/span 22}.panel-grid .g-col-lg-23{grid-column:auto/span 23}.panel-grid .g-col-lg-24{grid-column:auto/span 24}.panel-grid .g-start-lg-1{grid-column-start:1}.panel-grid .g-start-lg-2{grid-column-start:2}.panel-grid .g-start-lg-3{grid-column-start:3}.panel-grid .g-start-lg-4{grid-column-start:4}.panel-grid .g-start-lg-5{grid-column-start:5}.panel-grid .g-start-lg-6{grid-column-start:6}.panel-grid .g-start-lg-7{grid-column-start:7}.panel-grid .g-start-lg-8{grid-column-start:8}.panel-grid .g-start-lg-9{grid-column-start:9}.panel-grid .g-start-lg-10{grid-column-start:10}.panel-grid .g-start-lg-11{grid-column-start:11}.panel-grid .g-start-lg-12{grid-column-start:12}.panel-grid .g-start-lg-13{grid-column-start:13}.panel-grid .g-start-lg-14{grid-column-start:14}.panel-grid .g-start-lg-15{grid-column-start:15}.panel-grid .g-start-lg-16{grid-column-start:16}.panel-grid .g-start-lg-17{grid-column-start:17}.panel-grid .g-start-lg-18{grid-column-start:18}.panel-grid .g-start-lg-19{grid-column-start:19}.panel-grid .g-start-lg-20{grid-column-start:20}.panel-grid .g-start-lg-21{grid-column-start:21}.panel-grid .g-start-lg-22{grid-column-start:22}.panel-grid .g-start-lg-23{grid-column-start:23}}@media(min-width: 1200px){.panel-grid .g-col-xl-1{grid-column:auto/span 1}.panel-grid .g-col-xl-2{grid-column:auto/span 2}.panel-grid .g-col-xl-3{grid-column:auto/span 3}.panel-grid .g-col-xl-4{grid-column:auto/span 4}.panel-grid .g-col-xl-5{grid-column:auto/span 5}.panel-grid .g-col-xl-6{grid-column:auto/span 6}.panel-grid .g-col-xl-7{grid-column:auto/span 7}.panel-grid .g-col-xl-8{grid-column:auto/span 8}.panel-grid .g-col-xl-9{grid-column:auto/span 9}.panel-grid .g-col-xl-10{grid-column:auto/span 10}.panel-grid .g-col-xl-11{grid-column:auto/span 11}.panel-grid .g-col-xl-12{grid-column:auto/span 12}.panel-grid .g-col-xl-13{grid-column:auto/span 13}.panel-grid .g-col-xl-14{grid-column:auto/span 14}.panel-grid .g-col-xl-15{grid-column:auto/span 15}.panel-grid .g-col-xl-16{grid-column:auto/span 16}.panel-grid .g-col-xl-17{grid-column:auto/span 17}.panel-grid .g-col-xl-18{grid-column:auto/span 18}.panel-grid .g-col-xl-19{grid-column:auto/span 19}.panel-grid .g-col-xl-20{grid-column:auto/span 20}.panel-grid .g-col-xl-21{grid-column:auto/span 21}.panel-grid .g-col-xl-22{grid-column:auto/span 22}.panel-grid .g-col-xl-23{grid-column:auto/span 23}.panel-grid .g-col-xl-24{grid-column:auto/span 24}.panel-grid .g-start-xl-1{grid-column-start:1}.panel-grid .g-start-xl-2{grid-column-start:2}.panel-grid .g-start-xl-3{grid-column-start:3}.panel-grid .g-start-xl-4{grid-column-start:4}.panel-grid .g-start-xl-5{grid-column-start:5}.panel-grid .g-start-xl-6{grid-column-start:6}.panel-grid .g-start-xl-7{grid-column-start:7}.panel-grid .g-start-xl-8{grid-column-start:8}.panel-grid .g-start-xl-9{grid-column-start:9}.panel-grid .g-start-xl-10{grid-column-start:10}.panel-grid .g-start-xl-11{grid-column-start:11}.panel-grid .g-start-xl-12{grid-column-start:12}.panel-grid .g-start-xl-13{grid-column-start:13}.panel-grid .g-start-xl-14{grid-column-start:14}.panel-grid .g-start-xl-15{grid-column-start:15}.panel-grid .g-start-xl-16{grid-column-start:16}.panel-grid .g-start-xl-17{grid-column-start:17}.panel-grid .g-start-xl-18{grid-column-start:18}.panel-grid .g-start-xl-19{grid-column-start:19}.panel-grid .g-start-xl-20{grid-column-start:20}.panel-grid .g-start-xl-21{grid-column-start:21}.panel-grid .g-start-xl-22{grid-column-start:22}.panel-grid .g-start-xl-23{grid-column-start:23}}@media(min-width: 1400px){.panel-grid .g-col-xxl-1{grid-column:auto/span 1}.panel-grid .g-col-xxl-2{grid-column:auto/span 2}.panel-grid .g-col-xxl-3{grid-column:auto/span 3}.panel-grid .g-col-xxl-4{grid-column:auto/span 4}.panel-grid .g-col-xxl-5{grid-column:auto/span 5}.panel-grid .g-col-xxl-6{grid-column:auto/span 6}.panel-grid .g-col-xxl-7{grid-column:auto/span 7}.panel-grid .g-col-xxl-8{grid-column:auto/span 8}.panel-grid .g-col-xxl-9{grid-column:auto/span 9}.panel-grid .g-col-xxl-10{grid-column:auto/span 10}.panel-grid .g-col-xxl-11{grid-column:auto/span 11}.panel-grid .g-col-xxl-12{grid-column:auto/span 12}.panel-grid .g-col-xxl-13{grid-column:auto/span 13}.panel-grid .g-col-xxl-14{grid-column:auto/span 14}.panel-grid .g-col-xxl-15{grid-column:auto/span 15}.panel-grid .g-col-xxl-16{grid-column:auto/span 16}.panel-grid .g-col-xxl-17{grid-column:auto/span 17}.panel-grid .g-col-xxl-18{grid-column:auto/span 18}.panel-grid .g-col-xxl-19{grid-column:auto/span 19}.panel-grid .g-col-xxl-20{grid-column:auto/span 20}.panel-grid .g-col-xxl-21{grid-column:auto/span 21}.panel-grid .g-col-xxl-22{grid-column:auto/span 22}.panel-grid .g-col-xxl-23{grid-column:auto/span 23}.panel-grid .g-col-xxl-24{grid-column:auto/span 24}.panel-grid .g-start-xxl-1{grid-column-start:1}.panel-grid .g-start-xxl-2{grid-column-start:2}.panel-grid .g-start-xxl-3{grid-column-start:3}.panel-grid .g-start-xxl-4{grid-column-start:4}.panel-grid .g-start-xxl-5{grid-column-start:5}.panel-grid .g-start-xxl-6{grid-column-start:6}.panel-grid .g-start-xxl-7{grid-column-start:7}.panel-grid .g-start-xxl-8{grid-column-start:8}.panel-grid .g-start-xxl-9{grid-column-start:9}.panel-grid .g-start-xxl-10{grid-column-start:10}.panel-grid .g-start-xxl-11{grid-column-start:11}.panel-grid .g-start-xxl-12{grid-column-start:12}.panel-grid .g-start-xxl-13{grid-column-start:13}.panel-grid .g-start-xxl-14{grid-column-start:14}.panel-grid .g-start-xxl-15{grid-column-start:15}.panel-grid .g-start-xxl-16{grid-column-start:16}.panel-grid .g-start-xxl-17{grid-column-start:17}.panel-grid .g-start-xxl-18{grid-column-start:18}.panel-grid .g-start-xxl-19{grid-column-start:19}.panel-grid .g-start-xxl-20{grid-column-start:20}.panel-grid .g-start-xxl-21{grid-column-start:21}.panel-grid .g-start-xxl-22{grid-column-start:22}.panel-grid .g-start-xxl-23{grid-column-start:23}}main{margin-top:1em;margin-bottom:1em}h1,.h1,h2,.h2{margin-top:2rem;margin-bottom:1rem}h1.title,.title.h1{margin-top:0}h2,.h2{border-bottom:1px solid #dee2e6;padding-bottom:.5rem}h3,.h3,h4,.h4{margin-top:1.5rem}.header-section-number{color:#747a7f}.nav-link.active .header-section-number{color:inherit}mark,.mark{padding:0em}.panel-caption,caption,.figure-caption{font-size:1rem}.panel-caption,.figure-caption,figcaption{color:#747a7f}.table-caption,caption{color:#373a3c}.quarto-layout-cell[data-ref-parent] caption{color:#747a7f}.column-margin figcaption,.margin-caption,div.aside,aside,.column-margin{color:#747a7f;font-size:.825rem}.panel-caption.margin-caption{text-align:inherit}.column-margin.column-container p{margin-bottom:0}.column-margin.column-container>*:not(.collapse){padding-top:.5em;padding-bottom:.5em;display:block}.column-margin.column-container>*.collapse:not(.show){display:none}@media(min-width: 768px){.column-margin.column-container .callout-margin-content:first-child{margin-top:4.5em}.column-margin.column-container .callout-margin-content-simple:first-child{margin-top:3.5em}}.margin-caption>*{padding-top:.5em;padding-bottom:.5em}@media(max-width: 767.98px){.quarto-layout-row{flex-direction:column}}.tab-content{margin-top:0px;border-left:#dee2e6 1px solid;border-right:#dee2e6 1px solid;border-bottom:#dee2e6 1px solid;margin-left:0;padding:1em;margin-bottom:1em}@media(max-width: 767.98px){.layout-sidebar{margin-left:0;margin-right:0}}.panel-sidebar,.panel-sidebar .form-control,.panel-input,.panel-input .form-control,.selectize-dropdown{font-size:.9rem}.panel-sidebar .form-control,.panel-input .form-control{padding-top:.1rem}.tab-pane div.sourceCode{margin-top:0px}.tab-pane>p{padding-top:1em}.tab-content>.tab-pane:not(.active){display:none !important}div.sourceCode{background-color:rgba(233,236,239,.65);border:1px solid rgba(233,236,239,.65);border-radius:.25rem}pre.sourceCode{background-color:transparent}pre.sourceCode{border:none;font-size:.875em;overflow:visible !important;padding:.4em}.callout pre.sourceCode{padding-left:0}div.sourceCode{overflow-y:hidden}.callout div.sourceCode{margin-left:initial}.blockquote{font-size:inherit;padding-left:1rem;padding-right:1.5rem;color:#747a7f}.blockquote h1:first-child,.blockquote .h1:first-child,.blockquote h2:first-child,.blockquote .h2:first-child,.blockquote h3:first-child,.blockquote .h3:first-child,.blockquote h4:first-child,.blockquote .h4:first-child,.blockquote h5:first-child,.blockquote .h5:first-child{margin-top:0}pre{background-color:initial;padding:initial;border:initial}p code:not(.sourceCode),li code:not(.sourceCode){background-color:#f7f7f7;padding:.2em}nav p code:not(.sourceCode),nav li code:not(.sourceCode){background-color:transparent;padding:0}#quarto-embedded-source-code-modal>.modal-dialog{max-width:1000px;padding-left:1.75rem;padding-right:1.75rem}#quarto-embedded-source-code-modal>.modal-dialog>.modal-content>.modal-body{padding:0}#quarto-embedded-source-code-modal>.modal-dialog>.modal-content>.modal-body div.sourceCode{margin:0;padding:.2rem .2rem;border-radius:0px;border:none}#quarto-embedded-source-code-modal>.modal-dialog>.modal-content>.modal-header{padding:.7rem}.code-tools-button{font-size:1rem;padding:.15rem .15rem;margin-left:5px;color:#868e96;background-color:transparent;transition:initial;cursor:pointer}.code-tools-button>.bi::before{display:inline-block;height:1rem;width:1rem;content:"";vertical-align:-0.125em;background-image:url('data:image/svg+xml,');background-repeat:no-repeat;background-size:1rem 1rem}.code-tools-button:hover>.bi::before{background-image:url('data:image/svg+xml,')}#quarto-embedded-source-code-modal .code-copy-button>.bi::before{background-image:url('data:image/svg+xml,')}#quarto-embedded-source-code-modal .code-copy-button-checked>.bi::before{background-image:url('data:image/svg+xml,')}.sidebar{will-change:top;transition:top 200ms linear;position:sticky;overflow-y:auto;padding-top:1.2em;max-height:100vh}.sidebar.toc-left,.sidebar.margin-sidebar{top:0px;padding-top:1em}.sidebar.toc-left>*,.sidebar.margin-sidebar>*{padding-top:.5em}.sidebar.quarto-banner-title-block-sidebar>*{padding-top:1.65em}.sidebar nav[role=doc-toc]>h2,.sidebar nav[role=doc-toc]>.h2{font-size:.875rem;font-weight:400;margin-bottom:.5rem;margin-top:.3rem;font-family:inherit;border-bottom:0;padding-bottom:0;padding-top:0px}.sidebar nav[role=doc-toc]>ul a{border-left:1px solid #e9ecef;padding-left:.6rem}.sidebar nav[role=doc-toc]>ul a:empty{display:none}.sidebar nav[role=doc-toc] ul{padding-left:0;list-style:none;font-size:.875rem;font-weight:300}.sidebar nav[role=doc-toc]>ul li a{line-height:1.1rem;padding-bottom:.2rem;padding-top:.2rem;color:inherit}.sidebar nav[role=doc-toc] ul>li>ul>li>a{padding-left:1.2em}.sidebar nav[role=doc-toc] ul>li>ul>li>ul>li>a{padding-left:2.4em}.sidebar nav[role=doc-toc] ul>li>ul>li>ul>li>ul>li>a{padding-left:3.6em}.sidebar nav[role=doc-toc] ul>li>ul>li>ul>li>ul>li>ul>li>a{padding-left:4.8em}.sidebar nav[role=doc-toc] ul>li>ul>li>ul>li>ul>li>ul>li>ul>li>a{padding-left:6em}.sidebar nav[role=doc-toc] ul>li>ul>li>a.active{border-left:1px solid #2780e3;color:#2780e3 !important}.sidebar nav[role=doc-toc] ul>li>a.active{border-left:1px solid #2780e3;color:#2780e3 !important}kbd,.kbd{color:#373a3c;background-color:#f8f9fa;border:1px solid;border-radius:5px;border-color:#dee2e6}div.hanging-indent{margin-left:1em;text-indent:-1em}.citation a,.footnote-ref{text-decoration:none}.footnotes ol{padding-left:1em}.tippy-content>*{margin-bottom:.7em}.tippy-content>*:last-child{margin-bottom:0}.table a{word-break:break-word}.table>:not(:first-child){border-top-width:1px;border-top-color:#dee2e6}.table>thead{border-bottom:1px solid currentColor}.table>tbody{border-top:1px solid #dee2e6}.callout{margin-top:1.25rem;margin-bottom:1.25rem;border-radius:.25rem}.callout.callout-style-simple{padding:.4em .7em;border-left:5px solid;border-right:1px solid #dee2e6;border-top:1px solid #dee2e6;border-bottom:1px solid #dee2e6}.callout.callout-style-default{border-left:5px solid;border-right:1px solid #dee2e6;border-top:1px solid #dee2e6;border-bottom:1px solid #dee2e6}.callout .callout-body-container{flex-grow:1}.callout.callout-style-simple .callout-body{font-size:.9rem;font-weight:400}.callout.callout-style-default .callout-body{font-size:.9rem;font-weight:400}.callout.callout-captioned .callout-body{margin-top:.2em}.callout:not(.no-icon).callout-captioned.callout-style-simple .callout-body{padding-left:1.6em}.callout.callout-captioned>.callout-header{padding-top:.2em;margin-bottom:-0.2em}.callout.callout-style-simple>div.callout-header{border-bottom:none;font-size:.9rem;font-weight:600;opacity:75%}.callout.callout-style-default>div.callout-header{border-bottom:none;font-weight:600;opacity:85%;font-size:.9rem;padding-left:.5em;padding-right:.5em}.callout.callout-style-default div.callout-body{padding-left:.5em;padding-right:.5em}.callout.callout-style-default div.callout-body>:first-child{margin-top:.5em}.callout>div.callout-header[data-bs-toggle=collapse]{cursor:pointer}.callout.callout-style-default .callout-header[aria-expanded=false],.callout.callout-style-default .callout-header[aria-expanded=true]{padding-top:0px;margin-bottom:0px;align-items:center}.callout.callout-captioned .callout-body>:last-child:not(.sourceCode),.callout.callout-captioned .callout-body>div>:last-child:not(.sourceCode){margin-bottom:.5rem}.callout:not(.callout-captioned) .callout-body>:first-child,.callout:not(.callout-captioned) .callout-body>div>:first-child{margin-top:.25rem}.callout:not(.callout-captioned) .callout-body>:last-child,.callout:not(.callout-captioned) .callout-body>div>:last-child{margin-bottom:.2rem}.callout.callout-style-simple .callout-icon::before,.callout.callout-style-simple .callout-toggle::before{height:1rem;width:1rem;display:inline-block;content:"";background-repeat:no-repeat;background-size:1rem 1rem}.callout.callout-style-default .callout-icon::before,.callout.callout-style-default .callout-toggle::before{height:.9rem;width:.9rem;display:inline-block;content:"";background-repeat:no-repeat;background-size:.9rem .9rem}.callout.callout-style-default .callout-toggle::before{margin-top:5px}.callout .callout-btn-toggle .callout-toggle::before{transition:transform .2s linear}.callout .callout-header[aria-expanded=false] .callout-toggle::before{transform:rotate(-90deg)}.callout .callout-header[aria-expanded=true] .callout-toggle::before{transform:none}.callout.callout-style-simple:not(.no-icon) div.callout-icon-container{padding-top:.2em;padding-right:.55em}.callout.callout-style-default:not(.no-icon) div.callout-icon-container{padding-top:.1em;padding-right:.35em}.callout.callout-style-default:not(.no-icon) div.callout-caption-container{margin-top:-1px}.callout.callout-style-default.callout-caution:not(.no-icon) div.callout-icon-container{padding-top:.3em;padding-right:.35em}.callout>.callout-body>.callout-icon-container>.no-icon,.callout>.callout-header>.callout-icon-container>.no-icon{display:none}div.callout.callout{border-left-color:#868e96}div.callout.callout-style-default>.callout-header{background-color:#868e96}div.callout-note.callout{border-left-color:#2780e3}div.callout-note.callout-style-default>.callout-header{background-color:#e9f2fc}div.callout-note:not(.callout-captioned) .callout-icon::before{background-image:url('data:image/svg+xml,');}div.callout-note.callout-captioned .callout-icon::before{background-image:url('data:image/svg+xml,');}div.callout-note .callout-toggle::before{background-image:url('data:image/svg+xml,')}div.callout-tip.callout{border-left-color:#3fb618}div.callout-tip.callout-style-default>.callout-header{background-color:#ecf8e8}div.callout-tip:not(.callout-captioned) .callout-icon::before{background-image:url('data:image/svg+xml,');}div.callout-tip.callout-captioned .callout-icon::before{background-image:url('data:image/svg+xml,');}div.callout-tip .callout-toggle::before{background-image:url('data:image/svg+xml,')}div.callout-warning.callout{border-left-color:#ff7518}div.callout-warning.callout-style-default>.callout-header{background-color:#fff1e8}div.callout-warning:not(.callout-captioned) .callout-icon::before{background-image:url('data:image/svg+xml,');}div.callout-warning.callout-captioned .callout-icon::before{background-image:url('data:image/svg+xml,');}div.callout-warning .callout-toggle::before{background-image:url('data:image/svg+xml,')}div.callout-caution.callout{border-left-color:#f0ad4e}div.callout-caution.callout-style-default>.callout-header{background-color:#fef7ed}div.callout-caution:not(.callout-captioned) .callout-icon::before{background-image:url('data:image/svg+xml,');}div.callout-caution.callout-captioned .callout-icon::before{background-image:url('data:image/svg+xml,');}div.callout-caution .callout-toggle::before{background-image:url('data:image/svg+xml,')}div.callout-important.callout{border-left-color:#ff0039}div.callout-important.callout-style-default>.callout-header{background-color:#ffe6eb}div.callout-important:not(.callout-captioned) .callout-icon::before{background-image:url('data:image/svg+xml,');}div.callout-important.callout-captioned .callout-icon::before{background-image:url('data:image/svg+xml,');}div.callout-important .callout-toggle::before{background-image:url('data:image/svg+xml,')}.quarto-toggle-container{display:flex}@media(min-width: 992px){.navbar .quarto-color-scheme-toggle{padding-left:.5rem;padding-right:.5rem}}@media(max-width: 767.98px){.navbar .quarto-color-scheme-toggle{padding-left:0;padding-right:0;padding-bottom:.5em}}.quarto-reader-toggle .bi::before,.quarto-color-scheme-toggle .bi::before{display:inline-block;height:1rem;width:1rem;content:"";background-repeat:no-repeat;background-size:1rem 1rem}.navbar-collapse .quarto-color-scheme-toggle{padding-left:.6rem;padding-right:0;margin-top:-12px}.sidebar-navigation{padding-left:20px}.sidebar-navigation .quarto-color-scheme-toggle .bi::before{padding-top:.2rem;margin-bottom:-0.2rem}.sidebar-tools-main .quarto-color-scheme-toggle .bi::before{padding-top:.2rem;margin-bottom:-0.2rem}.navbar .quarto-color-scheme-toggle .bi::before{padding-top:7px;margin-bottom:-7px;padding-left:2px;margin-right:2px}.navbar .quarto-color-scheme-toggle:not(.alternate) .bi::before{background-image:url('data:image/svg+xml,')}.navbar .quarto-color-scheme-toggle.alternate .bi::before{background-image:url('data:image/svg+xml,')}.sidebar-navigation .quarto-color-scheme-toggle:not(.alternate) .bi::before{background-image:url('data:image/svg+xml,')}.sidebar-navigation .quarto-color-scheme-toggle.alternate .bi::before{background-image:url('data:image/svg+xml,')}.quarto-sidebar-toggle{border-color:#dee2e6;border-bottom-left-radius:.25rem;border-bottom-right-radius:.25rem;border-style:solid;border-width:1px;overflow:hidden;border-top-width:0px;padding-top:0px !important}.quarto-sidebar-toggle-title{cursor:pointer;padding-bottom:2px;margin-left:.25em;text-align:center;font-weight:400;font-size:.775em}#quarto-content .quarto-sidebar-toggle{background:#fafafa}#quarto-content .quarto-sidebar-toggle-title{color:#373a3c}.quarto-sidebar-toggle-icon{color:#dee2e6;margin-right:.5em;float:right;transition:transform .2s ease}.quarto-sidebar-toggle-icon::before{padding-top:5px}.quarto-sidebar-toggle.expanded .quarto-sidebar-toggle-icon{transform:rotate(-180deg)}.quarto-sidebar-toggle.expanded .quarto-sidebar-toggle-title{border-bottom:solid #dee2e6 1px}.quarto-sidebar-toggle-contents{background-color:#fff;padding-right:10px;padding-left:10px;margin-top:0px !important;transition:max-height .5s ease}.quarto-sidebar-toggle.expanded .quarto-sidebar-toggle-contents{padding-top:1em;padding-bottom:10px}.quarto-sidebar-toggle:not(.expanded) .quarto-sidebar-toggle-contents{padding-top:0px !important;padding-bottom:0px}nav[role=doc-toc]{z-index:1020}#quarto-sidebar>*,nav[role=doc-toc]>*{transition:opacity .1s ease,border .1s ease}#quarto-sidebar.slow>*,nav[role=doc-toc].slow>*{transition:opacity .4s ease,border .4s ease}.quarto-color-scheme-toggle:not(.alternate).top-right .bi::before{background-image:url('data:image/svg+xml,')}.quarto-color-scheme-toggle.alternate.top-right .bi::before{background-image:url('data:image/svg+xml,')}#quarto-appendix.default{border-top:1px solid #dee2e6}#quarto-appendix.default{background-color:#fff;padding-top:1.5em;margin-top:2em;z-index:998}#quarto-appendix.default .quarto-appendix-heading{margin-top:0;line-height:1.4em;font-weight:600;opacity:.9;border-bottom:none;margin-bottom:0}#quarto-appendix.default .footnotes ol,#quarto-appendix.default .footnotes ol li>p:last-of-type,#quarto-appendix.default .quarto-appendix-contents>p:last-of-type{margin-bottom:0}#quarto-appendix.default .quarto-appendix-secondary-label{margin-bottom:.4em}#quarto-appendix.default .quarto-appendix-bibtex{font-size:.7em;padding:1em;border:solid 1px #dee2e6;margin-bottom:1em}#quarto-appendix.default .quarto-appendix-bibtex code.sourceCode{white-space:pre-wrap}#quarto-appendix.default .quarto-appendix-citeas{font-size:.9em;padding:1em;border:solid 1px #dee2e6;margin-bottom:1em}#quarto-appendix.default .quarto-appendix-heading{font-size:1em !important}#quarto-appendix.default *[role=doc-endnotes]>ol,#quarto-appendix.default .quarto-appendix-contents>*:not(h2):not(.h2){font-size:.9em}#quarto-appendix.default section{padding-bottom:1.5em}#quarto-appendix.default section *[role=doc-endnotes],#quarto-appendix.default section>*:not(a){opacity:.9;word-wrap:break-word}.btn.btn-quarto,div.cell-output-display .btn-quarto{color:#cbcccc;background-color:#373a3c;border-color:#373a3c}.btn.btn-quarto:hover,div.cell-output-display .btn-quarto:hover{color:#cbcccc;background-color:#555859;border-color:#4b4e50}.btn-check:focus+.btn.btn-quarto,.btn.btn-quarto:focus,.btn-check:focus+div.cell-output-display .btn-quarto,div.cell-output-display .btn-quarto:focus{color:#cbcccc;background-color:#555859;border-color:#4b4e50;box-shadow:0 0 0 .25rem rgba(77,80,82,.5)}.btn-check:checked+.btn.btn-quarto,.btn-check:active+.btn.btn-quarto,.btn.btn-quarto:active,.btn.btn-quarto.active,.show>.btn.btn-quarto.dropdown-toggle,.btn-check:checked+div.cell-output-display .btn-quarto,.btn-check:active+div.cell-output-display .btn-quarto,div.cell-output-display .btn-quarto:active,div.cell-output-display .btn-quarto.active,.show>div.cell-output-display .btn-quarto.dropdown-toggle{color:#fff;background-color:#5f6163;border-color:#4b4e50}.btn-check:checked+.btn.btn-quarto:focus,.btn-check:active+.btn.btn-quarto:focus,.btn.btn-quarto:active:focus,.btn.btn-quarto.active:focus,.show>.btn.btn-quarto.dropdown-toggle:focus,.btn-check:checked+div.cell-output-display .btn-quarto:focus,.btn-check:active+div.cell-output-display .btn-quarto:focus,div.cell-output-display .btn-quarto:active:focus,div.cell-output-display .btn-quarto.active:focus,.show>div.cell-output-display .btn-quarto.dropdown-toggle:focus{box-shadow:0 0 0 .25rem rgba(77,80,82,.5)}.btn.btn-quarto:disabled,.btn.btn-quarto.disabled,div.cell-output-display .btn-quarto:disabled,div.cell-output-display .btn-quarto.disabled{color:#fff;background-color:#373a3c;border-color:#373a3c}nav.quarto-secondary-nav.color-navbar{background-color:#2780e3;color:#fdfeff}nav.quarto-secondary-nav.color-navbar h1,nav.quarto-secondary-nav.color-navbar .h1,nav.quarto-secondary-nav.color-navbar .quarto-btn-toggle{color:#fdfeff}@media(max-width: 991.98px){body.nav-sidebar .quarto-title-banner,body.nav-sidebar .quarto-title-banner{display:none}}p.subtitle{margin-top:.25em;margin-bottom:.5em}code a:any-link{color:inherit;text-decoration-color:#868e96}/*! light */a.external:after{display:inline-block;height:.75rem;width:.75rem;margin-bottom:.15em;margin-left:.25em;content:"";vertical-align:-0.125em;background-image:url('data:image/svg+xml,');background-repeat:no-repeat;background-size:.75rem .75rem}a.external:after:hover{cursor:pointer}.quarto-ext-icon{display:inline-block;font-size:.75em;padding-left:.3em}.quarto-title-banner{margin-bottom:1em;color:#fdfeff;background:#2780e3}.quarto-title-banner .code-tools-button{color:#97cbff}.quarto-title-banner .code-tools-button:hover{color:#fdfeff}.quarto-title-banner .code-tools-button>.bi::before{background-image:url('data:image/svg+xml,')}.quarto-title-banner .code-tools-button:hover>.bi::before{background-image:url('data:image/svg+xml,')}.quarto-title-banner .quarto-title .title{font-weight:600}.quarto-title-banner .quarto-categories{margin-top:.75em}@media(min-width: 992px){.quarto-title-banner{padding-top:2.5em;padding-bottom:2.5em}}@media(max-width: 991.98px){.quarto-title-banner{padding-top:1em;padding-bottom:1em}}main.quarto-banner-title-block section:first-of-type h2:first-of-type,main.quarto-banner-title-block section:first-of-type .h2:first-of-type,main.quarto-banner-title-block section:first-of-type h3:first-of-type,main.quarto-banner-title-block section:first-of-type .h3:first-of-type,main.quarto-banner-title-block section:first-of-type h4:first-of-type,main.quarto-banner-title-block section:first-of-type .h4:first-of-type{margin-top:0}.quarto-title .quarto-categories{display:flex;column-gap:.4em;padding-bottom:.5em;margin-top:.25em}.quarto-title .quarto-categories .quarto-category{padding:.25em .75em;font-size:.65em;text-transform:uppercase;border:solid 1px;border-radius:.25rem;opacity:.6}.quarto-title .quarto-categories .quarto-category a{color:inherit}#title-block-header.quarto-title-block.default .quarto-title-meta{display:grid;grid-template-columns:repeat(2, 1fr)}#title-block-header.quarto-title-block.default .quarto-title .title{margin-bottom:0}#title-block-header.quarto-title-block.default .quarto-title-author-orcid img{margin-top:-5px}#title-block-header.quarto-title-block.default .quarto-description p:last-of-type{margin-bottom:0}#title-block-header.quarto-title-block.default .quarto-title-meta-contents p,#title-block-header.quarto-title-block.default .quarto-title-authors p,#title-block-header.quarto-title-block.default .quarto-title-affiliations p{margin-bottom:.1em}#title-block-header.quarto-title-block.default .quarto-title-meta-heading{text-transform:uppercase;margin-top:1em;font-size:.8em;opacity:.8;font-weight:400}#title-block-header.quarto-title-block.default .quarto-title-meta-contents{font-size:.9em}#title-block-header.quarto-title-block.default .quarto-title-meta-contents a{color:#373a3c}#title-block-header.quarto-title-block.default .quarto-title-meta-contents p.affiliation:last-of-type{margin-bottom:.7em}#title-block-header.quarto-title-block.default p.affiliation{margin-bottom:.1em}#title-block-header.quarto-title-block.default .description,#title-block-header.quarto-title-block.default .abstract{margin-top:0}#title-block-header.quarto-title-block.default .description>p,#title-block-header.quarto-title-block.default .abstract>p{font-size:.9em}#title-block-header.quarto-title-block.default .description>p:last-of-type,#title-block-header.quarto-title-block.default .abstract>p:last-of-type{margin-bottom:0}#title-block-header.quarto-title-block.default .description .abstract-title,#title-block-header.quarto-title-block.default .abstract .abstract-title{margin-top:1em;text-transform:uppercase;font-size:.8em;opacity:.8;font-weight:400}#title-block-header.quarto-title-block.default .quarto-title-meta-author{display:grid;grid-template-columns:1fr 1fr}body{-webkit-font-smoothing:antialiased}.badge.bg-light{color:#373a3c}.progress .progress-bar{font-size:8px;line-height:8px}/*# sourceMappingURL=397ef2e52d54cf686e4908b90039e9db.css.map */ diff --git a/worksheets/worksheet_redlining_files/libs/bootstrap/bootstrap.min.js b/worksheets/worksheet_redlining_files/libs/bootstrap/bootstrap.min.js new file mode 100644 index 0000000..cc0a255 --- /dev/null +++ b/worksheets/worksheet_redlining_files/libs/bootstrap/bootstrap.min.js @@ -0,0 +1,7 @@ +/*! + * Bootstrap v5.1.3 (https://getbootstrap.com/) + * Copyright 2011-2021 The Bootstrap Authors (https://github.com/twbs/bootstrap/graphs/contributors) + * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE) + */ +!function(t,e){"object"==typeof exports&&"undefined"!=typeof module?module.exports=e():"function"==typeof define&&define.amd?define(e):(t="undefined"!=typeof globalThis?globalThis:t||self).bootstrap=e()}(this,(function(){"use strict";const t="transitionend",e=t=>{let e=t.getAttribute("data-bs-target");if(!e||"#"===e){let i=t.getAttribute("href");if(!i||!i.includes("#")&&!i.startsWith("."))return null;i.includes("#")&&!i.startsWith("#")&&(i=`#${i.split("#")[1]}`),e=i&&"#"!==i?i.trim():null}return e},i=t=>{const i=e(t);return i&&document.querySelector(i)?i:null},n=t=>{const i=e(t);return i?document.querySelector(i):null},s=e=>{e.dispatchEvent(new Event(t))},o=t=>!(!t||"object"!=typeof t)&&(void 0!==t.jquery&&(t=t[0]),void 0!==t.nodeType),r=t=>o(t)?t.jquery?t[0]:t:"string"==typeof t&&t.length>0?document.querySelector(t):null,a=(t,e,i)=>{Object.keys(i).forEach((n=>{const s=i[n],r=e[n],a=r&&o(r)?"element":null==(l=r)?`${l}`:{}.toString.call(l).match(/\s([a-z]+)/i)[1].toLowerCase();var l;if(!new RegExp(s).test(a))throw new TypeError(`${t.toUpperCase()}: Option "${n}" provided type "${a}" but expected type "${s}".`)}))},l=t=>!(!o(t)||0===t.getClientRects().length)&&"visible"===getComputedStyle(t).getPropertyValue("visibility"),c=t=>!t||t.nodeType!==Node.ELEMENT_NODE||!!t.classList.contains("disabled")||(void 0!==t.disabled?t.disabled:t.hasAttribute("disabled")&&"false"!==t.getAttribute("disabled")),h=t=>{if(!document.documentElement.attachShadow)return null;if("function"==typeof t.getRootNode){const e=t.getRootNode();return e instanceof ShadowRoot?e:null}return t instanceof ShadowRoot?t:t.parentNode?h(t.parentNode):null},d=()=>{},u=t=>{t.offsetHeight},f=()=>{const{jQuery:t}=window;return t&&!document.body.hasAttribute("data-bs-no-jquery")?t:null},p=[],m=()=>"rtl"===document.documentElement.dir,g=t=>{var e;e=()=>{const e=f();if(e){const i=t.NAME,n=e.fn[i];e.fn[i]=t.jQueryInterface,e.fn[i].Constructor=t,e.fn[i].noConflict=()=>(e.fn[i]=n,t.jQueryInterface)}},"loading"===document.readyState?(p.length||document.addEventListener("DOMContentLoaded",(()=>{p.forEach((t=>t()))})),p.push(e)):e()},_=t=>{"function"==typeof t&&t()},b=(e,i,n=!0)=>{if(!n)return void _(e);const o=(t=>{if(!t)return 0;let{transitionDuration:e,transitionDelay:i}=window.getComputedStyle(t);const n=Number.parseFloat(e),s=Number.parseFloat(i);return n||s?(e=e.split(",")[0],i=i.split(",")[0],1e3*(Number.parseFloat(e)+Number.parseFloat(i))):0})(i)+5;let r=!1;const a=({target:n})=>{n===i&&(r=!0,i.removeEventListener(t,a),_(e))};i.addEventListener(t,a),setTimeout((()=>{r||s(i)}),o)},v=(t,e,i,n)=>{let s=t.indexOf(e);if(-1===s)return t[!i&&n?t.length-1:0];const o=t.length;return s+=i?1:-1,n&&(s=(s+o)%o),t[Math.max(0,Math.min(s,o-1))]},y=/[^.]*(?=\..*)\.|.*/,w=/\..*/,E=/::\d+$/,A={};let T=1;const O={mouseenter:"mouseover",mouseleave:"mouseout"},C=/^(mouseenter|mouseleave)/i,k=new Set(["click","dblclick","mouseup","mousedown","contextmenu","mousewheel","DOMMouseScroll","mouseover","mouseout","mousemove","selectstart","selectend","keydown","keypress","keyup","orientationchange","touchstart","touchmove","touchend","touchcancel","pointerdown","pointermove","pointerup","pointerleave","pointercancel","gesturestart","gesturechange","gestureend","focus","blur","change","reset","select","submit","focusin","focusout","load","unload","beforeunload","resize","move","DOMContentLoaded","readystatechange","error","abort","scroll"]);function L(t,e){return e&&`${e}::${T++}`||t.uidEvent||T++}function x(t){const e=L(t);return t.uidEvent=e,A[e]=A[e]||{},A[e]}function D(t,e,i=null){const n=Object.keys(t);for(let s=0,o=n.length;sfunction(e){if(!e.relatedTarget||e.relatedTarget!==e.delegateTarget&&!e.delegateTarget.contains(e.relatedTarget))return t.call(this,e)};n?n=t(n):i=t(i)}const[o,r,a]=S(e,i,n),l=x(t),c=l[a]||(l[a]={}),h=D(c,r,o?i:null);if(h)return void(h.oneOff=h.oneOff&&s);const d=L(r,e.replace(y,"")),u=o?function(t,e,i){return function n(s){const o=t.querySelectorAll(e);for(let{target:r}=s;r&&r!==this;r=r.parentNode)for(let a=o.length;a--;)if(o[a]===r)return s.delegateTarget=r,n.oneOff&&j.off(t,s.type,e,i),i.apply(r,[s]);return null}}(t,i,n):function(t,e){return function i(n){return n.delegateTarget=t,i.oneOff&&j.off(t,n.type,e),e.apply(t,[n])}}(t,i);u.delegationSelector=o?i:null,u.originalHandler=r,u.oneOff=s,u.uidEvent=d,c[d]=u,t.addEventListener(a,u,o)}function I(t,e,i,n,s){const o=D(e[i],n,s);o&&(t.removeEventListener(i,o,Boolean(s)),delete e[i][o.uidEvent])}function P(t){return t=t.replace(w,""),O[t]||t}const j={on(t,e,i,n){N(t,e,i,n,!1)},one(t,e,i,n){N(t,e,i,n,!0)},off(t,e,i,n){if("string"!=typeof e||!t)return;const[s,o,r]=S(e,i,n),a=r!==e,l=x(t),c=e.startsWith(".");if(void 0!==o){if(!l||!l[r])return;return void I(t,l,r,o,s?i:null)}c&&Object.keys(l).forEach((i=>{!function(t,e,i,n){const s=e[i]||{};Object.keys(s).forEach((o=>{if(o.includes(n)){const n=s[o];I(t,e,i,n.originalHandler,n.delegationSelector)}}))}(t,l,i,e.slice(1))}));const h=l[r]||{};Object.keys(h).forEach((i=>{const n=i.replace(E,"");if(!a||e.includes(n)){const e=h[i];I(t,l,r,e.originalHandler,e.delegationSelector)}}))},trigger(t,e,i){if("string"!=typeof e||!t)return null;const n=f(),s=P(e),o=e!==s,r=k.has(s);let a,l=!0,c=!0,h=!1,d=null;return o&&n&&(a=n.Event(e,i),n(t).trigger(a),l=!a.isPropagationStopped(),c=!a.isImmediatePropagationStopped(),h=a.isDefaultPrevented()),r?(d=document.createEvent("HTMLEvents"),d.initEvent(s,l,!0)):d=new CustomEvent(e,{bubbles:l,cancelable:!0}),void 0!==i&&Object.keys(i).forEach((t=>{Object.defineProperty(d,t,{get:()=>i[t]})})),h&&d.preventDefault(),c&&t.dispatchEvent(d),d.defaultPrevented&&void 0!==a&&a.preventDefault(),d}},M=new Map,H={set(t,e,i){M.has(t)||M.set(t,new Map);const n=M.get(t);n.has(e)||0===n.size?n.set(e,i):console.error(`Bootstrap doesn't allow more than one instance per element. Bound instance: ${Array.from(n.keys())[0]}.`)},get:(t,e)=>M.has(t)&&M.get(t).get(e)||null,remove(t,e){if(!M.has(t))return;const i=M.get(t);i.delete(e),0===i.size&&M.delete(t)}};class B{constructor(t){(t=r(t))&&(this._element=t,H.set(this._element,this.constructor.DATA_KEY,this))}dispose(){H.remove(this._element,this.constructor.DATA_KEY),j.off(this._element,this.constructor.EVENT_KEY),Object.getOwnPropertyNames(this).forEach((t=>{this[t]=null}))}_queueCallback(t,e,i=!0){b(t,e,i)}static getInstance(t){return H.get(r(t),this.DATA_KEY)}static getOrCreateInstance(t,e={}){return this.getInstance(t)||new this(t,"object"==typeof e?e:null)}static get VERSION(){return"5.1.3"}static get NAME(){throw new Error('You have to implement the static method "NAME", for each component!')}static get DATA_KEY(){return`bs.${this.NAME}`}static get EVENT_KEY(){return`.${this.DATA_KEY}`}}const R=(t,e="hide")=>{const i=`click.dismiss${t.EVENT_KEY}`,s=t.NAME;j.on(document,i,`[data-bs-dismiss="${s}"]`,(function(i){if(["A","AREA"].includes(this.tagName)&&i.preventDefault(),c(this))return;const o=n(this)||this.closest(`.${s}`);t.getOrCreateInstance(o)[e]()}))};class W extends B{static get NAME(){return"alert"}close(){if(j.trigger(this._element,"close.bs.alert").defaultPrevented)return;this._element.classList.remove("show");const t=this._element.classList.contains("fade");this._queueCallback((()=>this._destroyElement()),this._element,t)}_destroyElement(){this._element.remove(),j.trigger(this._element,"closed.bs.alert"),this.dispose()}static jQueryInterface(t){return this.each((function(){const e=W.getOrCreateInstance(this);if("string"==typeof t){if(void 0===e[t]||t.startsWith("_")||"constructor"===t)throw new TypeError(`No method named "${t}"`);e[t](this)}}))}}R(W,"close"),g(W);const $='[data-bs-toggle="button"]';class z extends B{static get NAME(){return"button"}toggle(){this._element.setAttribute("aria-pressed",this._element.classList.toggle("active"))}static jQueryInterface(t){return this.each((function(){const e=z.getOrCreateInstance(this);"toggle"===t&&e[t]()}))}}function q(t){return"true"===t||"false"!==t&&(t===Number(t).toString()?Number(t):""===t||"null"===t?null:t)}function F(t){return t.replace(/[A-Z]/g,(t=>`-${t.toLowerCase()}`))}j.on(document,"click.bs.button.data-api",$,(t=>{t.preventDefault();const e=t.target.closest($);z.getOrCreateInstance(e).toggle()})),g(z);const U={setDataAttribute(t,e,i){t.setAttribute(`data-bs-${F(e)}`,i)},removeDataAttribute(t,e){t.removeAttribute(`data-bs-${F(e)}`)},getDataAttributes(t){if(!t)return{};const e={};return Object.keys(t.dataset).filter((t=>t.startsWith("bs"))).forEach((i=>{let n=i.replace(/^bs/,"");n=n.charAt(0).toLowerCase()+n.slice(1,n.length),e[n]=q(t.dataset[i])})),e},getDataAttribute:(t,e)=>q(t.getAttribute(`data-bs-${F(e)}`)),offset(t){const e=t.getBoundingClientRect();return{top:e.top+window.pageYOffset,left:e.left+window.pageXOffset}},position:t=>({top:t.offsetTop,left:t.offsetLeft})},V={find:(t,e=document.documentElement)=>[].concat(...Element.prototype.querySelectorAll.call(e,t)),findOne:(t,e=document.documentElement)=>Element.prototype.querySelector.call(e,t),children:(t,e)=>[].concat(...t.children).filter((t=>t.matches(e))),parents(t,e){const i=[];let n=t.parentNode;for(;n&&n.nodeType===Node.ELEMENT_NODE&&3!==n.nodeType;)n.matches(e)&&i.push(n),n=n.parentNode;return i},prev(t,e){let i=t.previousElementSibling;for(;i;){if(i.matches(e))return[i];i=i.previousElementSibling}return[]},next(t,e){let i=t.nextElementSibling;for(;i;){if(i.matches(e))return[i];i=i.nextElementSibling}return[]},focusableChildren(t){const e=["a","button","input","textarea","select","details","[tabindex]",'[contenteditable="true"]'].map((t=>`${t}:not([tabindex^="-"])`)).join(", ");return this.find(e,t).filter((t=>!c(t)&&l(t)))}},K="carousel",X={interval:5e3,keyboard:!0,slide:!1,pause:"hover",wrap:!0,touch:!0},Y={interval:"(number|boolean)",keyboard:"boolean",slide:"(boolean|string)",pause:"(string|boolean)",wrap:"boolean",touch:"boolean"},Q="next",G="prev",Z="left",J="right",tt={ArrowLeft:J,ArrowRight:Z},et="slid.bs.carousel",it="active",nt=".active.carousel-item";class st extends B{constructor(t,e){super(t),this._items=null,this._interval=null,this._activeElement=null,this._isPaused=!1,this._isSliding=!1,this.touchTimeout=null,this.touchStartX=0,this.touchDeltaX=0,this._config=this._getConfig(e),this._indicatorsElement=V.findOne(".carousel-indicators",this._element),this._touchSupported="ontouchstart"in document.documentElement||navigator.maxTouchPoints>0,this._pointerEvent=Boolean(window.PointerEvent),this._addEventListeners()}static get Default(){return X}static get NAME(){return K}next(){this._slide(Q)}nextWhenVisible(){!document.hidden&&l(this._element)&&this.next()}prev(){this._slide(G)}pause(t){t||(this._isPaused=!0),V.findOne(".carousel-item-next, .carousel-item-prev",this._element)&&(s(this._element),this.cycle(!0)),clearInterval(this._interval),this._interval=null}cycle(t){t||(this._isPaused=!1),this._interval&&(clearInterval(this._interval),this._interval=null),this._config&&this._config.interval&&!this._isPaused&&(this._updateInterval(),this._interval=setInterval((document.visibilityState?this.nextWhenVisible:this.next).bind(this),this._config.interval))}to(t){this._activeElement=V.findOne(nt,this._element);const e=this._getItemIndex(this._activeElement);if(t>this._items.length-1||t<0)return;if(this._isSliding)return void j.one(this._element,et,(()=>this.to(t)));if(e===t)return this.pause(),void this.cycle();const i=t>e?Q:G;this._slide(i,this._items[t])}_getConfig(t){return t={...X,...U.getDataAttributes(this._element),..."object"==typeof t?t:{}},a(K,t,Y),t}_handleSwipe(){const t=Math.abs(this.touchDeltaX);if(t<=40)return;const e=t/this.touchDeltaX;this.touchDeltaX=0,e&&this._slide(e>0?J:Z)}_addEventListeners(){this._config.keyboard&&j.on(this._element,"keydown.bs.carousel",(t=>this._keydown(t))),"hover"===this._config.pause&&(j.on(this._element,"mouseenter.bs.carousel",(t=>this.pause(t))),j.on(this._element,"mouseleave.bs.carousel",(t=>this.cycle(t)))),this._config.touch&&this._touchSupported&&this._addTouchEventListeners()}_addTouchEventListeners(){const t=t=>this._pointerEvent&&("pen"===t.pointerType||"touch"===t.pointerType),e=e=>{t(e)?this.touchStartX=e.clientX:this._pointerEvent||(this.touchStartX=e.touches[0].clientX)},i=t=>{this.touchDeltaX=t.touches&&t.touches.length>1?0:t.touches[0].clientX-this.touchStartX},n=e=>{t(e)&&(this.touchDeltaX=e.clientX-this.touchStartX),this._handleSwipe(),"hover"===this._config.pause&&(this.pause(),this.touchTimeout&&clearTimeout(this.touchTimeout),this.touchTimeout=setTimeout((t=>this.cycle(t)),500+this._config.interval))};V.find(".carousel-item img",this._element).forEach((t=>{j.on(t,"dragstart.bs.carousel",(t=>t.preventDefault()))})),this._pointerEvent?(j.on(this._element,"pointerdown.bs.carousel",(t=>e(t))),j.on(this._element,"pointerup.bs.carousel",(t=>n(t))),this._element.classList.add("pointer-event")):(j.on(this._element,"touchstart.bs.carousel",(t=>e(t))),j.on(this._element,"touchmove.bs.carousel",(t=>i(t))),j.on(this._element,"touchend.bs.carousel",(t=>n(t))))}_keydown(t){if(/input|textarea/i.test(t.target.tagName))return;const e=tt[t.key];e&&(t.preventDefault(),this._slide(e))}_getItemIndex(t){return this._items=t&&t.parentNode?V.find(".carousel-item",t.parentNode):[],this._items.indexOf(t)}_getItemByOrder(t,e){const i=t===Q;return v(this._items,e,i,this._config.wrap)}_triggerSlideEvent(t,e){const i=this._getItemIndex(t),n=this._getItemIndex(V.findOne(nt,this._element));return j.trigger(this._element,"slide.bs.carousel",{relatedTarget:t,direction:e,from:n,to:i})}_setActiveIndicatorElement(t){if(this._indicatorsElement){const e=V.findOne(".active",this._indicatorsElement);e.classList.remove(it),e.removeAttribute("aria-current");const i=V.find("[data-bs-target]",this._indicatorsElement);for(let e=0;e{j.trigger(this._element,et,{relatedTarget:o,direction:d,from:s,to:r})};if(this._element.classList.contains("slide")){o.classList.add(h),u(o),n.classList.add(c),o.classList.add(c);const t=()=>{o.classList.remove(c,h),o.classList.add(it),n.classList.remove(it,h,c),this._isSliding=!1,setTimeout(f,0)};this._queueCallback(t,n,!0)}else n.classList.remove(it),o.classList.add(it),this._isSliding=!1,f();a&&this.cycle()}_directionToOrder(t){return[J,Z].includes(t)?m()?t===Z?G:Q:t===Z?Q:G:t}_orderToDirection(t){return[Q,G].includes(t)?m()?t===G?Z:J:t===G?J:Z:t}static carouselInterface(t,e){const i=st.getOrCreateInstance(t,e);let{_config:n}=i;"object"==typeof e&&(n={...n,...e});const s="string"==typeof e?e:n.slide;if("number"==typeof e)i.to(e);else if("string"==typeof s){if(void 0===i[s])throw new TypeError(`No method named "${s}"`);i[s]()}else n.interval&&n.ride&&(i.pause(),i.cycle())}static jQueryInterface(t){return this.each((function(){st.carouselInterface(this,t)}))}static dataApiClickHandler(t){const e=n(this);if(!e||!e.classList.contains("carousel"))return;const i={...U.getDataAttributes(e),...U.getDataAttributes(this)},s=this.getAttribute("data-bs-slide-to");s&&(i.interval=!1),st.carouselInterface(e,i),s&&st.getInstance(e).to(s),t.preventDefault()}}j.on(document,"click.bs.carousel.data-api","[data-bs-slide], [data-bs-slide-to]",st.dataApiClickHandler),j.on(window,"load.bs.carousel.data-api",(()=>{const t=V.find('[data-bs-ride="carousel"]');for(let e=0,i=t.length;et===this._element));null!==s&&o.length&&(this._selector=s,this._triggerArray.push(e))}this._initializeChildren(),this._config.parent||this._addAriaAndCollapsedClass(this._triggerArray,this._isShown()),this._config.toggle&&this.toggle()}static get Default(){return rt}static get NAME(){return ot}toggle(){this._isShown()?this.hide():this.show()}show(){if(this._isTransitioning||this._isShown())return;let t,e=[];if(this._config.parent){const t=V.find(ut,this._config.parent);e=V.find(".collapse.show, .collapse.collapsing",this._config.parent).filter((e=>!t.includes(e)))}const i=V.findOne(this._selector);if(e.length){const n=e.find((t=>i!==t));if(t=n?pt.getInstance(n):null,t&&t._isTransitioning)return}if(j.trigger(this._element,"show.bs.collapse").defaultPrevented)return;e.forEach((e=>{i!==e&&pt.getOrCreateInstance(e,{toggle:!1}).hide(),t||H.set(e,"bs.collapse",null)}));const n=this._getDimension();this._element.classList.remove(ct),this._element.classList.add(ht),this._element.style[n]=0,this._addAriaAndCollapsedClass(this._triggerArray,!0),this._isTransitioning=!0;const s=`scroll${n[0].toUpperCase()+n.slice(1)}`;this._queueCallback((()=>{this._isTransitioning=!1,this._element.classList.remove(ht),this._element.classList.add(ct,lt),this._element.style[n]="",j.trigger(this._element,"shown.bs.collapse")}),this._element,!0),this._element.style[n]=`${this._element[s]}px`}hide(){if(this._isTransitioning||!this._isShown())return;if(j.trigger(this._element,"hide.bs.collapse").defaultPrevented)return;const t=this._getDimension();this._element.style[t]=`${this._element.getBoundingClientRect()[t]}px`,u(this._element),this._element.classList.add(ht),this._element.classList.remove(ct,lt);const e=this._triggerArray.length;for(let t=0;t{this._isTransitioning=!1,this._element.classList.remove(ht),this._element.classList.add(ct),j.trigger(this._element,"hidden.bs.collapse")}),this._element,!0)}_isShown(t=this._element){return t.classList.contains(lt)}_getConfig(t){return(t={...rt,...U.getDataAttributes(this._element),...t}).toggle=Boolean(t.toggle),t.parent=r(t.parent),a(ot,t,at),t}_getDimension(){return this._element.classList.contains("collapse-horizontal")?"width":"height"}_initializeChildren(){if(!this._config.parent)return;const t=V.find(ut,this._config.parent);V.find(ft,this._config.parent).filter((e=>!t.includes(e))).forEach((t=>{const e=n(t);e&&this._addAriaAndCollapsedClass([t],this._isShown(e))}))}_addAriaAndCollapsedClass(t,e){t.length&&t.forEach((t=>{e?t.classList.remove(dt):t.classList.add(dt),t.setAttribute("aria-expanded",e)}))}static jQueryInterface(t){return this.each((function(){const e={};"string"==typeof t&&/show|hide/.test(t)&&(e.toggle=!1);const i=pt.getOrCreateInstance(this,e);if("string"==typeof t){if(void 0===i[t])throw new TypeError(`No method named "${t}"`);i[t]()}}))}}j.on(document,"click.bs.collapse.data-api",ft,(function(t){("A"===t.target.tagName||t.delegateTarget&&"A"===t.delegateTarget.tagName)&&t.preventDefault();const e=i(this);V.find(e).forEach((t=>{pt.getOrCreateInstance(t,{toggle:!1}).toggle()}))})),g(pt);var mt="top",gt="bottom",_t="right",bt="left",vt="auto",yt=[mt,gt,_t,bt],wt="start",Et="end",At="clippingParents",Tt="viewport",Ot="popper",Ct="reference",kt=yt.reduce((function(t,e){return t.concat([e+"-"+wt,e+"-"+Et])}),[]),Lt=[].concat(yt,[vt]).reduce((function(t,e){return t.concat([e,e+"-"+wt,e+"-"+Et])}),[]),xt="beforeRead",Dt="read",St="afterRead",Nt="beforeMain",It="main",Pt="afterMain",jt="beforeWrite",Mt="write",Ht="afterWrite",Bt=[xt,Dt,St,Nt,It,Pt,jt,Mt,Ht];function Rt(t){return t?(t.nodeName||"").toLowerCase():null}function Wt(t){if(null==t)return window;if("[object Window]"!==t.toString()){var e=t.ownerDocument;return e&&e.defaultView||window}return t}function $t(t){return t instanceof Wt(t).Element||t instanceof Element}function zt(t){return t instanceof Wt(t).HTMLElement||t instanceof HTMLElement}function qt(t){return"undefined"!=typeof ShadowRoot&&(t instanceof Wt(t).ShadowRoot||t instanceof ShadowRoot)}const Ft={name:"applyStyles",enabled:!0,phase:"write",fn:function(t){var e=t.state;Object.keys(e.elements).forEach((function(t){var i=e.styles[t]||{},n=e.attributes[t]||{},s=e.elements[t];zt(s)&&Rt(s)&&(Object.assign(s.style,i),Object.keys(n).forEach((function(t){var e=n[t];!1===e?s.removeAttribute(t):s.setAttribute(t,!0===e?"":e)})))}))},effect:function(t){var e=t.state,i={popper:{position:e.options.strategy,left:"0",top:"0",margin:"0"},arrow:{position:"absolute"},reference:{}};return Object.assign(e.elements.popper.style,i.popper),e.styles=i,e.elements.arrow&&Object.assign(e.elements.arrow.style,i.arrow),function(){Object.keys(e.elements).forEach((function(t){var n=e.elements[t],s=e.attributes[t]||{},o=Object.keys(e.styles.hasOwnProperty(t)?e.styles[t]:i[t]).reduce((function(t,e){return t[e]="",t}),{});zt(n)&&Rt(n)&&(Object.assign(n.style,o),Object.keys(s).forEach((function(t){n.removeAttribute(t)})))}))}},requires:["computeStyles"]};function Ut(t){return t.split("-")[0]}function Vt(t,e){var i=t.getBoundingClientRect();return{width:i.width/1,height:i.height/1,top:i.top/1,right:i.right/1,bottom:i.bottom/1,left:i.left/1,x:i.left/1,y:i.top/1}}function Kt(t){var e=Vt(t),i=t.offsetWidth,n=t.offsetHeight;return Math.abs(e.width-i)<=1&&(i=e.width),Math.abs(e.height-n)<=1&&(n=e.height),{x:t.offsetLeft,y:t.offsetTop,width:i,height:n}}function Xt(t,e){var i=e.getRootNode&&e.getRootNode();if(t.contains(e))return!0;if(i&&qt(i)){var n=e;do{if(n&&t.isSameNode(n))return!0;n=n.parentNode||n.host}while(n)}return!1}function Yt(t){return Wt(t).getComputedStyle(t)}function Qt(t){return["table","td","th"].indexOf(Rt(t))>=0}function Gt(t){return(($t(t)?t.ownerDocument:t.document)||window.document).documentElement}function Zt(t){return"html"===Rt(t)?t:t.assignedSlot||t.parentNode||(qt(t)?t.host:null)||Gt(t)}function Jt(t){return zt(t)&&"fixed"!==Yt(t).position?t.offsetParent:null}function te(t){for(var e=Wt(t),i=Jt(t);i&&Qt(i)&&"static"===Yt(i).position;)i=Jt(i);return i&&("html"===Rt(i)||"body"===Rt(i)&&"static"===Yt(i).position)?e:i||function(t){var e=-1!==navigator.userAgent.toLowerCase().indexOf("firefox");if(-1!==navigator.userAgent.indexOf("Trident")&&zt(t)&&"fixed"===Yt(t).position)return null;for(var i=Zt(t);zt(i)&&["html","body"].indexOf(Rt(i))<0;){var n=Yt(i);if("none"!==n.transform||"none"!==n.perspective||"paint"===n.contain||-1!==["transform","perspective"].indexOf(n.willChange)||e&&"filter"===n.willChange||e&&n.filter&&"none"!==n.filter)return i;i=i.parentNode}return null}(t)||e}function ee(t){return["top","bottom"].indexOf(t)>=0?"x":"y"}var ie=Math.max,ne=Math.min,se=Math.round;function oe(t,e,i){return ie(t,ne(e,i))}function re(t){return Object.assign({},{top:0,right:0,bottom:0,left:0},t)}function ae(t,e){return e.reduce((function(e,i){return e[i]=t,e}),{})}const le={name:"arrow",enabled:!0,phase:"main",fn:function(t){var e,i=t.state,n=t.name,s=t.options,o=i.elements.arrow,r=i.modifiersData.popperOffsets,a=Ut(i.placement),l=ee(a),c=[bt,_t].indexOf(a)>=0?"height":"width";if(o&&r){var h=function(t,e){return re("number"!=typeof(t="function"==typeof t?t(Object.assign({},e.rects,{placement:e.placement})):t)?t:ae(t,yt))}(s.padding,i),d=Kt(o),u="y"===l?mt:bt,f="y"===l?gt:_t,p=i.rects.reference[c]+i.rects.reference[l]-r[l]-i.rects.popper[c],m=r[l]-i.rects.reference[l],g=te(o),_=g?"y"===l?g.clientHeight||0:g.clientWidth||0:0,b=p/2-m/2,v=h[u],y=_-d[c]-h[f],w=_/2-d[c]/2+b,E=oe(v,w,y),A=l;i.modifiersData[n]=((e={})[A]=E,e.centerOffset=E-w,e)}},effect:function(t){var e=t.state,i=t.options.element,n=void 0===i?"[data-popper-arrow]":i;null!=n&&("string"!=typeof n||(n=e.elements.popper.querySelector(n)))&&Xt(e.elements.popper,n)&&(e.elements.arrow=n)},requires:["popperOffsets"],requiresIfExists:["preventOverflow"]};function ce(t){return t.split("-")[1]}var he={top:"auto",right:"auto",bottom:"auto",left:"auto"};function de(t){var e,i=t.popper,n=t.popperRect,s=t.placement,o=t.variation,r=t.offsets,a=t.position,l=t.gpuAcceleration,c=t.adaptive,h=t.roundOffsets,d=!0===h?function(t){var e=t.x,i=t.y,n=window.devicePixelRatio||1;return{x:se(se(e*n)/n)||0,y:se(se(i*n)/n)||0}}(r):"function"==typeof h?h(r):r,u=d.x,f=void 0===u?0:u,p=d.y,m=void 0===p?0:p,g=r.hasOwnProperty("x"),_=r.hasOwnProperty("y"),b=bt,v=mt,y=window;if(c){var w=te(i),E="clientHeight",A="clientWidth";w===Wt(i)&&"static"!==Yt(w=Gt(i)).position&&"absolute"===a&&(E="scrollHeight",A="scrollWidth"),w=w,s!==mt&&(s!==bt&&s!==_t||o!==Et)||(v=gt,m-=w[E]-n.height,m*=l?1:-1),s!==bt&&(s!==mt&&s!==gt||o!==Et)||(b=_t,f-=w[A]-n.width,f*=l?1:-1)}var T,O=Object.assign({position:a},c&&he);return l?Object.assign({},O,((T={})[v]=_?"0":"",T[b]=g?"0":"",T.transform=(y.devicePixelRatio||1)<=1?"translate("+f+"px, "+m+"px)":"translate3d("+f+"px, "+m+"px, 0)",T)):Object.assign({},O,((e={})[v]=_?m+"px":"",e[b]=g?f+"px":"",e.transform="",e))}const ue={name:"computeStyles",enabled:!0,phase:"beforeWrite",fn:function(t){var e=t.state,i=t.options,n=i.gpuAcceleration,s=void 0===n||n,o=i.adaptive,r=void 0===o||o,a=i.roundOffsets,l=void 0===a||a,c={placement:Ut(e.placement),variation:ce(e.placement),popper:e.elements.popper,popperRect:e.rects.popper,gpuAcceleration:s};null!=e.modifiersData.popperOffsets&&(e.styles.popper=Object.assign({},e.styles.popper,de(Object.assign({},c,{offsets:e.modifiersData.popperOffsets,position:e.options.strategy,adaptive:r,roundOffsets:l})))),null!=e.modifiersData.arrow&&(e.styles.arrow=Object.assign({},e.styles.arrow,de(Object.assign({},c,{offsets:e.modifiersData.arrow,position:"absolute",adaptive:!1,roundOffsets:l})))),e.attributes.popper=Object.assign({},e.attributes.popper,{"data-popper-placement":e.placement})},data:{}};var fe={passive:!0};const pe={name:"eventListeners",enabled:!0,phase:"write",fn:function(){},effect:function(t){var e=t.state,i=t.instance,n=t.options,s=n.scroll,o=void 0===s||s,r=n.resize,a=void 0===r||r,l=Wt(e.elements.popper),c=[].concat(e.scrollParents.reference,e.scrollParents.popper);return o&&c.forEach((function(t){t.addEventListener("scroll",i.update,fe)})),a&&l.addEventListener("resize",i.update,fe),function(){o&&c.forEach((function(t){t.removeEventListener("scroll",i.update,fe)})),a&&l.removeEventListener("resize",i.update,fe)}},data:{}};var me={left:"right",right:"left",bottom:"top",top:"bottom"};function ge(t){return t.replace(/left|right|bottom|top/g,(function(t){return me[t]}))}var _e={start:"end",end:"start"};function be(t){return t.replace(/start|end/g,(function(t){return _e[t]}))}function ve(t){var e=Wt(t);return{scrollLeft:e.pageXOffset,scrollTop:e.pageYOffset}}function ye(t){return Vt(Gt(t)).left+ve(t).scrollLeft}function we(t){var e=Yt(t),i=e.overflow,n=e.overflowX,s=e.overflowY;return/auto|scroll|overlay|hidden/.test(i+s+n)}function Ee(t){return["html","body","#document"].indexOf(Rt(t))>=0?t.ownerDocument.body:zt(t)&&we(t)?t:Ee(Zt(t))}function Ae(t,e){var i;void 0===e&&(e=[]);var n=Ee(t),s=n===(null==(i=t.ownerDocument)?void 0:i.body),o=Wt(n),r=s?[o].concat(o.visualViewport||[],we(n)?n:[]):n,a=e.concat(r);return s?a:a.concat(Ae(Zt(r)))}function Te(t){return Object.assign({},t,{left:t.x,top:t.y,right:t.x+t.width,bottom:t.y+t.height})}function Oe(t,e){return e===Tt?Te(function(t){var e=Wt(t),i=Gt(t),n=e.visualViewport,s=i.clientWidth,o=i.clientHeight,r=0,a=0;return n&&(s=n.width,o=n.height,/^((?!chrome|android).)*safari/i.test(navigator.userAgent)||(r=n.offsetLeft,a=n.offsetTop)),{width:s,height:o,x:r+ye(t),y:a}}(t)):zt(e)?function(t){var e=Vt(t);return e.top=e.top+t.clientTop,e.left=e.left+t.clientLeft,e.bottom=e.top+t.clientHeight,e.right=e.left+t.clientWidth,e.width=t.clientWidth,e.height=t.clientHeight,e.x=e.left,e.y=e.top,e}(e):Te(function(t){var e,i=Gt(t),n=ve(t),s=null==(e=t.ownerDocument)?void 0:e.body,o=ie(i.scrollWidth,i.clientWidth,s?s.scrollWidth:0,s?s.clientWidth:0),r=ie(i.scrollHeight,i.clientHeight,s?s.scrollHeight:0,s?s.clientHeight:0),a=-n.scrollLeft+ye(t),l=-n.scrollTop;return"rtl"===Yt(s||i).direction&&(a+=ie(i.clientWidth,s?s.clientWidth:0)-o),{width:o,height:r,x:a,y:l}}(Gt(t)))}function Ce(t){var e,i=t.reference,n=t.element,s=t.placement,o=s?Ut(s):null,r=s?ce(s):null,a=i.x+i.width/2-n.width/2,l=i.y+i.height/2-n.height/2;switch(o){case mt:e={x:a,y:i.y-n.height};break;case gt:e={x:a,y:i.y+i.height};break;case _t:e={x:i.x+i.width,y:l};break;case bt:e={x:i.x-n.width,y:l};break;default:e={x:i.x,y:i.y}}var c=o?ee(o):null;if(null!=c){var h="y"===c?"height":"width";switch(r){case wt:e[c]=e[c]-(i[h]/2-n[h]/2);break;case Et:e[c]=e[c]+(i[h]/2-n[h]/2)}}return e}function ke(t,e){void 0===e&&(e={});var i=e,n=i.placement,s=void 0===n?t.placement:n,o=i.boundary,r=void 0===o?At:o,a=i.rootBoundary,l=void 0===a?Tt:a,c=i.elementContext,h=void 0===c?Ot:c,d=i.altBoundary,u=void 0!==d&&d,f=i.padding,p=void 0===f?0:f,m=re("number"!=typeof p?p:ae(p,yt)),g=h===Ot?Ct:Ot,_=t.rects.popper,b=t.elements[u?g:h],v=function(t,e,i){var n="clippingParents"===e?function(t){var e=Ae(Zt(t)),i=["absolute","fixed"].indexOf(Yt(t).position)>=0&&zt(t)?te(t):t;return $t(i)?e.filter((function(t){return $t(t)&&Xt(t,i)&&"body"!==Rt(t)})):[]}(t):[].concat(e),s=[].concat(n,[i]),o=s[0],r=s.reduce((function(e,i){var n=Oe(t,i);return e.top=ie(n.top,e.top),e.right=ne(n.right,e.right),e.bottom=ne(n.bottom,e.bottom),e.left=ie(n.left,e.left),e}),Oe(t,o));return r.width=r.right-r.left,r.height=r.bottom-r.top,r.x=r.left,r.y=r.top,r}($t(b)?b:b.contextElement||Gt(t.elements.popper),r,l),y=Vt(t.elements.reference),w=Ce({reference:y,element:_,strategy:"absolute",placement:s}),E=Te(Object.assign({},_,w)),A=h===Ot?E:y,T={top:v.top-A.top+m.top,bottom:A.bottom-v.bottom+m.bottom,left:v.left-A.left+m.left,right:A.right-v.right+m.right},O=t.modifiersData.offset;if(h===Ot&&O){var C=O[s];Object.keys(T).forEach((function(t){var e=[_t,gt].indexOf(t)>=0?1:-1,i=[mt,gt].indexOf(t)>=0?"y":"x";T[t]+=C[i]*e}))}return T}function Le(t,e){void 0===e&&(e={});var i=e,n=i.placement,s=i.boundary,o=i.rootBoundary,r=i.padding,a=i.flipVariations,l=i.allowedAutoPlacements,c=void 0===l?Lt:l,h=ce(n),d=h?a?kt:kt.filter((function(t){return ce(t)===h})):yt,u=d.filter((function(t){return c.indexOf(t)>=0}));0===u.length&&(u=d);var f=u.reduce((function(e,i){return e[i]=ke(t,{placement:i,boundary:s,rootBoundary:o,padding:r})[Ut(i)],e}),{});return Object.keys(f).sort((function(t,e){return f[t]-f[e]}))}const xe={name:"flip",enabled:!0,phase:"main",fn:function(t){var e=t.state,i=t.options,n=t.name;if(!e.modifiersData[n]._skip){for(var s=i.mainAxis,o=void 0===s||s,r=i.altAxis,a=void 0===r||r,l=i.fallbackPlacements,c=i.padding,h=i.boundary,d=i.rootBoundary,u=i.altBoundary,f=i.flipVariations,p=void 0===f||f,m=i.allowedAutoPlacements,g=e.options.placement,_=Ut(g),b=l||(_!==g&&p?function(t){if(Ut(t)===vt)return[];var e=ge(t);return[be(t),e,be(e)]}(g):[ge(g)]),v=[g].concat(b).reduce((function(t,i){return t.concat(Ut(i)===vt?Le(e,{placement:i,boundary:h,rootBoundary:d,padding:c,flipVariations:p,allowedAutoPlacements:m}):i)}),[]),y=e.rects.reference,w=e.rects.popper,E=new Map,A=!0,T=v[0],O=0;O=0,D=x?"width":"height",S=ke(e,{placement:C,boundary:h,rootBoundary:d,altBoundary:u,padding:c}),N=x?L?_t:bt:L?gt:mt;y[D]>w[D]&&(N=ge(N));var I=ge(N),P=[];if(o&&P.push(S[k]<=0),a&&P.push(S[N]<=0,S[I]<=0),P.every((function(t){return t}))){T=C,A=!1;break}E.set(C,P)}if(A)for(var j=function(t){var e=v.find((function(e){var i=E.get(e);if(i)return i.slice(0,t).every((function(t){return t}))}));if(e)return T=e,"break"},M=p?3:1;M>0&&"break"!==j(M);M--);e.placement!==T&&(e.modifiersData[n]._skip=!0,e.placement=T,e.reset=!0)}},requiresIfExists:["offset"],data:{_skip:!1}};function De(t,e,i){return void 0===i&&(i={x:0,y:0}),{top:t.top-e.height-i.y,right:t.right-e.width+i.x,bottom:t.bottom-e.height+i.y,left:t.left-e.width-i.x}}function Se(t){return[mt,_t,gt,bt].some((function(e){return t[e]>=0}))}const Ne={name:"hide",enabled:!0,phase:"main",requiresIfExists:["preventOverflow"],fn:function(t){var e=t.state,i=t.name,n=e.rects.reference,s=e.rects.popper,o=e.modifiersData.preventOverflow,r=ke(e,{elementContext:"reference"}),a=ke(e,{altBoundary:!0}),l=De(r,n),c=De(a,s,o),h=Se(l),d=Se(c);e.modifiersData[i]={referenceClippingOffsets:l,popperEscapeOffsets:c,isReferenceHidden:h,hasPopperEscaped:d},e.attributes.popper=Object.assign({},e.attributes.popper,{"data-popper-reference-hidden":h,"data-popper-escaped":d})}},Ie={name:"offset",enabled:!0,phase:"main",requires:["popperOffsets"],fn:function(t){var e=t.state,i=t.options,n=t.name,s=i.offset,o=void 0===s?[0,0]:s,r=Lt.reduce((function(t,i){return t[i]=function(t,e,i){var n=Ut(t),s=[bt,mt].indexOf(n)>=0?-1:1,o="function"==typeof i?i(Object.assign({},e,{placement:t})):i,r=o[0],a=o[1];return r=r||0,a=(a||0)*s,[bt,_t].indexOf(n)>=0?{x:a,y:r}:{x:r,y:a}}(i,e.rects,o),t}),{}),a=r[e.placement],l=a.x,c=a.y;null!=e.modifiersData.popperOffsets&&(e.modifiersData.popperOffsets.x+=l,e.modifiersData.popperOffsets.y+=c),e.modifiersData[n]=r}},Pe={name:"popperOffsets",enabled:!0,phase:"read",fn:function(t){var e=t.state,i=t.name;e.modifiersData[i]=Ce({reference:e.rects.reference,element:e.rects.popper,strategy:"absolute",placement:e.placement})},data:{}},je={name:"preventOverflow",enabled:!0,phase:"main",fn:function(t){var e=t.state,i=t.options,n=t.name,s=i.mainAxis,o=void 0===s||s,r=i.altAxis,a=void 0!==r&&r,l=i.boundary,c=i.rootBoundary,h=i.altBoundary,d=i.padding,u=i.tether,f=void 0===u||u,p=i.tetherOffset,m=void 0===p?0:p,g=ke(e,{boundary:l,rootBoundary:c,padding:d,altBoundary:h}),_=Ut(e.placement),b=ce(e.placement),v=!b,y=ee(_),w="x"===y?"y":"x",E=e.modifiersData.popperOffsets,A=e.rects.reference,T=e.rects.popper,O="function"==typeof m?m(Object.assign({},e.rects,{placement:e.placement})):m,C={x:0,y:0};if(E){if(o||a){var k="y"===y?mt:bt,L="y"===y?gt:_t,x="y"===y?"height":"width",D=E[y],S=E[y]+g[k],N=E[y]-g[L],I=f?-T[x]/2:0,P=b===wt?A[x]:T[x],j=b===wt?-T[x]:-A[x],M=e.elements.arrow,H=f&&M?Kt(M):{width:0,height:0},B=e.modifiersData["arrow#persistent"]?e.modifiersData["arrow#persistent"].padding:{top:0,right:0,bottom:0,left:0},R=B[k],W=B[L],$=oe(0,A[x],H[x]),z=v?A[x]/2-I-$-R-O:P-$-R-O,q=v?-A[x]/2+I+$+W+O:j+$+W+O,F=e.elements.arrow&&te(e.elements.arrow),U=F?"y"===y?F.clientTop||0:F.clientLeft||0:0,V=e.modifiersData.offset?e.modifiersData.offset[e.placement][y]:0,K=E[y]+z-V-U,X=E[y]+q-V;if(o){var Y=oe(f?ne(S,K):S,D,f?ie(N,X):N);E[y]=Y,C[y]=Y-D}if(a){var Q="x"===y?mt:bt,G="x"===y?gt:_t,Z=E[w],J=Z+g[Q],tt=Z-g[G],et=oe(f?ne(J,K):J,Z,f?ie(tt,X):tt);E[w]=et,C[w]=et-Z}}e.modifiersData[n]=C}},requiresIfExists:["offset"]};function Me(t,e,i){void 0===i&&(i=!1);var n=zt(e);zt(e)&&function(t){var e=t.getBoundingClientRect();e.width,t.offsetWidth,e.height,t.offsetHeight}(e);var s,o,r=Gt(e),a=Vt(t),l={scrollLeft:0,scrollTop:0},c={x:0,y:0};return(n||!n&&!i)&&(("body"!==Rt(e)||we(r))&&(l=(s=e)!==Wt(s)&&zt(s)?{scrollLeft:(o=s).scrollLeft,scrollTop:o.scrollTop}:ve(s)),zt(e)?((c=Vt(e)).x+=e.clientLeft,c.y+=e.clientTop):r&&(c.x=ye(r))),{x:a.left+l.scrollLeft-c.x,y:a.top+l.scrollTop-c.y,width:a.width,height:a.height}}function He(t){var e=new Map,i=new Set,n=[];function s(t){i.add(t.name),[].concat(t.requires||[],t.requiresIfExists||[]).forEach((function(t){if(!i.has(t)){var n=e.get(t);n&&s(n)}})),n.push(t)}return t.forEach((function(t){e.set(t.name,t)})),t.forEach((function(t){i.has(t.name)||s(t)})),n}var Be={placement:"bottom",modifiers:[],strategy:"absolute"};function Re(){for(var t=arguments.length,e=new Array(t),i=0;ij.on(t,"mouseover",d))),this._element.focus(),this._element.setAttribute("aria-expanded",!0),this._menu.classList.add(Je),this._element.classList.add(Je),j.trigger(this._element,"shown.bs.dropdown",t)}hide(){if(c(this._element)||!this._isShown(this._menu))return;const t={relatedTarget:this._element};this._completeHide(t)}dispose(){this._popper&&this._popper.destroy(),super.dispose()}update(){this._inNavbar=this._detectNavbar(),this._popper&&this._popper.update()}_completeHide(t){j.trigger(this._element,"hide.bs.dropdown",t).defaultPrevented||("ontouchstart"in document.documentElement&&[].concat(...document.body.children).forEach((t=>j.off(t,"mouseover",d))),this._popper&&this._popper.destroy(),this._menu.classList.remove(Je),this._element.classList.remove(Je),this._element.setAttribute("aria-expanded","false"),U.removeDataAttribute(this._menu,"popper"),j.trigger(this._element,"hidden.bs.dropdown",t))}_getConfig(t){if(t={...this.constructor.Default,...U.getDataAttributes(this._element),...t},a(Ue,t,this.constructor.DefaultType),"object"==typeof t.reference&&!o(t.reference)&&"function"!=typeof t.reference.getBoundingClientRect)throw new TypeError(`${Ue.toUpperCase()}: Option "reference" provided type "object" without a required "getBoundingClientRect" method.`);return t}_createPopper(t){if(void 0===Fe)throw new TypeError("Bootstrap's dropdowns require Popper (https://popper.js.org)");let e=this._element;"parent"===this._config.reference?e=t:o(this._config.reference)?e=r(this._config.reference):"object"==typeof this._config.reference&&(e=this._config.reference);const i=this._getPopperConfig(),n=i.modifiers.find((t=>"applyStyles"===t.name&&!1===t.enabled));this._popper=qe(e,this._menu,i),n&&U.setDataAttribute(this._menu,"popper","static")}_isShown(t=this._element){return t.classList.contains(Je)}_getMenuElement(){return V.next(this._element,ei)[0]}_getPlacement(){const t=this._element.parentNode;if(t.classList.contains("dropend"))return ri;if(t.classList.contains("dropstart"))return ai;const e="end"===getComputedStyle(this._menu).getPropertyValue("--bs-position").trim();return t.classList.contains("dropup")?e?ni:ii:e?oi:si}_detectNavbar(){return null!==this._element.closest(".navbar")}_getOffset(){const{offset:t}=this._config;return"string"==typeof t?t.split(",").map((t=>Number.parseInt(t,10))):"function"==typeof t?e=>t(e,this._element):t}_getPopperConfig(){const t={placement:this._getPlacement(),modifiers:[{name:"preventOverflow",options:{boundary:this._config.boundary}},{name:"offset",options:{offset:this._getOffset()}}]};return"static"===this._config.display&&(t.modifiers=[{name:"applyStyles",enabled:!1}]),{...t,..."function"==typeof this._config.popperConfig?this._config.popperConfig(t):this._config.popperConfig}}_selectMenuItem({key:t,target:e}){const i=V.find(".dropdown-menu .dropdown-item:not(.disabled):not(:disabled)",this._menu).filter(l);i.length&&v(i,e,t===Ye,!i.includes(e)).focus()}static jQueryInterface(t){return this.each((function(){const e=hi.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===e[t])throw new TypeError(`No method named "${t}"`);e[t]()}}))}static clearMenus(t){if(t&&(2===t.button||"keyup"===t.type&&"Tab"!==t.key))return;const e=V.find(ti);for(let i=0,n=e.length;ie+t)),this._setElementAttributes(di,"paddingRight",(e=>e+t)),this._setElementAttributes(ui,"marginRight",(e=>e-t))}_disableOverFlow(){this._saveInitialAttribute(this._element,"overflow"),this._element.style.overflow="hidden"}_setElementAttributes(t,e,i){const n=this.getWidth();this._applyManipulationCallback(t,(t=>{if(t!==this._element&&window.innerWidth>t.clientWidth+n)return;this._saveInitialAttribute(t,e);const s=window.getComputedStyle(t)[e];t.style[e]=`${i(Number.parseFloat(s))}px`}))}reset(){this._resetElementAttributes(this._element,"overflow"),this._resetElementAttributes(this._element,"paddingRight"),this._resetElementAttributes(di,"paddingRight"),this._resetElementAttributes(ui,"marginRight")}_saveInitialAttribute(t,e){const i=t.style[e];i&&U.setDataAttribute(t,e,i)}_resetElementAttributes(t,e){this._applyManipulationCallback(t,(t=>{const i=U.getDataAttribute(t,e);void 0===i?t.style.removeProperty(e):(U.removeDataAttribute(t,e),t.style[e]=i)}))}_applyManipulationCallback(t,e){o(t)?e(t):V.find(t,this._element).forEach(e)}isOverflowing(){return this.getWidth()>0}}const pi={className:"modal-backdrop",isVisible:!0,isAnimated:!1,rootElement:"body",clickCallback:null},mi={className:"string",isVisible:"boolean",isAnimated:"boolean",rootElement:"(element|string)",clickCallback:"(function|null)"},gi="show",_i="mousedown.bs.backdrop";class bi{constructor(t){this._config=this._getConfig(t),this._isAppended=!1,this._element=null}show(t){this._config.isVisible?(this._append(),this._config.isAnimated&&u(this._getElement()),this._getElement().classList.add(gi),this._emulateAnimation((()=>{_(t)}))):_(t)}hide(t){this._config.isVisible?(this._getElement().classList.remove(gi),this._emulateAnimation((()=>{this.dispose(),_(t)}))):_(t)}_getElement(){if(!this._element){const t=document.createElement("div");t.className=this._config.className,this._config.isAnimated&&t.classList.add("fade"),this._element=t}return this._element}_getConfig(t){return(t={...pi,..."object"==typeof t?t:{}}).rootElement=r(t.rootElement),a("backdrop",t,mi),t}_append(){this._isAppended||(this._config.rootElement.append(this._getElement()),j.on(this._getElement(),_i,(()=>{_(this._config.clickCallback)})),this._isAppended=!0)}dispose(){this._isAppended&&(j.off(this._element,_i),this._element.remove(),this._isAppended=!1)}_emulateAnimation(t){b(t,this._getElement(),this._config.isAnimated)}}const vi={trapElement:null,autofocus:!0},yi={trapElement:"element",autofocus:"boolean"},wi=".bs.focustrap",Ei="backward";class Ai{constructor(t){this._config=this._getConfig(t),this._isActive=!1,this._lastTabNavDirection=null}activate(){const{trapElement:t,autofocus:e}=this._config;this._isActive||(e&&t.focus(),j.off(document,wi),j.on(document,"focusin.bs.focustrap",(t=>this._handleFocusin(t))),j.on(document,"keydown.tab.bs.focustrap",(t=>this._handleKeydown(t))),this._isActive=!0)}deactivate(){this._isActive&&(this._isActive=!1,j.off(document,wi))}_handleFocusin(t){const{target:e}=t,{trapElement:i}=this._config;if(e===document||e===i||i.contains(e))return;const n=V.focusableChildren(i);0===n.length?i.focus():this._lastTabNavDirection===Ei?n[n.length-1].focus():n[0].focus()}_handleKeydown(t){"Tab"===t.key&&(this._lastTabNavDirection=t.shiftKey?Ei:"forward")}_getConfig(t){return t={...vi,..."object"==typeof t?t:{}},a("focustrap",t,yi),t}}const Ti="modal",Oi="Escape",Ci={backdrop:!0,keyboard:!0,focus:!0},ki={backdrop:"(boolean|string)",keyboard:"boolean",focus:"boolean"},Li="hidden.bs.modal",xi="show.bs.modal",Di="resize.bs.modal",Si="click.dismiss.bs.modal",Ni="keydown.dismiss.bs.modal",Ii="mousedown.dismiss.bs.modal",Pi="modal-open",ji="show",Mi="modal-static";class Hi extends B{constructor(t,e){super(t),this._config=this._getConfig(e),this._dialog=V.findOne(".modal-dialog",this._element),this._backdrop=this._initializeBackDrop(),this._focustrap=this._initializeFocusTrap(),this._isShown=!1,this._ignoreBackdropClick=!1,this._isTransitioning=!1,this._scrollBar=new fi}static get Default(){return Ci}static get NAME(){return Ti}toggle(t){return this._isShown?this.hide():this.show(t)}show(t){this._isShown||this._isTransitioning||j.trigger(this._element,xi,{relatedTarget:t}).defaultPrevented||(this._isShown=!0,this._isAnimated()&&(this._isTransitioning=!0),this._scrollBar.hide(),document.body.classList.add(Pi),this._adjustDialog(),this._setEscapeEvent(),this._setResizeEvent(),j.on(this._dialog,Ii,(()=>{j.one(this._element,"mouseup.dismiss.bs.modal",(t=>{t.target===this._element&&(this._ignoreBackdropClick=!0)}))})),this._showBackdrop((()=>this._showElement(t))))}hide(){if(!this._isShown||this._isTransitioning)return;if(j.trigger(this._element,"hide.bs.modal").defaultPrevented)return;this._isShown=!1;const t=this._isAnimated();t&&(this._isTransitioning=!0),this._setEscapeEvent(),this._setResizeEvent(),this._focustrap.deactivate(),this._element.classList.remove(ji),j.off(this._element,Si),j.off(this._dialog,Ii),this._queueCallback((()=>this._hideModal()),this._element,t)}dispose(){[window,this._dialog].forEach((t=>j.off(t,".bs.modal"))),this._backdrop.dispose(),this._focustrap.deactivate(),super.dispose()}handleUpdate(){this._adjustDialog()}_initializeBackDrop(){return new bi({isVisible:Boolean(this._config.backdrop),isAnimated:this._isAnimated()})}_initializeFocusTrap(){return new Ai({trapElement:this._element})}_getConfig(t){return t={...Ci,...U.getDataAttributes(this._element),..."object"==typeof t?t:{}},a(Ti,t,ki),t}_showElement(t){const e=this._isAnimated(),i=V.findOne(".modal-body",this._dialog);this._element.parentNode&&this._element.parentNode.nodeType===Node.ELEMENT_NODE||document.body.append(this._element),this._element.style.display="block",this._element.removeAttribute("aria-hidden"),this._element.setAttribute("aria-modal",!0),this._element.setAttribute("role","dialog"),this._element.scrollTop=0,i&&(i.scrollTop=0),e&&u(this._element),this._element.classList.add(ji),this._queueCallback((()=>{this._config.focus&&this._focustrap.activate(),this._isTransitioning=!1,j.trigger(this._element,"shown.bs.modal",{relatedTarget:t})}),this._dialog,e)}_setEscapeEvent(){this._isShown?j.on(this._element,Ni,(t=>{this._config.keyboard&&t.key===Oi?(t.preventDefault(),this.hide()):this._config.keyboard||t.key!==Oi||this._triggerBackdropTransition()})):j.off(this._element,Ni)}_setResizeEvent(){this._isShown?j.on(window,Di,(()=>this._adjustDialog())):j.off(window,Di)}_hideModal(){this._element.style.display="none",this._element.setAttribute("aria-hidden",!0),this._element.removeAttribute("aria-modal"),this._element.removeAttribute("role"),this._isTransitioning=!1,this._backdrop.hide((()=>{document.body.classList.remove(Pi),this._resetAdjustments(),this._scrollBar.reset(),j.trigger(this._element,Li)}))}_showBackdrop(t){j.on(this._element,Si,(t=>{this._ignoreBackdropClick?this._ignoreBackdropClick=!1:t.target===t.currentTarget&&(!0===this._config.backdrop?this.hide():"static"===this._config.backdrop&&this._triggerBackdropTransition())})),this._backdrop.show(t)}_isAnimated(){return this._element.classList.contains("fade")}_triggerBackdropTransition(){if(j.trigger(this._element,"hidePrevented.bs.modal").defaultPrevented)return;const{classList:t,scrollHeight:e,style:i}=this._element,n=e>document.documentElement.clientHeight;!n&&"hidden"===i.overflowY||t.contains(Mi)||(n||(i.overflowY="hidden"),t.add(Mi),this._queueCallback((()=>{t.remove(Mi),n||this._queueCallback((()=>{i.overflowY=""}),this._dialog)}),this._dialog),this._element.focus())}_adjustDialog(){const t=this._element.scrollHeight>document.documentElement.clientHeight,e=this._scrollBar.getWidth(),i=e>0;(!i&&t&&!m()||i&&!t&&m())&&(this._element.style.paddingLeft=`${e}px`),(i&&!t&&!m()||!i&&t&&m())&&(this._element.style.paddingRight=`${e}px`)}_resetAdjustments(){this._element.style.paddingLeft="",this._element.style.paddingRight=""}static jQueryInterface(t,e){return this.each((function(){const i=Hi.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===i[t])throw new TypeError(`No method named "${t}"`);i[t](e)}}))}}j.on(document,"click.bs.modal.data-api",'[data-bs-toggle="modal"]',(function(t){const e=n(this);["A","AREA"].includes(this.tagName)&&t.preventDefault(),j.one(e,xi,(t=>{t.defaultPrevented||j.one(e,Li,(()=>{l(this)&&this.focus()}))}));const i=V.findOne(".modal.show");i&&Hi.getInstance(i).hide(),Hi.getOrCreateInstance(e).toggle(this)})),R(Hi),g(Hi);const Bi="offcanvas",Ri={backdrop:!0,keyboard:!0,scroll:!1},Wi={backdrop:"boolean",keyboard:"boolean",scroll:"boolean"},$i="show",zi=".offcanvas.show",qi="hidden.bs.offcanvas";class Fi extends B{constructor(t,e){super(t),this._config=this._getConfig(e),this._isShown=!1,this._backdrop=this._initializeBackDrop(),this._focustrap=this._initializeFocusTrap(),this._addEventListeners()}static get NAME(){return Bi}static get Default(){return Ri}toggle(t){return this._isShown?this.hide():this.show(t)}show(t){this._isShown||j.trigger(this._element,"show.bs.offcanvas",{relatedTarget:t}).defaultPrevented||(this._isShown=!0,this._element.style.visibility="visible",this._backdrop.show(),this._config.scroll||(new fi).hide(),this._element.removeAttribute("aria-hidden"),this._element.setAttribute("aria-modal",!0),this._element.setAttribute("role","dialog"),this._element.classList.add($i),this._queueCallback((()=>{this._config.scroll||this._focustrap.activate(),j.trigger(this._element,"shown.bs.offcanvas",{relatedTarget:t})}),this._element,!0))}hide(){this._isShown&&(j.trigger(this._element,"hide.bs.offcanvas").defaultPrevented||(this._focustrap.deactivate(),this._element.blur(),this._isShown=!1,this._element.classList.remove($i),this._backdrop.hide(),this._queueCallback((()=>{this._element.setAttribute("aria-hidden",!0),this._element.removeAttribute("aria-modal"),this._element.removeAttribute("role"),this._element.style.visibility="hidden",this._config.scroll||(new fi).reset(),j.trigger(this._element,qi)}),this._element,!0)))}dispose(){this._backdrop.dispose(),this._focustrap.deactivate(),super.dispose()}_getConfig(t){return t={...Ri,...U.getDataAttributes(this._element),..."object"==typeof t?t:{}},a(Bi,t,Wi),t}_initializeBackDrop(){return new bi({className:"offcanvas-backdrop",isVisible:this._config.backdrop,isAnimated:!0,rootElement:this._element.parentNode,clickCallback:()=>this.hide()})}_initializeFocusTrap(){return new Ai({trapElement:this._element})}_addEventListeners(){j.on(this._element,"keydown.dismiss.bs.offcanvas",(t=>{this._config.keyboard&&"Escape"===t.key&&this.hide()}))}static jQueryInterface(t){return this.each((function(){const e=Fi.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===e[t]||t.startsWith("_")||"constructor"===t)throw new TypeError(`No method named "${t}"`);e[t](this)}}))}}j.on(document,"click.bs.offcanvas.data-api",'[data-bs-toggle="offcanvas"]',(function(t){const e=n(this);if(["A","AREA"].includes(this.tagName)&&t.preventDefault(),c(this))return;j.one(e,qi,(()=>{l(this)&&this.focus()}));const i=V.findOne(zi);i&&i!==e&&Fi.getInstance(i).hide(),Fi.getOrCreateInstance(e).toggle(this)})),j.on(window,"load.bs.offcanvas.data-api",(()=>V.find(zi).forEach((t=>Fi.getOrCreateInstance(t).show())))),R(Fi),g(Fi);const Ui=new Set(["background","cite","href","itemtype","longdesc","poster","src","xlink:href"]),Vi=/^(?:(?:https?|mailto|ftp|tel|file|sms):|[^#&/:?]*(?:[#/?]|$))/i,Ki=/^data:(?:image\/(?:bmp|gif|jpeg|jpg|png|tiff|webp)|video\/(?:mpeg|mp4|ogg|webm)|audio\/(?:mp3|oga|ogg|opus));base64,[\d+/a-z]+=*$/i,Xi=(t,e)=>{const i=t.nodeName.toLowerCase();if(e.includes(i))return!Ui.has(i)||Boolean(Vi.test(t.nodeValue)||Ki.test(t.nodeValue));const n=e.filter((t=>t instanceof RegExp));for(let t=0,e=n.length;t{Xi(t,r)||i.removeAttribute(t.nodeName)}))}return n.body.innerHTML}const Qi="tooltip",Gi=new Set(["sanitize","allowList","sanitizeFn"]),Zi={animation:"boolean",template:"string",title:"(string|element|function)",trigger:"string",delay:"(number|object)",html:"boolean",selector:"(string|boolean)",placement:"(string|function)",offset:"(array|string|function)",container:"(string|element|boolean)",fallbackPlacements:"array",boundary:"(string|element)",customClass:"(string|function)",sanitize:"boolean",sanitizeFn:"(null|function)",allowList:"object",popperConfig:"(null|object|function)"},Ji={AUTO:"auto",TOP:"top",RIGHT:m()?"left":"right",BOTTOM:"bottom",LEFT:m()?"right":"left"},tn={animation:!0,template:'',trigger:"hover focus",title:"",delay:0,html:!1,selector:!1,placement:"top",offset:[0,0],container:!1,fallbackPlacements:["top","right","bottom","left"],boundary:"clippingParents",customClass:"",sanitize:!0,sanitizeFn:null,allowList:{"*":["class","dir","id","lang","role",/^aria-[\w-]*$/i],a:["target","href","title","rel"],area:[],b:[],br:[],col:[],code:[],div:[],em:[],hr:[],h1:[],h2:[],h3:[],h4:[],h5:[],h6:[],i:[],img:["src","srcset","alt","title","width","height"],li:[],ol:[],p:[],pre:[],s:[],small:[],span:[],sub:[],sup:[],strong:[],u:[],ul:[]},popperConfig:null},en={HIDE:"hide.bs.tooltip",HIDDEN:"hidden.bs.tooltip",SHOW:"show.bs.tooltip",SHOWN:"shown.bs.tooltip",INSERTED:"inserted.bs.tooltip",CLICK:"click.bs.tooltip",FOCUSIN:"focusin.bs.tooltip",FOCUSOUT:"focusout.bs.tooltip",MOUSEENTER:"mouseenter.bs.tooltip",MOUSELEAVE:"mouseleave.bs.tooltip"},nn="fade",sn="show",on="show",rn="out",an=".tooltip-inner",ln=".modal",cn="hide.bs.modal",hn="hover",dn="focus";class un extends B{constructor(t,e){if(void 0===Fe)throw new TypeError("Bootstrap's tooltips require Popper (https://popper.js.org)");super(t),this._isEnabled=!0,this._timeout=0,this._hoverState="",this._activeTrigger={},this._popper=null,this._config=this._getConfig(e),this.tip=null,this._setListeners()}static get Default(){return tn}static get NAME(){return Qi}static get Event(){return en}static get DefaultType(){return Zi}enable(){this._isEnabled=!0}disable(){this._isEnabled=!1}toggleEnabled(){this._isEnabled=!this._isEnabled}toggle(t){if(this._isEnabled)if(t){const e=this._initializeOnDelegatedTarget(t);e._activeTrigger.click=!e._activeTrigger.click,e._isWithActiveTrigger()?e._enter(null,e):e._leave(null,e)}else{if(this.getTipElement().classList.contains(sn))return void this._leave(null,this);this._enter(null,this)}}dispose(){clearTimeout(this._timeout),j.off(this._element.closest(ln),cn,this._hideModalHandler),this.tip&&this.tip.remove(),this._disposePopper(),super.dispose()}show(){if("none"===this._element.style.display)throw new Error("Please use show on visible elements");if(!this.isWithContent()||!this._isEnabled)return;const t=j.trigger(this._element,this.constructor.Event.SHOW),e=h(this._element),i=null===e?this._element.ownerDocument.documentElement.contains(this._element):e.contains(this._element);if(t.defaultPrevented||!i)return;"tooltip"===this.constructor.NAME&&this.tip&&this.getTitle()!==this.tip.querySelector(an).innerHTML&&(this._disposePopper(),this.tip.remove(),this.tip=null);const n=this.getTipElement(),s=(t=>{do{t+=Math.floor(1e6*Math.random())}while(document.getElementById(t));return t})(this.constructor.NAME);n.setAttribute("id",s),this._element.setAttribute("aria-describedby",s),this._config.animation&&n.classList.add(nn);const o="function"==typeof this._config.placement?this._config.placement.call(this,n,this._element):this._config.placement,r=this._getAttachment(o);this._addAttachmentClass(r);const{container:a}=this._config;H.set(n,this.constructor.DATA_KEY,this),this._element.ownerDocument.documentElement.contains(this.tip)||(a.append(n),j.trigger(this._element,this.constructor.Event.INSERTED)),this._popper?this._popper.update():this._popper=qe(this._element,n,this._getPopperConfig(r)),n.classList.add(sn);const l=this._resolvePossibleFunction(this._config.customClass);l&&n.classList.add(...l.split(" ")),"ontouchstart"in document.documentElement&&[].concat(...document.body.children).forEach((t=>{j.on(t,"mouseover",d)}));const c=this.tip.classList.contains(nn);this._queueCallback((()=>{const t=this._hoverState;this._hoverState=null,j.trigger(this._element,this.constructor.Event.SHOWN),t===rn&&this._leave(null,this)}),this.tip,c)}hide(){if(!this._popper)return;const t=this.getTipElement();if(j.trigger(this._element,this.constructor.Event.HIDE).defaultPrevented)return;t.classList.remove(sn),"ontouchstart"in document.documentElement&&[].concat(...document.body.children).forEach((t=>j.off(t,"mouseover",d))),this._activeTrigger.click=!1,this._activeTrigger.focus=!1,this._activeTrigger.hover=!1;const e=this.tip.classList.contains(nn);this._queueCallback((()=>{this._isWithActiveTrigger()||(this._hoverState!==on&&t.remove(),this._cleanTipClass(),this._element.removeAttribute("aria-describedby"),j.trigger(this._element,this.constructor.Event.HIDDEN),this._disposePopper())}),this.tip,e),this._hoverState=""}update(){null!==this._popper&&this._popper.update()}isWithContent(){return Boolean(this.getTitle())}getTipElement(){if(this.tip)return this.tip;const t=document.createElement("div");t.innerHTML=this._config.template;const e=t.children[0];return this.setContent(e),e.classList.remove(nn,sn),this.tip=e,this.tip}setContent(t){this._sanitizeAndSetContent(t,this.getTitle(),an)}_sanitizeAndSetContent(t,e,i){const n=V.findOne(i,t);e||!n?this.setElementContent(n,e):n.remove()}setElementContent(t,e){if(null!==t)return o(e)?(e=r(e),void(this._config.html?e.parentNode!==t&&(t.innerHTML="",t.append(e)):t.textContent=e.textContent)):void(this._config.html?(this._config.sanitize&&(e=Yi(e,this._config.allowList,this._config.sanitizeFn)),t.innerHTML=e):t.textContent=e)}getTitle(){const t=this._element.getAttribute("data-bs-original-title")||this._config.title;return this._resolvePossibleFunction(t)}updateAttachment(t){return"right"===t?"end":"left"===t?"start":t}_initializeOnDelegatedTarget(t,e){return e||this.constructor.getOrCreateInstance(t.delegateTarget,this._getDelegateConfig())}_getOffset(){const{offset:t}=this._config;return"string"==typeof t?t.split(",").map((t=>Number.parseInt(t,10))):"function"==typeof t?e=>t(e,this._element):t}_resolvePossibleFunction(t){return"function"==typeof t?t.call(this._element):t}_getPopperConfig(t){const e={placement:t,modifiers:[{name:"flip",options:{fallbackPlacements:this._config.fallbackPlacements}},{name:"offset",options:{offset:this._getOffset()}},{name:"preventOverflow",options:{boundary:this._config.boundary}},{name:"arrow",options:{element:`.${this.constructor.NAME}-arrow`}},{name:"onChange",enabled:!0,phase:"afterWrite",fn:t=>this._handlePopperPlacementChange(t)}],onFirstUpdate:t=>{t.options.placement!==t.placement&&this._handlePopperPlacementChange(t)}};return{...e,..."function"==typeof this._config.popperConfig?this._config.popperConfig(e):this._config.popperConfig}}_addAttachmentClass(t){this.getTipElement().classList.add(`${this._getBasicClassPrefix()}-${this.updateAttachment(t)}`)}_getAttachment(t){return Ji[t.toUpperCase()]}_setListeners(){this._config.trigger.split(" ").forEach((t=>{if("click"===t)j.on(this._element,this.constructor.Event.CLICK,this._config.selector,(t=>this.toggle(t)));else if("manual"!==t){const e=t===hn?this.constructor.Event.MOUSEENTER:this.constructor.Event.FOCUSIN,i=t===hn?this.constructor.Event.MOUSELEAVE:this.constructor.Event.FOCUSOUT;j.on(this._element,e,this._config.selector,(t=>this._enter(t))),j.on(this._element,i,this._config.selector,(t=>this._leave(t)))}})),this._hideModalHandler=()=>{this._element&&this.hide()},j.on(this._element.closest(ln),cn,this._hideModalHandler),this._config.selector?this._config={...this._config,trigger:"manual",selector:""}:this._fixTitle()}_fixTitle(){const t=this._element.getAttribute("title"),e=typeof this._element.getAttribute("data-bs-original-title");(t||"string"!==e)&&(this._element.setAttribute("data-bs-original-title",t||""),!t||this._element.getAttribute("aria-label")||this._element.textContent||this._element.setAttribute("aria-label",t),this._element.setAttribute("title",""))}_enter(t,e){e=this._initializeOnDelegatedTarget(t,e),t&&(e._activeTrigger["focusin"===t.type?dn:hn]=!0),e.getTipElement().classList.contains(sn)||e._hoverState===on?e._hoverState=on:(clearTimeout(e._timeout),e._hoverState=on,e._config.delay&&e._config.delay.show?e._timeout=setTimeout((()=>{e._hoverState===on&&e.show()}),e._config.delay.show):e.show())}_leave(t,e){e=this._initializeOnDelegatedTarget(t,e),t&&(e._activeTrigger["focusout"===t.type?dn:hn]=e._element.contains(t.relatedTarget)),e._isWithActiveTrigger()||(clearTimeout(e._timeout),e._hoverState=rn,e._config.delay&&e._config.delay.hide?e._timeout=setTimeout((()=>{e._hoverState===rn&&e.hide()}),e._config.delay.hide):e.hide())}_isWithActiveTrigger(){for(const t in this._activeTrigger)if(this._activeTrigger[t])return!0;return!1}_getConfig(t){const e=U.getDataAttributes(this._element);return Object.keys(e).forEach((t=>{Gi.has(t)&&delete e[t]})),(t={...this.constructor.Default,...e,..."object"==typeof t&&t?t:{}}).container=!1===t.container?document.body:r(t.container),"number"==typeof t.delay&&(t.delay={show:t.delay,hide:t.delay}),"number"==typeof t.title&&(t.title=t.title.toString()),"number"==typeof t.content&&(t.content=t.content.toString()),a(Qi,t,this.constructor.DefaultType),t.sanitize&&(t.template=Yi(t.template,t.allowList,t.sanitizeFn)),t}_getDelegateConfig(){const t={};for(const e in this._config)this.constructor.Default[e]!==this._config[e]&&(t[e]=this._config[e]);return t}_cleanTipClass(){const t=this.getTipElement(),e=new RegExp(`(^|\\s)${this._getBasicClassPrefix()}\\S+`,"g"),i=t.getAttribute("class").match(e);null!==i&&i.length>0&&i.map((t=>t.trim())).forEach((e=>t.classList.remove(e)))}_getBasicClassPrefix(){return"bs-tooltip"}_handlePopperPlacementChange(t){const{state:e}=t;e&&(this.tip=e.elements.popper,this._cleanTipClass(),this._addAttachmentClass(this._getAttachment(e.placement)))}_disposePopper(){this._popper&&(this._popper.destroy(),this._popper=null)}static jQueryInterface(t){return this.each((function(){const e=un.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===e[t])throw new TypeError(`No method named "${t}"`);e[t]()}}))}}g(un);const fn={...un.Default,placement:"right",offset:[0,8],trigger:"click",content:"",template:''},pn={...un.DefaultType,content:"(string|element|function)"},mn={HIDE:"hide.bs.popover",HIDDEN:"hidden.bs.popover",SHOW:"show.bs.popover",SHOWN:"shown.bs.popover",INSERTED:"inserted.bs.popover",CLICK:"click.bs.popover",FOCUSIN:"focusin.bs.popover",FOCUSOUT:"focusout.bs.popover",MOUSEENTER:"mouseenter.bs.popover",MOUSELEAVE:"mouseleave.bs.popover"};class gn extends un{static get Default(){return fn}static get NAME(){return"popover"}static get Event(){return mn}static get DefaultType(){return pn}isWithContent(){return this.getTitle()||this._getContent()}setContent(t){this._sanitizeAndSetContent(t,this.getTitle(),".popover-header"),this._sanitizeAndSetContent(t,this._getContent(),".popover-body")}_getContent(){return this._resolvePossibleFunction(this._config.content)}_getBasicClassPrefix(){return"bs-popover"}static jQueryInterface(t){return this.each((function(){const e=gn.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===e[t])throw new TypeError(`No method named "${t}"`);e[t]()}}))}}g(gn);const _n="scrollspy",bn={offset:10,method:"auto",target:""},vn={offset:"number",method:"string",target:"(string|element)"},yn="active",wn=".nav-link, .list-group-item, .dropdown-item",En="position";class An extends B{constructor(t,e){super(t),this._scrollElement="BODY"===this._element.tagName?window:this._element,this._config=this._getConfig(e),this._offsets=[],this._targets=[],this._activeTarget=null,this._scrollHeight=0,j.on(this._scrollElement,"scroll.bs.scrollspy",(()=>this._process())),this.refresh(),this._process()}static get Default(){return bn}static get NAME(){return _n}refresh(){const t=this._scrollElement===this._scrollElement.window?"offset":En,e="auto"===this._config.method?t:this._config.method,n=e===En?this._getScrollTop():0;this._offsets=[],this._targets=[],this._scrollHeight=this._getScrollHeight(),V.find(wn,this._config.target).map((t=>{const s=i(t),o=s?V.findOne(s):null;if(o){const t=o.getBoundingClientRect();if(t.width||t.height)return[U[e](o).top+n,s]}return null})).filter((t=>t)).sort(((t,e)=>t[0]-e[0])).forEach((t=>{this._offsets.push(t[0]),this._targets.push(t[1])}))}dispose(){j.off(this._scrollElement,".bs.scrollspy"),super.dispose()}_getConfig(t){return(t={...bn,...U.getDataAttributes(this._element),..."object"==typeof t&&t?t:{}}).target=r(t.target)||document.documentElement,a(_n,t,vn),t}_getScrollTop(){return this._scrollElement===window?this._scrollElement.pageYOffset:this._scrollElement.scrollTop}_getScrollHeight(){return this._scrollElement.scrollHeight||Math.max(document.body.scrollHeight,document.documentElement.scrollHeight)}_getOffsetHeight(){return this._scrollElement===window?window.innerHeight:this._scrollElement.getBoundingClientRect().height}_process(){const t=this._getScrollTop()+this._config.offset,e=this._getScrollHeight(),i=this._config.offset+e-this._getOffsetHeight();if(this._scrollHeight!==e&&this.refresh(),t>=i){const t=this._targets[this._targets.length-1];this._activeTarget!==t&&this._activate(t)}else{if(this._activeTarget&&t0)return this._activeTarget=null,void this._clear();for(let e=this._offsets.length;e--;)this._activeTarget!==this._targets[e]&&t>=this._offsets[e]&&(void 0===this._offsets[e+1]||t`${e}[data-bs-target="${t}"],${e}[href="${t}"]`)),i=V.findOne(e.join(","),this._config.target);i.classList.add(yn),i.classList.contains("dropdown-item")?V.findOne(".dropdown-toggle",i.closest(".dropdown")).classList.add(yn):V.parents(i,".nav, .list-group").forEach((t=>{V.prev(t,".nav-link, .list-group-item").forEach((t=>t.classList.add(yn))),V.prev(t,".nav-item").forEach((t=>{V.children(t,".nav-link").forEach((t=>t.classList.add(yn)))}))})),j.trigger(this._scrollElement,"activate.bs.scrollspy",{relatedTarget:t})}_clear(){V.find(wn,this._config.target).filter((t=>t.classList.contains(yn))).forEach((t=>t.classList.remove(yn)))}static jQueryInterface(t){return this.each((function(){const e=An.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===e[t])throw new TypeError(`No method named "${t}"`);e[t]()}}))}}j.on(window,"load.bs.scrollspy.data-api",(()=>{V.find('[data-bs-spy="scroll"]').forEach((t=>new An(t)))})),g(An);const Tn="active",On="fade",Cn="show",kn=".active",Ln=":scope > li > .active";class xn extends B{static get NAME(){return"tab"}show(){if(this._element.parentNode&&this._element.parentNode.nodeType===Node.ELEMENT_NODE&&this._element.classList.contains(Tn))return;let t;const e=n(this._element),i=this._element.closest(".nav, .list-group");if(i){const e="UL"===i.nodeName||"OL"===i.nodeName?Ln:kn;t=V.find(e,i),t=t[t.length-1]}const s=t?j.trigger(t,"hide.bs.tab",{relatedTarget:this._element}):null;if(j.trigger(this._element,"show.bs.tab",{relatedTarget:t}).defaultPrevented||null!==s&&s.defaultPrevented)return;this._activate(this._element,i);const o=()=>{j.trigger(t,"hidden.bs.tab",{relatedTarget:this._element}),j.trigger(this._element,"shown.bs.tab",{relatedTarget:t})};e?this._activate(e,e.parentNode,o):o()}_activate(t,e,i){const n=(!e||"UL"!==e.nodeName&&"OL"!==e.nodeName?V.children(e,kn):V.find(Ln,e))[0],s=i&&n&&n.classList.contains(On),o=()=>this._transitionComplete(t,n,i);n&&s?(n.classList.remove(Cn),this._queueCallback(o,t,!0)):o()}_transitionComplete(t,e,i){if(e){e.classList.remove(Tn);const t=V.findOne(":scope > .dropdown-menu .active",e.parentNode);t&&t.classList.remove(Tn),"tab"===e.getAttribute("role")&&e.setAttribute("aria-selected",!1)}t.classList.add(Tn),"tab"===t.getAttribute("role")&&t.setAttribute("aria-selected",!0),u(t),t.classList.contains(On)&&t.classList.add(Cn);let n=t.parentNode;if(n&&"LI"===n.nodeName&&(n=n.parentNode),n&&n.classList.contains("dropdown-menu")){const e=t.closest(".dropdown");e&&V.find(".dropdown-toggle",e).forEach((t=>t.classList.add(Tn))),t.setAttribute("aria-expanded",!0)}i&&i()}static jQueryInterface(t){return this.each((function(){const e=xn.getOrCreateInstance(this);if("string"==typeof t){if(void 0===e[t])throw new TypeError(`No method named "${t}"`);e[t]()}}))}}j.on(document,"click.bs.tab.data-api",'[data-bs-toggle="tab"], [data-bs-toggle="pill"], [data-bs-toggle="list"]',(function(t){["A","AREA"].includes(this.tagName)&&t.preventDefault(),c(this)||xn.getOrCreateInstance(this).show()})),g(xn);const Dn="toast",Sn="hide",Nn="show",In="showing",Pn={animation:"boolean",autohide:"boolean",delay:"number"},jn={animation:!0,autohide:!0,delay:5e3};class Mn extends B{constructor(t,e){super(t),this._config=this._getConfig(e),this._timeout=null,this._hasMouseInteraction=!1,this._hasKeyboardInteraction=!1,this._setListeners()}static get DefaultType(){return Pn}static get Default(){return jn}static get NAME(){return Dn}show(){j.trigger(this._element,"show.bs.toast").defaultPrevented||(this._clearTimeout(),this._config.animation&&this._element.classList.add("fade"),this._element.classList.remove(Sn),u(this._element),this._element.classList.add(Nn),this._element.classList.add(In),this._queueCallback((()=>{this._element.classList.remove(In),j.trigger(this._element,"shown.bs.toast"),this._maybeScheduleHide()}),this._element,this._config.animation))}hide(){this._element.classList.contains(Nn)&&(j.trigger(this._element,"hide.bs.toast").defaultPrevented||(this._element.classList.add(In),this._queueCallback((()=>{this._element.classList.add(Sn),this._element.classList.remove(In),this._element.classList.remove(Nn),j.trigger(this._element,"hidden.bs.toast")}),this._element,this._config.animation)))}dispose(){this._clearTimeout(),this._element.classList.contains(Nn)&&this._element.classList.remove(Nn),super.dispose()}_getConfig(t){return t={...jn,...U.getDataAttributes(this._element),..."object"==typeof t&&t?t:{}},a(Dn,t,this.constructor.DefaultType),t}_maybeScheduleHide(){this._config.autohide&&(this._hasMouseInteraction||this._hasKeyboardInteraction||(this._timeout=setTimeout((()=>{this.hide()}),this._config.delay)))}_onInteraction(t,e){switch(t.type){case"mouseover":case"mouseout":this._hasMouseInteraction=e;break;case"focusin":case"focusout":this._hasKeyboardInteraction=e}if(e)return void this._clearTimeout();const i=t.relatedTarget;this._element===i||this._element.contains(i)||this._maybeScheduleHide()}_setListeners(){j.on(this._element,"mouseover.bs.toast",(t=>this._onInteraction(t,!0))),j.on(this._element,"mouseout.bs.toast",(t=>this._onInteraction(t,!1))),j.on(this._element,"focusin.bs.toast",(t=>this._onInteraction(t,!0))),j.on(this._element,"focusout.bs.toast",(t=>this._onInteraction(t,!1)))}_clearTimeout(){clearTimeout(this._timeout),this._timeout=null}static jQueryInterface(t){return this.each((function(){const e=Mn.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===e[t])throw new TypeError(`No method named "${t}"`);e[t](this)}}))}}return R(Mn),g(Mn),{Alert:W,Button:z,Carousel:st,Collapse:pt,Dropdown:hi,Modal:Hi,Offcanvas:Fi,Popover:gn,ScrollSpy:An,Tab:xn,Toast:Mn,Tooltip:un}})); +//# sourceMappingURL=bootstrap.bundle.min.js.map \ No newline at end of file diff --git a/worksheets/worksheet_redlining_files/libs/clipboard/clipboard.min.js b/worksheets/worksheet_redlining_files/libs/clipboard/clipboard.min.js new file mode 100644 index 0000000..41c6a0f --- /dev/null +++ b/worksheets/worksheet_redlining_files/libs/clipboard/clipboard.min.js @@ -0,0 +1,7 @@ +/*! + * clipboard.js v2.0.10 + * https://clipboardjs.com/ + * + * Licensed MIT © Zeno Rocha + */ +!function(t,e){"object"==typeof exports&&"object"==typeof module?module.exports=e():"function"==typeof define&&define.amd?define([],e):"object"==typeof exports?exports.ClipboardJS=e():t.ClipboardJS=e()}(this,function(){return n={686:function(t,e,n){"use strict";n.d(e,{default:function(){return o}});var e=n(279),i=n.n(e),e=n(370),u=n.n(e),e=n(817),c=n.n(e);function a(t){try{return document.execCommand(t)}catch(t){return}}var f=function(t){t=c()(t);return a("cut"),t};var l=function(t){var e,n,o,r=1.anchorjs-link,.anchorjs-link:focus{opacity:1}",u.sheet.cssRules.length),u.sheet.insertRule("[data-anchorjs-icon]::after{content:attr(data-anchorjs-icon)}",u.sheet.cssRules.length),u.sheet.insertRule('@font-face{font-family:anchorjs-icons;src:url(data:n/a;base64,AAEAAAALAIAAAwAwT1MvMg8yG2cAAAE4AAAAYGNtYXDp3gC3AAABpAAAAExnYXNwAAAAEAAAA9wAAAAIZ2x5ZlQCcfwAAAH4AAABCGhlYWQHFvHyAAAAvAAAADZoaGVhBnACFwAAAPQAAAAkaG10eASAADEAAAGYAAAADGxvY2EACACEAAAB8AAAAAhtYXhwAAYAVwAAARgAAAAgbmFtZQGOH9cAAAMAAAAAunBvc3QAAwAAAAADvAAAACAAAQAAAAEAAHzE2p9fDzz1AAkEAAAAAADRecUWAAAAANQA6R8AAAAAAoACwAAAAAgAAgAAAAAAAAABAAADwP/AAAACgAAA/9MCrQABAAAAAAAAAAAAAAAAAAAAAwABAAAAAwBVAAIAAAAAAAIAAAAAAAAAAAAAAAAAAAAAAAMCQAGQAAUAAAKZAswAAACPApkCzAAAAesAMwEJAAAAAAAAAAAAAAAAAAAAARAAAAAAAAAAAAAAAAAAAAAAQAAg//0DwP/AAEADwABAAAAAAQAAAAAAAAAAAAAAIAAAAAAAAAIAAAACgAAxAAAAAwAAAAMAAAAcAAEAAwAAABwAAwABAAAAHAAEADAAAAAIAAgAAgAAACDpy//9//8AAAAg6cv//f///+EWNwADAAEAAAAAAAAAAAAAAAAACACEAAEAAAAAAAAAAAAAAAAxAAACAAQARAKAAsAAKwBUAAABIiYnJjQ3NzY2MzIWFxYUBwcGIicmNDc3NjQnJiYjIgYHBwYUFxYUBwYGIwciJicmNDc3NjIXFhQHBwYUFxYWMzI2Nzc2NCcmNDc2MhcWFAcHBgYjARQGDAUtLXoWOR8fORYtLTgKGwoKCjgaGg0gEhIgDXoaGgkJBQwHdR85Fi0tOAobCgoKOBoaDSASEiANehoaCQkKGwotLXoWOR8BMwUFLYEuehYXFxYugC44CQkKGwo4GkoaDQ0NDXoaShoKGwoFBe8XFi6ALjgJCQobCjgaShoNDQ0NehpKGgobCgoKLYEuehYXAAAADACWAAEAAAAAAAEACAAAAAEAAAAAAAIAAwAIAAEAAAAAAAMACAAAAAEAAAAAAAQACAAAAAEAAAAAAAUAAQALAAEAAAAAAAYACAAAAAMAAQQJAAEAEAAMAAMAAQQJAAIABgAcAAMAAQQJAAMAEAAMAAMAAQQJAAQAEAAMAAMAAQQJAAUAAgAiAAMAAQQJAAYAEAAMYW5jaG9yanM0MDBAAGEAbgBjAGgAbwByAGoAcwA0ADAAMABAAAAAAwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAAH//wAP) format("truetype")}',u.sheet.cssRules.length)),u=document.querySelectorAll("[id]"),t=[].map.call(u,function(A){return A.id}),i=0;i\]./()*\\\n\t\b\v\u00A0]/g,"-").replace(/-{2,}/g,"-").substring(0,this.options.truncate).replace(/^-+|-+$/gm,"").toLowerCase()},this.hasAnchorJSLink=function(A){var e=A.firstChild&&-1<(" "+A.firstChild.className+" ").indexOf(" anchorjs-link "),A=A.lastChild&&-1<(" "+A.lastChild.className+" ").indexOf(" anchorjs-link ");return e||A||!1}}}); +// @license-end \ No newline at end of file diff --git a/worksheets/worksheet_redlining_files/libs/quarto-html/popper.min.js b/worksheets/worksheet_redlining_files/libs/quarto-html/popper.min.js new file mode 100644 index 0000000..2269d66 --- /dev/null +++ b/worksheets/worksheet_redlining_files/libs/quarto-html/popper.min.js @@ -0,0 +1,6 @@ +/** + * @popperjs/core v2.11.4 - MIT License + */ + +!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?t(exports):"function"==typeof define&&define.amd?define(["exports"],t):t((e="undefined"!=typeof globalThis?globalThis:e||self).Popper={})}(this,(function(e){"use strict";function t(e){if(null==e)return window;if("[object Window]"!==e.toString()){var t=e.ownerDocument;return t&&t.defaultView||window}return e}function n(e){return e instanceof t(e).Element||e instanceof Element}function r(e){return e instanceof t(e).HTMLElement||e instanceof HTMLElement}function o(e){return"undefined"!=typeof ShadowRoot&&(e instanceof t(e).ShadowRoot||e instanceof ShadowRoot)}var i=Math.max,a=Math.min,s=Math.round;function f(e,t){void 0===t&&(t=!1);var n=e.getBoundingClientRect(),o=1,i=1;if(r(e)&&t){var a=e.offsetHeight,f=e.offsetWidth;f>0&&(o=s(n.width)/f||1),a>0&&(i=s(n.height)/a||1)}return{width:n.width/o,height:n.height/i,top:n.top/i,right:n.right/o,bottom:n.bottom/i,left:n.left/o,x:n.left/o,y:n.top/i}}function c(e){var n=t(e);return{scrollLeft:n.pageXOffset,scrollTop:n.pageYOffset}}function p(e){return e?(e.nodeName||"").toLowerCase():null}function u(e){return((n(e)?e.ownerDocument:e.document)||window.document).documentElement}function l(e){return f(u(e)).left+c(e).scrollLeft}function d(e){return t(e).getComputedStyle(e)}function h(e){var t=d(e),n=t.overflow,r=t.overflowX,o=t.overflowY;return/auto|scroll|overlay|hidden/.test(n+o+r)}function m(e,n,o){void 0===o&&(o=!1);var i,a,d=r(n),m=r(n)&&function(e){var t=e.getBoundingClientRect(),n=s(t.width)/e.offsetWidth||1,r=s(t.height)/e.offsetHeight||1;return 1!==n||1!==r}(n),v=u(n),g=f(e,m),y={scrollLeft:0,scrollTop:0},b={x:0,y:0};return(d||!d&&!o)&&(("body"!==p(n)||h(v))&&(y=(i=n)!==t(i)&&r(i)?{scrollLeft:(a=i).scrollLeft,scrollTop:a.scrollTop}:c(i)),r(n)?((b=f(n,!0)).x+=n.clientLeft,b.y+=n.clientTop):v&&(b.x=l(v))),{x:g.left+y.scrollLeft-b.x,y:g.top+y.scrollTop-b.y,width:g.width,height:g.height}}function v(e){var t=f(e),n=e.offsetWidth,r=e.offsetHeight;return Math.abs(t.width-n)<=1&&(n=t.width),Math.abs(t.height-r)<=1&&(r=t.height),{x:e.offsetLeft,y:e.offsetTop,width:n,height:r}}function g(e){return"html"===p(e)?e:e.assignedSlot||e.parentNode||(o(e)?e.host:null)||u(e)}function y(e){return["html","body","#document"].indexOf(p(e))>=0?e.ownerDocument.body:r(e)&&h(e)?e:y(g(e))}function b(e,n){var r;void 0===n&&(n=[]);var o=y(e),i=o===(null==(r=e.ownerDocument)?void 0:r.body),a=t(o),s=i?[a].concat(a.visualViewport||[],h(o)?o:[]):o,f=n.concat(s);return i?f:f.concat(b(g(s)))}function x(e){return["table","td","th"].indexOf(p(e))>=0}function w(e){return r(e)&&"fixed"!==d(e).position?e.offsetParent:null}function O(e){for(var n=t(e),i=w(e);i&&x(i)&&"static"===d(i).position;)i=w(i);return i&&("html"===p(i)||"body"===p(i)&&"static"===d(i).position)?n:i||function(e){var t=-1!==navigator.userAgent.toLowerCase().indexOf("firefox");if(-1!==navigator.userAgent.indexOf("Trident")&&r(e)&&"fixed"===d(e).position)return null;var n=g(e);for(o(n)&&(n=n.host);r(n)&&["html","body"].indexOf(p(n))<0;){var i=d(n);if("none"!==i.transform||"none"!==i.perspective||"paint"===i.contain||-1!==["transform","perspective"].indexOf(i.willChange)||t&&"filter"===i.willChange||t&&i.filter&&"none"!==i.filter)return n;n=n.parentNode}return null}(e)||n}var j="top",E="bottom",D="right",A="left",L="auto",P=[j,E,D,A],M="start",k="end",W="viewport",B="popper",H=P.reduce((function(e,t){return e.concat([t+"-"+M,t+"-"+k])}),[]),T=[].concat(P,[L]).reduce((function(e,t){return e.concat([t,t+"-"+M,t+"-"+k])}),[]),R=["beforeRead","read","afterRead","beforeMain","main","afterMain","beforeWrite","write","afterWrite"];function S(e){var t=new Map,n=new Set,r=[];function o(e){n.add(e.name),[].concat(e.requires||[],e.requiresIfExists||[]).forEach((function(e){if(!n.has(e)){var r=t.get(e);r&&o(r)}})),r.push(e)}return e.forEach((function(e){t.set(e.name,e)})),e.forEach((function(e){n.has(e.name)||o(e)})),r}function C(e){return e.split("-")[0]}function q(e,t){var n=t.getRootNode&&t.getRootNode();if(e.contains(t))return!0;if(n&&o(n)){var r=t;do{if(r&&e.isSameNode(r))return!0;r=r.parentNode||r.host}while(r)}return!1}function V(e){return Object.assign({},e,{left:e.x,top:e.y,right:e.x+e.width,bottom:e.y+e.height})}function N(e,r){return r===W?V(function(e){var n=t(e),r=u(e),o=n.visualViewport,i=r.clientWidth,a=r.clientHeight,s=0,f=0;return o&&(i=o.width,a=o.height,/^((?!chrome|android).)*safari/i.test(navigator.userAgent)||(s=o.offsetLeft,f=o.offsetTop)),{width:i,height:a,x:s+l(e),y:f}}(e)):n(r)?function(e){var t=f(e);return t.top=t.top+e.clientTop,t.left=t.left+e.clientLeft,t.bottom=t.top+e.clientHeight,t.right=t.left+e.clientWidth,t.width=e.clientWidth,t.height=e.clientHeight,t.x=t.left,t.y=t.top,t}(r):V(function(e){var t,n=u(e),r=c(e),o=null==(t=e.ownerDocument)?void 0:t.body,a=i(n.scrollWidth,n.clientWidth,o?o.scrollWidth:0,o?o.clientWidth:0),s=i(n.scrollHeight,n.clientHeight,o?o.scrollHeight:0,o?o.clientHeight:0),f=-r.scrollLeft+l(e),p=-r.scrollTop;return"rtl"===d(o||n).direction&&(f+=i(n.clientWidth,o?o.clientWidth:0)-a),{width:a,height:s,x:f,y:p}}(u(e)))}function I(e,t,o){var s="clippingParents"===t?function(e){var t=b(g(e)),o=["absolute","fixed"].indexOf(d(e).position)>=0&&r(e)?O(e):e;return n(o)?t.filter((function(e){return n(e)&&q(e,o)&&"body"!==p(e)})):[]}(e):[].concat(t),f=[].concat(s,[o]),c=f[0],u=f.reduce((function(t,n){var r=N(e,n);return t.top=i(r.top,t.top),t.right=a(r.right,t.right),t.bottom=a(r.bottom,t.bottom),t.left=i(r.left,t.left),t}),N(e,c));return u.width=u.right-u.left,u.height=u.bottom-u.top,u.x=u.left,u.y=u.top,u}function _(e){return e.split("-")[1]}function F(e){return["top","bottom"].indexOf(e)>=0?"x":"y"}function U(e){var t,n=e.reference,r=e.element,o=e.placement,i=o?C(o):null,a=o?_(o):null,s=n.x+n.width/2-r.width/2,f=n.y+n.height/2-r.height/2;switch(i){case j:t={x:s,y:n.y-r.height};break;case E:t={x:s,y:n.y+n.height};break;case D:t={x:n.x+n.width,y:f};break;case A:t={x:n.x-r.width,y:f};break;default:t={x:n.x,y:n.y}}var c=i?F(i):null;if(null!=c){var p="y"===c?"height":"width";switch(a){case M:t[c]=t[c]-(n[p]/2-r[p]/2);break;case k:t[c]=t[c]+(n[p]/2-r[p]/2)}}return t}function z(e){return Object.assign({},{top:0,right:0,bottom:0,left:0},e)}function X(e,t){return t.reduce((function(t,n){return t[n]=e,t}),{})}function Y(e,t){void 0===t&&(t={});var r=t,o=r.placement,i=void 0===o?e.placement:o,a=r.boundary,s=void 0===a?"clippingParents":a,c=r.rootBoundary,p=void 0===c?W:c,l=r.elementContext,d=void 0===l?B:l,h=r.altBoundary,m=void 0!==h&&h,v=r.padding,g=void 0===v?0:v,y=z("number"!=typeof g?g:X(g,P)),b=d===B?"reference":B,x=e.rects.popper,w=e.elements[m?b:d],O=I(n(w)?w:w.contextElement||u(e.elements.popper),s,p),A=f(e.elements.reference),L=U({reference:A,element:x,strategy:"absolute",placement:i}),M=V(Object.assign({},x,L)),k=d===B?M:A,H={top:O.top-k.top+y.top,bottom:k.bottom-O.bottom+y.bottom,left:O.left-k.left+y.left,right:k.right-O.right+y.right},T=e.modifiersData.offset;if(d===B&&T){var R=T[i];Object.keys(H).forEach((function(e){var t=[D,E].indexOf(e)>=0?1:-1,n=[j,E].indexOf(e)>=0?"y":"x";H[e]+=R[n]*t}))}return H}var G={placement:"bottom",modifiers:[],strategy:"absolute"};function J(){for(var e=arguments.length,t=new Array(e),n=0;n=0?-1:1,i="function"==typeof n?n(Object.assign({},t,{placement:e})):n,a=i[0],s=i[1];return a=a||0,s=(s||0)*o,[A,D].indexOf(r)>=0?{x:s,y:a}:{x:a,y:s}}(n,t.rects,i),e}),{}),s=a[t.placement],f=s.x,c=s.y;null!=t.modifiersData.popperOffsets&&(t.modifiersData.popperOffsets.x+=f,t.modifiersData.popperOffsets.y+=c),t.modifiersData[r]=a}},ie={left:"right",right:"left",bottom:"top",top:"bottom"};function ae(e){return e.replace(/left|right|bottom|top/g,(function(e){return ie[e]}))}var se={start:"end",end:"start"};function fe(e){return e.replace(/start|end/g,(function(e){return se[e]}))}function ce(e,t){void 0===t&&(t={});var n=t,r=n.placement,o=n.boundary,i=n.rootBoundary,a=n.padding,s=n.flipVariations,f=n.allowedAutoPlacements,c=void 0===f?T:f,p=_(r),u=p?s?H:H.filter((function(e){return _(e)===p})):P,l=u.filter((function(e){return c.indexOf(e)>=0}));0===l.length&&(l=u);var d=l.reduce((function(t,n){return t[n]=Y(e,{placement:n,boundary:o,rootBoundary:i,padding:a})[C(n)],t}),{});return Object.keys(d).sort((function(e,t){return d[e]-d[t]}))}var pe={name:"flip",enabled:!0,phase:"main",fn:function(e){var t=e.state,n=e.options,r=e.name;if(!t.modifiersData[r]._skip){for(var o=n.mainAxis,i=void 0===o||o,a=n.altAxis,s=void 0===a||a,f=n.fallbackPlacements,c=n.padding,p=n.boundary,u=n.rootBoundary,l=n.altBoundary,d=n.flipVariations,h=void 0===d||d,m=n.allowedAutoPlacements,v=t.options.placement,g=C(v),y=f||(g===v||!h?[ae(v)]:function(e){if(C(e)===L)return[];var t=ae(e);return[fe(e),t,fe(t)]}(v)),b=[v].concat(y).reduce((function(e,n){return e.concat(C(n)===L?ce(t,{placement:n,boundary:p,rootBoundary:u,padding:c,flipVariations:h,allowedAutoPlacements:m}):n)}),[]),x=t.rects.reference,w=t.rects.popper,O=new Map,P=!0,k=b[0],W=0;W=0,S=R?"width":"height",q=Y(t,{placement:B,boundary:p,rootBoundary:u,altBoundary:l,padding:c}),V=R?T?D:A:T?E:j;x[S]>w[S]&&(V=ae(V));var N=ae(V),I=[];if(i&&I.push(q[H]<=0),s&&I.push(q[V]<=0,q[N]<=0),I.every((function(e){return e}))){k=B,P=!1;break}O.set(B,I)}if(P)for(var F=function(e){var t=b.find((function(t){var n=O.get(t);if(n)return n.slice(0,e).every((function(e){return e}))}));if(t)return k=t,"break"},U=h?3:1;U>0;U--){if("break"===F(U))break}t.placement!==k&&(t.modifiersData[r]._skip=!0,t.placement=k,t.reset=!0)}},requiresIfExists:["offset"],data:{_skip:!1}};function ue(e,t,n){return i(e,a(t,n))}var le={name:"preventOverflow",enabled:!0,phase:"main",fn:function(e){var t=e.state,n=e.options,r=e.name,o=n.mainAxis,s=void 0===o||o,f=n.altAxis,c=void 0!==f&&f,p=n.boundary,u=n.rootBoundary,l=n.altBoundary,d=n.padding,h=n.tether,m=void 0===h||h,g=n.tetherOffset,y=void 0===g?0:g,b=Y(t,{boundary:p,rootBoundary:u,padding:d,altBoundary:l}),x=C(t.placement),w=_(t.placement),L=!w,P=F(x),k="x"===P?"y":"x",W=t.modifiersData.popperOffsets,B=t.rects.reference,H=t.rects.popper,T="function"==typeof y?y(Object.assign({},t.rects,{placement:t.placement})):y,R="number"==typeof T?{mainAxis:T,altAxis:T}:Object.assign({mainAxis:0,altAxis:0},T),S=t.modifiersData.offset?t.modifiersData.offset[t.placement]:null,q={x:0,y:0};if(W){if(s){var V,N="y"===P?j:A,I="y"===P?E:D,U="y"===P?"height":"width",z=W[P],X=z+b[N],G=z-b[I],J=m?-H[U]/2:0,K=w===M?B[U]:H[U],Q=w===M?-H[U]:-B[U],Z=t.elements.arrow,$=m&&Z?v(Z):{width:0,height:0},ee=t.modifiersData["arrow#persistent"]?t.modifiersData["arrow#persistent"].padding:{top:0,right:0,bottom:0,left:0},te=ee[N],ne=ee[I],re=ue(0,B[U],$[U]),oe=L?B[U]/2-J-re-te-R.mainAxis:K-re-te-R.mainAxis,ie=L?-B[U]/2+J+re+ne+R.mainAxis:Q+re+ne+R.mainAxis,ae=t.elements.arrow&&O(t.elements.arrow),se=ae?"y"===P?ae.clientTop||0:ae.clientLeft||0:0,fe=null!=(V=null==S?void 0:S[P])?V:0,ce=z+ie-fe,pe=ue(m?a(X,z+oe-fe-se):X,z,m?i(G,ce):G);W[P]=pe,q[P]=pe-z}if(c){var le,de="x"===P?j:A,he="x"===P?E:D,me=W[k],ve="y"===k?"height":"width",ge=me+b[de],ye=me-b[he],be=-1!==[j,A].indexOf(x),xe=null!=(le=null==S?void 0:S[k])?le:0,we=be?ge:me-B[ve]-H[ve]-xe+R.altAxis,Oe=be?me+B[ve]+H[ve]-xe-R.altAxis:ye,je=m&&be?function(e,t,n){var r=ue(e,t,n);return r>n?n:r}(we,me,Oe):ue(m?we:ge,me,m?Oe:ye);W[k]=je,q[k]=je-me}t.modifiersData[r]=q}},requiresIfExists:["offset"]};var de={name:"arrow",enabled:!0,phase:"main",fn:function(e){var t,n=e.state,r=e.name,o=e.options,i=n.elements.arrow,a=n.modifiersData.popperOffsets,s=C(n.placement),f=F(s),c=[A,D].indexOf(s)>=0?"height":"width";if(i&&a){var p=function(e,t){return z("number"!=typeof(e="function"==typeof e?e(Object.assign({},t.rects,{placement:t.placement})):e)?e:X(e,P))}(o.padding,n),u=v(i),l="y"===f?j:A,d="y"===f?E:D,h=n.rects.reference[c]+n.rects.reference[f]-a[f]-n.rects.popper[c],m=a[f]-n.rects.reference[f],g=O(i),y=g?"y"===f?g.clientHeight||0:g.clientWidth||0:0,b=h/2-m/2,x=p[l],w=y-u[c]-p[d],L=y/2-u[c]/2+b,M=ue(x,L,w),k=f;n.modifiersData[r]=((t={})[k]=M,t.centerOffset=M-L,t)}},effect:function(e){var t=e.state,n=e.options.element,r=void 0===n?"[data-popper-arrow]":n;null!=r&&("string"!=typeof r||(r=t.elements.popper.querySelector(r)))&&q(t.elements.popper,r)&&(t.elements.arrow=r)},requires:["popperOffsets"],requiresIfExists:["preventOverflow"]};function he(e,t,n){return void 0===n&&(n={x:0,y:0}),{top:e.top-t.height-n.y,right:e.right-t.width+n.x,bottom:e.bottom-t.height+n.y,left:e.left-t.width-n.x}}function me(e){return[j,D,E,A].some((function(t){return e[t]>=0}))}var ve={name:"hide",enabled:!0,phase:"main",requiresIfExists:["preventOverflow"],fn:function(e){var t=e.state,n=e.name,r=t.rects.reference,o=t.rects.popper,i=t.modifiersData.preventOverflow,a=Y(t,{elementContext:"reference"}),s=Y(t,{altBoundary:!0}),f=he(a,r),c=he(s,o,i),p=me(f),u=me(c);t.modifiersData[n]={referenceClippingOffsets:f,popperEscapeOffsets:c,isReferenceHidden:p,hasPopperEscaped:u},t.attributes.popper=Object.assign({},t.attributes.popper,{"data-popper-reference-hidden":p,"data-popper-escaped":u})}},ge=K({defaultModifiers:[Z,$,ne,re]}),ye=[Z,$,ne,re,oe,pe,le,de,ve],be=K({defaultModifiers:ye});e.applyStyles=re,e.arrow=de,e.computeStyles=ne,e.createPopper=be,e.createPopperLite=ge,e.defaultModifiers=ye,e.detectOverflow=Y,e.eventListeners=Z,e.flip=pe,e.hide=ve,e.offset=oe,e.popperGenerator=K,e.popperOffsets=$,e.preventOverflow=le,Object.defineProperty(e,"__esModule",{value:!0})})); + diff --git a/worksheets/worksheet_redlining_files/libs/quarto-html/quarto-syntax-highlighting.css b/worksheets/worksheet_redlining_files/libs/quarto-html/quarto-syntax-highlighting.css new file mode 100644 index 0000000..36cb328 --- /dev/null +++ b/worksheets/worksheet_redlining_files/libs/quarto-html/quarto-syntax-highlighting.css @@ -0,0 +1,171 @@ +/* quarto syntax highlight colors */ +:root { + --quarto-hl-ot-color: #003B4F; + --quarto-hl-at-color: #657422; + --quarto-hl-ss-color: #20794D; + --quarto-hl-an-color: #5E5E5E; + --quarto-hl-fu-color: #4758AB; + --quarto-hl-st-color: #20794D; + --quarto-hl-cf-color: #003B4F; + --quarto-hl-op-color: #5E5E5E; + --quarto-hl-er-color: #AD0000; + --quarto-hl-bn-color: #AD0000; + --quarto-hl-al-color: #AD0000; + --quarto-hl-va-color: #111111; + --quarto-hl-bu-color: inherit; + --quarto-hl-ex-color: inherit; + --quarto-hl-pp-color: #AD0000; + --quarto-hl-in-color: #5E5E5E; + --quarto-hl-vs-color: #20794D; + --quarto-hl-wa-color: #5E5E5E; + --quarto-hl-do-color: #5E5E5E; + --quarto-hl-im-color: #00769E; + --quarto-hl-ch-color: #20794D; + --quarto-hl-dt-color: #AD0000; + --quarto-hl-fl-color: #AD0000; + --quarto-hl-co-color: #5E5E5E; + --quarto-hl-cv-color: #5E5E5E; + --quarto-hl-cn-color: #8f5902; + --quarto-hl-sc-color: #5E5E5E; + --quarto-hl-dv-color: #AD0000; + --quarto-hl-kw-color: #003B4F; +} + +/* other quarto variables */ +:root { + --quarto-font-monospace: SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace; +} + +pre > code.sourceCode > span { + color: #003B4F; +} + +code span { + color: #003B4F; +} + +code.sourceCode > span { + color: #003B4F; +} + +div.sourceCode, +div.sourceCode pre.sourceCode { + color: #003B4F; +} + +code span.ot { + color: #003B4F; +} + +code span.at { + color: #657422; +} + +code span.ss { + color: #20794D; +} + +code span.an { + color: #5E5E5E; +} + +code span.fu { + color: #4758AB; +} + +code span.st { + color: #20794D; +} + +code span.cf { + color: #003B4F; +} + +code span.op { + color: #5E5E5E; +} + +code span.er { + color: #AD0000; +} + +code span.bn { + color: #AD0000; +} + +code span.al { + color: #AD0000; +} + +code span.va { + color: #111111; +} + +code span.pp { + color: #AD0000; +} + +code span.in { + color: #5E5E5E; +} + +code span.vs { + color: #20794D; +} + +code span.wa { + color: #5E5E5E; + font-style: italic; +} + +code span.do { + color: #5E5E5E; + font-style: italic; +} + +code span.im { + color: #00769E; +} + +code span.ch { + color: #20794D; +} + +code span.dt { + color: #AD0000; +} + +code span.fl { + color: #AD0000; +} + +code span.co { + color: #5E5E5E; +} + +code span.cv { + color: #5E5E5E; + font-style: italic; +} + +code span.cn { + color: #8f5902; +} + +code span.sc { + color: #5E5E5E; +} + +code span.dv { + color: #AD0000; +} + +code span.kw { + color: #003B4F; +} + +.prevent-inlining { + content: " { + const sibling = el.previousElementSibling; + if (sibling && sibling.tagName === "A") { + return sibling.classList.contains("active"); + } else { + return false; + } + }; + + // fire slideEnter for bootstrap tab activations (for htmlwidget resize behavior) + function fireSlideEnter(e) { + const event = window.document.createEvent("Event"); + event.initEvent("slideenter", true, true); + window.document.dispatchEvent(event); + } + const tabs = window.document.querySelectorAll('a[data-bs-toggle="tab"]'); + tabs.forEach((tab) => { + tab.addEventListener("shown.bs.tab", fireSlideEnter); + }); + + // Track scrolling and mark TOC links as active + // get table of contents and sidebar (bail if we don't have at least one) + const tocLinks = tocEl + ? [...tocEl.querySelectorAll("a[data-scroll-target]")] + : []; + const makeActive = (link) => tocLinks[link].classList.add("active"); + const removeActive = (link) => tocLinks[link].classList.remove("active"); + const removeAllActive = () => + [...Array(tocLinks.length).keys()].forEach((link) => removeActive(link)); + + // activate the anchor for a section associated with this TOC entry + tocLinks.forEach((link) => { + link.addEventListener("click", () => { + if (link.href.indexOf("#") !== -1) { + const anchor = link.href.split("#")[1]; + const heading = window.document.querySelector( + `[data-anchor-id=${anchor}]` + ); + if (heading) { + // Add the class + heading.classList.add("reveal-anchorjs-link"); + + // function to show the anchor + const handleMouseout = () => { + heading.classList.remove("reveal-anchorjs-link"); + heading.removeEventListener("mouseout", handleMouseout); + }; + + // add a function to clear the anchor when the user mouses out of it + heading.addEventListener("mouseout", handleMouseout); + } + } + }); + }); + + const sections = tocLinks.map((link) => { + const target = link.getAttribute("data-scroll-target"); + if (target.startsWith("#")) { + return window.document.getElementById(decodeURI(`${target.slice(1)}`)); + } else { + return window.document.querySelector(decodeURI(`${target}`)); + } + }); + + const sectionMargin = 200; + let currentActive = 0; + // track whether we've initialized state the first time + let init = false; + + const updateActiveLink = () => { + // The index from bottom to top (e.g. reversed list) + let sectionIndex = -1; + if ( + window.innerHeight + window.pageYOffset >= + window.document.body.offsetHeight + ) { + sectionIndex = 0; + } else { + sectionIndex = [...sections].reverse().findIndex((section) => { + if (section) { + return window.pageYOffset >= section.offsetTop - sectionMargin; + } else { + return false; + } + }); + } + if (sectionIndex > -1) { + const current = sections.length - sectionIndex - 1; + if (current !== currentActive) { + removeAllActive(); + currentActive = current; + makeActive(current); + if (init) { + window.dispatchEvent(sectionChanged); + } + init = true; + } + } + }; + + const inHiddenRegion = (top, bottom, hiddenRegions) => { + for (const region of hiddenRegions) { + if (top <= region.bottom && bottom >= region.top) { + return true; + } + } + return false; + }; + + const categorySelector = "header.quarto-title-block .quarto-category"; + const activateCategories = (href) => { + // Find any categories + // Surround them with a link pointing back to: + // #category=Authoring + try { + const categoryEls = window.document.querySelectorAll(categorySelector); + for (const categoryEl of categoryEls) { + const categoryText = categoryEl.textContent; + if (categoryText) { + const link = `${href}#category=${encodeURIComponent(categoryText)}`; + const linkEl = window.document.createElement("a"); + linkEl.setAttribute("href", link); + for (const child of categoryEl.childNodes) { + linkEl.append(child); + } + categoryEl.appendChild(linkEl); + } + } + } catch { + // Ignore errors + } + }; + function hasTitleCategories() { + return window.document.querySelector(categorySelector) !== null; + } + + function offsetRelativeUrl(url) { + const offset = getMeta("quarto:offset"); + return offset ? offset + url : url; + } + + function offsetAbsoluteUrl(url) { + const offset = getMeta("quarto:offset"); + const baseUrl = new URL(offset, window.location); + + const projRelativeUrl = url.replace(baseUrl, ""); + if (projRelativeUrl.startsWith("/")) { + return projRelativeUrl; + } else { + return "/" + projRelativeUrl; + } + } + + // read a meta tag value + function getMeta(metaName) { + const metas = window.document.getElementsByTagName("meta"); + for (let i = 0; i < metas.length; i++) { + if (metas[i].getAttribute("name") === metaName) { + return metas[i].getAttribute("content"); + } + } + return ""; + } + + async function findAndActivateCategories() { + const currentPagePath = offsetAbsoluteUrl(window.location.href); + const response = await fetch(offsetRelativeUrl("listings.json")); + if (response.status == 200) { + return response.json().then(function (listingPaths) { + const listingHrefs = []; + for (const listingPath of listingPaths) { + const pathWithoutLeadingSlash = listingPath.listing.substring(1); + for (const item of listingPath.items) { + if ( + item === currentPagePath || + item === currentPagePath + "index.html" + ) { + // Resolve this path against the offset to be sure + // we already are using the correct path to the listing + // (this adjusts the listing urls to be rooted against + // whatever root the page is actually running against) + const relative = offsetRelativeUrl(pathWithoutLeadingSlash); + const baseUrl = window.location; + const resolvedPath = new URL(relative, baseUrl); + listingHrefs.push(resolvedPath.pathname); + break; + } + } + } + + // Look up the tree for a nearby linting and use that if we find one + const nearestListing = findNearestParentListing( + offsetAbsoluteUrl(window.location.pathname), + listingHrefs + ); + if (nearestListing) { + activateCategories(nearestListing); + } else { + // See if the referrer is a listing page for this item + const referredRelativePath = offsetAbsoluteUrl(document.referrer); + const referrerListing = listingHrefs.find((listingHref) => { + const isListingReferrer = + listingHref === referredRelativePath || + listingHref === referredRelativePath + "index.html"; + return isListingReferrer; + }); + + if (referrerListing) { + // Try to use the referrer if possible + activateCategories(referrerListing); + } else if (listingHrefs.length > 0) { + // Otherwise, just fall back to the first listing + activateCategories(listingHrefs[0]); + } + } + }); + } + } + if (hasTitleCategories()) { + findAndActivateCategories(); + } + + const findNearestParentListing = (href, listingHrefs) => { + if (!href || !listingHrefs) { + return undefined; + } + // Look up the tree for a nearby linting and use that if we find one + const relativeParts = href.substring(1).split("/"); + while (relativeParts.length > 0) { + const path = relativeParts.join("/"); + for (const listingHref of listingHrefs) { + if (listingHref.startsWith(path)) { + return listingHref; + } + } + relativeParts.pop(); + } + + return undefined; + }; + + const manageSidebarVisiblity = (el, placeholderDescriptor) => { + let isVisible = true; + + return (hiddenRegions) => { + if (el === null) { + return; + } + + // Find the last element of the TOC + const lastChildEl = el.lastElementChild; + + if (lastChildEl) { + // Find the top and bottom o the element that is being managed + const elTop = el.offsetTop; + const elBottom = + elTop + lastChildEl.offsetTop + lastChildEl.offsetHeight; + + // Converts the sidebar to a menu + const convertToMenu = () => { + for (const child of el.children) { + child.style.opacity = 0; + child.style.display = "none"; + } + + const toggleContainer = window.document.createElement("div"); + toggleContainer.style.width = "100%"; + toggleContainer.classList.add("zindex-over-content"); + toggleContainer.classList.add("quarto-sidebar-toggle"); + toggleContainer.classList.add("headroom-target"); // Marks this to be managed by headeroom + toggleContainer.id = placeholderDescriptor.id; + toggleContainer.style.position = "fixed"; + + const toggleIcon = window.document.createElement("i"); + toggleIcon.classList.add("quarto-sidebar-toggle-icon"); + toggleIcon.classList.add("bi"); + toggleIcon.classList.add("bi-caret-down-fill"); + + const toggleTitle = window.document.createElement("div"); + const titleEl = window.document.body.querySelector( + placeholderDescriptor.titleSelector + ); + if (titleEl) { + toggleTitle.append(titleEl.innerText, toggleIcon); + } + toggleTitle.classList.add("zindex-over-content"); + toggleTitle.classList.add("quarto-sidebar-toggle-title"); + toggleContainer.append(toggleTitle); + + const toggleContents = window.document.createElement("div"); + toggleContents.classList = el.classList; + toggleContents.classList.add("zindex-over-content"); + toggleContents.classList.add("quarto-sidebar-toggle-contents"); + for (const child of el.children) { + if (child.id === "toc-title") { + continue; + } + + const clone = child.cloneNode(true); + clone.style.opacity = 1; + clone.style.display = null; + toggleContents.append(clone); + } + toggleContents.style.height = "0px"; + toggleContainer.append(toggleContents); + el.parentElement.prepend(toggleContainer); + + // Process clicks + let tocShowing = false; + // Allow the caller to control whether this is dismissed + // when it is clicked (e.g. sidebar navigation supports + // opening and closing the nav tree, so don't dismiss on click) + const clickEl = placeholderDescriptor.dismissOnClick + ? toggleContainer + : toggleTitle; + + const closeToggle = () => { + if (tocShowing) { + toggleContainer.classList.remove("expanded"); + toggleContents.style.height = "0px"; + tocShowing = false; + } + }; + + const positionToggle = () => { + // position the element (top left of parent, same width as parent) + const elRect = el.getBoundingClientRect(); + toggleContainer.style.left = `${elRect.left}px`; + toggleContainer.style.top = `${elRect.top}px`; + toggleContainer.style.width = `${elRect.width}px`; + }; + + // Get rid of any expanded toggle if the user scrolls + window.document.addEventListener( + "scroll", + throttle(() => { + closeToggle(); + }, 50) + ); + + // Handle positioning of the toggle + window.addEventListener( + "resize", + throttle(() => { + positionToggle(); + }, 50) + ); + positionToggle(); + + // Process the click + clickEl.onclick = () => { + if (!tocShowing) { + toggleContainer.classList.add("expanded"); + toggleContents.style.height = null; + tocShowing = true; + } else { + closeToggle(); + } + }; + }; + + // Converts a sidebar from a menu back to a sidebar + const convertToSidebar = () => { + for (const child of el.children) { + child.style.opacity = 1; + clone.style.display = null; + } + + const placeholderEl = window.document.getElementById( + placeholderDescriptor.id + ); + if (placeholderEl) { + placeholderEl.remove(); + } + + el.classList.remove("rollup"); + }; + + if (isReaderMode()) { + convertToMenu(); + isVisible = false; + } else { + if (!isVisible) { + // If the element is current not visible reveal if there are + // no conflicts with overlay regions + if (!inHiddenRegion(elTop, elBottom, hiddenRegions)) { + convertToSidebar(); + isVisible = true; + } + } else { + // If the element is visible, hide it if it conflicts with overlay regions + // and insert a placeholder toggle (or if we're in reader mode) + if (inHiddenRegion(elTop, elBottom, hiddenRegions)) { + convertToMenu(); + isVisible = false; + } + } + } + } + }; + }; + + // Find any conflicting margin elements and add margins to the + // top to prevent overlap + const marginChildren = window.document.querySelectorAll( + ".column-margin.column-container > * " + ); + let lastBottom = 0; + for (const marginChild of marginChildren) { + const top = marginChild.getBoundingClientRect().top; + if (top < lastBottom) { + const margin = lastBottom - top; + marginChild.style.marginTop = `${margin}px`; + } + const styles = window.getComputedStyle(marginChild); + const marginTop = parseFloat(styles["marginTop"]); + + lastBottom = top + marginChild.getBoundingClientRect().height + marginTop; + } + + // Manage the visibility of the toc and the sidebar + const marginScrollVisibility = manageSidebarVisiblity(marginSidebarEl, { + id: "quarto-toc-toggle", + titleSelector: "#toc-title", + dismissOnClick: true, + }); + const sidebarScrollVisiblity = manageSidebarVisiblity(sidebarEl, { + id: "quarto-sidebarnav-toggle", + titleSelector: ".title", + dismissOnClick: false, + }); + let tocLeftScrollVisibility; + if (leftTocEl) { + tocLeftScrollVisibility = manageSidebarVisiblity(leftTocEl, { + id: "quarto-lefttoc-toggle", + titleSelector: "#toc-title", + dismissOnClick: true, + }); + } + + // Find the first element that uses formatting in special columns + const conflictingEls = window.document.body.querySelectorAll( + '[class^="column-"], [class*=" column-"], aside, [class*="margin-caption"], [class*=" margin-caption"], [class*="margin-ref"], [class*=" margin-ref"]' + ); + + // Filter all the possibly conflicting elements into ones + // the do conflict on the left or ride side + const arrConflictingEls = Array.from(conflictingEls); + const leftSideConflictEls = arrConflictingEls.filter((el) => { + if (el.tagName === "ASIDE") { + return false; + } + return Array.from(el.classList).find((className) => { + return ( + className !== "column-body" && + className.startsWith("column-") && + !className.endsWith("right") && + !className.endsWith("container") && + className !== "column-margin" + ); + }); + }); + const rightSideConflictEls = arrConflictingEls.filter((el) => { + if (el.tagName === "ASIDE") { + return true; + } + + const hasMarginCaption = Array.from(el.classList).find((className) => { + return className == "margin-caption"; + }); + if (hasMarginCaption) { + return true; + } + + return Array.from(el.classList).find((className) => { + return ( + className !== "column-body" && + !className.endsWith("container") && + className.startsWith("column-") && + !className.endsWith("left") + ); + }); + }); + + const kOverlapPaddingSize = 10; + function toRegions(els) { + return els.map((el) => { + const top = + el.getBoundingClientRect().top + + document.documentElement.scrollTop - + kOverlapPaddingSize; + return { + top, + bottom: top + el.scrollHeight + 2 * kOverlapPaddingSize, + }; + }); + } + + const hideOverlappedSidebars = () => { + marginScrollVisibility(toRegions(rightSideConflictEls)); + sidebarScrollVisiblity(toRegions(leftSideConflictEls)); + if (tocLeftScrollVisibility) { + tocLeftScrollVisibility(toRegions(leftSideConflictEls)); + } + }; + + window.quartoToggleReader = () => { + // Applies a slow class (or removes it) + // to update the transition speed + const slowTransition = (slow) => { + const manageTransition = (id, slow) => { + const el = document.getElementById(id); + if (el) { + if (slow) { + el.classList.add("slow"); + } else { + el.classList.remove("slow"); + } + } + }; + + manageTransition("TOC", slow); + manageTransition("quarto-sidebar", slow); + }; + + const readerMode = !isReaderMode(); + setReaderModeValue(readerMode); + + // If we're entering reader mode, slow the transition + if (readerMode) { + slowTransition(readerMode); + } + highlightReaderToggle(readerMode); + hideOverlappedSidebars(); + + // If we're exiting reader mode, restore the non-slow transition + if (!readerMode) { + slowTransition(!readerMode); + } + }; + + const highlightReaderToggle = (readerMode) => { + const els = document.querySelectorAll(".quarto-reader-toggle"); + if (els) { + els.forEach((el) => { + if (readerMode) { + el.classList.add("reader"); + } else { + el.classList.remove("reader"); + } + }); + } + }; + + const setReaderModeValue = (val) => { + if (window.location.protocol !== "file:") { + window.localStorage.setItem("quarto-reader-mode", val); + } else { + localReaderMode = val; + } + }; + + const isReaderMode = () => { + if (window.location.protocol !== "file:") { + return window.localStorage.getItem("quarto-reader-mode") === "true"; + } else { + return localReaderMode; + } + }; + let localReaderMode = null; + + // Walk the TOC and collapse/expand nodes + // Nodes are expanded if: + // - they are top level + // - they have children that are 'active' links + // - they are directly below an link that is 'active' + const walk = (el, depth) => { + // Tick depth when we enter a UL + if (el.tagName === "UL") { + depth = depth + 1; + } + + // It this is active link + let isActiveNode = false; + if (el.tagName === "A" && el.classList.contains("active")) { + isActiveNode = true; + } + + // See if there is an active child to this element + let hasActiveChild = false; + for (child of el.children) { + hasActiveChild = walk(child, depth) || hasActiveChild; + } + + // Process the collapse state if this is an UL + if (el.tagName === "UL") { + if (depth === 1 || hasActiveChild || prevSiblingIsActiveLink(el)) { + el.classList.remove("collapse"); + } else { + el.classList.add("collapse"); + } + + // untick depth when we leave a UL + depth = depth - 1; + } + return hasActiveChild || isActiveNode; + }; + + // walk the TOC and expand / collapse any items that should be shown + + if (tocEl) { + walk(tocEl, 0); + updateActiveLink(); + } + + // Throttle the scroll event and walk peridiocally + window.document.addEventListener( + "scroll", + throttle(() => { + if (tocEl) { + updateActiveLink(); + walk(tocEl, 0); + } + if (!isReaderMode()) { + hideOverlappedSidebars(); + } + }, 5) + ); + window.addEventListener( + "resize", + throttle(() => { + if (!isReaderMode()) { + hideOverlappedSidebars(); + } + }, 10) + ); + hideOverlappedSidebars(); + highlightReaderToggle(isReaderMode()); +}); + +function throttle(func, wait) { + let waiting = false; + return function () { + if (!waiting) { + func.apply(this, arguments); + waiting = true; + setTimeout(function () { + waiting = false; + }, wait); + } + }; +} diff --git a/worksheets/worksheet_redlining_files/libs/quarto-html/tippy.css b/worksheets/worksheet_redlining_files/libs/quarto-html/tippy.css new file mode 100644 index 0000000..e6ae635 --- /dev/null +++ b/worksheets/worksheet_redlining_files/libs/quarto-html/tippy.css @@ -0,0 +1 @@ +.tippy-box[data-animation=fade][data-state=hidden]{opacity:0}[data-tippy-root]{max-width:calc(100vw - 10px)}.tippy-box{position:relative;background-color:#333;color:#fff;border-radius:4px;font-size:14px;line-height:1.4;white-space:normal;outline:0;transition-property:transform,visibility,opacity}.tippy-box[data-placement^=top]>.tippy-arrow{bottom:0}.tippy-box[data-placement^=top]>.tippy-arrow:before{bottom:-7px;left:0;border-width:8px 8px 0;border-top-color:initial;transform-origin:center top}.tippy-box[data-placement^=bottom]>.tippy-arrow{top:0}.tippy-box[data-placement^=bottom]>.tippy-arrow:before{top:-7px;left:0;border-width:0 8px 8px;border-bottom-color:initial;transform-origin:center bottom}.tippy-box[data-placement^=left]>.tippy-arrow{right:0}.tippy-box[data-placement^=left]>.tippy-arrow:before{border-width:8px 0 8px 8px;border-left-color:initial;right:-7px;transform-origin:center left}.tippy-box[data-placement^=right]>.tippy-arrow{left:0}.tippy-box[data-placement^=right]>.tippy-arrow:before{left:-7px;border-width:8px 8px 8px 0;border-right-color:initial;transform-origin:center right}.tippy-box[data-inertia][data-state=visible]{transition-timing-function:cubic-bezier(.54,1.5,.38,1.11)}.tippy-arrow{width:16px;height:16px;color:#333}.tippy-arrow:before{content:"";position:absolute;border-color:transparent;border-style:solid}.tippy-content{position:relative;padding:5px 9px;z-index:1} \ No newline at end of file diff --git a/worksheets/worksheet_redlining_files/libs/quarto-html/tippy.umd.min.js b/worksheets/worksheet_redlining_files/libs/quarto-html/tippy.umd.min.js new file mode 100644 index 0000000..ca292be --- /dev/null +++ b/worksheets/worksheet_redlining_files/libs/quarto-html/tippy.umd.min.js @@ -0,0 +1,2 @@ +!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?module.exports=t(require("@popperjs/core")):"function"==typeof define&&define.amd?define(["@popperjs/core"],t):(e=e||self).tippy=t(e.Popper)}(this,(function(e){"use strict";var t={passive:!0,capture:!0},n=function(){return document.body};function r(e,t,n){if(Array.isArray(e)){var r=e[t];return null==r?Array.isArray(n)?n[t]:n:r}return e}function o(e,t){var n={}.toString.call(e);return 0===n.indexOf("[object")&&n.indexOf(t+"]")>-1}function i(e,t){return"function"==typeof e?e.apply(void 0,t):e}function a(e,t){return 0===t?e:function(r){clearTimeout(n),n=setTimeout((function(){e(r)}),t)};var n}function s(e,t){var n=Object.assign({},e);return t.forEach((function(e){delete n[e]})),n}function u(e){return[].concat(e)}function c(e,t){-1===e.indexOf(t)&&e.push(t)}function p(e){return e.split("-")[0]}function f(e){return[].slice.call(e)}function l(e){return Object.keys(e).reduce((function(t,n){return void 0!==e[n]&&(t[n]=e[n]),t}),{})}function d(){return document.createElement("div")}function v(e){return["Element","Fragment"].some((function(t){return o(e,t)}))}function m(e){return o(e,"MouseEvent")}function g(e){return!(!e||!e._tippy||e._tippy.reference!==e)}function h(e){return v(e)?[e]:function(e){return o(e,"NodeList")}(e)?f(e):Array.isArray(e)?e:f(document.querySelectorAll(e))}function b(e,t){e.forEach((function(e){e&&(e.style.transitionDuration=t+"ms")}))}function y(e,t){e.forEach((function(e){e&&e.setAttribute("data-state",t)}))}function w(e){var t,n=u(e)[0];return null!=n&&null!=(t=n.ownerDocument)&&t.body?n.ownerDocument:document}function E(e,t,n){var r=t+"EventListener";["transitionend","webkitTransitionEnd"].forEach((function(t){e[r](t,n)}))}function O(e,t){for(var n=t;n;){var r;if(e.contains(n))return!0;n=null==n.getRootNode||null==(r=n.getRootNode())?void 0:r.host}return!1}var x={isTouch:!1},C=0;function T(){x.isTouch||(x.isTouch=!0,window.performance&&document.addEventListener("mousemove",A))}function A(){var e=performance.now();e-C<20&&(x.isTouch=!1,document.removeEventListener("mousemove",A)),C=e}function L(){var e=document.activeElement;if(g(e)){var t=e._tippy;e.blur&&!t.state.isVisible&&e.blur()}}var D=!!("undefined"!=typeof window&&"undefined"!=typeof document)&&!!window.msCrypto,R=Object.assign({appendTo:n,aria:{content:"auto",expanded:"auto"},delay:0,duration:[300,250],getReferenceClientRect:null,hideOnClick:!0,ignoreAttributes:!1,interactive:!1,interactiveBorder:2,interactiveDebounce:0,moveTransition:"",offset:[0,10],onAfterUpdate:function(){},onBeforeUpdate:function(){},onCreate:function(){},onDestroy:function(){},onHidden:function(){},onHide:function(){},onMount:function(){},onShow:function(){},onShown:function(){},onTrigger:function(){},onUntrigger:function(){},onClickOutside:function(){},placement:"top",plugins:[],popperOptions:{},render:null,showOnCreate:!1,touch:!0,trigger:"mouseenter focus",triggerTarget:null},{animateFill:!1,followCursor:!1,inlinePositioning:!1,sticky:!1},{allowHTML:!1,animation:"fade",arrow:!0,content:"",inertia:!1,maxWidth:350,role:"tooltip",theme:"",zIndex:9999}),k=Object.keys(R);function P(e){var t=(e.plugins||[]).reduce((function(t,n){var r,o=n.name,i=n.defaultValue;o&&(t[o]=void 0!==e[o]?e[o]:null!=(r=R[o])?r:i);return t}),{});return Object.assign({},e,t)}function j(e,t){var n=Object.assign({},t,{content:i(t.content,[e])},t.ignoreAttributes?{}:function(e,t){return(t?Object.keys(P(Object.assign({},R,{plugins:t}))):k).reduce((function(t,n){var r=(e.getAttribute("data-tippy-"+n)||"").trim();if(!r)return t;if("content"===n)t[n]=r;else try{t[n]=JSON.parse(r)}catch(e){t[n]=r}return t}),{})}(e,t.plugins));return n.aria=Object.assign({},R.aria,n.aria),n.aria={expanded:"auto"===n.aria.expanded?t.interactive:n.aria.expanded,content:"auto"===n.aria.content?t.interactive?null:"describedby":n.aria.content},n}function M(e,t){e.innerHTML=t}function V(e){var t=d();return!0===e?t.className="tippy-arrow":(t.className="tippy-svg-arrow",v(e)?t.appendChild(e):M(t,e)),t}function I(e,t){v(t.content)?(M(e,""),e.appendChild(t.content)):"function"!=typeof t.content&&(t.allowHTML?M(e,t.content):e.textContent=t.content)}function S(e){var t=e.firstElementChild,n=f(t.children);return{box:t,content:n.find((function(e){return e.classList.contains("tippy-content")})),arrow:n.find((function(e){return e.classList.contains("tippy-arrow")||e.classList.contains("tippy-svg-arrow")})),backdrop:n.find((function(e){return e.classList.contains("tippy-backdrop")}))}}function N(e){var t=d(),n=d();n.className="tippy-box",n.setAttribute("data-state","hidden"),n.setAttribute("tabindex","-1");var r=d();function o(n,r){var o=S(t),i=o.box,a=o.content,s=o.arrow;r.theme?i.setAttribute("data-theme",r.theme):i.removeAttribute("data-theme"),"string"==typeof r.animation?i.setAttribute("data-animation",r.animation):i.removeAttribute("data-animation"),r.inertia?i.setAttribute("data-inertia",""):i.removeAttribute("data-inertia"),i.style.maxWidth="number"==typeof r.maxWidth?r.maxWidth+"px":r.maxWidth,r.role?i.setAttribute("role",r.role):i.removeAttribute("role"),n.content===r.content&&n.allowHTML===r.allowHTML||I(a,e.props),r.arrow?s?n.arrow!==r.arrow&&(i.removeChild(s),i.appendChild(V(r.arrow))):i.appendChild(V(r.arrow)):s&&i.removeChild(s)}return r.className="tippy-content",r.setAttribute("data-state","hidden"),I(r,e.props),t.appendChild(n),n.appendChild(r),o(e.props,e.props),{popper:t,onUpdate:o}}N.$$tippy=!0;var B=1,H=[],U=[];function _(o,s){var v,g,h,C,T,A,L,k,M=j(o,Object.assign({},R,P(l(s)))),V=!1,I=!1,N=!1,_=!1,F=[],W=a(we,M.interactiveDebounce),X=B++,Y=(k=M.plugins).filter((function(e,t){return k.indexOf(e)===t})),$={id:X,reference:o,popper:d(),popperInstance:null,props:M,state:{isEnabled:!0,isVisible:!1,isDestroyed:!1,isMounted:!1,isShown:!1},plugins:Y,clearDelayTimeouts:function(){clearTimeout(v),clearTimeout(g),cancelAnimationFrame(h)},setProps:function(e){if($.state.isDestroyed)return;ae("onBeforeUpdate",[$,e]),be();var t=$.props,n=j(o,Object.assign({},t,l(e),{ignoreAttributes:!0}));$.props=n,he(),t.interactiveDebounce!==n.interactiveDebounce&&(ce(),W=a(we,n.interactiveDebounce));t.triggerTarget&&!n.triggerTarget?u(t.triggerTarget).forEach((function(e){e.removeAttribute("aria-expanded")})):n.triggerTarget&&o.removeAttribute("aria-expanded");ue(),ie(),J&&J(t,n);$.popperInstance&&(Ce(),Ae().forEach((function(e){requestAnimationFrame(e._tippy.popperInstance.forceUpdate)})));ae("onAfterUpdate",[$,e])},setContent:function(e){$.setProps({content:e})},show:function(){var e=$.state.isVisible,t=$.state.isDestroyed,o=!$.state.isEnabled,a=x.isTouch&&!$.props.touch,s=r($.props.duration,0,R.duration);if(e||t||o||a)return;if(te().hasAttribute("disabled"))return;if(ae("onShow",[$],!1),!1===$.props.onShow($))return;$.state.isVisible=!0,ee()&&(z.style.visibility="visible");ie(),de(),$.state.isMounted||(z.style.transition="none");if(ee()){var u=re(),p=u.box,f=u.content;b([p,f],0)}A=function(){var e;if($.state.isVisible&&!_){if(_=!0,z.offsetHeight,z.style.transition=$.props.moveTransition,ee()&&$.props.animation){var t=re(),n=t.box,r=t.content;b([n,r],s),y([n,r],"visible")}se(),ue(),c(U,$),null==(e=$.popperInstance)||e.forceUpdate(),ae("onMount",[$]),$.props.animation&&ee()&&function(e,t){me(e,t)}(s,(function(){$.state.isShown=!0,ae("onShown",[$])}))}},function(){var e,t=$.props.appendTo,r=te();e=$.props.interactive&&t===n||"parent"===t?r.parentNode:i(t,[r]);e.contains(z)||e.appendChild(z);$.state.isMounted=!0,Ce()}()},hide:function(){var e=!$.state.isVisible,t=$.state.isDestroyed,n=!$.state.isEnabled,o=r($.props.duration,1,R.duration);if(e||t||n)return;if(ae("onHide",[$],!1),!1===$.props.onHide($))return;$.state.isVisible=!1,$.state.isShown=!1,_=!1,V=!1,ee()&&(z.style.visibility="hidden");if(ce(),ve(),ie(!0),ee()){var i=re(),a=i.box,s=i.content;$.props.animation&&(b([a,s],o),y([a,s],"hidden"))}se(),ue(),$.props.animation?ee()&&function(e,t){me(e,(function(){!$.state.isVisible&&z.parentNode&&z.parentNode.contains(z)&&t()}))}(o,$.unmount):$.unmount()},hideWithInteractivity:function(e){ne().addEventListener("mousemove",W),c(H,W),W(e)},enable:function(){$.state.isEnabled=!0},disable:function(){$.hide(),$.state.isEnabled=!1},unmount:function(){$.state.isVisible&&$.hide();if(!$.state.isMounted)return;Te(),Ae().forEach((function(e){e._tippy.unmount()})),z.parentNode&&z.parentNode.removeChild(z);U=U.filter((function(e){return e!==$})),$.state.isMounted=!1,ae("onHidden",[$])},destroy:function(){if($.state.isDestroyed)return;$.clearDelayTimeouts(),$.unmount(),be(),delete o._tippy,$.state.isDestroyed=!0,ae("onDestroy",[$])}};if(!M.render)return $;var q=M.render($),z=q.popper,J=q.onUpdate;z.setAttribute("data-tippy-root",""),z.id="tippy-"+$.id,$.popper=z,o._tippy=$,z._tippy=$;var G=Y.map((function(e){return e.fn($)})),K=o.hasAttribute("aria-expanded");return he(),ue(),ie(),ae("onCreate",[$]),M.showOnCreate&&Le(),z.addEventListener("mouseenter",(function(){$.props.interactive&&$.state.isVisible&&$.clearDelayTimeouts()})),z.addEventListener("mouseleave",(function(){$.props.interactive&&$.props.trigger.indexOf("mouseenter")>=0&&ne().addEventListener("mousemove",W)})),$;function Q(){var e=$.props.touch;return Array.isArray(e)?e:[e,0]}function Z(){return"hold"===Q()[0]}function ee(){var e;return!(null==(e=$.props.render)||!e.$$tippy)}function te(){return L||o}function ne(){var e=te().parentNode;return e?w(e):document}function re(){return S(z)}function oe(e){return $.state.isMounted&&!$.state.isVisible||x.isTouch||C&&"focus"===C.type?0:r($.props.delay,e?0:1,R.delay)}function ie(e){void 0===e&&(e=!1),z.style.pointerEvents=$.props.interactive&&!e?"":"none",z.style.zIndex=""+$.props.zIndex}function ae(e,t,n){var r;(void 0===n&&(n=!0),G.forEach((function(n){n[e]&&n[e].apply(n,t)})),n)&&(r=$.props)[e].apply(r,t)}function se(){var e=$.props.aria;if(e.content){var t="aria-"+e.content,n=z.id;u($.props.triggerTarget||o).forEach((function(e){var r=e.getAttribute(t);if($.state.isVisible)e.setAttribute(t,r?r+" "+n:n);else{var o=r&&r.replace(n,"").trim();o?e.setAttribute(t,o):e.removeAttribute(t)}}))}}function ue(){!K&&$.props.aria.expanded&&u($.props.triggerTarget||o).forEach((function(e){$.props.interactive?e.setAttribute("aria-expanded",$.state.isVisible&&e===te()?"true":"false"):e.removeAttribute("aria-expanded")}))}function ce(){ne().removeEventListener("mousemove",W),H=H.filter((function(e){return e!==W}))}function pe(e){if(!x.isTouch||!N&&"mousedown"!==e.type){var t=e.composedPath&&e.composedPath()[0]||e.target;if(!$.props.interactive||!O(z,t)){if(u($.props.triggerTarget||o).some((function(e){return O(e,t)}))){if(x.isTouch)return;if($.state.isVisible&&$.props.trigger.indexOf("click")>=0)return}else ae("onClickOutside",[$,e]);!0===$.props.hideOnClick&&($.clearDelayTimeouts(),$.hide(),I=!0,setTimeout((function(){I=!1})),$.state.isMounted||ve())}}}function fe(){N=!0}function le(){N=!1}function de(){var e=ne();e.addEventListener("mousedown",pe,!0),e.addEventListener("touchend",pe,t),e.addEventListener("touchstart",le,t),e.addEventListener("touchmove",fe,t)}function ve(){var e=ne();e.removeEventListener("mousedown",pe,!0),e.removeEventListener("touchend",pe,t),e.removeEventListener("touchstart",le,t),e.removeEventListener("touchmove",fe,t)}function me(e,t){var n=re().box;function r(e){e.target===n&&(E(n,"remove",r),t())}if(0===e)return t();E(n,"remove",T),E(n,"add",r),T=r}function ge(e,t,n){void 0===n&&(n=!1),u($.props.triggerTarget||o).forEach((function(r){r.addEventListener(e,t,n),F.push({node:r,eventType:e,handler:t,options:n})}))}function he(){var e;Z()&&(ge("touchstart",ye,{passive:!0}),ge("touchend",Ee,{passive:!0})),(e=$.props.trigger,e.split(/\s+/).filter(Boolean)).forEach((function(e){if("manual"!==e)switch(ge(e,ye),e){case"mouseenter":ge("mouseleave",Ee);break;case"focus":ge(D?"focusout":"blur",Oe);break;case"focusin":ge("focusout",Oe)}}))}function be(){F.forEach((function(e){var t=e.node,n=e.eventType,r=e.handler,o=e.options;t.removeEventListener(n,r,o)})),F=[]}function ye(e){var t,n=!1;if($.state.isEnabled&&!xe(e)&&!I){var r="focus"===(null==(t=C)?void 0:t.type);C=e,L=e.currentTarget,ue(),!$.state.isVisible&&m(e)&&H.forEach((function(t){return t(e)})),"click"===e.type&&($.props.trigger.indexOf("mouseenter")<0||V)&&!1!==$.props.hideOnClick&&$.state.isVisible?n=!0:Le(e),"click"===e.type&&(V=!n),n&&!r&&De(e)}}function we(e){var t=e.target,n=te().contains(t)||z.contains(t);"mousemove"===e.type&&n||function(e,t){var n=t.clientX,r=t.clientY;return e.every((function(e){var t=e.popperRect,o=e.popperState,i=e.props.interactiveBorder,a=p(o.placement),s=o.modifiersData.offset;if(!s)return!0;var u="bottom"===a?s.top.y:0,c="top"===a?s.bottom.y:0,f="right"===a?s.left.x:0,l="left"===a?s.right.x:0,d=t.top-r+u>i,v=r-t.bottom-c>i,m=t.left-n+f>i,g=n-t.right-l>i;return d||v||m||g}))}(Ae().concat(z).map((function(e){var t,n=null==(t=e._tippy.popperInstance)?void 0:t.state;return n?{popperRect:e.getBoundingClientRect(),popperState:n,props:M}:null})).filter(Boolean),e)&&(ce(),De(e))}function Ee(e){xe(e)||$.props.trigger.indexOf("click")>=0&&V||($.props.interactive?$.hideWithInteractivity(e):De(e))}function Oe(e){$.props.trigger.indexOf("focusin")<0&&e.target!==te()||$.props.interactive&&e.relatedTarget&&z.contains(e.relatedTarget)||De(e)}function xe(e){return!!x.isTouch&&Z()!==e.type.indexOf("touch")>=0}function Ce(){Te();var t=$.props,n=t.popperOptions,r=t.placement,i=t.offset,a=t.getReferenceClientRect,s=t.moveTransition,u=ee()?S(z).arrow:null,c=a?{getBoundingClientRect:a,contextElement:a.contextElement||te()}:o,p=[{name:"offset",options:{offset:i}},{name:"preventOverflow",options:{padding:{top:2,bottom:2,left:5,right:5}}},{name:"flip",options:{padding:5}},{name:"computeStyles",options:{adaptive:!s}},{name:"$$tippy",enabled:!0,phase:"beforeWrite",requires:["computeStyles"],fn:function(e){var t=e.state;if(ee()){var n=re().box;["placement","reference-hidden","escaped"].forEach((function(e){"placement"===e?n.setAttribute("data-placement",t.placement):t.attributes.popper["data-popper-"+e]?n.setAttribute("data-"+e,""):n.removeAttribute("data-"+e)})),t.attributes.popper={}}}}];ee()&&u&&p.push({name:"arrow",options:{element:u,padding:3}}),p.push.apply(p,(null==n?void 0:n.modifiers)||[]),$.popperInstance=e.createPopper(c,z,Object.assign({},n,{placement:r,onFirstUpdate:A,modifiers:p}))}function Te(){$.popperInstance&&($.popperInstance.destroy(),$.popperInstance=null)}function Ae(){return f(z.querySelectorAll("[data-tippy-root]"))}function Le(e){$.clearDelayTimeouts(),e&&ae("onTrigger",[$,e]),de();var t=oe(!0),n=Q(),r=n[0],o=n[1];x.isTouch&&"hold"===r&&o&&(t=o),t?v=setTimeout((function(){$.show()}),t):$.show()}function De(e){if($.clearDelayTimeouts(),ae("onUntrigger",[$,e]),$.state.isVisible){if(!($.props.trigger.indexOf("mouseenter")>=0&&$.props.trigger.indexOf("click")>=0&&["mouseleave","mousemove"].indexOf(e.type)>=0&&V)){var t=oe(!1);t?g=setTimeout((function(){$.state.isVisible&&$.hide()}),t):h=requestAnimationFrame((function(){$.hide()}))}}else ve()}}function F(e,n){void 0===n&&(n={});var r=R.plugins.concat(n.plugins||[]);document.addEventListener("touchstart",T,t),window.addEventListener("blur",L);var o=Object.assign({},n,{plugins:r}),i=h(e).reduce((function(e,t){var n=t&&_(t,o);return n&&e.push(n),e}),[]);return v(e)?i[0]:i}F.defaultProps=R,F.setDefaultProps=function(e){Object.keys(e).forEach((function(t){R[t]=e[t]}))},F.currentInput=x;var W=Object.assign({},e.applyStyles,{effect:function(e){var t=e.state,n={popper:{position:t.options.strategy,left:"0",top:"0",margin:"0"},arrow:{position:"absolute"},reference:{}};Object.assign(t.elements.popper.style,n.popper),t.styles=n,t.elements.arrow&&Object.assign(t.elements.arrow.style,n.arrow)}}),X={mouseover:"mouseenter",focusin:"focus",click:"click"};var Y={name:"animateFill",defaultValue:!1,fn:function(e){var t;if(null==(t=e.props.render)||!t.$$tippy)return{};var n=S(e.popper),r=n.box,o=n.content,i=e.props.animateFill?function(){var e=d();return e.className="tippy-backdrop",y([e],"hidden"),e}():null;return{onCreate:function(){i&&(r.insertBefore(i,r.firstElementChild),r.setAttribute("data-animatefill",""),r.style.overflow="hidden",e.setProps({arrow:!1,animation:"shift-away"}))},onMount:function(){if(i){var e=r.style.transitionDuration,t=Number(e.replace("ms",""));o.style.transitionDelay=Math.round(t/10)+"ms",i.style.transitionDuration=e,y([i],"visible")}},onShow:function(){i&&(i.style.transitionDuration="0ms")},onHide:function(){i&&y([i],"hidden")}}}};var $={clientX:0,clientY:0},q=[];function z(e){var t=e.clientX,n=e.clientY;$={clientX:t,clientY:n}}var J={name:"followCursor",defaultValue:!1,fn:function(e){var t=e.reference,n=w(e.props.triggerTarget||t),r=!1,o=!1,i=!0,a=e.props;function s(){return"initial"===e.props.followCursor&&e.state.isVisible}function u(){n.addEventListener("mousemove",f)}function c(){n.removeEventListener("mousemove",f)}function p(){r=!0,e.setProps({getReferenceClientRect:null}),r=!1}function f(n){var r=!n.target||t.contains(n.target),o=e.props.followCursor,i=n.clientX,a=n.clientY,s=t.getBoundingClientRect(),u=i-s.left,c=a-s.top;!r&&e.props.interactive||e.setProps({getReferenceClientRect:function(){var e=t.getBoundingClientRect(),n=i,r=a;"initial"===o&&(n=e.left+u,r=e.top+c);var s="horizontal"===o?e.top:r,p="vertical"===o?e.right:n,f="horizontal"===o?e.bottom:r,l="vertical"===o?e.left:n;return{width:p-l,height:f-s,top:s,right:p,bottom:f,left:l}}})}function l(){e.props.followCursor&&(q.push({instance:e,doc:n}),function(e){e.addEventListener("mousemove",z)}(n))}function d(){0===(q=q.filter((function(t){return t.instance!==e}))).filter((function(e){return e.doc===n})).length&&function(e){e.removeEventListener("mousemove",z)}(n)}return{onCreate:l,onDestroy:d,onBeforeUpdate:function(){a=e.props},onAfterUpdate:function(t,n){var i=n.followCursor;r||void 0!==i&&a.followCursor!==i&&(d(),i?(l(),!e.state.isMounted||o||s()||u()):(c(),p()))},onMount:function(){e.props.followCursor&&!o&&(i&&(f($),i=!1),s()||u())},onTrigger:function(e,t){m(t)&&($={clientX:t.clientX,clientY:t.clientY}),o="focus"===t.type},onHidden:function(){e.props.followCursor&&(p(),c(),i=!0)}}}};var G={name:"inlinePositioning",defaultValue:!1,fn:function(e){var t,n=e.reference;var r=-1,o=!1,i=[],a={name:"tippyInlinePositioning",enabled:!0,phase:"afterWrite",fn:function(o){var a=o.state;e.props.inlinePositioning&&(-1!==i.indexOf(a.placement)&&(i=[]),t!==a.placement&&-1===i.indexOf(a.placement)&&(i.push(a.placement),e.setProps({getReferenceClientRect:function(){return function(e){return function(e,t,n,r){if(n.length<2||null===e)return t;if(2===n.length&&r>=0&&n[0].left>n[1].right)return n[r]||t;switch(e){case"top":case"bottom":var o=n[0],i=n[n.length-1],a="top"===e,s=o.top,u=i.bottom,c=a?o.left:i.left,p=a?o.right:i.right;return{top:s,bottom:u,left:c,right:p,width:p-c,height:u-s};case"left":case"right":var f=Math.min.apply(Math,n.map((function(e){return e.left}))),l=Math.max.apply(Math,n.map((function(e){return e.right}))),d=n.filter((function(t){return"left"===e?t.left===f:t.right===l})),v=d[0].top,m=d[d.length-1].bottom;return{top:v,bottom:m,left:f,right:l,width:l-f,height:m-v};default:return t}}(p(e),n.getBoundingClientRect(),f(n.getClientRects()),r)}(a.placement)}})),t=a.placement)}};function s(){var t;o||(t=function(e,t){var n;return{popperOptions:Object.assign({},e.popperOptions,{modifiers:[].concat(((null==(n=e.popperOptions)?void 0:n.modifiers)||[]).filter((function(e){return e.name!==t.name})),[t])})}}(e.props,a),o=!0,e.setProps(t),o=!1)}return{onCreate:s,onAfterUpdate:s,onTrigger:function(t,n){if(m(n)){var o=f(e.reference.getClientRects()),i=o.find((function(e){return e.left-2<=n.clientX&&e.right+2>=n.clientX&&e.top-2<=n.clientY&&e.bottom+2>=n.clientY})),a=o.indexOf(i);r=a>-1?a:r}},onHidden:function(){r=-1}}}};var K={name:"sticky",defaultValue:!1,fn:function(e){var t=e.reference,n=e.popper;function r(t){return!0===e.props.sticky||e.props.sticky===t}var o=null,i=null;function a(){var s=r("reference")?(e.popperInstance?e.popperInstance.state.elements.reference:t).getBoundingClientRect():null,u=r("popper")?n.getBoundingClientRect():null;(s&&Q(o,s)||u&&Q(i,u))&&e.popperInstance&&e.popperInstance.update(),o=s,i=u,e.state.isMounted&&requestAnimationFrame(a)}return{onMount:function(){e.props.sticky&&a()}}}};function Q(e,t){return!e||!t||(e.top!==t.top||e.right!==t.right||e.bottom!==t.bottom||e.left!==t.left)}return F.setDefaultProps({plugins:[Y,J,G,K],render:N}),F.createSingleton=function(e,t){var n;void 0===t&&(t={});var r,o=e,i=[],a=[],c=t.overrides,p=[],f=!1;function l(){a=o.map((function(e){return u(e.props.triggerTarget||e.reference)})).reduce((function(e,t){return e.concat(t)}),[])}function v(){i=o.map((function(e){return e.reference}))}function m(e){o.forEach((function(t){e?t.enable():t.disable()}))}function g(e){return o.map((function(t){var n=t.setProps;return t.setProps=function(o){n(o),t.reference===r&&e.setProps(o)},function(){t.setProps=n}}))}function h(e,t){var n=a.indexOf(t);if(t!==r){r=t;var s=(c||[]).concat("content").reduce((function(e,t){return e[t]=o[n].props[t],e}),{});e.setProps(Object.assign({},s,{getReferenceClientRect:"function"==typeof s.getReferenceClientRect?s.getReferenceClientRect:function(){var e;return null==(e=i[n])?void 0:e.getBoundingClientRect()}}))}}m(!1),v(),l();var b={fn:function(){return{onDestroy:function(){m(!0)},onHidden:function(){r=null},onClickOutside:function(e){e.props.showOnCreate&&!f&&(f=!0,r=null)},onShow:function(e){e.props.showOnCreate&&!f&&(f=!0,h(e,i[0]))},onTrigger:function(e,t){h(e,t.currentTarget)}}}},y=F(d(),Object.assign({},s(t,["overrides"]),{plugins:[b].concat(t.plugins||[]),triggerTarget:a,popperOptions:Object.assign({},t.popperOptions,{modifiers:[].concat((null==(n=t.popperOptions)?void 0:n.modifiers)||[],[W])})})),w=y.show;y.show=function(e){if(w(),!r&&null==e)return h(y,i[0]);if(!r||null!=e){if("number"==typeof e)return i[e]&&h(y,i[e]);if(o.indexOf(e)>=0){var t=e.reference;return h(y,t)}return i.indexOf(e)>=0?h(y,e):void 0}},y.showNext=function(){var e=i[0];if(!r)return y.show(0);var t=i.indexOf(r);y.show(i[t+1]||e)},y.showPrevious=function(){var e=i[i.length-1];if(!r)return y.show(e);var t=i.indexOf(r),n=i[t-1]||e;y.show(n)};var E=y.setProps;return y.setProps=function(e){c=e.overrides||c,E(e)},y.setInstances=function(e){m(!0),p.forEach((function(e){return e()})),o=e,m(!1),v(),l(),p=g(y),y.setProps({triggerTarget:a})},p=g(y),y},F.delegate=function(e,n){var r=[],o=[],i=!1,a=n.target,c=s(n,["target"]),p=Object.assign({},c,{trigger:"manual",touch:!1}),f=Object.assign({touch:R.touch},c,{showOnCreate:!0}),l=F(e,p);function d(e){if(e.target&&!i){var t=e.target.closest(a);if(t){var r=t.getAttribute("data-tippy-trigger")||n.trigger||R.trigger;if(!t._tippy&&!("touchstart"===e.type&&"boolean"==typeof f.touch||"touchstart"!==e.type&&r.indexOf(X[e.type])<0)){var s=F(t,f);s&&(o=o.concat(s))}}}}function v(e,t,n,o){void 0===o&&(o=!1),e.addEventListener(t,n,o),r.push({node:e,eventType:t,handler:n,options:o})}return u(l).forEach((function(e){var n=e.destroy,a=e.enable,s=e.disable;e.destroy=function(e){void 0===e&&(e=!0),e&&o.forEach((function(e){e.destroy()})),o=[],r.forEach((function(e){var t=e.node,n=e.eventType,r=e.handler,o=e.options;t.removeEventListener(n,r,o)})),r=[],n()},e.enable=function(){a(),o.forEach((function(e){return e.enable()})),i=!1},e.disable=function(){s(),o.forEach((function(e){return e.disable()})),i=!0},function(e){var n=e.reference;v(n,"touchstart",d,t),v(n,"mouseover",d),v(n,"focusin",d),v(n,"click",d)}(e)})),l},F.hideAll=function(e){var t=void 0===e?{}:e,n=t.exclude,r=t.duration;U.forEach((function(e){var t=!1;if(n&&(t=g(n)?e.reference===n:e.popper===n.popper),!t){var o=e.props.duration;e.setProps({duration:r}),e.hide(),e.state.isDestroyed||e.setProps({duration:o})}}))},F.roundArrow='',F})); + diff --git a/worksheets/worksheet_redlining_student_edition.qmd b/worksheets/worksheet_redlining_student_edition.qmd new file mode 100644 index 0000000..1cbd96f --- /dev/null +++ b/worksheets/worksheet_redlining_student_edition.qmd @@ -0,0 +1,659 @@ +--- +title: "student edition" +format: gfm +--- + +# Make map + +The provided R script outlines a comprehensive workflow for mapping and analyzing geographic data using various spatial functions and visualization tools in R. This process involves loading, processing, and mapping data with a focus on urban planning and environmental analysis, particularly using the example of redlining data for the city of Denver. + +Overview and Usage +Setup and Dependencies: The script starts by loading necessary R packages like sf for handling spatial data, ggplot2 for plotting, and others like gdalcubes and dplyr for data manipulation and analysis. It also includes installation of custom libraries like basemapR via devtools. + +Data Loading (load_city_redlining_data): This function retrieves redlining data from a specified URL, filters it by city, and reads it into an sf object, which is a standard format for storing geometric location data and associated attributes in R. + +Data Retrieval (get_places): This function is designed to fetch additional geographic data like roads and rivers based on the bounding box of the provided spatial layer. It demonstrates the integration of external data sources into spatial analysis workflows. + +Data Visualization (plot_city_redlining and split_plot): These functions are used to create detailed maps that overlay various data layers such as roads, rivers, and residential zones based on redlining grades. The use of thematic mapping and faceting provides insights into the spatial distribution of urban features. + +Advanced Spatial Analysis (process_and_plot_sf_layers): This function performs complex spatial operations like buffering and intersecting different layers to analyze the interactions between them. It exemplifies how to handle and visualize spatial relationships and dependencies. + +```{r, collapse=TRUE} +library(devtools) +install_github('Chrisjb/basemapR') +library(basemapR) +library(tidytext) +library(sf) +library(ggplot2) +library(ggthemes) +library(dplyr) +library(rstac) +library(gdalcubes) +library(gdalUtils) +library(gdalcubes) +library(colorspace) +library(terra) +library(tidyterra) +library(tidytext) +library(ggwordcloud) +library(osmextract) +library(sf) +library(ggplot2) +library(ggthemes) +library(glue) +library(purrr) +``` + +```{r, collapse=TRUE} +# Function to load and filter redlining data by city +load_city_redlining_data <- function(city_name) { + # URL to the GeoJSON data + url <- "https://raw.githubusercontent.com/americanpanorama/mapping-inequality-census-crosswalk/main/MIv3Areas_2010TractCrosswalk.geojson" + + # Read the GeoJSON file into an sf object + redlining_data <- read_sf(url) + + # Filter the data for the specified city and non-empty grades + city_redline <- redlining_data %>% + filter(city == city_name) + + # Return the filtered data + return(city_redline) +} + +``` + +```{r, cache=TRUE, warning=FALSE, message=FALSE} +# Load redlining data for Denver +denver_redlining <- load_city_redlining_data("Denver") +denver_redlining +``` + +```{r, collapse=TRUE} +get_places <- function(polygon_layer, type = "food") { + # Check if the input is an sf object + if (!inherits(polygon_layer, "sf")) { + stop("The provided object is not an sf object.") + } + + # Create a bounding box from the input sf object + bbox_here <- st_bbox(polygon_layer) |> + st_as_sfc() + + if (type == "roads") { + my_layer <- "lines" + my_query <- "SELECT * FROM lines WHERE ( + highway IN ('motorway', 'trunk', 'primary', 'secondary', 'tertiary'))" + title <- "Major roads" + } + + if (type == "rivers") { + my_layer <- "lines" + my_query <- "SELECT * FROM lines WHERE ( + waterway IN ('river'))" + title <- "Major rivers" + } + + # Use the bbox to get data with oe_get(), specifying the desired layer and a custom SQL query for fresh food places + tryCatch({ + places <- oe_get( + place = bbox_here, + layer = my_layer, + query = my_query, + quiet = TRUE + ) + + places <- st_make_valid(places) + + # Crop the data to the bounding box + cropped_places <- st_crop(places, bbox_here) + + # Plotting the cropped fresh food places + plot <- ggplot(data = cropped_places) + + geom_sf(fill="cornflowerblue", color="cornflowerblue") + + ggtitle(title) + + theme_tufte() + + theme(legend.position = "none", # Optionally hide the legend + axis.text = element_blank(), # Remove axis text + axis.title = element_blank(), # Remove axis titles + axis.ticks = element_blank(), # Remove axis ticks + plot.background = element_rect(fill = "white", color = NA), # Set the plot background to white + panel.background = element_rect(fill = "white", color = NA), # Set the panel background to white + panel.grid.major = element_blank(), # Remove major grid lines + panel.grid.minor = element_blank()) + + # Save the plot as a PNG file + png_filename <- paste0(title, "_", Sys.Date(), ".png") + ggsave(png_filename, plot, width = 10, height = 8, units = "in") + + # Return the cropped dataset + return(cropped_places) + }, error = function(e) { + stop("Failed to retrieve or plot data: ", e$message) + }) +} +``` + + +```{r, cache=TRUE, warning=FALSE, message=FALSE} +roads <- get_places(denver_redlining, type="roads") + +rivers <- get_places(denver_redlining, type="rivers") +``` + +```{r, warning=FALSE, collapse=TRUE} +plot_city_redlining <- function(redlining_data, filename = "redlining_plot.png") { + # Fetch additional geographic data based on redlining data + roads <- get_places(redlining_data, type = "roads") + rivers <- get_places(redlining_data, type = "rivers") + + # Filter residential zones with valid grades and where city survey is TRUE + residential_zones <- redlining_data %>% + filter(city_survey == TRUE & grade != "") + + # Colors for the grades + colors <- c("#76a865", "#7cb5bd", "#ffff00", "#d9838d") + + # Plot the data using ggplot2 + plot <- ggplot() + + geom_sf(data = roads, lwd = 0.1) + + geom_sf(data = rivers, color = "blue", alpha = 0.5, lwd = 1.1) + + geom_sf(data = residential_zones, aes(fill = grade), alpha = 0.5) + + theme_tufte() + + scale_fill_manual(values = colors) + + labs(fill = 'HOLC Categories') + + theme( + plot.background = element_rect(fill = "white", color = NA), + panel.background = element_rect(fill = "white", color = NA), + panel.grid.major = element_blank(), + panel.grid.minor = element_blank(), + legend.position = "right" + ) + + # Save the plot as a high-resolution PNG file + ggsave(filename, plot, width = 10, height = 8, units = "in", dpi = 600) + + # Return the plot object if needed for further manipulation or checking + return(plot) +} +``` + +```{r, collapse=TRUE} +split_plot <- function(sf_data, roads, rivers) { + # Filter for grades A, B, C, and D + sf_data_filtered <- sf_data %>% + filter(grade %in% c('A', 'B', 'C', 'D')) + + # Define a color for each grade + grade_colors <- c("A" = "#76a865", "B" = "#7cb5bd", "C" = "#ffff00", "D" = "#d9838d") + + # Create the plot with panels for each grade + plot <- ggplot(data = sf_data_filtered) + + geom_sf(data = roads, alpha = 0.1, lwd = 0.1) + + geom_sf(data = rivers, color = "blue", alpha = 0.1, lwd = 1.1) + + geom_sf(aes(fill = grade)) + + facet_wrap(~ grade, nrow = 1) + # Free scales for different zoom levels if needed + scale_fill_manual(values = grade_colors) + + theme_minimal() + + labs(fill = 'HOLC Grade') + + theme_tufte() + + theme(plot.background = element_rect(fill = "white", color = NA), + panel.background = element_rect(fill = "white", color = NA), + legend.position = "none", # Optionally hide the legend + axis.text = element_blank(), # Remove axis text + axis.title = element_blank(), # Remove axis titles + axis.ticks = element_blank(), # Remove axis ticks + panel.grid.major = element_blank(), # Remove major grid lines + panel.grid.minor = element_blank()) + + ggsave(plot, filename = "HOLC_grades_individually.png", width = 10, height = 4, units = "in", dpi = 1200) + return(plot) +} +``` + +```{r, cache=TRUE, warning=FALSE, message=FALSE} +denver_plot <- plot_city_redlining(denver_redlining) +denver_plot +``` + + +```{r, cache=TRUE, warning=FALSE, message=FALSE} +plot_row <- split_plot(denver_redlining, roads, rivers) +plot_row +``` + +```{r, collapse=TRUE} +process_and_plot_sf_layers <- function(layer1, layer2, output_file = "output_plot.png") { + # Make geometries valid +layer1 <- st_make_valid(layer1) +layer2 <- st_make_valid(layer2) + +# Optionally, simplify geometries to remove duplicate vertices +layer1 <- st_simplify(layer1, preserveTopology = TRUE) |> + filter(grade != "") + +# Prepare a list to store results +results <- list() + +# Loop through each grade and perform operations +for (grade in c("A", "B", "C", "D")) { + # Filter layer1 for current grade + layer1_grade <- layer1[layer1$grade == grade, ] + + # Buffer the geometries of the current grade + buffered_layer1_grade <- st_buffer(layer1_grade, dist = 500) + + # Intersect with the second layer + intersections <- st_intersects(layer2, buffered_layer1_grade, sparse = FALSE) + selected_polygons <- layer2[rowSums(intersections) > 0, ] + + # Add a new column to store the grade information + selected_polygons$grade <- grade + + # Store the result + results[[grade]] <- selected_polygons +} + +# Combine all selected polygons from different grades into one sf object +final_selected_polygons <- do.call(rbind, results) + + # Define colors for the grades + grade_colors <- c("A" = "grey", "B" = "grey", "C" = "grey", "D" = "grey") + + # Create the plot + plot <- ggplot() + + geom_sf(data = roads, alpha = 0.05, lwd = 0.1) + + geom_sf(data = rivers, color = "blue", alpha = 0.1, lwd = 1.1) + + geom_sf(data = layer1, fill = "grey", color = "grey", size = 0.1) + + facet_wrap(~ grade, nrow = 1) + + geom_sf(data = final_selected_polygons,fill = "green", color = "green", size = 0.1) + + facet_wrap(~ grade, nrow = 1) + + #scale_fill_manual(values = grade_colors) + + #scale_color_manual(values = grade_colors) + + theme_minimal() + + labs(fill = 'HOLC Grade') + + theme_tufte() + + theme(plot.background = element_rect(fill = "white", color = NA), + panel.background = element_rect(fill = "white", color = NA), + legend.position = "none", + axis.text = element_blank(), + axis.title = element_blank(), + axis.ticks = element_blank(), + panel.grid.major = element_blank(), + panel.grid.minor = element_blank()) + + # Save the plot as a high-resolution PNG file + ggsave(output_file, plot, width = 10, height = 4, units = "in", dpi = 1200) + + # Return the plot for optional further use + return(list(plot=plot, sf = final_selected_polygons)) +} +``` + +# NDVI +This section pertain to the processing of satellite imagery to calculate the Normalized Difference Vegetation Index (NDVI), a popular remote sensing measurement used to assess the presence and condition of green vegetation. The NDVI process helps in understanding the spatial distribution of vegetation, urban heat effects, and environmental management. + +How to Use the NDVI Functions +Process Satellite Data (process_satellite_data): This function takes a polygon layer (e.g., administrative boundaries or any spatial delineation in sf format), a start and end date, and specific satellite assets (e.g., bands of Sentinel-2). It calculates the NDVI for the specified area and period, creating an animated GIF to visually represent changes over time. This is useful for observing phenological changes or assessing vegetation health periodically. + +Yearly Average NDVI (yearly_average_ndvi): This function calculates the yearly average NDVI for a given spatial extent defined by a polygon layer. It filters cloud-free satellite images within a year to compute a median NDVI, providing insights into the annual vegetation status which is crucial for environmental monitoring and urban planning. + +Create Mask and Plot (create_mask_and_plot): After processing NDVI, this function overlays the NDVI data on a map with additional geographical layers (e.g., roads, rivers). It applies a mask to segment the NDVI results by different grades or zones within the area, which is particularly useful for detailed spatial analysis in urban planning or environmental studies. + +```{r, collapse=TRUE} +polygon_layer <- denver_redlining +# Function to process satellite data based on an SF polygon's extent +process_satellite_data <- function(polygon_layer, start_date, end_date, assets, fps = 1, output_file = "anim.gif") { + # Record start time + start_time <- Sys.time() + + # Calculate the bbox from the polygon layer + bbox <- st_bbox(polygon_layer) + + s = stac("https://earth-search.aws.element84.com/v0") + + + # Use stacR to search for Sentinel-2 images within the bbox and date range + items = s |> stac_search( + collections = "sentinel-s2-l2a-cogs", + bbox = c(bbox["xmin"], bbox["ymin"], bbox["xmax"], bbox["ymax"]), + datetime = paste(start_date, end_date, sep = "/"), + limit = 500 + ) %>% + post_request() + + # Define mask for Sentinel-2 image quality + #S2.mask <- image_mask("SCL", values = c(3, 8, 9)) + + # Create a collection of images filtering by cloud cover + col <- stac_image_collection(items$features, asset_names = assets, property_filter = function(x) {x[["eo:cloud_cover"]] < 30}) + + # Define a view for processing the data + v <- cube_view(srs = "EPSG:4326", + extent = list(t0 = start_date, t1 = end_date, + left = bbox["xmin"], right = bbox["xmax"], + top = bbox["ymax"], bottom = bbox["ymin"]), + dx = 0.001, dy = 0.001, dt = "P1M", + aggregation = "median", resampling = "bilinear") + + # Calculate NDVI and create an animation + ndvi_col <- function(n) { + rev(sequential_hcl(n, "Green-Yellow")) + } + + #raster_cube(col, v, mask = S2.mask) %>% + raster_cube(col, v) %>% + select_bands(c("B04", "B08")) %>% + apply_pixel("(B08-B04)/(B08+B04)", "NDVI") %>% + gdalcubes::animate(col = ndvi_col, zlim = c(-0.2, 1), key.pos = 1, save_as = output_file, fps = fps) + + # Calculate processing time + end_time <- Sys.time() + processing_time <- difftime(end_time, start_time) + + # Return processing time + return(processing_time) +} +``` + +```{r, cache=TRUE, warning=FALSE, message=FALSE} +#processing_time <- process_satellite_data(denver_redlining, "2022-05-31", "2023-05-31", c("B04", "B08")) +``` + +![](../worksheets/anim.gif) + + + + + +```{r, collapse=TRUE} +create_mask_and_plot <- function(redlining_sf, background_raster = ndvi$raster, roads = NULL, rivers = NULL){ + start_time <- Sys.time() # Start timing + + # Validate and prepare the redlining data + redlining_sf <- redlining_sf %>% + filter(grade != "") %>% + st_make_valid() + + +bbox <- st_bbox(redlining_sf) # Get original bounding box + + +expanded_bbox <- expand_bbox(bbox, 6000, 1000) # + + +expanded_bbox_poly <- st_as_sfc(expanded_bbox, crs = st_crs(redlining_sf)) %>% + st_make_valid() + + # Initialize an empty list to store masks + masks <- list() + + # Iterate over each grade to create masks + unique_grades <- unique(redlining_sf$grade) + for (grade in unique_grades) { + # Filter polygons by grade + grade_polygons <- redlining_sf[redlining_sf$grade == grade, ] + + # Create an "inverted" mask by subtracting these polygons from the background + mask <- st_difference(expanded_bbox_poly, st_union(grade_polygons)) + + # Store the mask in the list with the grade as the name + masks[[grade]] <- st_sf(geometry = mask, grade = grade) + } + + # Combine all masks into a single sf object + mask_sf <- do.call(rbind, masks) + + # Normalize the grades so that C.2 becomes C, but correctly handle other grades + mask_sf$grade <- ifelse(mask_sf$grade == "C.2", "C", mask_sf$grade) + + # Prepare the plot + plot <- ggplot() + + geom_spatraster(data = background_raster, aes(fill = NDVI)) + + scale_fill_viridis_c(name = "NDVI", option = "viridis", direction = -1) + + + geom_sf(data = mask_sf, aes(color = grade), fill = "white", size = 0.1, show.legend = FALSE) + + scale_color_manual(values = c("A" = "white", "B" = "white", "C" = "white", "D" = "white"), name = "Grade") + + facet_wrap(~ grade, nrow = 1) + + geom_sf(data = roads, alpha = 1, lwd = 0.1, color="white") + + geom_sf(data = rivers, color = "white", alpha = 0.5, lwd = 1.1) + + labs(title = "NDVI: Normalized Difference Vegetation Index") + + theme_minimal() + + coord_sf(xlim = c(bbox["xmin"], bbox["xmax"]), + ylim = c(bbox["ymin"], bbox["ymax"]), + expand = FALSE) + + theme(plot.background = element_rect(fill = "white", color = NA), + panel.background = element_rect(fill = "white", color = NA), + legend.position = "bottom", + axis.text = element_blank(), + axis.title = element_blank(), + axis.ticks = element_blank(), + panel.grid.major = element_blank(), + panel.grid.minor = element_blank()) + + # Save the plot + ggsave("redlining_mask_ndvi.png", plot, width = 10, height = 4, dpi = 600) + + end_time <- Sys.time() # End timing + runtime <- end_time - start_time + + # Return the plot and runtime + return(list(plot = plot, runtime = runtime, mask_sf = mask_sf)) +} +``` + +```{r, collapse=TRUE} +yearly_average_ndvi <- function(polygon_layer, output_file = "ndvi.png", dx = 0.01, dy = 0.01) { + # Record start time + start_time <- Sys.time() + + # Calculate the bbox from the polygon layer + bbox <- st_bbox(polygon_layer) + + s = stac("https://earth-search.aws.element84.com/v0") + + # Search for Sentinel-2 images within the bbox for June + items <- s |> stac_search( + collections = "sentinel-s2-l2a-cogs", + bbox = c(bbox["xmin"], bbox["ymin"], bbox["xmax"], bbox["ymax"]), + datetime = "2023-01-01/2023-12-31", + limit = 500 + ) %>% + post_request() + + # Create a collection of images filtering by cloud cover + col <- stac_image_collection(items$features, asset_names = c("B04", "B08"), property_filter = function(x) {x[["eo:cloud_cover"]] < 80}) + + # Define a view for processing the data specifically for June + v <- cube_view(srs = "EPSG:4326", + extent = list(t0 = "2023-01-01", t1 = "2023-12-31", + left = bbox["xmin"], right = bbox["xmax"], + top = bbox["ymax"], bottom = bbox["ymin"]), + dx = dx, dy = dy, dt = "P1Y", + aggregation = "median", resampling = "bilinear") + + # Process NDVI + ndvi_rast <- raster_cube(col, v) %>% + select_bands(c("B04", "B08")) %>% + apply_pixel("(B08-B04)/(B08+B04)", "NDVI") %>% + write_tif() |> + terra::rast() + + + # Convert terra Raster to ggplot using tidyterra +ndvi_plot <- ggplot() + + geom_spatraster(data = ndvi_rast, aes(fill = NDVI)) + + scale_fill_viridis_c(option = "viridis", direction = -1, name = "NDVI") + + labs(title = "NDVI mean for 2023") + + theme_minimal() + + coord_sf() + + theme(plot.background = element_rect(fill = "white", color = NA), + panel.background = element_rect(fill = "white", color = NA), + legend.position = "right", + axis.text = element_blank(), + axis.title = element_blank(), + axis.ticks = element_blank(), + panel.grid.major = element_blank(), + panel.grid.minor = element_blank()) + + # Save the plot as a high-resolution PNG file + ggsave(output_file, ndvi_plot, width = 10, height = 8, dpi = 600) + + # Calculate processing time + end_time <- Sys.time() + processing_time <- difftime(end_time, start_time) + + # Return the plot and processing time + return(list(plot = ndvi_plot, processing_time = processing_time, raster = ndvi_rast)) +} +``` + +```{r, cache=TRUE} +ndvi_background_low <- yearly_average_ndvi(denver_redlining) +ndvi_background_low$plot +``` + + +```{r, cache=TRUE} +ndvi <- create_mask_and_plot(denver_redlining, background_raster = ndvi_background_low$raster, roads = roads, rivers = rivers) +ndvi$plot +``` + +# City of Denver Open Data Portal +This section serves as an interface to the City of Denver Open Data Portal for geographic data analysis. It features a functional approach to access, process, and visualize diverse city inventory datasets. Each dataset—ranging from tree density to crime statistics—is available through direct download and analysis via a centralized function, process_city_inventory_data, which utilizes shapefiles and spatial data frameworks to generate insightful visualizations. + +Overview and Usage Instructions: +Function Setup (process_city_inventory_data): This function automates the downloading and reading of shapefiles from specified URLs, processes them according to the geographic area provided (polygon layer), and then plots density maps. These maps can be used to assess various urban factors like tree density or crime rates within specific city zones. + +Choice Function (process_city_inventory_data_choice): To streamline user interaction and selection from multiple datasets, this function allows users to choose a dataset by number and pass a spatial polygon for analysis. It maps user input to specific datasets and triggers data processing for that choice. + +```{r} +process_city_inventory_data <- function(address, inner_file, polygon_layer, output_filename,variable_label= 'Tree Density') { + # Download and read the shapefile + full_path <- glue("/vsizip/vsicurl/{address}/{inner_file}") + shape_data <- st_read(full_path, quiet = TRUE) |> st_as_sf() + + # Process the shape data with the provided polygon layer + processed_data <- process_and_plot_sf_layers(polygon_layer, shape_data, paste0(output_filename, ".png")) + + # Extract trees from the processed data + trees <- processed_data$sf + denver_redlining_residential <- polygon_layer |> filter(grade != "") + + # Generate the density plot + plot <- ggplot() + + geom_sf(data = roads, alpha = 0.05, lwd = 0.1) + + geom_sf(data = rivers, color = "blue", alpha = 0.1, lwd = 1.1) + + geom_sf(data = denver_redlining_residential, fill = "grey", color = "grey", size = 0.1) + + facet_wrap(~ grade, nrow = 1) + + stat_density_2d(data = trees, + mapping = aes(x = map_dbl(geometry, ~.[1]), + y = map_dbl(geometry, ~.[2]), + fill = stat(density)), + geom = 'tile', + contour = FALSE, + alpha = 0.9) + + scale_fill_gradientn(colors = c("transparent", "white", "limegreen"), + values = scales::rescale(c(0, 0.1, 1)), # Adjust these based on your density range + guide = "colourbar") + + theme_minimal() + + labs(fill = variable_label) + + theme_tufte() + + theme(plot.background = element_rect(fill = "white", color = NA), + panel.background = element_rect(fill = "white", color = NA), + legend.position = "bottom", + axis.text = element_blank(), + axis.title = element_blank(), + axis.ticks = element_blank(), + panel.grid.major = element_blank(), + panel.grid.minor = element_blank()) + + # Save the plot + ggsave(paste0(output_filename, "_density_plot.png"), plot, width = 10, height = 4, units = "in", dpi = 600) + + # Return the plot and the tree layer + return(list(plot = plot, layer = trees)) +} +``` + + + +```{r} +process_city_inventory_data_choice <- function(choice, polygon_layer) { + # Define the dataset choices + datasets <- list( + list(address = "https://www.denvergov.org/media/gis/DataCatalog/tree_inventory/shape/tree_inventory.zip", + inner_file = "tree_inventory.shp", + output_filename = "Denver_tree_inventory_2023", + variable_label = "Tree Density"), + list(address = "https://www.denvergov.org/media/gis/DataCatalog/traffic_accidents/shape/traffic_accidents.zip", + inner_file = "traffic_accidents.shp", + output_filename = "Denver_traffic_accidents", + variable_label = "Traffic Accidents Density"), + list(address = "https://www.denvergov.org/media/gis/DataCatalog/instream_sampling_sites/shape/instream_sampling_sites.zip", + inner_file = "instream_sampling_sites.shp", + output_filename = "instream_sampling_sites", + variable_label = "Instream Sampling Sites Density"), + list(address = "https://www.denvergov.org/media/gis/DataCatalog/soil_samples/shape/soil_samples.zip", + inner_file = "soil_samples.shp", + output_filename = "Soil_samples", + variable_label = "Soil Samples Density"), + list(address = "https://www.denvergov.org/media/gis/DataCatalog/public_art/shape/public_art.zip", + inner_file = "public_art.shp", + output_filename = "Public_art", + variable_label = "Public Art Density"), + list(address = "https://www.denvergov.org/media/gis/DataCatalog/liquor_licenses/shape/liquor_licenses.zip", + inner_file = "liquor_licenses.shp", + output_filename = "liquor_licenses", + variable_label = "Liquor Licenses Density"), + list(address = "https://www.denvergov.org/media/gis/DataCatalog/crime/shape/crime.zip", + inner_file = "crime.shp", + output_filename = "Crime", + variable_label = "Crime Density") + ) + + # Validate input + if (choice < 1 || choice > length(datasets)) { + stop("Invalid choice. Please enter a number between 1 and 7.") + } + + # Get the selected dataset information + dataset <- datasets[[choice]] + + # Call the original function + result <- process_city_inventory_data( + address = dataset$address, + inner_file = dataset$inner_file, + polygon_layer = polygon_layer, + output_filename = dataset$output_filename, + variable_label = dataset$variable_label + ) + + return(result) +} + +``` + + +The function process_city_inventory_data_choice allows users to select from a predefined set of datasets for processing. It takes two arguments: choice, an integer that specifies the dataset to process, and polygon_layer, an sf object that represents the geographic area to be analyzed. The choice argument should be a number between 1 and 7, each corresponding to different types of city data: + +1 = **Tree Density** - Tree inventory data. +2 = **Traffic Accidents Density** - Traffic accidents data. +3 = **Instream Sampling Sites Density** - Environmental sampling sites data. +4 = **Soil Samples Density** - Soil sample data. +5 = **Public Art Density** - Public art locations. +6 = **Liquor Licenses Density** - Liquor license data. +7 = **Crime Density** - City crime data. + +To use this function, simply specify the choice of data and the geographic area as an sf object. The function will process the selected data and return a list containing the generated plot and other relevant data layers. This allows for easy integration and analysis of various city data layers based on spatial parameters. + +```{r} +# Example usage: +result <- process_city_inventory_data_choice(1, denver_redlining) +print(result$plot) # To display the generated plot + +``` + + + + diff --git a/worksheets/worksheet_redlining_student_edition/index.html b/worksheets/worksheet_redlining_student_edition/index.html new file mode 100644 index 0000000..3b2e02f --- /dev/null +++ b/worksheets/worksheet_redlining_student_edition/index.html @@ -0,0 +1,2243 @@ + + + + + + + + + + + + + + + + + + + + + + student edition - ESIIL Innovation Summit 2024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + + + + + + +
    + +
    + + + + +
    +
    + + + +
    +
    +
    + + + + + + + + +
    +
    +
    + + + + +
    +
    + + + + + + + + + +

    student edition

    + +
    + +R libraries we use in this analysis + + +
    if (!requireNamespace("tidytext", quietly = TRUE)) {
    +  install.packages("tidytext")
    +}
    +library(tidytext)
    +## Warning: package 'tidytext' was built under R version 4.3.2
    +library(sf)
    +## Warning: package 'sf' was built under R version 4.3.2
    +## Linking to GEOS 3.11.0, GDAL 3.5.3, PROJ 9.1.0; sf_use_s2() is TRUE
    +library(ggplot2)
    +## Warning: package 'ggplot2' was built under R version 4.3.2
    +library(ggthemes)
    +## Warning: package 'ggthemes' was built under R version 4.3.2
    +library(dplyr)
    +## 
    +## Attaching package: 'dplyr'
    +## The following objects are masked from 'package:stats':
    +## 
    +##     filter, lag
    +## The following objects are masked from 'package:base':
    +## 
    +##     intersect, setdiff, setequal, union
    +library(rstac)
    +## Warning: package 'rstac' was built under R version 4.3.2
    +library(gdalcubes)
    +## Warning: package 'gdalcubes' was built under R version 4.3.2
    +library(gdalUtils)
    +## Please note that rgdal will be retired during October 2023,
    +## plan transition to sf/stars/terra functions using GDAL and PROJ
    +## at your earliest convenience.
    +## See https://r-spatial.org/r/2023/05/15/evolution4.html and https://github.com/r-spatial/evolution
    +## rgdal: version: 1.6-7, (SVN revision 1203)
    +## Geospatial Data Abstraction Library extensions to R successfully loaded
    +## Loaded GDAL runtime: GDAL 3.5.3, released 2022/10/21
    +## Path to GDAL shared files: /Library/Frameworks/R.framework/Versions/4.3-x86_64/Resources/library/rgdal/gdal
    +##  GDAL does not use iconv for recoding strings.
    +## GDAL binary built with GEOS: TRUE 
    +## Loaded PROJ runtime: Rel. 9.1.0, September 1st, 2022, [PJ_VERSION: 910]
    +## Path to PROJ shared files: /Library/Frameworks/R.framework/Versions/4.3-x86_64/Resources/library/gdalcubes/proj
    +## PROJ CDN enabled: FALSE
    +## Linking to sp version:1.6-1
    +## To mute warnings of possible GDAL/OSR exportToProj4() degradation,
    +## use options("rgdal_show_exportToProj4_warnings"="none") before loading sp or rgdal.
    +## 
    +## Attaching package: 'gdalUtils'
    +## The following object is masked from 'package:sf':
    +## 
    +##     gdal_rasterize
    +library(gdalcubes)
    +library(colorspace)
    +library(terra)
    +## Warning: package 'terra' was built under R version 4.3.2
    +## terra 1.7.71
    +## 
    +## Attaching package: 'terra'
    +## The following object is masked from 'package:colorspace':
    +## 
    +##     RGB
    +## The following objects are masked from 'package:gdalcubes':
    +## 
    +##     animate, crop, size
    +library(tidyterra)
    +## 
    +## Attaching package: 'tidyterra'
    +## The following object is masked from 'package:stats':
    +## 
    +##     filter
    +library(basemapR)
    +library(tidytext)
    +library(ggwordcloud)
    +library(osmextract)
    +## Data (c) OpenStreetMap contributors, ODbL 1.0. https://www.openstreetmap.org/copyright.
    +## Check the package website, https://docs.ropensci.org/osmextract/, for more details.
    +library(sf)
    +library(ggplot2)
    +library(ggthemes)
    +library(glue)
    +## 
    +## Attaching package: 'glue'
    +## The following object is masked from 'package:terra':
    +## 
    +##     trim
    +
    +library(purrr)
    +
    + +
    +
    + +FUNCTION: Stream HOLC data from a city + + +
    # Function to load and filter redlining data by city
    +load_city_redlining_data <- function(city_name) {
    +  # URL to the GeoJSON data
    +  url <- "https://raw.githubusercontent.com/americanpanorama/mapping-inequality-census-crosswalk/main/MIv3Areas_2010TractCrosswalk.geojson"
    +
    +  # Read the GeoJSON file into an sf object
    +  redlining_data <- read_sf(url)
    +
    +  # Filter the data for the specified city and non-empty grades
    +
    +  city_redline <- redlining_data %>%
    +    filter(city == city_name )
    +
    +  # Return the filtered data
    +  return(city_redline)
    +}
    +
    + +
    +
    + +Stream HOLC data for Denver, CO + + +
    # Load redlining data for Denver
    +denver_redlining <- load_city_redlining_data("Denver")
    +knitr::kable(head(denver_redlining), format = "markdown")
    +
    + +| area_id | city | state | city_survey | cat | grade | label | res | com | ind | fill | GEOID10 | GISJOIN | calc_area | pct_tract | geometry | +|--------:|:-------|:------|:------------|:-----|:------|:------|:-----|:------|:------|:---------|:------------|:---------------|-------------:|----------:|:-----------------------------| +| 6525 | Denver | CO | TRUE | Best | A | A1 | TRUE | FALSE | FALSE | \#76a865 | 08031004104 | G0800310004104 | 1.525535e+01 | 0.00001 | MULTIPOLYGON (((-104.9125 3… | +| 6525 | Denver | CO | TRUE | Best | A | A1 | TRUE | FALSE | FALSE | \#76a865 | 08031004201 | G0800310004201 | 3.987458e+05 | 0.20900 | MULTIPOLYGON (((-104.9246 3… | +| 6525 | Denver | CO | TRUE | Best | A | A1 | TRUE | FALSE | FALSE | \#76a865 | 08031004304 | G0800310004304 | 1.554195e+05 | 0.05927 | MULTIPOLYGON (((-104.9125 3… | +| 6525 | Denver | CO | TRUE | Best | A | A1 | TRUE | FALSE | FALSE | \#76a865 | 08031004202 | G0800310004202 | 1.117770e+06 | 0.57245 | MULTIPOLYGON (((-104.9125 3… | +| 6529 | Denver | CO | TRUE | Best | A | A2 | TRUE | FALSE | FALSE | \#76a865 | 08031004302 | G0800310004302 | 3.133415e+05 | 0.28381 | MULTIPOLYGON (((-104.928 39… | +| 6529 | Denver | CO | TRUE | Best | A | A2 | TRUE | FALSE | FALSE | \#76a865 | 08031004301 | G0800310004301 | 1.221218e+05 | 0.08622 | MULTIPOLYGON (((-104.9305 3… | + +
    +
    + +FUNCTION: Get Points-of-Interest from city of interest + + +
    get_places <- function(polygon_layer, type = "food" ) {
    +  # Check if the input is an sf object
    +  if (!inherits(polygon_layer, "sf")) {
    +    stop("The provided object is not an sf object.")
    +  }
    +
    +  # Create a bounding box from the input sf object
    +  bbox_here <- st_bbox(polygon_layer) |>
    +    st_as_sfc()
    +
    +   if(type == "roads"){
    +    my_layer <- "lines"
    +    my_query <- "SELECT * FROM lines WHERE (
    +             highway IN ('motorway', 'trunk', 'primary', 'secondary', 'tertiary') )"
    +    title <- "Major roads"
    +   }
    +
    +  if(type == "rivers"){
    +    my_layer <- "lines"
    +    my_query <- "SELECT * FROM lines WHERE (
    +             waterway IN ('river'))"
    +    title <- "Major rivers"
    +  }
    +
    +
    +
    +  # Use the bbox to get data with oe_get(), specifying the desired layer and a custom SQL query for fresh food places
    +  tryCatch({
    +    places <- oe_get(
    +      place = bbox_here,
    +      layer = my_layer,  # Adjusted layer; change as per actual data availability
    +      query = my_query,
    +      quiet = TRUE
    +    )
    +
    +  places <- st_make_valid(places)
    +
    +    # Crop the data to the bounding box
    +    cropped_places <- st_crop(places, bbox_here)
    +
    +    # Plotting the cropped fresh food places
    +    plot <- ggplot(data = cropped_places) +
    +      geom_sf(fill="cornflowerblue", color="cornflowerblue") +
    +      ggtitle(title) +
    +  theme_tufte()+
    +  theme(legend.position = "none",  # Optionally hide the legend
    +        axis.text = element_blank(),     # Remove axis text
    +        axis.title = element_blank(),    # Remove axis titles
    +        axis.ticks = element_blank(),    # Remove axis ticks
    +         plot.background = element_rect(fill = "white", color = NA),  # Set the plot background to white
    +        panel.background = element_rect(fill = "white", color = NA),  # Set the panel background to white
    +        panel.grid.major = element_blank(),  # Remove major grid lines
    +        panel.grid.minor = element_blank(),
    +        ) 
    +
    +    # Save the plot as a PNG file
    +    png_filename <- paste0(title,"_", Sys.Date(), ".png")
    +    ggsave(png_filename, plot, width = 10, height = 8, units = "in")
    +
    +    # Return the cropped dataset
    +    return(cropped_places)
    +  }, error = function(e) {
    +    stop("Failed to retrieve or plot data: ", e$message)
    +  })
    +}
    +
    + +
    +
    + +Stream amenities by category + + +
    roads <- get_places(denver_redlining, type="roads")
    +
    +rivers <- get_places(denver_redlining, type="rivers")
    +
    + +
    +
    + +FUNCTION: Plot POI over HOLC grades + + +
    plot_city_redlining <- function(redlining_data, filename = "redlining_plot.png") {
    +  # Fetch additional geographic data based on redlining data
    +  roads <- get_places(redlining_data, type = "roads")
    +  rivers <- get_places(redlining_data, type = "rivers")
    +
    +  # Filter residential zones with valid grades and where city survey is TRUE
    +  residential_zones <- redlining_data %>%
    +    filter(city_survey == TRUE & grade != "") 
    +
    +  # Colors for the grades
    +  colors <- c("#76a865", "#7cb5bd", "#ffff00", "#d9838d")
    +
    +  # Plot the data using ggplot2
    +  plot <- ggplot() +
    +    geom_sf(data = roads, lwd = 0.1) +
    +    geom_sf(data = rivers, color = "blue", alpha = 0.5, lwd = 1.1) +
    +    geom_sf(data = residential_zones, aes(fill = grade), alpha = 0.5) +
    +    theme_tufte() +
    +    scale_fill_manual(values = colors) +
    +    labs(fill = 'HOLC Categories') +
    +    theme(
    +      plot.background = element_rect(fill = "white", color = NA),
    +      panel.background = element_rect(fill = "white", color = NA),
    +      panel.grid.major = element_blank(),
    +      panel.grid.minor = element_blank(),
    +      legend.position = "right"
    +    )
    +
    +  # Save the plot as a high-resolution PNG file
    +  ggsave(filename, plot, width = 10, height = 8, units = "in", dpi = 600)
    +
    +  # Return the plot object if needed for further manipulation or checking
    +  return(plot)
    +}
    +
    + +
    +
    + +FUNCTION: Plot the HOLC grades individually + + +
    split_plot <- function(sf_data, roads, rivers) {
    +  # Filter for grades A, B, C, and D
    +  sf_data_filtered <- sf_data %>% 
    +    filter(grade %in% c('A', 'B', 'C', 'D'))
    +
    +  # Define a color for each grade
    +  grade_colors <- c("A" = "#76a865", "B" = "#7cb5bd", "C" = "#ffff00", "D" = "#d9838d")
    +
    +  # Create the plot with panels for each grade
    +  plot <- ggplot(data = sf_data_filtered) +
    +    geom_sf(data = roads, alpha = 0.1, lwd = 0.1) +
    +    geom_sf(data = rivers, color = "blue", alpha = 0.1, lwd = 1.1) +
    +    geom_sf(aes(fill = grade)) +
    +    facet_wrap(~ grade, nrow = 1) +  # Free scales for different zoom levels if needed
    +    scale_fill_manual(values = grade_colors) +
    +    theme_minimal() +
    +    labs(fill = 'HOLC Grade') +
    +    theme_tufte() +
    +    theme(plot.background = element_rect(fill = "white", color = NA),
    +          panel.background = element_rect(fill = "white", color = NA),
    +          legend.position = "none",  # Optionally hide the legend
    +          axis.text = element_blank(),     # Remove axis text
    +          axis.title = element_blank(),    # Remove axis titles
    +          axis.ticks = element_blank(),    # Remove axis ticks
    +          panel.grid.major = element_blank(),  # Remove major grid lines
    +          panel.grid.minor = element_blank())  
    +
    +  ggsave(plot, filename = "HOLC_grades_individually.png", width = 10, height = 4, units = "in", dpi = 1200)
    +  return(plot)
    +}
    +
    + +
    +
    + +Plot Denver Redlining + + +
    denver_plot <- plot_city_redlining(denver_redlining)
    +denver_plot
    +
    + +![](worksheet_redlining_student_edition_files/figure-gfm/unnamed-chunk-8-1.png) + +
    + +

    +
    + +Plot 4 HOLC grades individually + + +
    plot_row <- split_plot(denver_redlining, roads, rivers)
    +plot_row
    +
    + +![](worksheet_redlining_student_edition_files/figure-gfm/unnamed-chunk-9-1.png) + +
    + +

    +
    + +FUNCTION: Map an amenity over each grade individually + + +
    process_and_plot_sf_layers <- function(layer1, layer2, output_file = "output_plot.png") {
    + # Make geometries valid
    +layer1 <- st_make_valid(layer1)
    +layer2 <- st_make_valid(layer2)
    +
    +# Optionally, simplify geometries to remove duplicate vertices
    +layer1 <- st_simplify(layer1, preserveTopology = TRUE) |>
    +  filter(grade != "")
    +
    +# Prepare a list to store results
    +results <- list()
    +
    +# Loop through each grade and perform operations
    +for (grade in c("A", "B", "C", "D")) {
    +  # Filter layer1 for current grade
    +  layer1_grade <- layer1[layer1$grade == grade, ]
    +
    +  # Buffer the geometries of the current grade
    +  buffered_layer1_grade <- st_buffer(layer1_grade, dist = 500)
    +
    +  # Intersect with the second layer
    +  intersections <- st_intersects(layer2, buffered_layer1_grade, sparse = FALSE)
    +  selected_polygons <- layer2[rowSums(intersections) > 0, ]
    +
    +  # Add a new column to store the grade information
    +  selected_polygons$grade <- grade
    +
    +  # Store the result
    +  results[[grade]] <- selected_polygons
    +}
    +
    +# Combine all selected polygons from different grades into one sf object
    +final_selected_polygons <- do.call(rbind, results)
    +
    +  # Define colors for the grades
    +  grade_colors <- c("A" = "grey", "B" = "grey", "C" = "grey", "D" = "grey")
    +
    +  # Create the plot
    +  plot <- ggplot() +
    +    geom_sf(data = roads, alpha = 0.05, lwd = 0.1) +
    +    geom_sf(data = rivers, color = "blue", alpha = 0.1, lwd = 1.1) +
    +    geom_sf(data = layer1, fill = "grey", color = "grey", size = 0.1) +
    +    facet_wrap(~ grade, nrow = 1) +
    +    geom_sf(data = final_selected_polygons,fill = "green", color = "green", size = 0.1) +
    +    facet_wrap(~ grade, nrow = 1) +
    +    #scale_fill_manual(values = grade_colors) +
    +    #scale_color_manual(values = grade_colors) +
    +    theme_minimal() +
    +    labs(fill = 'HOLC Grade') +
    +    theme_tufte() +
    +    theme(plot.background = element_rect(fill = "white", color = NA),
    +      panel.background = element_rect(fill = "white", color = NA),
    +      legend.position = "none",
    +          axis.text = element_blank(),
    +          axis.title = element_blank(),
    +          axis.ticks = element_blank(),
    +          panel.grid.major = element_blank(),
    +          panel.grid.minor = element_blank())
    +
    +  # Save the plot as a high-resolution PNG file
    +  ggsave(output_file, plot, width = 10, height = 4, units = "in", dpi = 1200)
    +
    +  # Return the plot for optional further use
    +  return(list(plot=plot, sf = final_selected_polygons))
    +}
    +
    + +
    + +

    Part 2: Integrating Environmental Data

    +

    Data Processing

    +
      +
    • Use satellite data from 2010 to analyze greenspace using NDVI, an + index that measures the quantity of vegetation in an area.
    • +
    • Apply methods to adjust for potential confounders as described in + the study, ensuring that comparisons of greenspace across HOLC + grades are valid and not biased by historical or socio-demographic + factors.
    • +
    +
    + +FUNCTION: Stream NDVI data + + +
    polygon_layer <- denver_redlining
    +# Function to process satellite data based on an SF polygon's extent
    +process_satellite_data <- function(polygon_layer, start_date, end_date, assets, fps = 1, output_file = "anim.gif") {
    +  # Record start time
    +  start_time <- Sys.time()
    +
    +  # Calculate the bbox from the polygon layer
    +  bbox <- st_bbox(polygon_layer)
    +
    +  s = stac("https://earth-search.aws.element84.com/v0")
    +
    +
    +  # Use stacR to search for Sentinel-2 images within the bbox and date range
    +  items = s |> stac_search(
    +    collections = "sentinel-s2-l2a-cogs",
    +    bbox = c(bbox["xmin"], bbox["ymin"], bbox["xmax"], bbox["ymax"]),
    +    datetime = paste(start_date, end_date, sep = "/"),
    +    limit = 500
    +  ) %>% 
    +  post_request()
    +
    +  # Define mask for Sentinel-2 image quality
    +  #S2.mask <- image_mask("SCL", values = c(3, 8, 9))
    +
    +  # Create a collection of images filtering by cloud cover
    +  col <- stac_image_collection(items$features, asset_names = assets, property_filter = function(x) {x[["eo:cloud_cover"]] < 30})
    +
    +  # Define a view for processing the data
    +  v <- cube_view(srs = "EPSG:4326", 
    +                 extent = list(t0 = start_date, t1 = end_date,
    +                               left = bbox["xmin"], right = bbox["xmax"], 
    +                               top = bbox["ymax"], bottom = bbox["ymin"]),
    +                 dx = 0.001, dy = 0.001, dt = "P1M", 
    +                 aggregation = "median", resampling = "bilinear")
    +
    +  # Calculate NDVI and create an animation
    +  ndvi_col <- function(n) {
    +    rev(sequential_hcl(n, "Green-Yellow"))
    +  }
    +
    +  #raster_cube(col, v, mask = S2.mask) %>%
    +  raster_cube(col, v) %>%
    +    select_bands(c("B04", "B08")) %>%
    +    apply_pixel("(B08-B04)/(B08+B04)", "NDVI") %>%
    +    gdalcubes::animate(col = ndvi_col, zlim = c(-0.2, 1), key.pos = 1, save_as = output_file, fps = fps)
    +
    +  # Calculate processing time
    +  end_time <- Sys.time()
    +  processing_time <- difftime(end_time, start_time)
    +
    +  # Return processing time
    +  return(processing_time)
    +}
    +
    + +
    +
    + +Stream NDVI data: animation + + +
    processing_time <- process_satellite_data(denver_redlining, "2022-05-31", "2023-05-31", c("B04", "B08"))
    +
    + +
    + +

    +
    + +FUNCTION: Map NDVI per HOLC grade individually + + +
    create_mask_and_plot <- function(redlining_sf, background_raster = ndvi$raster, roads = NULL, rivers = NULL){
    +  start_time <- Sys.time()  # Start timing
    +
    +  # Validate and prepare the redlining data
    +  redlining_sf <- redlining_sf %>%
    +    filter(grade != "") %>%
    +    st_make_valid()
    +
    +
    +bbox <- st_bbox(redlining_sf)  # Get original bounding box
    +
    +
    +expanded_bbox <- expand_bbox(bbox, 6000, 1000)  # 
    +
    +
    +expanded_bbox_poly <- st_as_sfc(expanded_bbox, crs = st_crs(redlining_sf)) %>%
    +    st_make_valid()
    +
    +  # Initialize an empty list to store masks
    +  masks <- list()
    +
    +  # Iterate over each grade to create masks
    +  unique_grades <- unique(redlining_sf$grade)
    +  for (grade in unique_grades) {
    +    # Filter polygons by grade
    +    grade_polygons <- redlining_sf[redlining_sf$grade == grade, ]
    +
    +    # Create an "inverted" mask by subtracting these polygons from the background
    +    mask <- st_difference(expanded_bbox_poly, st_union(grade_polygons))
    +
    +    # Store the mask in the list with the grade as the name
    +    masks[[grade]] <- st_sf(geometry = mask, grade = grade)
    +  }
    +
    +  # Combine all masks into a single sf object
    +  mask_sf <- do.call(rbind, masks)
    +
    +  # Normalize the grades so that C.2 becomes C, but correctly handle other grades
    +  mask_sf$grade <- ifelse(mask_sf$grade == "C.2", "C", mask_sf$grade)
    +
    +  # Prepare the plot
    +  plot <- ggplot() +
    +    geom_spatraster(data = background_raster, aes(fill = NDVI)) +
    +  scale_fill_viridis_c(name = "NDVI", option = "viridis", direction = -1) +
    +
    +    geom_sf(data = mask_sf, aes(color = grade), fill = "white", size = 0.1, show.legend = FALSE) +
    +    scale_color_manual(values = c("A" = "white", "B" = "white", "C" = "white", "D" = "white"), name = "Grade") +
    +    facet_wrap(~ grade, nrow = 1) +
    +     geom_sf(data = roads, alpha = 1, lwd = 0.1, color="white") +
    +    geom_sf(data = rivers, color = "white", alpha = 0.5, lwd = 1.1) +
    +    labs(title = "NDVI: Normalized Difference Vegetation Index") +
    +    theme_minimal() +
    +    coord_sf(xlim = c(bbox["xmin"], bbox["xmax"]), 
    +           ylim = c(bbox["ymin"], bbox["ymax"]), 
    +           expand = FALSE) + 
    +    theme(plot.background = element_rect(fill = "white", color = NA),
    +          panel.background = element_rect(fill = "white", color = NA),
    +          legend.position = "bottom",
    +          axis.text = element_blank(),
    +          axis.title = element_blank(),
    +          axis.ticks = element_blank(),
    +          panel.grid.major = element_blank(),
    +          panel.grid.minor = element_blank())
    +
    +  # Save the plot
    +  ggsave("redlining_mask_ndvi.png", plot, width = 10, height = 4, dpi = 600)
    +
    +  end_time <- Sys.time()  # End timing
    +  runtime <- end_time - start_time
    +
    +  # Return the plot and runtime
    +  return(list(plot = plot, runtime = runtime, mask_sf = mask_sf))
    +}
    +
    + +
    +
    + +FUNCTION: Stream year average NDVI + + +
    yearly_average_ndvi <- function(polygon_layer, output_file = "ndvi.png", dx = 0.01, dy = 0.01) {
    +  # Record start time
    +  start_time <- Sys.time()
    +
    +  # Calculate the bbox from the polygon layer
    +  bbox <- st_bbox(polygon_layer)
    +
    +  s = stac("https://earth-search.aws.element84.com/v0")
    +
    +  # Search for Sentinel-2 images within the bbox for June
    +  items <- s |> stac_search(
    +    collections = "sentinel-s2-l2a-cogs",
    +    bbox = c(bbox["xmin"], bbox["ymin"], bbox["xmax"], bbox["ymax"]),
    +    datetime = "2023-01-01/2023-12-31",
    +    limit = 500
    +  ) %>% 
    +  post_request()
    +
    +  # Create a collection of images filtering by cloud cover
    +  col <- stac_image_collection(items$features, asset_names = c("B04", "B08"), property_filter = function(x) {x[["eo:cloud_cover"]] < 80})
    +
    +  # Define a view for processing the data specifically for June
    +  v <- cube_view(srs = "EPSG:4326", 
    +                 extent = list(t0 = "2023-01-01", t1 = "2023-12-31",
    +                               left = bbox["xmin"], right = bbox["xmax"], 
    +                               top = bbox["ymax"], bottom = bbox["ymin"]),
    +                 dx = dx, dy = dy, dt = "P1Y", 
    +                 aggregation = "median", resampling = "bilinear")
    +
    +  # Process NDVI
    +  ndvi_rast <- raster_cube(col, v) %>%
    +    select_bands(c("B04", "B08")) %>%
    +    apply_pixel("(B08-B04)/(B08+B04)", "NDVI") %>%
    +    write_tif() |>
    +    terra::rast()
    +
    +
    +  # Convert terra Raster to ggplot using tidyterra
    +ndvi_plot <-   ggplot() +
    +    geom_spatraster(data = ndvi_rast, aes(fill = NDVI)) +
    +    scale_fill_viridis_c(option = "viridis", direction = -1, name = "NDVI") +
    +    labs(title = "NDVI mean for 2023") +
    +    theme_minimal() +
    +    coord_sf() +
    +    theme(plot.background = element_rect(fill = "white", color = NA),
    +      panel.background = element_rect(fill = "white", color = NA),
    +      legend.position = "right",
    +          axis.text = element_blank(),
    +          axis.title = element_blank(),
    +          axis.ticks = element_blank(),
    +          panel.grid.major = element_blank(),
    +          panel.grid.minor = element_blank()) 
    +
    +  # Save the plot as a high-resolution PNG file
    +  ggsave(output_file, ndvi_plot, width = 10, height = 8, dpi = 600)
    +
    +  # Calculate processing time
    +  end_time <- Sys.time()
    +  processing_time <- difftime(end_time, start_time)
    +
    +  # Return the plot and processing time
    +  return(list(plot = ndvi_plot, processing_time = processing_time, raster = ndvi_rast))
    +}
    +
    + +
    +
    + +Stream NDVI: low resolution + + +
    ndvi_background_low <- yearly_average_ndvi(denver_redlining)
    +
    + +
    +

    +
    + +Map low resolution NDVI per HOLC grade + + +
    ndvi <- create_mask_and_plot(denver_redlining, background_raster = ndvi_background_low$raster, roads = roads, rivers = rivers)
    +
    + +
    + +

    +
    + +FUNCTION: Map Denver City provided data per HOLC grade + + +
    process_city_inventory_data <- function(address, inner_file, polygon_layer, output_filename,variable_label= 'Tree Density') {
    +  # Download and read the shapefile
    +  full_path <- glue("/vsizip/vsicurl/{address}/{inner_file}")
    +  shape_data <- st_read(full_path, quiet = TRUE) |> st_as_sf()
    +
    +  # Process the shape data with the provided polygon layer
    +  processed_data <- process_and_plot_sf_layers(polygon_layer, shape_data, paste0(output_filename, ".png"))
    +
    +  # Extract trees from the processed data
    +  trees <- processed_data$sf
    +  denver_redlining_residential <- polygon_layer |> filter(grade != "")
    +
    +  # Generate the density plot
    +  plot <- ggplot() +
    +    geom_sf(data = roads, alpha = 0.05, lwd = 0.1) +
    +    geom_sf(data = rivers, color = "blue", alpha = 0.1, lwd = 1.1) +
    +    geom_sf(data = denver_redlining_residential, fill = "grey", color = "grey", size = 0.1) +
    +    facet_wrap(~ grade, nrow = 1) +
    +    stat_density_2d(data = trees, 
    +                    mapping = aes(x = map_dbl(geometry, ~.[1]),
    +                                  y = map_dbl(geometry, ~.[2]),
    +                                  fill = stat(density)),
    +                    geom = 'tile',
    +                    contour = FALSE,
    +                    alpha = 0.9) +
    +    scale_fill_gradientn(colors = c("transparent", "white", "limegreen"),
    +                         values = scales::rescale(c(0, 0.1, 1)),  # Adjust these based on your density range
    +                         guide = "colourbar") +
    +    theme_minimal() +
    +    labs(fill = variable_label) +
    +    theme_tufte() +
    +    theme(plot.background = element_rect(fill = "white", color = NA),
    +          panel.background = element_rect(fill = "white", color = NA),
    +          legend.position = "bottom",
    +          axis.text = element_blank(),
    +          axis.title = element_blank(),
    +          axis.ticks = element_blank(),
    +          panel.grid.major = element_blank(),
    +          panel.grid.minor = element_blank())
    +
    +  # Save the plot
    +  ggsave(paste0(output_filename, "_density_plot.png"), plot, width = 10, height = 4, units = "in", dpi = 600)
    +
    +  # Return the plot and the tree layer
    +  return(list(plot = plot, layer = trees))
    +}
    +
    + +
    +
    + +Map tree inventory per HOLC grade + + +
    result <- process_city_inventory_data(
    +  "https://www.denvergov.org/media/gis/DataCatalog/tree_inventory/shape/tree_inventory.zip",
    +  "tree_inventory.shp",
    +  denver_redlining,
    +  "Denver_tree_inventory_2023"
    +)
    +
    + + Warning: `stat(density)` was deprecated in ggplot2 3.4.0. + ℹ Please use `after_stat(density)` instead. + +
    + +

    +

    +
    + +Map traffic accidents per HOLC grade + + +
    result <- process_city_inventory_data(
    +  "https://www.denvergov.org/media/gis/DataCatalog/traffic_accidents/shape/traffic_accidents.zip",
    +  "traffic_accidents.shp",
    +  denver_redlining,
    +  "Denver_traffic_accidents",
    +  variable_label= 'Traffic accidents density'
    +)
    +
    + +
    + +

    +

    +
    + +Map stream sampling effort per HOLC grade + + +
    instream_sampling_sites <- process_city_inventory_data(
    +  "https://www.denvergov.org/media/gis/DataCatalog/instream_sampling_sites/shape/instream_sampling_sites.zip",
    +  "instream_sampling_sites.shp",
    +  denver_redlining,
    +  "instream_sampling_sites",
    +  variable_label= 'Instream sampling sites density'
    +)
    +
    + +
    + +

    +

    +
    + +Map soil sampling effort per HOLC grade + + +
    soil_samples <- process_city_inventory_data(
    +  "https://www.denvergov.org/media/gis/DataCatalog/soil_samples/shape/soil_samples.zip",
    +  "soil_samples.shp",
    +  denver_redlining,
    +  "Soil samples",
    +  variable_label= 'soil samples density'
    +)
    +
    + +
    + +

    +

    +
    + +Map public art density per HOLC grade + + +
    public_art <- process_city_inventory_data(
    +  "https://www.denvergov.org/media/gis/DataCatalog/public_art/shape/public_art.zip",
    +  "public_art.shp",
    +  denver_redlining,
    +  "Public art ",
    +  variable_label= 'Public art density'
    +)
    +
    + +
    + +

    +

    +
    + +Map liquor licenses density per HOLC grade + + +
    liquor_licenses <- process_city_inventory_data(
    +  "https://www.denvergov.org/media/gis/DataCatalog/liquor_licenses/shape/liquor_licenses.zip",
    +  "liquor_licenses.shp",
    +  denver_redlining,
    +  "liquor licenses ",
    +  variable_label= 'liquor licenses density'
    +)
    +
    + +
    + +

    +

    +
    + +Map crime density per HOLC grade + + +
    Crime <- process_city_inventory_data(
    +  "https://www.denvergov.org/media/gis/DataCatalog/crime/shape/crime.zip",
    +  "crime.shp",
    +  denver_redlining,
    +  "crime",
    +  variable_label= 'Crime density'
    +)
    +
    + +
    + +

    +
    + +Map police shooting density per HOLC grade + + +
    Denver_police_shootings <- process_city_inventory_data(
    +  "https://www.denvergov.org/media/gis/DataCatalog/denver_police_officer_involved_shootings/shape/denver_police_officer_involved_shootings.zip",
    +  "denver_police_officer_involved_shootings.shp",
    +  denver_redlining,
    +  "Police shootings",
    +  variable_label= 'Police shootings density'
    +)
    +
    + +
    + +

    +

    Part 3: Comparative Analysis and Visualization

    +

    Statistical Analysis

    +
      +
    • Conduct a detailed statistical analysis to compare greenspace across + different HOLC grades, using techniques like Targeted Maximum + Likelihood Estimation (TMLE) to assess the association between + historical redlining and current greenspace levels.
    • +
    • Visualize the disparities in greenspace distribution using GIS + tools, highlighting how redlining has shaped urban ecological + landscapes.
    • +
    +

    Conclusion

    +

    This tutorial provides tools and methodologies to explore the lingering +effects of historic redlining on urban greenspace, offering insights +into the intersection of urban planning, environmental justice, and +public health.

    +

    References

    +
      +
    • Nardone, A., Rudolph, K. E., Morello-Frosch, R., & Casey, J. A. + (2021). Redlines and Greenspace: The Relationship between Historical + Redlining and 2010 Greenspace across the United States. + Environmental Health Perspectives, 129(1), 017006. + DOI:10.1289/EHP7495. Available + online
    • +
    + +
    +
    + + + Last update: + 2024-09-23 + + +
    + + + + + + +
    +
    + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/worksheets/worksheet_redlining_student_edition/worksheet_redlining_student_edition.md b/worksheets/worksheet_redlining_student_edition/worksheet_redlining_student_edition.md new file mode 100644 index 0000000..087ff3e --- /dev/null +++ b/worksheets/worksheet_redlining_student_edition/worksheet_redlining_student_edition.md @@ -0,0 +1,946 @@ +student edition +================ + + +
    + +R libraries we use in this analysis + + +``` r +if (!requireNamespace("tidytext", quietly = TRUE)) { + install.packages("tidytext") +} +library(tidytext) +## Warning: package 'tidytext' was built under R version 4.3.2 +library(sf) +## Warning: package 'sf' was built under R version 4.3.2 +## Linking to GEOS 3.11.0, GDAL 3.5.3, PROJ 9.1.0; sf_use_s2() is TRUE +library(ggplot2) +## Warning: package 'ggplot2' was built under R version 4.3.2 +library(ggthemes) +## Warning: package 'ggthemes' was built under R version 4.3.2 +library(dplyr) +## +## Attaching package: 'dplyr' +## The following objects are masked from 'package:stats': +## +## filter, lag +## The following objects are masked from 'package:base': +## +## intersect, setdiff, setequal, union +library(rstac) +## Warning: package 'rstac' was built under R version 4.3.2 +library(gdalcubes) +## Warning: package 'gdalcubes' was built under R version 4.3.2 +library(gdalUtils) +## Please note that rgdal will be retired during October 2023, +## plan transition to sf/stars/terra functions using GDAL and PROJ +## at your earliest convenience. +## See https://r-spatial.org/r/2023/05/15/evolution4.html and https://github.com/r-spatial/evolution +## rgdal: version: 1.6-7, (SVN revision 1203) +## Geospatial Data Abstraction Library extensions to R successfully loaded +## Loaded GDAL runtime: GDAL 3.5.3, released 2022/10/21 +## Path to GDAL shared files: /Library/Frameworks/R.framework/Versions/4.3-x86_64/Resources/library/rgdal/gdal +## GDAL does not use iconv for recoding strings. +## GDAL binary built with GEOS: TRUE +## Loaded PROJ runtime: Rel. 9.1.0, September 1st, 2022, [PJ_VERSION: 910] +## Path to PROJ shared files: /Library/Frameworks/R.framework/Versions/4.3-x86_64/Resources/library/gdalcubes/proj +## PROJ CDN enabled: FALSE +## Linking to sp version:1.6-1 +## To mute warnings of possible GDAL/OSR exportToProj4() degradation, +## use options("rgdal_show_exportToProj4_warnings"="none") before loading sp or rgdal. +## +## Attaching package: 'gdalUtils' +## The following object is masked from 'package:sf': +## +## gdal_rasterize +library(gdalcubes) +library(colorspace) +library(terra) +## Warning: package 'terra' was built under R version 4.3.2 +## terra 1.7.71 +## +## Attaching package: 'terra' +## The following object is masked from 'package:colorspace': +## +## RGB +## The following objects are masked from 'package:gdalcubes': +## +## animate, crop, size +library(tidyterra) +## +## Attaching package: 'tidyterra' +## The following object is masked from 'package:stats': +## +## filter +library(basemapR) +library(tidytext) +library(ggwordcloud) +library(osmextract) +## Data (c) OpenStreetMap contributors, ODbL 1.0. https://www.openstreetmap.org/copyright. +## Check the package website, https://docs.ropensci.org/osmextract/, for more details. +library(sf) +library(ggplot2) +library(ggthemes) +library(glue) +## +## Attaching package: 'glue' +## The following object is masked from 'package:terra': +## +## trim + +library(purrr) +``` + +
    +
    + +FUNCTION: Stream HOLC data from a city + + +``` r +# Function to load and filter redlining data by city +load_city_redlining_data <- function(city_name) { + # URL to the GeoJSON data + url <- "https://raw.githubusercontent.com/americanpanorama/mapping-inequality-census-crosswalk/main/MIv3Areas_2010TractCrosswalk.geojson" + + # Read the GeoJSON file into an sf object + redlining_data <- read_sf(url) + + # Filter the data for the specified city and non-empty grades + + city_redline <- redlining_data %>% + filter(city == city_name ) + + # Return the filtered data + return(city_redline) +} +``` + +
    +
    + +Stream HOLC data for Denver, CO + + +``` r +# Load redlining data for Denver +denver_redlining <- load_city_redlining_data("Denver") +knitr::kable(head(denver_redlining), format = "markdown") +``` + +| area_id | city | state | city_survey | cat | grade | label | res | com | ind | fill | GEOID10 | GISJOIN | calc_area | pct_tract | geometry | +|--------:|:-------|:------|:------------|:-----|:------|:------|:-----|:------|:------|:---------|:------------|:---------------|-------------:|----------:|:-----------------------------| +| 6525 | Denver | CO | TRUE | Best | A | A1 | TRUE | FALSE | FALSE | \#76a865 | 08031004104 | G0800310004104 | 1.525535e+01 | 0.00001 | MULTIPOLYGON (((-104.9125 3… | +| 6525 | Denver | CO | TRUE | Best | A | A1 | TRUE | FALSE | FALSE | \#76a865 | 08031004201 | G0800310004201 | 3.987458e+05 | 0.20900 | MULTIPOLYGON (((-104.9246 3… | +| 6525 | Denver | CO | TRUE | Best | A | A1 | TRUE | FALSE | FALSE | \#76a865 | 08031004304 | G0800310004304 | 1.554195e+05 | 0.05927 | MULTIPOLYGON (((-104.9125 3… | +| 6525 | Denver | CO | TRUE | Best | A | A1 | TRUE | FALSE | FALSE | \#76a865 | 08031004202 | G0800310004202 | 1.117770e+06 | 0.57245 | MULTIPOLYGON (((-104.9125 3… | +| 6529 | Denver | CO | TRUE | Best | A | A2 | TRUE | FALSE | FALSE | \#76a865 | 08031004302 | G0800310004302 | 3.133415e+05 | 0.28381 | MULTIPOLYGON (((-104.928 39… | +| 6529 | Denver | CO | TRUE | Best | A | A2 | TRUE | FALSE | FALSE | \#76a865 | 08031004301 | G0800310004301 | 1.221218e+05 | 0.08622 | MULTIPOLYGON (((-104.9305 3… | + +
    +
    + +FUNCTION: Get Points-of-Interest from city of interest + + +``` r + + +get_places <- function(polygon_layer, type = "food" ) { + # Check if the input is an sf object + if (!inherits(polygon_layer, "sf")) { + stop("The provided object is not an sf object.") + } + + # Create a bounding box from the input sf object + bbox_here <- st_bbox(polygon_layer) |> + st_as_sfc() + + if(type == "roads"){ + my_layer <- "lines" + my_query <- "SELECT * FROM lines WHERE ( + highway IN ('motorway', 'trunk', 'primary', 'secondary', 'tertiary') )" + title <- "Major roads" + } + + if(type == "rivers"){ + my_layer <- "lines" + my_query <- "SELECT * FROM lines WHERE ( + waterway IN ('river'))" + title <- "Major rivers" + } + + + + # Use the bbox to get data with oe_get(), specifying the desired layer and a custom SQL query for fresh food places + tryCatch({ + places <- oe_get( + place = bbox_here, + layer = my_layer, # Adjusted layer; change as per actual data availability + query = my_query, + quiet = TRUE + ) + + places <- st_make_valid(places) + + # Crop the data to the bounding box + cropped_places <- st_crop(places, bbox_here) + + # Plotting the cropped fresh food places + plot <- ggplot(data = cropped_places) + + geom_sf(fill="cornflowerblue", color="cornflowerblue") + + ggtitle(title) + + theme_tufte()+ + theme(legend.position = "none", # Optionally hide the legend + axis.text = element_blank(), # Remove axis text + axis.title = element_blank(), # Remove axis titles + axis.ticks = element_blank(), # Remove axis ticks + plot.background = element_rect(fill = "white", color = NA), # Set the plot background to white + panel.background = element_rect(fill = "white", color = NA), # Set the panel background to white + panel.grid.major = element_blank(), # Remove major grid lines + panel.grid.minor = element_blank(), + ) + + # Save the plot as a PNG file + png_filename <- paste0(title,"_", Sys.Date(), ".png") + ggsave(png_filename, plot, width = 10, height = 8, units = "in") + + # Return the cropped dataset + return(cropped_places) + }, error = function(e) { + stop("Failed to retrieve or plot data: ", e$message) + }) +} +``` + +
    +
    + +Stream amenities by category + + +``` r +roads <- get_places(denver_redlining, type="roads") + +rivers <- get_places(denver_redlining, type="rivers") +``` + +
    +
    + +FUNCTION: Plot POI over HOLC grades + + +``` r + + +plot_city_redlining <- function(redlining_data, filename = "redlining_plot.png") { + # Fetch additional geographic data based on redlining data + roads <- get_places(redlining_data, type = "roads") + rivers <- get_places(redlining_data, type = "rivers") + + # Filter residential zones with valid grades and where city survey is TRUE + residential_zones <- redlining_data %>% + filter(city_survey == TRUE & grade != "") + + # Colors for the grades + colors <- c("#76a865", "#7cb5bd", "#ffff00", "#d9838d") + + # Plot the data using ggplot2 + plot <- ggplot() + + geom_sf(data = roads, lwd = 0.1) + + geom_sf(data = rivers, color = "blue", alpha = 0.5, lwd = 1.1) + + geom_sf(data = residential_zones, aes(fill = grade), alpha = 0.5) + + theme_tufte() + + scale_fill_manual(values = colors) + + labs(fill = 'HOLC Categories') + + theme( + plot.background = element_rect(fill = "white", color = NA), + panel.background = element_rect(fill = "white", color = NA), + panel.grid.major = element_blank(), + panel.grid.minor = element_blank(), + legend.position = "right" + ) + + # Save the plot as a high-resolution PNG file + ggsave(filename, plot, width = 10, height = 8, units = "in", dpi = 600) + + # Return the plot object if needed for further manipulation or checking + return(plot) +} +``` + +
    +
    + +FUNCTION: Plot the HOLC grades individually + + +``` r +split_plot <- function(sf_data, roads, rivers) { + # Filter for grades A, B, C, and D + sf_data_filtered <- sf_data %>% + filter(grade %in% c('A', 'B', 'C', 'D')) + + # Define a color for each grade + grade_colors <- c("A" = "#76a865", "B" = "#7cb5bd", "C" = "#ffff00", "D" = "#d9838d") + + # Create the plot with panels for each grade + plot <- ggplot(data = sf_data_filtered) + + geom_sf(data = roads, alpha = 0.1, lwd = 0.1) + + geom_sf(data = rivers, color = "blue", alpha = 0.1, lwd = 1.1) + + geom_sf(aes(fill = grade)) + + facet_wrap(~ grade, nrow = 1) + # Free scales for different zoom levels if needed + scale_fill_manual(values = grade_colors) + + theme_minimal() + + labs(fill = 'HOLC Grade') + + theme_tufte() + + theme(plot.background = element_rect(fill = "white", color = NA), + panel.background = element_rect(fill = "white", color = NA), + legend.position = "none", # Optionally hide the legend + axis.text = element_blank(), # Remove axis text + axis.title = element_blank(), # Remove axis titles + axis.ticks = element_blank(), # Remove axis ticks + panel.grid.major = element_blank(), # Remove major grid lines + panel.grid.minor = element_blank()) + + ggsave(plot, filename = "HOLC_grades_individually.png", width = 10, height = 4, units = "in", dpi = 1200) + return(plot) +} +``` + +
    +
    + +Plot Denver Redlining + + +``` r +denver_plot <- plot_city_redlining(denver_redlining) +denver_plot +``` + +![](worksheet_redlining_student_edition_files/figure-gfm/unnamed-chunk-8-1.png) + +
    + +![](../worksheets/redlining_plot.png) + +
    + +Plot 4 HOLC grades individually + + +``` r +plot_row <- split_plot(denver_redlining, roads, rivers) +plot_row +``` + +![](worksheet_redlining_student_edition_files/figure-gfm/unnamed-chunk-9-1.png) + +
    + +![](../worksheets/HOLC_grades_individually.png) + +
    + +FUNCTION: Map an amenity over each grade individually + + +``` r + +process_and_plot_sf_layers <- function(layer1, layer2, output_file = "output_plot.png") { + # Make geometries valid +layer1 <- st_make_valid(layer1) +layer2 <- st_make_valid(layer2) + +# Optionally, simplify geometries to remove duplicate vertices +layer1 <- st_simplify(layer1, preserveTopology = TRUE) |> + filter(grade != "") + +# Prepare a list to store results +results <- list() + +# Loop through each grade and perform operations +for (grade in c("A", "B", "C", "D")) { + # Filter layer1 for current grade + layer1_grade <- layer1[layer1$grade == grade, ] + + # Buffer the geometries of the current grade + buffered_layer1_grade <- st_buffer(layer1_grade, dist = 500) + + # Intersect with the second layer + intersections <- st_intersects(layer2, buffered_layer1_grade, sparse = FALSE) + selected_polygons <- layer2[rowSums(intersections) > 0, ] + + # Add a new column to store the grade information + selected_polygons$grade <- grade + + # Store the result + results[[grade]] <- selected_polygons +} + +# Combine all selected polygons from different grades into one sf object +final_selected_polygons <- do.call(rbind, results) + + # Define colors for the grades + grade_colors <- c("A" = "grey", "B" = "grey", "C" = "grey", "D" = "grey") + + # Create the plot + plot <- ggplot() + + geom_sf(data = roads, alpha = 0.05, lwd = 0.1) + + geom_sf(data = rivers, color = "blue", alpha = 0.1, lwd = 1.1) + + geom_sf(data = layer1, fill = "grey", color = "grey", size = 0.1) + + facet_wrap(~ grade, nrow = 1) + + geom_sf(data = final_selected_polygons,fill = "green", color = "green", size = 0.1) + + facet_wrap(~ grade, nrow = 1) + + #scale_fill_manual(values = grade_colors) + + #scale_color_manual(values = grade_colors) + + theme_minimal() + + labs(fill = 'HOLC Grade') + + theme_tufte() + + theme(plot.background = element_rect(fill = "white", color = NA), + panel.background = element_rect(fill = "white", color = NA), + legend.position = "none", + axis.text = element_blank(), + axis.title = element_blank(), + axis.ticks = element_blank(), + panel.grid.major = element_blank(), + panel.grid.minor = element_blank()) + + # Save the plot as a high-resolution PNG file + ggsave(output_file, plot, width = 10, height = 4, units = "in", dpi = 1200) + + # Return the plot for optional further use + return(list(plot=plot, sf = final_selected_polygons)) +} +``` + +
    + +## Part 2: Integrating Environmental Data + +### Data Processing + +- Use satellite data from 2010 to analyze greenspace using NDVI, an + index that measures the quantity of vegetation in an area. +- Apply methods to adjust for potential confounders as described in + the study, ensuring that comparisons of greenspace across HOLC + grades are valid and not biased by historical or socio-demographic + factors. + +
    + +FUNCTION: Stream NDVI data + + +``` r +polygon_layer <- denver_redlining +# Function to process satellite data based on an SF polygon's extent +process_satellite_data <- function(polygon_layer, start_date, end_date, assets, fps = 1, output_file = "anim.gif") { + # Record start time + start_time <- Sys.time() + + # Calculate the bbox from the polygon layer + bbox <- st_bbox(polygon_layer) + + s = stac("https://earth-search.aws.element84.com/v0") + + + # Use stacR to search for Sentinel-2 images within the bbox and date range + items = s |> stac_search( + collections = "sentinel-s2-l2a-cogs", + bbox = c(bbox["xmin"], bbox["ymin"], bbox["xmax"], bbox["ymax"]), + datetime = paste(start_date, end_date, sep = "/"), + limit = 500 + ) %>% + post_request() + + # Define mask for Sentinel-2 image quality + #S2.mask <- image_mask("SCL", values = c(3, 8, 9)) + + # Create a collection of images filtering by cloud cover + col <- stac_image_collection(items$features, asset_names = assets, property_filter = function(x) {x[["eo:cloud_cover"]] < 30}) + + # Define a view for processing the data + v <- cube_view(srs = "EPSG:4326", + extent = list(t0 = start_date, t1 = end_date, + left = bbox["xmin"], right = bbox["xmax"], + top = bbox["ymax"], bottom = bbox["ymin"]), + dx = 0.001, dy = 0.001, dt = "P1M", + aggregation = "median", resampling = "bilinear") + + # Calculate NDVI and create an animation + ndvi_col <- function(n) { + rev(sequential_hcl(n, "Green-Yellow")) + } + + #raster_cube(col, v, mask = S2.mask) %>% + raster_cube(col, v) %>% + select_bands(c("B04", "B08")) %>% + apply_pixel("(B08-B04)/(B08+B04)", "NDVI") %>% + gdalcubes::animate(col = ndvi_col, zlim = c(-0.2, 1), key.pos = 1, save_as = output_file, fps = fps) + + # Calculate processing time + end_time <- Sys.time() + processing_time <- difftime(end_time, start_time) + + # Return processing time + return(processing_time) +} +``` + +
    +
    + +Stream NDVI data: animation + + +``` r +processing_time <- process_satellite_data(denver_redlining, "2022-05-31", "2023-05-31", c("B04", "B08")) +``` + +
    + +![](../worksheets/anim.gif) + +
    + +FUNCTION: Map NDVI per HOLC grade individually + + +``` r + + +create_mask_and_plot <- function(redlining_sf, background_raster = ndvi$raster, roads = NULL, rivers = NULL){ + start_time <- Sys.time() # Start timing + + # Validate and prepare the redlining data + redlining_sf <- redlining_sf %>% + filter(grade != "") %>% + st_make_valid() + + +bbox <- st_bbox(redlining_sf) # Get original bounding box + + +expanded_bbox <- expand_bbox(bbox, 6000, 1000) # + + +expanded_bbox_poly <- st_as_sfc(expanded_bbox, crs = st_crs(redlining_sf)) %>% + st_make_valid() + + # Initialize an empty list to store masks + masks <- list() + + # Iterate over each grade to create masks + unique_grades <- unique(redlining_sf$grade) + for (grade in unique_grades) { + # Filter polygons by grade + grade_polygons <- redlining_sf[redlining_sf$grade == grade, ] + + # Create an "inverted" mask by subtracting these polygons from the background + mask <- st_difference(expanded_bbox_poly, st_union(grade_polygons)) + + # Store the mask in the list with the grade as the name + masks[[grade]] <- st_sf(geometry = mask, grade = grade) + } + + # Combine all masks into a single sf object + mask_sf <- do.call(rbind, masks) + + # Normalize the grades so that C.2 becomes C, but correctly handle other grades + mask_sf$grade <- ifelse(mask_sf$grade == "C.2", "C", mask_sf$grade) + + # Prepare the plot + plot <- ggplot() + + geom_spatraster(data = background_raster, aes(fill = NDVI)) + + scale_fill_viridis_c(name = "NDVI", option = "viridis", direction = -1) + + + geom_sf(data = mask_sf, aes(color = grade), fill = "white", size = 0.1, show.legend = FALSE) + + scale_color_manual(values = c("A" = "white", "B" = "white", "C" = "white", "D" = "white"), name = "Grade") + + facet_wrap(~ grade, nrow = 1) + + geom_sf(data = roads, alpha = 1, lwd = 0.1, color="white") + + geom_sf(data = rivers, color = "white", alpha = 0.5, lwd = 1.1) + + labs(title = "NDVI: Normalized Difference Vegetation Index") + + theme_minimal() + + coord_sf(xlim = c(bbox["xmin"], bbox["xmax"]), + ylim = c(bbox["ymin"], bbox["ymax"]), + expand = FALSE) + + theme(plot.background = element_rect(fill = "white", color = NA), + panel.background = element_rect(fill = "white", color = NA), + legend.position = "bottom", + axis.text = element_blank(), + axis.title = element_blank(), + axis.ticks = element_blank(), + panel.grid.major = element_blank(), + panel.grid.minor = element_blank()) + + # Save the plot + ggsave("redlining_mask_ndvi.png", plot, width = 10, height = 4, dpi = 600) + + end_time <- Sys.time() # End timing + runtime <- end_time - start_time + + # Return the plot and runtime + return(list(plot = plot, runtime = runtime, mask_sf = mask_sf)) +} +``` + +
    +
    + +FUNCTION: Stream year average NDVI + + +``` r + + + +yearly_average_ndvi <- function(polygon_layer, output_file = "ndvi.png", dx = 0.01, dy = 0.01) { + # Record start time + start_time <- Sys.time() + + # Calculate the bbox from the polygon layer + bbox <- st_bbox(polygon_layer) + + s = stac("https://earth-search.aws.element84.com/v0") + + # Search for Sentinel-2 images within the bbox for June + items <- s |> stac_search( + collections = "sentinel-s2-l2a-cogs", + bbox = c(bbox["xmin"], bbox["ymin"], bbox["xmax"], bbox["ymax"]), + datetime = "2023-01-01/2023-12-31", + limit = 500 + ) %>% + post_request() + + # Create a collection of images filtering by cloud cover + col <- stac_image_collection(items$features, asset_names = c("B04", "B08"), property_filter = function(x) {x[["eo:cloud_cover"]] < 80}) + + # Define a view for processing the data specifically for June + v <- cube_view(srs = "EPSG:4326", + extent = list(t0 = "2023-01-01", t1 = "2023-12-31", + left = bbox["xmin"], right = bbox["xmax"], + top = bbox["ymax"], bottom = bbox["ymin"]), + dx = dx, dy = dy, dt = "P1Y", + aggregation = "median", resampling = "bilinear") + + # Process NDVI + ndvi_rast <- raster_cube(col, v) %>% + select_bands(c("B04", "B08")) %>% + apply_pixel("(B08-B04)/(B08+B04)", "NDVI") %>% + write_tif() |> + terra::rast() + + + # Convert terra Raster to ggplot using tidyterra +ndvi_plot <- ggplot() + + geom_spatraster(data = ndvi_rast, aes(fill = NDVI)) + + scale_fill_viridis_c(option = "viridis", direction = -1, name = "NDVI") + + labs(title = "NDVI mean for 2023") + + theme_minimal() + + coord_sf() + + theme(plot.background = element_rect(fill = "white", color = NA), + panel.background = element_rect(fill = "white", color = NA), + legend.position = "right", + axis.text = element_blank(), + axis.title = element_blank(), + axis.ticks = element_blank(), + panel.grid.major = element_blank(), + panel.grid.minor = element_blank()) + + # Save the plot as a high-resolution PNG file + ggsave(output_file, ndvi_plot, width = 10, height = 8, dpi = 600) + + # Calculate processing time + end_time <- Sys.time() + processing_time <- difftime(end_time, start_time) + + # Return the plot and processing time + return(list(plot = ndvi_plot, processing_time = processing_time, raster = ndvi_rast)) +} +``` + +
    +
    + +Stream NDVI: low resolution + + +``` r +ndvi_background_low <- yearly_average_ndvi(denver_redlining) +``` + +
    +![](../worksheets/ndvi.png) +
    + +Map low resolution NDVI per HOLC grade + + +``` r +ndvi <- create_mask_and_plot(denver_redlining, background_raster = ndvi_background_low$raster, roads = roads, rivers = rivers) +``` + +
    + +![](../worksheets/redlining_mask_ndvi.png) + +
    + +FUNCTION: Map Denver City provided data per HOLC grade + + +``` r +process_city_inventory_data <- function(address, inner_file, polygon_layer, output_filename,variable_label= 'Tree Density') { + # Download and read the shapefile + full_path <- glue("/vsizip/vsicurl/{address}/{inner_file}") + shape_data <- st_read(full_path, quiet = TRUE) |> st_as_sf() + + # Process the shape data with the provided polygon layer + processed_data <- process_and_plot_sf_layers(polygon_layer, shape_data, paste0(output_filename, ".png")) + + # Extract trees from the processed data + trees <- processed_data$sf + denver_redlining_residential <- polygon_layer |> filter(grade != "") + + # Generate the density plot + plot <- ggplot() + + geom_sf(data = roads, alpha = 0.05, lwd = 0.1) + + geom_sf(data = rivers, color = "blue", alpha = 0.1, lwd = 1.1) + + geom_sf(data = denver_redlining_residential, fill = "grey", color = "grey", size = 0.1) + + facet_wrap(~ grade, nrow = 1) + + stat_density_2d(data = trees, + mapping = aes(x = map_dbl(geometry, ~.[1]), + y = map_dbl(geometry, ~.[2]), + fill = stat(density)), + geom = 'tile', + contour = FALSE, + alpha = 0.9) + + scale_fill_gradientn(colors = c("transparent", "white", "limegreen"), + values = scales::rescale(c(0, 0.1, 1)), # Adjust these based on your density range + guide = "colourbar") + + theme_minimal() + + labs(fill = variable_label) + + theme_tufte() + + theme(plot.background = element_rect(fill = "white", color = NA), + panel.background = element_rect(fill = "white", color = NA), + legend.position = "bottom", + axis.text = element_blank(), + axis.title = element_blank(), + axis.ticks = element_blank(), + panel.grid.major = element_blank(), + panel.grid.minor = element_blank()) + + # Save the plot + ggsave(paste0(output_filename, "_density_plot.png"), plot, width = 10, height = 4, units = "in", dpi = 600) + + # Return the plot and the tree layer + return(list(plot = plot, layer = trees)) +} +``` + +
    +
    + +Map tree inventory per HOLC grade + + +``` r +result <- process_city_inventory_data( + "https://www.denvergov.org/media/gis/DataCatalog/tree_inventory/shape/tree_inventory.zip", + "tree_inventory.shp", + denver_redlining, + "Denver_tree_inventory_2023" +) +``` + + Warning: `stat(density)` was deprecated in ggplot2 3.4.0. + ℹ Please use `after_stat(density)` instead. + +
    + +![](../worksheets/Denver_tree_inventory_2023.png) +![](../worksheets/Denver_tree_inventory_2023_density_plot.png) + +
    + +Map traffic accidents per HOLC grade + + +``` r +result <- process_city_inventory_data( + "https://www.denvergov.org/media/gis/DataCatalog/traffic_accidents/shape/traffic_accidents.zip", + "traffic_accidents.shp", + denver_redlining, + "Denver_traffic_accidents", + variable_label= 'Traffic accidents density' +) +``` + +
    + +![](../worksheets/Denver_traffic_accidents.png) +![](../worksheets/Denver_traffic_accidents_density_plot.png) + +
    + +Map stream sampling effort per HOLC grade + + +``` r +instream_sampling_sites <- process_city_inventory_data( + "https://www.denvergov.org/media/gis/DataCatalog/instream_sampling_sites/shape/instream_sampling_sites.zip", + "instream_sampling_sites.shp", + denver_redlining, + "instream_sampling_sites", + variable_label= 'Instream sampling sites density' +) +``` + +
    + +![](../worksheets/instream_sampling_sites.png) +![](../worksheets/instream_sampling_sites_density_plot.png) + +
    + +Map soil sampling effort per HOLC grade + + +``` r +soil_samples <- process_city_inventory_data( + "https://www.denvergov.org/media/gis/DataCatalog/soil_samples/shape/soil_samples.zip", + "soil_samples.shp", + denver_redlining, + "Soil samples", + variable_label= 'soil samples density' +) +``` + +
    + +![](../worksheets/Soil%20samples.png) +![](../worksheets/Soil%20samples_density_plot.png) + +
    + +Map public art density per HOLC grade + + +``` r +public_art <- process_city_inventory_data( + "https://www.denvergov.org/media/gis/DataCatalog/public_art/shape/public_art.zip", + "public_art.shp", + denver_redlining, + "Public art ", + variable_label= 'Public art density' +) +``` + +
    + +![](../worksheets/Public%20art%20.png) +![](../worksheets/Public%20art%20_density_plot.png) + +
    + +Map liquor licenses density per HOLC grade + + +``` r +liquor_licenses <- process_city_inventory_data( + "https://www.denvergov.org/media/gis/DataCatalog/liquor_licenses/shape/liquor_licenses.zip", + "liquor_licenses.shp", + denver_redlining, + "liquor licenses ", + variable_label= 'liquor licenses density' +) +``` + +
    + +![](../worksheets/liquor%20licenses%20.png) +![](../worksheets/liquor%20licenses%20_density_plot.png) + +
    + +Map crime density per HOLC grade + + +``` r +Crime <- process_city_inventory_data( + "https://www.denvergov.org/media/gis/DataCatalog/crime/shape/crime.zip", + "crime.shp", + denver_redlining, + "crime", + variable_label= 'Crime density' +) +``` + +
    + +![](../worksheets/crime.png) ![](../worksheets/crime_density_plot.png) + +
    + +Map police shooting density per HOLC grade + + +``` r +Denver_police_shootings <- process_city_inventory_data( + "https://www.denvergov.org/media/gis/DataCatalog/denver_police_officer_involved_shootings/shape/denver_police_officer_involved_shootings.zip", + "denver_police_officer_involved_shootings.shp", + denver_redlining, + "Police shootings", + variable_label= 'Police shootings density' +) +``` + +
    + +![](../worksheets/Police%20shootings.png) + +## Part 3: Comparative Analysis and Visualization + +### Statistical Analysis + +- Conduct a detailed statistical analysis to compare greenspace across + different HOLC grades, using techniques like Targeted Maximum + Likelihood Estimation (TMLE) to assess the association between + historical redlining and current greenspace levels. +- Visualize the disparities in greenspace distribution using GIS + tools, highlighting how redlining has shaped urban ecological + landscapes. + +## Conclusion + +This tutorial provides tools and methodologies to explore the lingering +effects of historic redlining on urban greenspace, offering insights +into the intersection of urban planning, environmental justice, and +public health. + +### References + +- Nardone, A., Rudolph, K. E., Morello-Frosch, R., & Casey, J. A. + (2021). Redlines and Greenspace: The Relationship between Historical + Redlining and 2010 Greenspace across the United States. + *Environmental Health Perspectives*, 129(1), 017006. + DOI:10.1289/EHP7495. [Available + online](https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7839347/pdf/ehp7495.pdf) diff --git a/worksheets/worksheet_redlining_student_edition_files/figure-gfm/unnamed-chunk-7-1.png b/worksheets/worksheet_redlining_student_edition_files/figure-gfm/unnamed-chunk-7-1.png new file mode 100644 index 0000000..0a5e0a7 Binary files /dev/null and b/worksheets/worksheet_redlining_student_edition_files/figure-gfm/unnamed-chunk-7-1.png differ diff --git a/worksheets/worksheet_redlining_student_edition_files/figure-gfm/unnamed-chunk-8-1.png b/worksheets/worksheet_redlining_student_edition_files/figure-gfm/unnamed-chunk-8-1.png new file mode 100644 index 0000000..0a5e0a7 Binary files /dev/null and b/worksheets/worksheet_redlining_student_edition_files/figure-gfm/unnamed-chunk-8-1.png differ diff --git a/worksheets/worksheet_redlining_student_edition_files/figure-gfm/unnamed-chunk-9-1.png b/worksheets/worksheet_redlining_student_edition_files/figure-gfm/unnamed-chunk-9-1.png new file mode 100644 index 0000000..8992977 Binary files /dev/null and b/worksheets/worksheet_redlining_student_edition_files/figure-gfm/unnamed-chunk-9-1.png differ