From 9cb1ea36ccec11ade08cbd396a0318dfe0dfba14 Mon Sep 17 00:00:00 2001 From: Jothi Prakash Date: Wed, 14 Aug 2024 14:56:49 +0530 Subject: [PATCH 01/19] Modified the gitignore file to not have .idea file --- .gitignore | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index a1fe5bbd..2ae38dbc 100644 --- a/.gitignore +++ b/.gitignore @@ -195,7 +195,7 @@ cython_debug/ # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore # and can be added to the global gitignore or merged into this file. For a more nuclear # option (not recommended) you can uncomment the following to ignore the entire idea folder. -#.idea/ +.idea/ # End of https://www.toptal.com/developers/gitignore/api/python,macos From 4099939e40784f7e578c73ff86906e935b6dbf98 Mon Sep 17 00:00:00 2001 From: Jothi Prakash Date: Tue, 24 Sep 2024 10:32:10 +0530 Subject: [PATCH 02/19] [PECO-1803] Splitting the PySql connector into the core and the non core part (#417) * Implemented ColumnQueue to test the fetchall without pyarrow Removed token removed token * order of fields in row corrected * Changed the folder structure and tested the basic setup to work * Refractored the code to make connector to work * Basic Setup of connector, core and sqlalchemy is working * Basic integration of core, connect and sqlalchemy is working * Setup working dynamic change from ColumnQueue to ArrowQueue * Refractored the test code and moved to respective folders * Added the unit test for column_queue Fixed __version__ Fix * venv_main added to git ignore * Added code for merging columnar table * Merging code for columnar * Fixed the retry_close sesssion test issue with logging * Fixed the databricks_sqlalchemy tests and introduced pytest.ini for the sqla_testing * Added pyarrow_test mark on pytest * Fixed databricks.sqlalchemy to databricks_sqlalchemy imports * Added poetry.lock * Added dist folder * Changed the pyproject.toml * Minor Fix * Added the pyarrow skip tag on unit tests and tested their working * Fixed the Decimal and timestamp conversion issue in non arrow pipeline * Removed not required files and reformatted * Fixed test_retry error * Changed the folder structure to src / databricks * Removed the columnar non arrow flow to another PR * Moved the README to the root * removed columnQueue instance * Revmoved databricks_sqlalchemy dependency in core * Changed the pysql_supports_arrow predicate, introduced changes in the pyproject.toml * Ran the black formatter with the original version * Extra .py removed from all the __init__.py files names * Undo formatting check * Check * Check * Check * Check * Check * Check * Check * Check * Check * Check * Check * Check * Check * Check * BIG UPDATE * Refeactor code * Refractor * Fixed versioning * Minor refractoring * Minor refractoring --- .../databricks_sql_connector}/__init__.py | 0 databricks_sql_connector/pyproject.toml | 23 + .../poetry.lock | 0 .../pyproject.toml | 24 +- .../src}/databricks/__init__.py | 0 .../src}/databricks/sql/__init__.py | 0 .../src/databricks/sql/auth}/__init__.py | 0 .../src}/databricks/sql/auth/auth.py | 0 .../databricks/sql/auth/authenticators.py | 0 .../src}/databricks/sql/auth/endpoint.py | 0 .../src}/databricks/sql/auth/oauth.py | 0 .../databricks/sql/auth/oauth_http_handler.py | 0 .../src}/databricks/sql/auth/retry.py | 0 .../databricks/sql/auth/thrift_http_client.py | 0 .../src}/databricks/sql/client.py | 13 +- .../sql/cloudfetch/download_manager.py | 0 .../databricks/sql/cloudfetch/downloader.py | 0 .../src}/databricks/sql/exc.py | 0 .../databricks/sql/experimental}/__init__.py | 0 .../sql/experimental/oauth_persistence.py | 0 .../databricks/sql/parameters/__init__.py | 0 .../src}/databricks/sql/parameters/native.py | 0 .../src}/databricks/sql/parameters/py.typed | 0 .../src}/databricks/sql/py.typed | 0 .../thrift_api/TCLIService/TCLIService-remote | 0 .../sql/thrift_api/TCLIService/TCLIService.py | 0 .../sql/thrift_api/TCLIService/__init__.py | 0 .../sql/thrift_api/TCLIService/constants.py | 0 .../sql/thrift_api/TCLIService/ttypes.py | 0 .../databricks/sql/thrift_api}/__init__.py | 0 .../src}/databricks/sql/thrift_backend.py | 14 +- .../src}/databricks/sql/types.py | 0 .../src}/databricks/sql/utils.py | 28 +- .../src/databricks/sqlalchemy/__init__.py | 6 + .../tests}/__init__.py | 0 .../tests/conftest.py | 0 .../tests/e2e}/__init__.py | 0 .../tests/e2e/common}/__init__.py | 0 .../tests}/e2e/common/core_tests.py | 0 .../tests}/e2e/common/decimal_tests.py | 30 +- .../tests}/e2e/common/large_queries_mixin.py | 5 + .../tests}/e2e/common/predicates.py | 10 +- .../tests}/e2e/common/retry_test_mixins.py | 0 .../e2e/common/staging_ingestion_tests.py | 0 .../tests}/e2e/common/timestamp_tests.py | 0 .../tests}/e2e/common/uc_volume_tests.py | 0 .../tests}/e2e/test_complex_types.py | 3 +- .../tests}/e2e/test_driver.py | 21 +- .../tests}/e2e/test_parameterized_queries.py | 3 + .../tests/unit/__init__.py | 0 .../tests}/unit/test_arrow_queue.py | 9 +- .../tests}/unit/test_auth.py | 0 .../tests}/unit/test_client.py | 0 .../tests}/unit/test_cloud_fetch_queue.py | 9 +- .../tests}/unit/test_download_manager.py | 3 + .../tests}/unit/test_downloader.py | 0 .../tests}/unit/test_endpoint.py | 0 .../tests}/unit/test_fetches.py | 9 +- .../tests}/unit/test_fetches_bench.py | 7 +- .../tests}/unit/test_init_file.py | 0 .../tests}/unit/test_oauth_persistence.py | 0 .../tests}/unit/test_param_escaper.py | 0 .../tests}/unit/test_parameters.py | 0 .../tests}/unit/test_retry.py | 0 .../tests}/unit/test_thrift_backend.py | 10 +- .../sqlalchemy/README.sqlalchemy.md | 203 ------- src/databricks/sqlalchemy/README.tests.md | 44 -- src/databricks/sqlalchemy/__init__.py | 4 - src/databricks/sqlalchemy/_ddl.py | 100 ---- src/databricks/sqlalchemy/_parse.py | 385 ------------- src/databricks/sqlalchemy/_types.py | 323 ----------- src/databricks/sqlalchemy/base.py | 436 -------------- src/databricks/sqlalchemy/requirements.py | 249 -------- src/databricks/sqlalchemy/setup.cfg | 4 - src/databricks/sqlalchemy/test/_extra.py | 70 --- src/databricks/sqlalchemy/test/_future.py | 331 ----------- src/databricks/sqlalchemy/test/_regression.py | 311 ---------- .../sqlalchemy/test/_unsupported.py | 450 --------------- src/databricks/sqlalchemy/test/conftest.py | 13 - .../overrides/_componentreflectiontest.py | 189 ------ .../sqlalchemy/test/overrides/_ctetest.py | 33 -- src/databricks/sqlalchemy/test/test_suite.py | 13 - .../sqlalchemy/test_local/__init__.py | 5 - .../sqlalchemy/test_local/conftest.py | 44 -- .../sqlalchemy/test_local/e2e/MOCK_DATA.xlsx | Bin 59837 -> 0 bytes .../sqlalchemy/test_local/e2e/test_basic.py | 543 ------------------ .../sqlalchemy/test_local/test_ddl.py | 96 ---- .../sqlalchemy/test_local/test_parsing.py | 160 ------ .../sqlalchemy/test_local/test_types.py | 161 ------ 89 files changed, 162 insertions(+), 4232 deletions(-) rename {src/databricks/sql/auth => databricks_sql_connector/databricks_sql_connector}/__init__.py (100%) create mode 100644 databricks_sql_connector/pyproject.toml rename poetry.lock => databricks_sql_connector_core/poetry.lock (100%) rename pyproject.toml => databricks_sql_connector_core/pyproject.toml (67%) rename {src => databricks_sql_connector_core/src}/databricks/__init__.py (100%) rename {src => databricks_sql_connector_core/src}/databricks/sql/__init__.py (100%) rename {src/databricks/sql/experimental => databricks_sql_connector_core/src/databricks/sql/auth}/__init__.py (100%) rename {src => databricks_sql_connector_core/src}/databricks/sql/auth/auth.py (100%) rename {src => databricks_sql_connector_core/src}/databricks/sql/auth/authenticators.py (100%) rename {src => databricks_sql_connector_core/src}/databricks/sql/auth/endpoint.py (100%) rename {src => databricks_sql_connector_core/src}/databricks/sql/auth/oauth.py (100%) rename {src => databricks_sql_connector_core/src}/databricks/sql/auth/oauth_http_handler.py (100%) rename {src => databricks_sql_connector_core/src}/databricks/sql/auth/retry.py (100%) rename {src => databricks_sql_connector_core/src}/databricks/sql/auth/thrift_http_client.py (100%) rename {src => databricks_sql_connector_core/src}/databricks/sql/client.py (99%) rename {src => databricks_sql_connector_core/src}/databricks/sql/cloudfetch/download_manager.py (100%) rename {src => databricks_sql_connector_core/src}/databricks/sql/cloudfetch/downloader.py (100%) rename {src => databricks_sql_connector_core/src}/databricks/sql/exc.py (100%) rename {src/databricks/sql/thrift_api => databricks_sql_connector_core/src/databricks/sql/experimental}/__init__.py (100%) rename {src => databricks_sql_connector_core/src}/databricks/sql/experimental/oauth_persistence.py (100%) rename {src => databricks_sql_connector_core/src}/databricks/sql/parameters/__init__.py (100%) rename {src => databricks_sql_connector_core/src}/databricks/sql/parameters/native.py (100%) rename {src => databricks_sql_connector_core/src}/databricks/sql/parameters/py.typed (100%) rename {src => databricks_sql_connector_core/src}/databricks/sql/py.typed (100%) rename {src => databricks_sql_connector_core/src}/databricks/sql/thrift_api/TCLIService/TCLIService-remote (100%) rename {src => databricks_sql_connector_core/src}/databricks/sql/thrift_api/TCLIService/TCLIService.py (100%) rename {src => databricks_sql_connector_core/src}/databricks/sql/thrift_api/TCLIService/__init__.py (100%) rename {src => databricks_sql_connector_core/src}/databricks/sql/thrift_api/TCLIService/constants.py (100%) rename {src => databricks_sql_connector_core/src}/databricks/sql/thrift_api/TCLIService/ttypes.py (100%) rename {tests => databricks_sql_connector_core/src/databricks/sql/thrift_api}/__init__.py (100%) rename {src => databricks_sql_connector_core/src}/databricks/sql/thrift_backend.py (99%) rename {src => databricks_sql_connector_core/src}/databricks/sql/types.py (100%) rename {src => databricks_sql_connector_core/src}/databricks/sql/utils.py (97%) create mode 100644 databricks_sql_connector_core/src/databricks/sqlalchemy/__init__.py rename {tests/e2e => databricks_sql_connector_core/tests}/__init__.py (100%) rename conftest.py => databricks_sql_connector_core/tests/conftest.py (100%) rename {tests/e2e/common => databricks_sql_connector_core/tests/e2e}/__init__.py (100%) rename {tests/unit => databricks_sql_connector_core/tests/e2e/common}/__init__.py (100%) rename {tests => databricks_sql_connector_core/tests}/e2e/common/core_tests.py (100%) rename {tests => databricks_sql_connector_core/tests}/e2e/common/decimal_tests.py (79%) rename {tests => databricks_sql_connector_core/tests}/e2e/common/large_queries_mixin.py (95%) rename {tests => databricks_sql_connector_core/tests}/e2e/common/predicates.py (95%) rename {tests => databricks_sql_connector_core/tests}/e2e/common/retry_test_mixins.py (100%) rename {tests => databricks_sql_connector_core/tests}/e2e/common/staging_ingestion_tests.py (100%) rename {tests => databricks_sql_connector_core/tests}/e2e/common/timestamp_tests.py (100%) rename {tests => databricks_sql_connector_core/tests}/e2e/common/uc_volume_tests.py (100%) rename {tests => databricks_sql_connector_core/tests}/e2e/test_complex_types.py (93%) rename {tests => databricks_sql_connector_core/tests}/e2e/test_driver.py (97%) rename {tests => databricks_sql_connector_core/tests}/e2e/test_parameterized_queries.py (98%) rename src/databricks/sqlalchemy/py.typed => databricks_sql_connector_core/tests/unit/__init__.py (100%) mode change 100755 => 100644 rename {tests => databricks_sql_connector_core/tests}/unit/test_arrow_queue.py (82%) rename {tests => databricks_sql_connector_core/tests}/unit/test_auth.py (100%) rename {tests => databricks_sql_connector_core/tests}/unit/test_client.py (100%) rename {tests => databricks_sql_connector_core/tests}/unit/test_cloud_fetch_queue.py (98%) rename {tests => databricks_sql_connector_core/tests}/unit/test_download_manager.py (93%) rename {tests => databricks_sql_connector_core/tests}/unit/test_downloader.py (100%) rename {tests => databricks_sql_connector_core/tests}/unit/test_endpoint.py (100%) rename {tests => databricks_sql_connector_core/tests}/unit/test_fetches.py (97%) rename {tests => databricks_sql_connector_core/tests}/unit/test_fetches_bench.py (90%) rename {tests => databricks_sql_connector_core/tests}/unit/test_init_file.py (100%) rename {tests => databricks_sql_connector_core/tests}/unit/test_oauth_persistence.py (100%) rename {tests => databricks_sql_connector_core/tests}/unit/test_param_escaper.py (100%) rename {tests => databricks_sql_connector_core/tests}/unit/test_parameters.py (100%) rename {tests => databricks_sql_connector_core/tests}/unit/test_retry.py (100%) rename {tests => databricks_sql_connector_core/tests}/unit/test_thrift_backend.py (99%) delete mode 100644 src/databricks/sqlalchemy/README.sqlalchemy.md delete mode 100644 src/databricks/sqlalchemy/README.tests.md delete mode 100644 src/databricks/sqlalchemy/__init__.py delete mode 100644 src/databricks/sqlalchemy/_ddl.py delete mode 100644 src/databricks/sqlalchemy/_parse.py delete mode 100644 src/databricks/sqlalchemy/_types.py delete mode 100644 src/databricks/sqlalchemy/base.py delete mode 100644 src/databricks/sqlalchemy/requirements.py delete mode 100644 src/databricks/sqlalchemy/setup.cfg delete mode 100644 src/databricks/sqlalchemy/test/_extra.py delete mode 100644 src/databricks/sqlalchemy/test/_future.py delete mode 100644 src/databricks/sqlalchemy/test/_regression.py delete mode 100644 src/databricks/sqlalchemy/test/_unsupported.py delete mode 100644 src/databricks/sqlalchemy/test/conftest.py delete mode 100644 src/databricks/sqlalchemy/test/overrides/_componentreflectiontest.py delete mode 100644 src/databricks/sqlalchemy/test/overrides/_ctetest.py delete mode 100644 src/databricks/sqlalchemy/test/test_suite.py delete mode 100644 src/databricks/sqlalchemy/test_local/__init__.py delete mode 100644 src/databricks/sqlalchemy/test_local/conftest.py delete mode 100644 src/databricks/sqlalchemy/test_local/e2e/MOCK_DATA.xlsx delete mode 100644 src/databricks/sqlalchemy/test_local/e2e/test_basic.py delete mode 100644 src/databricks/sqlalchemy/test_local/test_ddl.py delete mode 100644 src/databricks/sqlalchemy/test_local/test_parsing.py delete mode 100644 src/databricks/sqlalchemy/test_local/test_types.py diff --git a/src/databricks/sql/auth/__init__.py b/databricks_sql_connector/databricks_sql_connector/__init__.py similarity index 100% rename from src/databricks/sql/auth/__init__.py rename to databricks_sql_connector/databricks_sql_connector/__init__.py diff --git a/databricks_sql_connector/pyproject.toml b/databricks_sql_connector/pyproject.toml new file mode 100644 index 00000000..6e7297d1 --- /dev/null +++ b/databricks_sql_connector/pyproject.toml @@ -0,0 +1,23 @@ +[tool.poetry] +name = "databricks-sql-connector" +version = "3.5.0" +description = "Databricks SQL Connector for Python" +authors = ["Databricks "] +license = "Apache-2.0" + + +[tool.poetry.dependencies] +databricks_sql_connector_core = { version = ">=1.0.0", extras=["all"]} +databricks_sqlalchemy = { version = ">=1.0.0", optional = true } + +[tool.poetry.extras] +databricks_sqlalchemy = ["databricks_sqlalchemy"] + +[tool.poetry.urls] +"Homepage" = "https://github.com/databricks/databricks-sql-python" +"Bug Tracker" = "https://github.com/databricks/databricks-sql-python/issues" + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + diff --git a/poetry.lock b/databricks_sql_connector_core/poetry.lock similarity index 100% rename from poetry.lock rename to databricks_sql_connector_core/poetry.lock diff --git a/pyproject.toml b/databricks_sql_connector_core/pyproject.toml similarity index 67% rename from pyproject.toml rename to databricks_sql_connector_core/pyproject.toml index 44d25ef9..a6e36091 100644 --- a/pyproject.toml +++ b/databricks_sql_connector_core/pyproject.toml @@ -1,12 +1,9 @@ [tool.poetry] -name = "databricks-sql-connector" -version = "3.3.0" -description = "Databricks SQL Connector for Python" +name = "databricks-sql-connector-core" +version = "1.0.0" +description = "Databricks SQL Connector core for Python" authors = ["Databricks "] -license = "Apache-2.0" -readme = "README.md" packages = [{ include = "databricks", from = "src" }] -include = ["CHANGELOG.md"] [tool.poetry.dependencies] python = "^3.8.0" @@ -14,23 +11,16 @@ thrift = ">=0.16.0,<0.21.0" pandas = [ { version = ">=1.2.5,<2.3.0", python = ">=3.8" } ] -pyarrow = ">=14.0.1,<17" - lz4 = "^4.0.2" requests = "^2.18.1" oauthlib = "^3.1.0" -numpy = [ - { version = "^1.16.6", python = ">=3.8,<3.11" }, - { version = "^1.23.4", python = ">=3.11" }, -] -sqlalchemy = { version = ">=2.0.21", optional = true } openpyxl = "^3.0.10" alembic = { version = "^1.0.11", optional = true } urllib3 = ">=1.26" +pyarrow = {version = ">=14.0.1,<17", optional = true} [tool.poetry.extras] -sqlalchemy = ["sqlalchemy"] -alembic = ["sqlalchemy", "alembic"] +pyarrow = ["pyarrow"] [tool.poetry.dev-dependencies] pytest = "^7.1.2" @@ -43,8 +33,6 @@ pytest-dotenv = "^0.5.2" "Homepage" = "https://github.com/databricks/databricks-sql-python" "Bug Tracker" = "https://github.com/databricks/databricks-sql-python/issues" -[tool.poetry.plugins."sqlalchemy.dialects"] -"databricks" = "databricks.sqlalchemy:DatabricksDialect" [build-system] requires = ["poetry-core>=1.0.0"] @@ -62,5 +50,5 @@ markers = {"reviewed" = "Test case has been reviewed by Databricks"} minversion = "6.0" log_cli = "false" log_cli_level = "INFO" -testpaths = ["tests", "src/databricks/sqlalchemy/test_local"] +testpaths = ["tests", "databricks_sql_connector_core/tests"] env_files = ["test.env"] diff --git a/src/databricks/__init__.py b/databricks_sql_connector_core/src/databricks/__init__.py similarity index 100% rename from src/databricks/__init__.py rename to databricks_sql_connector_core/src/databricks/__init__.py diff --git a/src/databricks/sql/__init__.py b/databricks_sql_connector_core/src/databricks/sql/__init__.py similarity index 100% rename from src/databricks/sql/__init__.py rename to databricks_sql_connector_core/src/databricks/sql/__init__.py diff --git a/src/databricks/sql/experimental/__init__.py b/databricks_sql_connector_core/src/databricks/sql/auth/__init__.py similarity index 100% rename from src/databricks/sql/experimental/__init__.py rename to databricks_sql_connector_core/src/databricks/sql/auth/__init__.py diff --git a/src/databricks/sql/auth/auth.py b/databricks_sql_connector_core/src/databricks/sql/auth/auth.py similarity index 100% rename from src/databricks/sql/auth/auth.py rename to databricks_sql_connector_core/src/databricks/sql/auth/auth.py diff --git a/src/databricks/sql/auth/authenticators.py b/databricks_sql_connector_core/src/databricks/sql/auth/authenticators.py similarity index 100% rename from src/databricks/sql/auth/authenticators.py rename to databricks_sql_connector_core/src/databricks/sql/auth/authenticators.py diff --git a/src/databricks/sql/auth/endpoint.py b/databricks_sql_connector_core/src/databricks/sql/auth/endpoint.py similarity index 100% rename from src/databricks/sql/auth/endpoint.py rename to databricks_sql_connector_core/src/databricks/sql/auth/endpoint.py diff --git a/src/databricks/sql/auth/oauth.py b/databricks_sql_connector_core/src/databricks/sql/auth/oauth.py similarity index 100% rename from src/databricks/sql/auth/oauth.py rename to databricks_sql_connector_core/src/databricks/sql/auth/oauth.py diff --git a/src/databricks/sql/auth/oauth_http_handler.py b/databricks_sql_connector_core/src/databricks/sql/auth/oauth_http_handler.py similarity index 100% rename from src/databricks/sql/auth/oauth_http_handler.py rename to databricks_sql_connector_core/src/databricks/sql/auth/oauth_http_handler.py diff --git a/src/databricks/sql/auth/retry.py b/databricks_sql_connector_core/src/databricks/sql/auth/retry.py similarity index 100% rename from src/databricks/sql/auth/retry.py rename to databricks_sql_connector_core/src/databricks/sql/auth/retry.py diff --git a/src/databricks/sql/auth/thrift_http_client.py b/databricks_sql_connector_core/src/databricks/sql/auth/thrift_http_client.py similarity index 100% rename from src/databricks/sql/auth/thrift_http_client.py rename to databricks_sql_connector_core/src/databricks/sql/auth/thrift_http_client.py diff --git a/src/databricks/sql/client.py b/databricks_sql_connector_core/src/databricks/sql/client.py similarity index 99% rename from src/databricks/sql/client.py rename to databricks_sql_connector_core/src/databricks/sql/client.py index c0bf534d..72811628 100755 --- a/src/databricks/sql/client.py +++ b/databricks_sql_connector_core/src/databricks/sql/client.py @@ -1,7 +1,6 @@ from typing import Dict, Tuple, List, Optional, Any, Union, Sequence import pandas -import pyarrow import requests import json import os @@ -43,6 +42,10 @@ TSparkParameter, ) +try: + import pyarrow +except ImportError: + pyarrow = None logger = logging.getLogger(__name__) @@ -977,14 +980,14 @@ def fetchmany(self, size: int) -> List[Row]: else: raise Error("There is no active result set") - def fetchall_arrow(self) -> pyarrow.Table: + def fetchall_arrow(self) -> "pyarrow.Table": self._check_not_closed() if self.active_result_set: return self.active_result_set.fetchall_arrow() else: raise Error("There is no active result set") - def fetchmany_arrow(self, size) -> pyarrow.Table: + def fetchmany_arrow(self, size) -> "pyarrow.Table": self._check_not_closed() if self.active_result_set: return self.active_result_set.fetchmany_arrow(size) @@ -1171,7 +1174,7 @@ def _convert_arrow_table(self, table): def rownumber(self): return self._next_row_index - def fetchmany_arrow(self, size: int) -> pyarrow.Table: + def fetchmany_arrow(self, size: int) -> "pyarrow.Table": """ Fetch the next set of rows of a query result, returning a PyArrow table. @@ -1196,7 +1199,7 @@ def fetchmany_arrow(self, size: int) -> pyarrow.Table: return results - def fetchall_arrow(self) -> pyarrow.Table: + def fetchall_arrow(self) -> "pyarrow.Table": """Fetch all (remaining) rows of a query result, returning them as a PyArrow table.""" results = self.results.remaining_rows() self._next_row_index += results.num_rows diff --git a/src/databricks/sql/cloudfetch/download_manager.py b/databricks_sql_connector_core/src/databricks/sql/cloudfetch/download_manager.py similarity index 100% rename from src/databricks/sql/cloudfetch/download_manager.py rename to databricks_sql_connector_core/src/databricks/sql/cloudfetch/download_manager.py diff --git a/src/databricks/sql/cloudfetch/downloader.py b/databricks_sql_connector_core/src/databricks/sql/cloudfetch/downloader.py similarity index 100% rename from src/databricks/sql/cloudfetch/downloader.py rename to databricks_sql_connector_core/src/databricks/sql/cloudfetch/downloader.py diff --git a/src/databricks/sql/exc.py b/databricks_sql_connector_core/src/databricks/sql/exc.py similarity index 100% rename from src/databricks/sql/exc.py rename to databricks_sql_connector_core/src/databricks/sql/exc.py diff --git a/src/databricks/sql/thrift_api/__init__.py b/databricks_sql_connector_core/src/databricks/sql/experimental/__init__.py similarity index 100% rename from src/databricks/sql/thrift_api/__init__.py rename to databricks_sql_connector_core/src/databricks/sql/experimental/__init__.py diff --git a/src/databricks/sql/experimental/oauth_persistence.py b/databricks_sql_connector_core/src/databricks/sql/experimental/oauth_persistence.py similarity index 100% rename from src/databricks/sql/experimental/oauth_persistence.py rename to databricks_sql_connector_core/src/databricks/sql/experimental/oauth_persistence.py diff --git a/src/databricks/sql/parameters/__init__.py b/databricks_sql_connector_core/src/databricks/sql/parameters/__init__.py similarity index 100% rename from src/databricks/sql/parameters/__init__.py rename to databricks_sql_connector_core/src/databricks/sql/parameters/__init__.py diff --git a/src/databricks/sql/parameters/native.py b/databricks_sql_connector_core/src/databricks/sql/parameters/native.py similarity index 100% rename from src/databricks/sql/parameters/native.py rename to databricks_sql_connector_core/src/databricks/sql/parameters/native.py diff --git a/src/databricks/sql/parameters/py.typed b/databricks_sql_connector_core/src/databricks/sql/parameters/py.typed similarity index 100% rename from src/databricks/sql/parameters/py.typed rename to databricks_sql_connector_core/src/databricks/sql/parameters/py.typed diff --git a/src/databricks/sql/py.typed b/databricks_sql_connector_core/src/databricks/sql/py.typed similarity index 100% rename from src/databricks/sql/py.typed rename to databricks_sql_connector_core/src/databricks/sql/py.typed diff --git a/src/databricks/sql/thrift_api/TCLIService/TCLIService-remote b/databricks_sql_connector_core/src/databricks/sql/thrift_api/TCLIService/TCLIService-remote similarity index 100% rename from src/databricks/sql/thrift_api/TCLIService/TCLIService-remote rename to databricks_sql_connector_core/src/databricks/sql/thrift_api/TCLIService/TCLIService-remote diff --git a/src/databricks/sql/thrift_api/TCLIService/TCLIService.py b/databricks_sql_connector_core/src/databricks/sql/thrift_api/TCLIService/TCLIService.py similarity index 100% rename from src/databricks/sql/thrift_api/TCLIService/TCLIService.py rename to databricks_sql_connector_core/src/databricks/sql/thrift_api/TCLIService/TCLIService.py diff --git a/src/databricks/sql/thrift_api/TCLIService/__init__.py b/databricks_sql_connector_core/src/databricks/sql/thrift_api/TCLIService/__init__.py similarity index 100% rename from src/databricks/sql/thrift_api/TCLIService/__init__.py rename to databricks_sql_connector_core/src/databricks/sql/thrift_api/TCLIService/__init__.py diff --git a/src/databricks/sql/thrift_api/TCLIService/constants.py b/databricks_sql_connector_core/src/databricks/sql/thrift_api/TCLIService/constants.py similarity index 100% rename from src/databricks/sql/thrift_api/TCLIService/constants.py rename to databricks_sql_connector_core/src/databricks/sql/thrift_api/TCLIService/constants.py diff --git a/src/databricks/sql/thrift_api/TCLIService/ttypes.py b/databricks_sql_connector_core/src/databricks/sql/thrift_api/TCLIService/ttypes.py similarity index 100% rename from src/databricks/sql/thrift_api/TCLIService/ttypes.py rename to databricks_sql_connector_core/src/databricks/sql/thrift_api/TCLIService/ttypes.py diff --git a/tests/__init__.py b/databricks_sql_connector_core/src/databricks/sql/thrift_api/__init__.py similarity index 100% rename from tests/__init__.py rename to databricks_sql_connector_core/src/databricks/sql/thrift_api/__init__.py diff --git a/src/databricks/sql/thrift_backend.py b/databricks_sql_connector_core/src/databricks/sql/thrift_backend.py similarity index 99% rename from src/databricks/sql/thrift_backend.py rename to databricks_sql_connector_core/src/databricks/sql/thrift_backend.py index 56412fce..42daf85e 100644 --- a/src/databricks/sql/thrift_backend.py +++ b/databricks_sql_connector_core/src/databricks/sql/thrift_backend.py @@ -8,7 +8,6 @@ from ssl import CERT_NONE, CERT_REQUIRED, create_default_context from typing import List, Union -import pyarrow import thrift.transport.THttpClient import thrift.protocol.TBinaryProtocol import thrift.transport.TSocket @@ -37,6 +36,11 @@ convert_column_based_set_to_arrow_table, ) +try: + import pyarrow +except ImportError: + pyarrow = None + logger = logging.getLogger(__name__) unsafe_logger = logging.getLogger("databricks.sql.unsafe") @@ -652,6 +656,12 @@ def _get_metadata_resp(self, op_handle): @staticmethod def _hive_schema_to_arrow_schema(t_table_schema): + + if pyarrow is None: + raise ImportError( + "pyarrow is required to convert Hive schema to Arrow schema" + ) + def map_type(t_type_entry): if t_type_entry.primitiveEntry: return { @@ -858,7 +868,7 @@ def execute_command( getDirectResults=ttypes.TSparkGetDirectResults( maxRows=max_rows, maxBytes=max_bytes ), - canReadArrowResult=True, + canReadArrowResult=True if pyarrow else False, canDecompressLZ4Result=lz4_compression, canDownloadResult=use_cloud_fetch, confOverlay={ diff --git a/src/databricks/sql/types.py b/databricks_sql_connector_core/src/databricks/sql/types.py similarity index 100% rename from src/databricks/sql/types.py rename to databricks_sql_connector_core/src/databricks/sql/types.py diff --git a/src/databricks/sql/utils.py b/databricks_sql_connector_core/src/databricks/sql/utils.py similarity index 97% rename from src/databricks/sql/utils.py rename to databricks_sql_connector_core/src/databricks/sql/utils.py index c22688bb..1bcc8a88 100644 --- a/src/databricks/sql/utils.py +++ b/databricks_sql_connector_core/src/databricks/sql/utils.py @@ -12,7 +12,6 @@ from ssl import SSLContext import lz4.frame -import pyarrow from databricks.sql import OperationalError, exc from databricks.sql.cloudfetch.download_manager import ResultFileDownloadManager @@ -28,16 +27,21 @@ import logging +try: + import pyarrow +except ImportError: + pyarrow = None + logger = logging.getLogger(__name__) class ResultSetQueue(ABC): @abstractmethod - def next_n_rows(self, num_rows: int) -> pyarrow.Table: + def next_n_rows(self, num_rows: int): pass @abstractmethod - def remaining_rows(self) -> pyarrow.Table: + def remaining_rows(self): pass @@ -100,7 +104,7 @@ def build_queue( class ArrowQueue(ResultSetQueue): def __init__( self, - arrow_table: pyarrow.Table, + arrow_table: "pyarrow.Table", n_valid_rows: int, start_row_index: int = 0, ): @@ -115,7 +119,7 @@ def __init__( self.arrow_table = arrow_table self.n_valid_rows = n_valid_rows - def next_n_rows(self, num_rows: int) -> pyarrow.Table: + def next_n_rows(self, num_rows: int) -> "pyarrow.Table": """Get upto the next n rows of the Arrow dataframe""" length = min(num_rows, self.n_valid_rows - self.cur_row_index) # Note that the table.slice API is not the same as Python's slice @@ -124,7 +128,7 @@ def next_n_rows(self, num_rows: int) -> pyarrow.Table: self.cur_row_index += slice.num_rows return slice - def remaining_rows(self) -> pyarrow.Table: + def remaining_rows(self) -> "pyarrow.Table": slice = self.arrow_table.slice( self.cur_row_index, self.n_valid_rows - self.cur_row_index ) @@ -184,7 +188,7 @@ def __init__( self.table = self._create_next_table() self.table_row_index = 0 - def next_n_rows(self, num_rows: int) -> pyarrow.Table: + def next_n_rows(self, num_rows: int) -> "pyarrow.Table": """ Get up to the next n rows of the cloud fetch Arrow dataframes. @@ -216,7 +220,7 @@ def next_n_rows(self, num_rows: int) -> pyarrow.Table: logger.debug("CloudFetchQueue: collected {} next rows".format(results.num_rows)) return results - def remaining_rows(self) -> pyarrow.Table: + def remaining_rows(self) -> "pyarrow.Table": """ Get all remaining rows of the cloud fetch Arrow dataframes. @@ -237,7 +241,7 @@ def remaining_rows(self) -> pyarrow.Table: self.table_row_index = 0 return results - def _create_next_table(self) -> Union[pyarrow.Table, None]: + def _create_next_table(self) -> Union["pyarrow.Table", None]: logger.debug( "CloudFetchQueue: Trying to get downloaded file for row {}".format( self.start_row_index @@ -276,7 +280,7 @@ def _create_next_table(self) -> Union[pyarrow.Table, None]: return arrow_table - def _create_empty_table(self) -> pyarrow.Table: + def _create_empty_table(self) -> "pyarrow.Table": # Create a 0-row table with just the schema bytes return create_arrow_table_from_arrow_file(self.schema_bytes, self.description) @@ -515,7 +519,7 @@ def transform_paramstyle( return output -def create_arrow_table_from_arrow_file(file_bytes: bytes, description) -> pyarrow.Table: +def create_arrow_table_from_arrow_file(file_bytes: bytes, description) -> "pyarrow.Table": arrow_table = convert_arrow_based_file_to_arrow_table(file_bytes) return convert_decimals_in_arrow_table(arrow_table, description) @@ -542,7 +546,7 @@ def convert_arrow_based_set_to_arrow_table(arrow_batches, lz4_compressed, schema return arrow_table, n_rows -def convert_decimals_in_arrow_table(table, description) -> pyarrow.Table: +def convert_decimals_in_arrow_table(table, description) -> "pyarrow.Table": for i, col in enumerate(table.itercolumns()): if description[i][1] == "decimal": decimal_col = col.to_pandas().apply( diff --git a/databricks_sql_connector_core/src/databricks/sqlalchemy/__init__.py b/databricks_sql_connector_core/src/databricks/sqlalchemy/__init__.py new file mode 100644 index 00000000..f79d4c20 --- /dev/null +++ b/databricks_sql_connector_core/src/databricks/sqlalchemy/__init__.py @@ -0,0 +1,6 @@ +try: + from databricks_sqlalchemy import * +except: + import warnings + + warnings.warn("Install databricks-sqlalchemy plugin before using this") \ No newline at end of file diff --git a/tests/e2e/__init__.py b/databricks_sql_connector_core/tests/__init__.py similarity index 100% rename from tests/e2e/__init__.py rename to databricks_sql_connector_core/tests/__init__.py diff --git a/conftest.py b/databricks_sql_connector_core/tests/conftest.py similarity index 100% rename from conftest.py rename to databricks_sql_connector_core/tests/conftest.py diff --git a/tests/e2e/common/__init__.py b/databricks_sql_connector_core/tests/e2e/__init__.py similarity index 100% rename from tests/e2e/common/__init__.py rename to databricks_sql_connector_core/tests/e2e/__init__.py diff --git a/tests/unit/__init__.py b/databricks_sql_connector_core/tests/e2e/common/__init__.py similarity index 100% rename from tests/unit/__init__.py rename to databricks_sql_connector_core/tests/e2e/common/__init__.py diff --git a/tests/e2e/common/core_tests.py b/databricks_sql_connector_core/tests/e2e/common/core_tests.py similarity index 100% rename from tests/e2e/common/core_tests.py rename to databricks_sql_connector_core/tests/e2e/common/core_tests.py diff --git a/tests/e2e/common/decimal_tests.py b/databricks_sql_connector_core/tests/e2e/common/decimal_tests.py similarity index 79% rename from tests/e2e/common/decimal_tests.py rename to databricks_sql_connector_core/tests/e2e/common/decimal_tests.py index 5005cdf1..47fc2070 100644 --- a/tests/e2e/common/decimal_tests.py +++ b/databricks_sql_connector_core/tests/e2e/common/decimal_tests.py @@ -1,11 +1,20 @@ from decimal import Decimal -import pyarrow import pytest +try: + import pyarrow +except ImportError: + pyarrow = None -class DecimalTestsMixin: - decimal_and_expected_results = [ +from tests.e2e.common.predicates import pysql_supports_arrow + +def decimal_and_expected_results(): + + if pyarrow is None: + return [] + + return [ ("100.001 AS DECIMAL(6, 3)", Decimal("100.001"), pyarrow.decimal128(6, 3)), ("1000000.0000 AS DECIMAL(11, 4)", Decimal("1000000.0000"), pyarrow.decimal128(11, 4)), ("-10.2343 AS DECIMAL(10, 6)", Decimal("-10.234300"), pyarrow.decimal128(10, 6)), @@ -17,7 +26,12 @@ class DecimalTestsMixin: ("1e-3 AS DECIMAL(38, 3)", Decimal("0.001"), pyarrow.decimal128(38, 3)), ] - multi_decimals_and_expected_results = [ +def multi_decimals_and_expected_results(): + + if pyarrow is None: + return [] + + return [ ( ["1 AS DECIMAL(6, 3)", "100.001 AS DECIMAL(6, 3)", "NULL AS DECIMAL(6, 3)"], [Decimal("1.00"), Decimal("100.001"), None], @@ -30,7 +44,9 @@ class DecimalTestsMixin: ), ] - @pytest.mark.parametrize("decimal, expected_value, expected_type", decimal_and_expected_results) +@pytest.mark.skipif(not pysql_supports_arrow(), reason="Skipping because pyarrow is not installed") +class DecimalTestsMixin: + @pytest.mark.parametrize("decimal, expected_value, expected_type", decimal_and_expected_results()) def test_decimals(self, decimal, expected_value, expected_type): with self.cursor({}) as cursor: query = "SELECT CAST ({})".format(decimal) @@ -39,9 +55,7 @@ def test_decimals(self, decimal, expected_value, expected_type): assert table.field(0).type == expected_type assert table.to_pydict().popitem()[1][0] == expected_value - @pytest.mark.parametrize( - "decimals, expected_values, expected_type", multi_decimals_and_expected_results - ) + @pytest.mark.parametrize("decimals, expected_values, expected_type", multi_decimals_and_expected_results()) def test_multi_decimals(self, decimals, expected_values, expected_type): with self.cursor({}) as cursor: union_str = " UNION ".join(["(SELECT CAST ({}))".format(dec) for dec in decimals]) diff --git a/tests/e2e/common/large_queries_mixin.py b/databricks_sql_connector_core/tests/e2e/common/large_queries_mixin.py similarity index 95% rename from tests/e2e/common/large_queries_mixin.py rename to databricks_sql_connector_core/tests/e2e/common/large_queries_mixin.py index 9ebc3f01..07d02447 100644 --- a/tests/e2e/common/large_queries_mixin.py +++ b/databricks_sql_connector_core/tests/e2e/common/large_queries_mixin.py @@ -1,6 +1,10 @@ import logging import math import time +from unittest import skipUnless + +import pytest +from tests.e2e.common.predicates import pysql_supports_arrow log = logging.getLogger(__name__) @@ -40,6 +44,7 @@ def fetch_rows(self, cursor, row_count, fetchmany_size): + "assuming 10K fetch size." ) + @pytest.mark.skipif(not pysql_supports_arrow(), "Without pyarrow lz4 compression is not supported") def test_query_with_large_wide_result_set(self): resultSize = 300 * 1000 * 1000 # 300 MB width = 8192 # B diff --git a/tests/e2e/common/predicates.py b/databricks_sql_connector_core/tests/e2e/common/predicates.py similarity index 95% rename from tests/e2e/common/predicates.py rename to databricks_sql_connector_core/tests/e2e/common/predicates.py index 88b14961..99e6f701 100644 --- a/tests/e2e/common/predicates.py +++ b/databricks_sql_connector_core/tests/e2e/common/predicates.py @@ -8,9 +8,13 @@ def pysql_supports_arrow(): - """Import databricks.sql and test whether Cursor has fetchall_arrow.""" - from databricks.sql.client import Cursor - return hasattr(Cursor, 'fetchall_arrow') + """Checks if the pyarrow library is installed or not""" + try: + import pyarrow + + return True + except ImportError: + return False def pysql_has_version(compare, version): diff --git a/tests/e2e/common/retry_test_mixins.py b/databricks_sql_connector_core/tests/e2e/common/retry_test_mixins.py similarity index 100% rename from tests/e2e/common/retry_test_mixins.py rename to databricks_sql_connector_core/tests/e2e/common/retry_test_mixins.py diff --git a/tests/e2e/common/staging_ingestion_tests.py b/databricks_sql_connector_core/tests/e2e/common/staging_ingestion_tests.py similarity index 100% rename from tests/e2e/common/staging_ingestion_tests.py rename to databricks_sql_connector_core/tests/e2e/common/staging_ingestion_tests.py diff --git a/tests/e2e/common/timestamp_tests.py b/databricks_sql_connector_core/tests/e2e/common/timestamp_tests.py similarity index 100% rename from tests/e2e/common/timestamp_tests.py rename to databricks_sql_connector_core/tests/e2e/common/timestamp_tests.py diff --git a/tests/e2e/common/uc_volume_tests.py b/databricks_sql_connector_core/tests/e2e/common/uc_volume_tests.py similarity index 100% rename from tests/e2e/common/uc_volume_tests.py rename to databricks_sql_connector_core/tests/e2e/common/uc_volume_tests.py diff --git a/tests/e2e/test_complex_types.py b/databricks_sql_connector_core/tests/e2e/test_complex_types.py similarity index 93% rename from tests/e2e/test_complex_types.py rename to databricks_sql_connector_core/tests/e2e/test_complex_types.py index 0a7f514a..acac4e44 100644 --- a/tests/e2e/test_complex_types.py +++ b/databricks_sql_connector_core/tests/e2e/test_complex_types.py @@ -2,8 +2,9 @@ from numpy import ndarray from tests.e2e.test_driver import PySQLPytestTestCase +from tests.e2e.common.predicates import pysql_supports_arrow - +@pytest.mark.skipif(not pysql_supports_arrow(), reason="Skipping because pyarrow is not installed") class TestComplexTypes(PySQLPytestTestCase): @pytest.fixture(scope="class") def table_fixture(self, connection_details): diff --git a/tests/e2e/test_driver.py b/databricks_sql_connector_core/tests/e2e/test_driver.py similarity index 97% rename from tests/e2e/test_driver.py rename to databricks_sql_connector_core/tests/e2e/test_driver.py index c23e4f79..6fa686e9 100644 --- a/tests/e2e/test_driver.py +++ b/databricks_sql_connector_core/tests/e2e/test_driver.py @@ -12,7 +12,6 @@ from uuid import uuid4 import numpy as np -import pyarrow import pytz import thrift import pytest @@ -35,6 +34,7 @@ pysql_supports_arrow, compare_dbr_versions, is_thrift_v5_plus, + pysql_supports_arrow ) from tests.e2e.common.core_tests import CoreTestMixin, SmokeTestMixin from tests.e2e.common.large_queries_mixin import LargeQueriesMixin @@ -48,6 +48,11 @@ from databricks.sql.exc import SessionAlreadyClosedError +try: + import pyarrow +except: + pyarrow = None + log = logging.getLogger(__name__) unsafe_logger = logging.getLogger("databricks.sql.unsafe") @@ -591,7 +596,7 @@ def test_ssp_passthrough(self): cursor.execute("SET ansi_mode") assert list(cursor.fetchone()) == ["ansi_mode", str(enable_ansi)] - @skipUnless(pysql_supports_arrow(), "arrow test needs arrow support") + @pytest.mark.skipif(not pysql_supports_arrow(), reason="Skipping because pyarrow is not installed") def test_timestamps_arrow(self): with self.cursor({"session_configuration": {"ansi_mode": False}}) as cursor: for timestamp, expected in self.timestamp_and_expected_results: @@ -611,7 +616,7 @@ def test_timestamps_arrow(self): aware_timestamp and aware_timestamp.timestamp() * 1000000 ), "timestamp {} did not match {}".format(timestamp, expected) - @skipUnless(pysql_supports_arrow(), "arrow test needs arrow support") + @pytest.mark.skipif(not pysql_supports_arrow(), reason="Skipping because pyarrow is not installed") def test_multi_timestamps_arrow(self): with self.cursor({"session_configuration": {"ansi_mode": False}}) as cursor: query, expected = self.multi_query() @@ -627,7 +632,7 @@ def test_multi_timestamps_arrow(self): ] assert result == expected - @skipUnless(pysql_supports_arrow(), "arrow test needs arrow support") + @pytest.mark.skipif(not pysql_supports_arrow(), reason="Skipping because pyarrow is not installed") def test_timezone_with_timestamp(self): if self.should_add_timezone(): with self.cursor() as cursor: @@ -646,7 +651,7 @@ def test_timezone_with_timestamp(self): assert arrow_result_table.field(0).type == ts_type assert arrow_result_value == expected.timestamp() * 1000000 - @skipUnless(pysql_supports_arrow(), "arrow test needs arrow support") + @pytest.mark.skipif(not pysql_supports_arrow(), reason="Skipping because pyarrow is not installed") def test_can_flip_compression(self): with self.cursor() as cursor: cursor.execute("SELECT array(1,2,3,4)") @@ -663,7 +668,7 @@ def test_can_flip_compression(self): def _should_have_native_complex_types(self): return pysql_has_version(">=", 2) and is_thrift_v5_plus(self.arguments) - @skipUnless(pysql_supports_arrow(), "arrow test needs arrow support") + @pytest.mark.skipif(not pysql_supports_arrow(), reason="Skipping because pyarrow is not installed") def test_arrays_are_not_returned_as_strings_arrow(self): if self._should_have_native_complex_types(): with self.cursor() as cursor: @@ -674,7 +679,7 @@ def test_arrays_are_not_returned_as_strings_arrow(self): assert pyarrow.types.is_list(list_type) assert pyarrow.types.is_integer(list_type.value_type) - @skipUnless(pysql_supports_arrow(), "arrow test needs arrow support") + @pytest.mark.skipif(not pysql_supports_arrow(), reason="Skipping because pyarrow is not installed") def test_structs_are_not_returned_as_strings_arrow(self): if self._should_have_native_complex_types(): with self.cursor() as cursor: @@ -684,7 +689,7 @@ def test_structs_are_not_returned_as_strings_arrow(self): struct_type = arrow_df.field(0).type assert pyarrow.types.is_struct(struct_type) - @skipUnless(pysql_supports_arrow(), "arrow test needs arrow support") + @pytest.mark.skipif(not pysql_supports_arrow(), reason="Skipping because pyarrow is not installed") def test_decimal_not_returned_as_strings_arrow(self): if self._should_have_native_complex_types(): with self.cursor() as cursor: diff --git a/tests/e2e/test_parameterized_queries.py b/databricks_sql_connector_core/tests/e2e/test_parameterized_queries.py similarity index 98% rename from tests/e2e/test_parameterized_queries.py rename to databricks_sql_connector_core/tests/e2e/test_parameterized_queries.py index 47dfc38c..e2eac174 100644 --- a/tests/e2e/test_parameterized_queries.py +++ b/databricks_sql_connector_core/tests/e2e/test_parameterized_queries.py @@ -28,6 +28,7 @@ VoidParameter, ) from tests.e2e.test_driver import PySQLPytestTestCase +from tests.e2e.common.predicates import pysql_supports_arrow class ParamStyle(Enum): @@ -284,6 +285,8 @@ def test_primitive_single( (PrimitiveExtra.TINYINT, TinyIntParameter), ], ) + + @pytest.mark.skipif(not pysql_supports_arrow(),reason="Without pyarrow TIMESTAMP_NTZ datatype cannot be inferred",) def test_dbsqlparameter_single( self, primitive: Primitive, diff --git a/src/databricks/sqlalchemy/py.typed b/databricks_sql_connector_core/tests/unit/__init__.py old mode 100755 new mode 100644 similarity index 100% rename from src/databricks/sqlalchemy/py.typed rename to databricks_sql_connector_core/tests/unit/__init__.py diff --git a/tests/unit/test_arrow_queue.py b/databricks_sql_connector_core/tests/unit/test_arrow_queue.py similarity index 82% rename from tests/unit/test_arrow_queue.py rename to databricks_sql_connector_core/tests/unit/test_arrow_queue.py index 6834cc9c..ac98e137 100644 --- a/tests/unit/test_arrow_queue.py +++ b/databricks_sql_connector_core/tests/unit/test_arrow_queue.py @@ -1,10 +1,17 @@ import unittest -import pyarrow as pa +import pytest from databricks.sql.utils import ArrowQueue +try: + import pyarrow as pa +except ImportError: + pa = None +from tests.e2e.common.predicates import pysql_supports_arrow + +@pytest.mark.skipif(not pysql_supports_arrow(), reason="Skipping because pyarrow is not installed") class ArrowQueueSuite(unittest.TestCase): @staticmethod def make_arrow_table(batch): diff --git a/tests/unit/test_auth.py b/databricks_sql_connector_core/tests/unit/test_auth.py similarity index 100% rename from tests/unit/test_auth.py rename to databricks_sql_connector_core/tests/unit/test_auth.py diff --git a/tests/unit/test_client.py b/databricks_sql_connector_core/tests/unit/test_client.py similarity index 100% rename from tests/unit/test_client.py rename to databricks_sql_connector_core/tests/unit/test_client.py diff --git a/tests/unit/test_cloud_fetch_queue.py b/databricks_sql_connector_core/tests/unit/test_cloud_fetch_queue.py similarity index 98% rename from tests/unit/test_cloud_fetch_queue.py rename to databricks_sql_connector_core/tests/unit/test_cloud_fetch_queue.py index cd14c676..def6b8aa 100644 --- a/tests/unit/test_cloud_fetch_queue.py +++ b/databricks_sql_connector_core/tests/unit/test_cloud_fetch_queue.py @@ -1,11 +1,18 @@ -import pyarrow +import pytest import unittest from unittest.mock import MagicMock, patch from ssl import create_default_context from databricks.sql.thrift_api.TCLIService.ttypes import TSparkArrowResultLink import databricks.sql.utils as utils +from tests.e2e.common.predicates import pysql_supports_arrow +try: + import pyarrow +except ImportError: + pyarrow = None + +@pytest.mark.skipif(not pysql_supports_arrow(), reason="Skipping because pyarrow is not installed") class CloudFetchQueueSuite(unittest.TestCase): def create_result_link( diff --git a/tests/unit/test_download_manager.py b/databricks_sql_connector_core/tests/unit/test_download_manager.py similarity index 93% rename from tests/unit/test_download_manager.py rename to databricks_sql_connector_core/tests/unit/test_download_manager.py index c084d8e4..f17049e8 100644 --- a/tests/unit/test_download_manager.py +++ b/databricks_sql_connector_core/tests/unit/test_download_manager.py @@ -1,12 +1,15 @@ import unittest from unittest.mock import patch, MagicMock +import pytest from ssl import create_default_context import databricks.sql.cloudfetch.download_manager as download_manager from databricks.sql.thrift_api.TCLIService.ttypes import TSparkArrowResultLink +from tests.e2e.common.predicates import pysql_supports_arrow +@pytest.mark.skipif(not pysql_supports_arrow(), reason="Skipping because pyarrow is not installed") class DownloadManagerTests(unittest.TestCase): """ Unit tests for checking download manager logic. diff --git a/tests/unit/test_downloader.py b/databricks_sql_connector_core/tests/unit/test_downloader.py similarity index 100% rename from tests/unit/test_downloader.py rename to databricks_sql_connector_core/tests/unit/test_downloader.py diff --git a/tests/unit/test_endpoint.py b/databricks_sql_connector_core/tests/unit/test_endpoint.py similarity index 100% rename from tests/unit/test_endpoint.py rename to databricks_sql_connector_core/tests/unit/test_endpoint.py diff --git a/tests/unit/test_fetches.py b/databricks_sql_connector_core/tests/unit/test_fetches.py similarity index 97% rename from tests/unit/test_fetches.py rename to databricks_sql_connector_core/tests/unit/test_fetches.py index 7d5686f8..c1aeadca 100644 --- a/tests/unit/test_fetches.py +++ b/databricks_sql_connector_core/tests/unit/test_fetches.py @@ -1,12 +1,17 @@ import unittest from unittest.mock import Mock - -import pyarrow as pa +import pytest import databricks.sql.client as client from databricks.sql.utils import ExecuteResponse, ArrowQueue +from tests.e2e.common.predicates import pysql_supports_arrow +try: + import pyarrow as pa +except ImportError: + pa = None +@pytest.mark.skipif(not pysql_supports_arrow(), reason="Skipping because pyarrow is not installed") class FetchTests(unittest.TestCase): """ Unit tests for checking the fetch logic. diff --git a/tests/unit/test_fetches_bench.py b/databricks_sql_connector_core/tests/unit/test_fetches_bench.py similarity index 90% rename from tests/unit/test_fetches_bench.py rename to databricks_sql_connector_core/tests/unit/test_fetches_bench.py index e322b44a..bba18247 100644 --- a/tests/unit/test_fetches_bench.py +++ b/databricks_sql_connector_core/tests/unit/test_fetches_bench.py @@ -1,15 +1,20 @@ import unittest from unittest.mock import Mock -import pyarrow as pa import uuid import time import pytest import databricks.sql.client as client from databricks.sql.utils import ExecuteResponse, ArrowQueue +from tests.e2e.common.predicates import pysql_supports_arrow +try: + import pyarrow as pa +except ImportError: + pa = None +@pytest.mark.skipif(not pysql_supports_arrow(), reason="Skipping because pyarrow is not installed") class FetchBenchmarkTests(unittest.TestCase): """ Micro benchmark test for Arrow result handling. diff --git a/tests/unit/test_init_file.py b/databricks_sql_connector_core/tests/unit/test_init_file.py similarity index 100% rename from tests/unit/test_init_file.py rename to databricks_sql_connector_core/tests/unit/test_init_file.py diff --git a/tests/unit/test_oauth_persistence.py b/databricks_sql_connector_core/tests/unit/test_oauth_persistence.py similarity index 100% rename from tests/unit/test_oauth_persistence.py rename to databricks_sql_connector_core/tests/unit/test_oauth_persistence.py diff --git a/tests/unit/test_param_escaper.py b/databricks_sql_connector_core/tests/unit/test_param_escaper.py similarity index 100% rename from tests/unit/test_param_escaper.py rename to databricks_sql_connector_core/tests/unit/test_param_escaper.py diff --git a/tests/unit/test_parameters.py b/databricks_sql_connector_core/tests/unit/test_parameters.py similarity index 100% rename from tests/unit/test_parameters.py rename to databricks_sql_connector_core/tests/unit/test_parameters.py diff --git a/tests/unit/test_retry.py b/databricks_sql_connector_core/tests/unit/test_retry.py similarity index 100% rename from tests/unit/test_retry.py rename to databricks_sql_connector_core/tests/unit/test_retry.py diff --git a/tests/unit/test_thrift_backend.py b/databricks_sql_connector_core/tests/unit/test_thrift_backend.py similarity index 99% rename from tests/unit/test_thrift_backend.py rename to databricks_sql_connector_core/tests/unit/test_thrift_backend.py index 4bcf84d2..9b53a17e 100644 --- a/tests/unit/test_thrift_backend.py +++ b/databricks_sql_connector_core/tests/unit/test_thrift_backend.py @@ -2,18 +2,22 @@ from decimal import Decimal import itertools import unittest +import pytest from unittest.mock import patch, MagicMock, Mock from ssl import CERT_NONE, CERT_REQUIRED -import pyarrow - import databricks.sql from databricks.sql import utils from databricks.sql.thrift_api.TCLIService import ttypes from databricks.sql import * from databricks.sql.auth.authenticators import AuthProvider from databricks.sql.thrift_backend import ThriftBackend +from tests.e2e.common.predicates import pysql_supports_arrow +try: + import pyarrow +except ImportError: + pyarrow = None def retry_policy_factory(): return { # (type, default, min, max) @@ -24,7 +28,7 @@ def retry_policy_factory(): "_retry_delay_default": (float, 5, 1, 60), } - +@pytest.mark.skipif(not pysql_supports_arrow(), reason="Skipping because pyarrow is not installed") class ThriftBackendTestSuite(unittest.TestCase): okay_status = ttypes.TStatus(statusCode=ttypes.TStatusCode.SUCCESS_STATUS) diff --git a/src/databricks/sqlalchemy/README.sqlalchemy.md b/src/databricks/sqlalchemy/README.sqlalchemy.md deleted file mode 100644 index 8aa51973..00000000 --- a/src/databricks/sqlalchemy/README.sqlalchemy.md +++ /dev/null @@ -1,203 +0,0 @@ -## Databricks dialect for SQLALchemy 2.0 - -The Databricks dialect for SQLAlchemy serves as bridge between [SQLAlchemy](https://www.sqlalchemy.org/) and the Databricks SQL Python driver. The dialect is included with `databricks-sql-connector==3.0.0` and above. A working example demonstrating usage can be found in `examples/sqlalchemy.py`. - -## Usage with SQLAlchemy <= 2.0 -A SQLAlchemy 1.4 compatible dialect was first released in connector [version 2.4](https://github.com/databricks/databricks-sql-python/releases/tag/v2.4.0). Support for SQLAlchemy 1.4 was dropped from the dialect as part of `databricks-sql-connector==3.0.0`. To continue using the dialect with SQLAlchemy 1.x, you can use `databricks-sql-connector^2.4.0`. - - -## Installation - -To install the dialect and its dependencies: - -```shell -pip install databricks-sql-connector[sqlalchemy] -``` - -If you also plan to use `alembic` you can alternatively run: - -```shell -pip install databricks-sql-connector[alembic] -``` - -## Connection String - -Every SQLAlchemy application that connects to a database needs to use an [Engine](https://docs.sqlalchemy.org/en/20/tutorial/engine.html#tutorial-engine), which you can create by passing a connection string to `create_engine`. The connection string must include these components: - -1. Host -2. HTTP Path for a compute resource -3. API access token -4. Initial catalog for the connection -5. Initial schema for the connection - -**Note: Our dialect is built and tested on workspaces with Unity Catalog enabled. Support for the `hive_metastore` catalog is untested.** - -For example: - -```python -import os -from sqlalchemy import create_engine - -host = os.getenv("DATABRICKS_SERVER_HOSTNAME") -http_path = os.getenv("DATABRICKS_HTTP_PATH") -access_token = os.getenv("DATABRICKS_TOKEN") -catalog = os.getenv("DATABRICKS_CATALOG") -schema = os.getenv("DATABRICKS_SCHEMA") - -engine = create_engine( - f"databricks://token:{access_token}@{host}?http_path={http_path}&catalog={catalog}&schema={schema}" - ) -``` - -## Types - -The [SQLAlchemy type hierarchy](https://docs.sqlalchemy.org/en/20/core/type_basics.html) contains backend-agnostic type implementations (represented in CamelCase) and backend-specific types (represented in UPPERCASE). The majority of SQLAlchemy's [CamelCase](https://docs.sqlalchemy.org/en/20/core/type_basics.html#the-camelcase-datatypes) types are supported. This means that a SQLAlchemy application using these types should "just work" with Databricks. - -|SQLAlchemy Type|Databricks SQL Type| -|-|-| -[`BigInteger`](https://docs.sqlalchemy.org/en/20/core/type_basics.html#sqlalchemy.types.BigInteger)| [`BIGINT`](https://docs.databricks.com/en/sql/language-manual/data-types/bigint-type.html) -[`LargeBinary`](https://docs.sqlalchemy.org/en/20/core/type_basics.html#sqlalchemy.types.LargeBinary)| (not supported)| -[`Boolean`](https://docs.sqlalchemy.org/en/20/core/type_basics.html#sqlalchemy.types.Boolean)| [`BOOLEAN`](https://docs.databricks.com/en/sql/language-manual/data-types/boolean-type.html) -[`Date`](https://docs.sqlalchemy.org/en/20/core/type_basics.html#sqlalchemy.types.Date)| [`DATE`](https://docs.databricks.com/en/sql/language-manual/data-types/date-type.html) -[`DateTime`](https://docs.sqlalchemy.org/en/20/core/type_basics.html#sqlalchemy.types.DateTime)| [`TIMESTAMP_NTZ`](https://docs.databricks.com/en/sql/language-manual/data-types/timestamp-ntz-type.html)| -[`Double`](https://docs.sqlalchemy.org/en/20/core/type_basics.html#sqlalchemy.types.Double)| [`DOUBLE`](https://docs.databricks.com/en/sql/language-manual/data-types/double-type.html) -[`Enum`](https://docs.sqlalchemy.org/en/20/core/type_basics.html#sqlalchemy.types.Enum)| (not supported)| -[`Float`](https://docs.sqlalchemy.org/en/20/core/type_basics.html#sqlalchemy.types.Float)| [`FLOAT`](https://docs.databricks.com/en/sql/language-manual/data-types/float-type.html) -[`Integer`](https://docs.sqlalchemy.org/en/20/core/type_basics.html#sqlalchemy.types.Integer)| [`INT`](https://docs.databricks.com/en/sql/language-manual/data-types/int-type.html) -[`Numeric`](https://docs.sqlalchemy.org/en/20/core/type_basics.html#sqlalchemy.types.Numeric)| [`DECIMAL`](https://docs.databricks.com/en/sql/language-manual/data-types/decimal-type.html)| -[`PickleType`](https://docs.sqlalchemy.org/en/20/core/type_basics.html#sqlalchemy.types.PickleType)| (not supported)| -[`SmallInteger`](https://docs.sqlalchemy.org/en/20/core/type_basics.html#sqlalchemy.types.SmallInteger)| [`SMALLINT`](https://docs.databricks.com/en/sql/language-manual/data-types/smallint-type.html) -[`String`](https://docs.sqlalchemy.org/en/20/core/type_basics.html#sqlalchemy.types.String)| [`STRING`](https://docs.databricks.com/en/sql/language-manual/data-types/string-type.html)| -[`Text`](https://docs.sqlalchemy.org/en/20/core/type_basics.html#sqlalchemy.types.Text)| [`STRING`](https://docs.databricks.com/en/sql/language-manual/data-types/string-type.html)| -[`Time`](https://docs.sqlalchemy.org/en/20/core/type_basics.html#sqlalchemy.types.Time)| [`STRING`](https://docs.databricks.com/en/sql/language-manual/data-types/string-type.html)| -[`Unicode`](https://docs.sqlalchemy.org/en/20/core/type_basics.html#sqlalchemy.types.Unicode)| [`STRING`](https://docs.databricks.com/en/sql/language-manual/data-types/string-type.html)| -[`UnicodeText`](https://docs.sqlalchemy.org/en/20/core/type_basics.html#sqlalchemy.types.UnicodeText)| [`STRING`](https://docs.databricks.com/en/sql/language-manual/data-types/string-type.html)| -[`Uuid`](https://docs.sqlalchemy.org/en/20/core/type_basics.html#sqlalchemy.types.Uuid)| [`STRING`](https://docs.databricks.com/en/sql/language-manual/data-types/string-type.html) - -In addition, the dialect exposes three UPPERCASE SQLAlchemy types which are specific to Databricks: - -- [`databricks.sqlalchemy.TINYINT`](https://docs.databricks.com/en/sql/language-manual/data-types/tinyint-type.html) -- [`databricks.sqlalchemy.TIMESTAMP`](https://docs.databricks.com/en/sql/language-manual/data-types/timestamp-type.html) -- [`databricks.sqlalchemy.TIMESTAMP_NTZ`](https://docs.databricks.com/en/sql/language-manual/data-types/timestamp-ntz-type.html) - - -### `LargeBinary()` and `PickleType()` - -Databricks Runtime doesn't currently support binding of binary values in SQL queries, which is a pre-requisite for this functionality in SQLAlchemy. - -## `Enum()` and `CHECK` constraints - -Support for `CHECK` constraints is not implemented in this dialect. Support is planned for a future release. - -SQLAlchemy's `Enum()` type depends on `CHECK` constraints and is therefore not yet supported. - -### `DateTime()`, `TIMESTAMP_NTZ()`, and `TIMESTAMP()` - -Databricks Runtime provides two datetime-like types: `TIMESTAMP` which is always timezone-aware and `TIMESTAMP_NTZ` which is timezone agnostic. Both types can be imported from `databricks.sqlalchemy` and used in your models. - -The SQLAlchemy documentation indicates that `DateTime()` is not timezone-aware by default. So our dialect maps this type to `TIMESTAMP_NTZ()`. In practice, you should never need to use `TIMESTAMP_NTZ()` directly. Just use `DateTime()`. - -If you need your field to be timezone-aware, you can import `TIMESTAMP()` and use it instead. - -_Note that SQLAlchemy documentation suggests that you can declare a `DateTime()` with `timezone=True` on supported backends. However, if you do this with the Databricks dialect, the `timezone` argument will be ignored._ - -```python -from sqlalchemy import DateTime -from databricks.sqlalchemy import TIMESTAMP - -class SomeModel(Base): - some_date_without_timezone = DateTime() - some_date_with_timezone = TIMESTAMP() -``` - -### `String()`, `Text()`, `Unicode()`, and `UnicodeText()` - -Databricks Runtime doesn't support length limitations for `STRING` fields. Therefore `String()` or `String(1)` or `String(255)` will all produce identical DDL. Since `Text()`, `Unicode()`, `UnicodeText()` all use the same underlying type in Databricks SQL, they will generate equivalent DDL. - -### `Time()` - -Databricks Runtime doesn't have a native time-like data type. To implement this type in SQLAlchemy, our dialect stores SQLAlchemy `Time()` values in a `STRING` field. Unlike `DateTime` above, this type can optionally support timezone awareness (since the dialect is in complete control of the strings that we write to the Delta table). - -```python -from sqlalchemy import Time - -class SomeModel(Base): - time_tz = Time(timezone=True) - time_ntz = Time() -``` - - -# Usage Notes - -## `Identity()` and `autoincrement` - -Identity and generated value support is currently limited in this dialect. - -When defining models, SQLAlchemy types can accept an [`autoincrement`](https://docs.sqlalchemy.org/en/20/core/metadata.html#sqlalchemy.schema.Column.params.autoincrement) argument. In our dialect, this argument is currently ignored. To create an auto-incrementing field in your model you can pass in an explicit [`Identity()`](https://docs.sqlalchemy.org/en/20/core/defaults.html#identity-ddl) instead. - -Furthermore, in Databricks Runtime, only `BIGINT` fields can be configured to auto-increment. So in SQLAlchemy, you must use the `BigInteger()` type. - -```python -from sqlalchemy import Identity, String - -class SomeModel(Base): - id = BigInteger(Identity()) - value = String() -``` - -When calling `Base.metadata.create_all()`, the executed DDL will include `GENERATED ALWAYS AS IDENTITY` for the `id` column. This is useful when using SQLAlchemy to generate tables. However, as of this writing, `Identity()` constructs are not captured when SQLAlchemy reflects a table's metadata (support for this is planned). - -## Parameters - -`databricks-sql-connector` supports two approaches to parameterizing SQL queries: native and inline. Our SQLAlchemy 2.0 dialect always uses the native approach and is therefore limited to DBR 14.2 and above. If you are writing parameterized queries to be executed by SQLAlchemy, you must use the "named" paramstyle (`:param`). Read more about parameterization in `docs/parameters.md`. - -## Usage with pandas - -Use [`pandas.DataFrame.to_sql`](https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.to_sql.html) and [`pandas.read_sql`](https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.read_sql.html#pandas.read_sql) to write and read from Databricks SQL. These methods both accept a SQLAlchemy connection to interact with Databricks. - -### Read from Databricks SQL into pandas -```python -from sqlalchemy import create_engine -import pandas as pd - -engine = create_engine("databricks://token:dapi***@***.cloud.databricks.com?http_path=***&catalog=main&schema=test") -with engine.connect() as conn: - # This will read the contents of `main.test.some_table` - df = pd.read_sql("some_table", conn) -``` - -### Write to Databricks SQL from pandas - -```python -from sqlalchemy import create_engine -import pandas as pd - -engine = create_engine("databricks://token:dapi***@***.cloud.databricks.com?http_path=***&catalog=main&schema=test") -squares = [(i, i * i) for i in range(100)] -df = pd.DataFrame(data=squares,columns=['x','x_squared']) - -with engine.connect() as conn: - # This will write the contents of `df` to `main.test.squares` - df.to_sql('squares',conn) -``` - -## [`PrimaryKey()`](https://docs.sqlalchemy.org/en/20/core/constraints.html#sqlalchemy.schema.PrimaryKeyConstraint) and [`ForeignKey()`](https://docs.sqlalchemy.org/en/20/core/constraints.html#defining-foreign-keys) - -Unity Catalog workspaces in Databricks support PRIMARY KEY and FOREIGN KEY constraints. _Note that Databricks Runtime does not enforce the integrity of FOREIGN KEY constraints_. You can establish a primary key by setting `primary_key=True` when defining a column. - -When building `ForeignKey` or `ForeignKeyConstraint` objects, you must specify a `name` for the constraint. - -If your model definition requires a self-referential FOREIGN KEY constraint, you must include `use_alter=True` when defining the relationship. - -```python -from sqlalchemy import Table, Column, ForeignKey, BigInteger, String - -users = Table( - "users", - metadata_obj, - Column("id", BigInteger, primary_key=True), - Column("name", String(), nullable=False), - Column("email", String()), - Column("manager_id", ForeignKey("users.id", name="fk_users_manager_id_x_users_id", use_alter=True)) -) -``` diff --git a/src/databricks/sqlalchemy/README.tests.md b/src/databricks/sqlalchemy/README.tests.md deleted file mode 100644 index 3ed92aba..00000000 --- a/src/databricks/sqlalchemy/README.tests.md +++ /dev/null @@ -1,44 +0,0 @@ -## SQLAlchemy Dialect Compliance Test Suite with Databricks - -The contents of the `test/` directory follow the SQLAlchemy developers' [guidance] for running the reusable dialect compliance test suite. Since not every test in the suite is applicable to every dialect, two options are provided to skip tests: - -- Any test can be skipped by subclassing its parent class, re-declaring the test-case and adding a `pytest.mark.skip` directive. -- Any test that is decorated with a `@requires` decorator can be skipped by marking the indicated requirement as `.closed()` in `requirements.py` - -We prefer to skip test cases directly with the first method wherever possible. We only mark requirements as `closed()` if there is no easier option to avoid a test failure. This principally occurs in test cases where the same test in the suite is parametrized, and some parameter combinations are conditionally skipped depending on `requirements.py`. If we skip the entire test method, then we skip _all_ permutations, not just the combinations we don't support. - -## Regression, Unsupported, and Future test cases - -We maintain three files of test cases that we import from the SQLAlchemy source code: - -* **`_regression.py`** contains all the tests cases with tests that we expect to pass for our dialect. Each one is marked with `pytest.mark.reiewed` to indicate that we've evaluated it for relevance. This file only contains base class declarations. -* **`_unsupported.py`** contains test cases that fail because of missing features in Databricks. We mark them as skipped with a `SkipReason` enumeration. If Databricks comes to support these features, those test or entire classes can be moved to `_regression.py`. -* **`_future.py`** contains test cases that fail because of missing features in the dialect itself, but which _are_ supported by Databricks generally. We mark them as skipped with a `FutureFeature` enumeration. These are features that have not been prioritised or that do not violate our acceptance criteria. All of these test cases will eventually move to either `_regression.py`. - -In some cases, only certain tests in class should be skipped with a `SkipReason` or `FutureFeature` justification. In those cases, we import the class into `_regression.py`, then import it from there into one or both of `_future.py` and `_unsupported.py`. If a class needs to be "touched" by regression, unsupported, and future, the class will be imported in that order. If an entire class should be skipped, then we do not import it into `_regression.py` at all. - -We maintain `_extra.py` with test cases that depend on SQLAlchemy's reusable dialect test fixtures but which are specific to Databricks (e.g TinyIntegerTest). - -## Running the reusable dialect tests - -``` -poetry shell -cd src/databricks/sqlalchemy/test -python -m pytest test_suite.py --dburi \ - "databricks://token:$access_token@$host?http_path=$http_path&catalog=$catalog&schema=$schema" -``` - -Whatever schema you pass in the `dburi` argument should be empty. Some tests also require the presence of an empty schema named `test_schema`. Note that we plan to implement our own `provision.py` which SQLAlchemy can automatically use to create an empty schema for testing. But for now this is a manual process. - -You can run only reviewed tests by appending `-m "reviewed"` to the test runner invocation. - -You can run only the unreviewed tests by appending `-m "not reviewed"` instead. - -Note that because these tests depend on SQLAlchemy's custom pytest plugin, they are not discoverable by IDE-based test runners like VSCode or PyCharm and must be invoked from a CLI. - -## Running local unit and e2e tests - -Apart from the SQLAlchemy reusable suite, we maintain our own unit and e2e tests under the `test_local/` directory. These can be invoked from a VSCode or Pycharm since they don't depend on a custom pytest plugin. Due to pytest's lookup order, the `pytest.ini` which is required for running the reusable dialect tests, also conflicts with VSCode and Pycharm's default pytest implementation and overrides the settings in `pyproject.toml`. So to run these tests, you can delete or rename `pytest.ini`. - - -[guidance]: "https://github.com/sqlalchemy/sqlalchemy/blob/rel_2_0_22/README.dialects.rst" diff --git a/src/databricks/sqlalchemy/__init__.py b/src/databricks/sqlalchemy/__init__.py deleted file mode 100644 index 2a17ac3e..00000000 --- a/src/databricks/sqlalchemy/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -from databricks.sqlalchemy.base import DatabricksDialect -from databricks.sqlalchemy._types import TINYINT, TIMESTAMP, TIMESTAMP_NTZ - -__all__ = ["TINYINT", "TIMESTAMP", "TIMESTAMP_NTZ"] diff --git a/src/databricks/sqlalchemy/_ddl.py b/src/databricks/sqlalchemy/_ddl.py deleted file mode 100644 index d5d0bf87..00000000 --- a/src/databricks/sqlalchemy/_ddl.py +++ /dev/null @@ -1,100 +0,0 @@ -import re -from sqlalchemy.sql import compiler, sqltypes -import logging - -logger = logging.getLogger(__name__) - - -class DatabricksIdentifierPreparer(compiler.IdentifierPreparer): - """https://docs.databricks.com/en/sql/language-manual/sql-ref-identifiers.html""" - - legal_characters = re.compile(r"^[A-Z0-9_]+$", re.I) - - def __init__(self, dialect): - super().__init__(dialect, initial_quote="`") - - -class DatabricksDDLCompiler(compiler.DDLCompiler): - def post_create_table(self, table): - post = [" USING DELTA"] - if table.comment: - comment = self.sql_compiler.render_literal_value( - table.comment, sqltypes.String() - ) - post.append("COMMENT " + comment) - - post.append("TBLPROPERTIES('delta.feature.allowColumnDefaults' = 'enabled')") - return "\n".join(post) - - def visit_unique_constraint(self, constraint, **kw): - logger.warning("Databricks does not support unique constraints") - pass - - def visit_check_constraint(self, constraint, **kw): - logger.warning("This dialect does not support check constraints") - pass - - def visit_identity_column(self, identity, **kw): - """When configuring an Identity() with Databricks, only the always option is supported. - All other options are ignored. - - Note: IDENTITY columns must always be defined as BIGINT. An exception will be raised if INT is used. - - https://www.databricks.com/blog/2022/08/08/identity-columns-to-generate-surrogate-keys-are-now-available-in-a-lakehouse-near-you.html - """ - text = "GENERATED %s AS IDENTITY" % ( - "ALWAYS" if identity.always else "BY DEFAULT", - ) - return text - - def visit_set_column_comment(self, create, **kw): - return "ALTER TABLE %s ALTER COLUMN %s COMMENT %s" % ( - self.preparer.format_table(create.element.table), - self.preparer.format_column(create.element), - self.sql_compiler.render_literal_value( - create.element.comment, sqltypes.String() - ), - ) - - def visit_drop_column_comment(self, create, **kw): - return "ALTER TABLE %s ALTER COLUMN %s COMMENT ''" % ( - self.preparer.format_table(create.element.table), - self.preparer.format_column(create.element), - ) - - def get_column_specification(self, column, **kwargs): - """ - Emit a log message if a user attempts to set autoincrement=True on a column. - See comments in test_suite.py. We may implement implicit IDENTITY using this - feature in the future, similar to the Microsoft SQL Server dialect. - """ - if column is column.table._autoincrement_column or column.autoincrement is True: - logger.warning( - "Databricks dialect ignores SQLAlchemy's autoincrement semantics. Use explicit Identity() instead." - ) - - colspec = super().get_column_specification(column, **kwargs) - if column.comment is not None: - literal = self.sql_compiler.render_literal_value( - column.comment, sqltypes.STRINGTYPE - ) - colspec += " COMMENT " + literal - - return colspec - - -class DatabricksStatementCompiler(compiler.SQLCompiler): - def limit_clause(self, select, **kw): - """Identical to the default implementation of SQLCompiler.limit_clause except it writes LIMIT ALL instead of LIMIT -1, - since Databricks SQL doesn't support the latter. - - https://docs.databricks.com/en/sql/language-manual/sql-ref-syntax-qry-select-limit.html - """ - text = "" - if select._limit_clause is not None: - text += "\n LIMIT " + self.process(select._limit_clause, **kw) - if select._offset_clause is not None: - if select._limit_clause is None: - text += "\n LIMIT ALL" - text += " OFFSET " + self.process(select._offset_clause, **kw) - return text diff --git a/src/databricks/sqlalchemy/_parse.py b/src/databricks/sqlalchemy/_parse.py deleted file mode 100644 index 6d38e1e6..00000000 --- a/src/databricks/sqlalchemy/_parse.py +++ /dev/null @@ -1,385 +0,0 @@ -from typing import List, Optional, Dict -import re - -import sqlalchemy -from sqlalchemy.engine import CursorResult -from sqlalchemy.engine.interfaces import ReflectedColumn - -from databricks.sqlalchemy import _types as type_overrides - -""" -This module contains helper functions that can parse the contents -of metadata and exceptions received from DBR. These are mostly just -wrappers around regexes. -""" - - -class DatabricksSqlAlchemyParseException(Exception): - pass - - -def _match_table_not_found_string(message: str) -> bool: - """Return True if the message contains a substring indicating that a table was not found""" - - DBR_LTE_12_NOT_FOUND_STRING = "Table or view not found" - DBR_GT_12_NOT_FOUND_STRING = "TABLE_OR_VIEW_NOT_FOUND" - return any( - [ - DBR_LTE_12_NOT_FOUND_STRING in message, - DBR_GT_12_NOT_FOUND_STRING in message, - ] - ) - - -def _describe_table_extended_result_to_dict_list( - result: CursorResult, -) -> List[Dict[str, str]]: - """Transform the CursorResult of DESCRIBE TABLE EXTENDED into a list of Dictionaries""" - - rows_to_return = [] - for row in result.all(): - this_row = {"col_name": row.col_name, "data_type": row.data_type} - rows_to_return.append(this_row) - - return rows_to_return - - -def extract_identifiers_from_string(input_str: str) -> List[str]: - """For a string input resembling (`a`, `b`, `c`) return a list of identifiers ['a', 'b', 'c']""" - - # This matches the valid character list contained in DatabricksIdentifierPreparer - pattern = re.compile(r"`([A-Za-z0-9_]+)`") - matches = pattern.findall(input_str) - return [i for i in matches] - - -def extract_identifier_groups_from_string(input_str: str) -> List[str]: - """For a string input resembling : - - FOREIGN KEY (`pname`, `pid`, `pattr`) REFERENCES `main`.`pysql_sqlalchemy`.`tb1` (`name`, `id`, `attr`) - - Return ['(`pname`, `pid`, `pattr`)', '(`name`, `id`, `attr`)'] - """ - pattern = re.compile(r"\([`A-Za-z0-9_,\s]*\)") - matches = pattern.findall(input_str) - return [i for i in matches] - - -def extract_three_level_identifier_from_constraint_string(input_str: str) -> dict: - """For a string input resembling : - FOREIGN KEY (`parent_user_id`) REFERENCES `main`.`pysql_dialect_compliance`.`users` (`user_id`) - - Return a dict like - { - "catalog": "main", - "schema": "pysql_dialect_compliance", - "table": "users" - } - - Raise a DatabricksSqlAlchemyParseException if a 3L namespace isn't found - """ - pat = re.compile(r"REFERENCES\s+(.*?)\s*\(") - matches = pat.findall(input_str) - - if not matches: - raise DatabricksSqlAlchemyParseException( - "3L namespace not found in constraint string" - ) - - first_match = matches[0] - parts = first_match.split(".") - - def strip_backticks(input: str): - return input.replace("`", "") - - try: - return { - "catalog": strip_backticks(parts[0]), - "schema": strip_backticks(parts[1]), - "table": strip_backticks(parts[2]), - } - except IndexError: - raise DatabricksSqlAlchemyParseException( - "Incomplete 3L namespace found in constraint string: " + ".".join(parts) - ) - - -def _parse_fk_from_constraint_string(constraint_str: str) -> dict: - """Build a dictionary of foreign key constraint information from a constraint string. - - For example: - - ``` - FOREIGN KEY (`pname`, `pid`, `pattr`) REFERENCES `main`.`pysql_dialect_compliance`.`tb1` (`name`, `id`, `attr`) - ``` - - Return a dictionary like: - - ``` - { - "constrained_columns": ["pname", "pid", "pattr"], - "referred_table": "tb1", - "referred_schema": "pysql_dialect_compliance", - "referred_columns": ["name", "id", "attr"] - } - ``` - - Note that the constraint name doesn't appear in the constraint string so it will not - be present in the output of this function. - """ - - referred_table_dict = extract_three_level_identifier_from_constraint_string( - constraint_str - ) - referred_table = referred_table_dict["table"] - referred_schema = referred_table_dict["schema"] - - # _extracted is a tuple of two lists of identifiers - # we assume the first immediately follows "FOREIGN KEY" and the second - # immediately follows REFERENCES $tableName - _extracted = extract_identifier_groups_from_string(constraint_str) - constrained_columns_str, referred_columns_str = ( - _extracted[0], - _extracted[1], - ) - - constrained_columns = extract_identifiers_from_string(constrained_columns_str) - referred_columns = extract_identifiers_from_string(referred_columns_str) - - return { - "constrained_columns": constrained_columns, - "referred_table": referred_table, - "referred_columns": referred_columns, - "referred_schema": referred_schema, - } - - -def build_fk_dict( - fk_name: str, fk_constraint_string: str, schema_name: Optional[str] -) -> dict: - """ - Given a foriegn key name and a foreign key constraint string, return a dictionary - with the following keys: - - name - the name of the foreign key constraint - constrained_columns - a list of column names that make up the foreign key - referred_table - the name of the table that the foreign key references - referred_columns - a list of column names that are referenced by the foreign key - referred_schema - the name of the schema that the foreign key references. - - referred schema will be None if the schema_name argument is None. - This is required by SQLAlchey's ComponentReflectionTest::test_get_foreign_keys - """ - - # The foreign key name is not contained in the constraint string so we - # need to add it manually - base_fk_dict = _parse_fk_from_constraint_string(fk_constraint_string) - - if not schema_name: - schema_override_dict = dict(referred_schema=None) - else: - schema_override_dict = {} - - # mypy doesn't like this method of conditionally adding a key to a dictionary - # while keeping everything immutable - complete_foreign_key_dict = { - "name": fk_name, - **base_fk_dict, - **schema_override_dict, # type: ignore - } - - return complete_foreign_key_dict - - -def _parse_pk_columns_from_constraint_string(constraint_str: str) -> List[str]: - """Build a list of constrained columns from a constraint string returned by DESCRIBE TABLE EXTENDED - - For example: - - PRIMARY KEY (`id`, `name`, `email_address`) - - Returns a list like - - ["id", "name", "email_address"] - """ - - _extracted = extract_identifiers_from_string(constraint_str) - - return _extracted - - -def build_pk_dict(pk_name: str, pk_constraint_string: str) -> dict: - """Given a primary key name and a primary key constraint string, return a dictionary - with the following keys: - - constrained_columns - A list of string column names that make up the primary key - - name - The name of the primary key constraint - """ - - constrained_columns = _parse_pk_columns_from_constraint_string(pk_constraint_string) - - return {"constrained_columns": constrained_columns, "name": pk_name} - - -def match_dte_rows_by_value(dte_output: List[Dict[str, str]], match: str) -> List[dict]: - """Return a list of dictionaries containing only the col_name:data_type pairs where the `data_type` - value contains the match argument. - - Today, DESCRIBE TABLE EXTENDED doesn't give a deterministic name to the fields - a constraint will be found in its output. So we cycle through its output looking - for a match. This is brittle. We could optionally make two roundtrips: the first - would query information_schema for the name of the constraint on this table, and - a second to DESCRIBE TABLE EXTENDED, at which point we would know the name of the - constraint. But for now we instead assume that Python list comprehension is faster - than a network roundtrip - """ - - output_rows = [] - - for row_dict in dte_output: - if match in row_dict["data_type"]: - output_rows.append(row_dict) - - return output_rows - - -def match_dte_rows_by_key(dte_output: List[Dict[str, str]], match: str) -> List[dict]: - """Return a list of dictionaries containing only the col_name:data_type pairs where the `col_name` - value contains the match argument. - """ - - output_rows = [] - - for row_dict in dte_output: - if match in row_dict["col_name"]: - output_rows.append(row_dict) - - return output_rows - - -def get_fk_strings_from_dte_output(dte_output: List[Dict[str, str]]) -> List[dict]: - """If the DESCRIBE TABLE EXTENDED output contains foreign key constraints, return a list of dictionaries, - one dictionary per defined constraint - """ - - output = match_dte_rows_by_value(dte_output, "FOREIGN KEY") - - return output - - -def get_pk_strings_from_dte_output( - dte_output: List[Dict[str, str]] -) -> Optional[List[dict]]: - """If the DESCRIBE TABLE EXTENDED output contains primary key constraints, return a list of dictionaries, - one dictionary per defined constraint. - - Returns None if no primary key constraints are found. - """ - - output = match_dte_rows_by_value(dte_output, "PRIMARY KEY") - - return output - - -def get_comment_from_dte_output(dte_output: List[Dict[str, str]]) -> Optional[str]: - """Returns the value of the first "Comment" col_name data in dte_output""" - output = match_dte_rows_by_key(dte_output, "Comment") - if not output: - return None - else: - return output[0]["data_type"] - - -# The keys of this dictionary are the values we expect to see in a -# TGetColumnsRequest's .TYPE_NAME attribute. -# These are enumerated in ttypes.py as class TTypeId. -# TODO: confirm that all types in TTypeId are included here. -GET_COLUMNS_TYPE_MAP = { - "boolean": sqlalchemy.types.Boolean, - "smallint": sqlalchemy.types.SmallInteger, - "tinyint": type_overrides.TINYINT, - "int": sqlalchemy.types.Integer, - "bigint": sqlalchemy.types.BigInteger, - "float": sqlalchemy.types.Float, - "double": sqlalchemy.types.Float, - "string": sqlalchemy.types.String, - "varchar": sqlalchemy.types.String, - "char": sqlalchemy.types.String, - "binary": sqlalchemy.types.String, - "array": sqlalchemy.types.String, - "map": sqlalchemy.types.String, - "struct": sqlalchemy.types.String, - "uniontype": sqlalchemy.types.String, - "decimal": sqlalchemy.types.Numeric, - "timestamp": type_overrides.TIMESTAMP, - "timestamp_ntz": type_overrides.TIMESTAMP_NTZ, - "date": sqlalchemy.types.Date, -} - - -def parse_numeric_type_precision_and_scale(type_name_str): - """Return an intantiated sqlalchemy Numeric() type that preserves the precision and scale indicated - in the output from TGetColumnsRequest. - - type_name_str - The value of TGetColumnsReq.TYPE_NAME. - - If type_name_str is "DECIMAL(18,5) returns sqlalchemy.types.Numeric(18,5) - """ - - pattern = re.compile(r"DECIMAL\((\d+,\d+)\)") - match = re.search(pattern, type_name_str) - precision_and_scale = match.group(1) - precision, scale = tuple(precision_and_scale.split(",")) - - return sqlalchemy.types.Numeric(int(precision), int(scale)) - - -def parse_column_info_from_tgetcolumnsresponse(thrift_resp_row) -> ReflectedColumn: - """Returns a dictionary of the ReflectedColumn schema parsed from - a single of the result of a TGetColumnsRequest thrift RPC - """ - - pat = re.compile(r"^\w+") - - # This method assumes a valid TYPE_NAME field in the response. - # TODO: add error handling in case TGetColumnsResponse format changes - - _raw_col_type = re.search(pat, thrift_resp_row.TYPE_NAME).group(0).lower() # type: ignore - _col_type = GET_COLUMNS_TYPE_MAP[_raw_col_type] - - if _raw_col_type == "decimal": - final_col_type = parse_numeric_type_precision_and_scale( - thrift_resp_row.TYPE_NAME - ) - else: - final_col_type = _col_type - - # See comments about autoincrement in test_suite.py - # Since Databricks SQL doesn't currently support inline AUTOINCREMENT declarations - # the autoincrement must be manually declared with an Identity() construct in SQLAlchemy - # Other dialects can perform this extra Identity() step automatically. But that is not - # implemented in the Databricks dialect right now. So autoincrement is currently always False. - # It's not clear what IS_AUTO_INCREMENT in the thrift response actually reflects or whether - # it ever returns a `YES`. - - # Per the guidance in SQLAlchemy's docstrings, we prefer to not even include an autoincrement - # key in this dictionary. - this_column = { - "name": thrift_resp_row.COLUMN_NAME, - "type": final_col_type, - "nullable": bool(thrift_resp_row.NULLABLE), - "default": thrift_resp_row.COLUMN_DEF, - "comment": thrift_resp_row.REMARKS or None, - } - - # TODO: figure out how to return sqlalchemy.interfaces in a way that mypy respects - return this_column # type: ignore diff --git a/src/databricks/sqlalchemy/_types.py b/src/databricks/sqlalchemy/_types.py deleted file mode 100644 index 5fc14a70..00000000 --- a/src/databricks/sqlalchemy/_types.py +++ /dev/null @@ -1,323 +0,0 @@ -from datetime import datetime, time, timezone -from itertools import product -from typing import Any, Union, Optional - -import sqlalchemy -from sqlalchemy.engine.interfaces import Dialect -from sqlalchemy.ext.compiler import compiles - -from databricks.sql.utils import ParamEscaper - - -def process_literal_param_hack(value: Any): - """This method is supposed to accept a Python type and return a string representation of that type. - But due to some weirdness in the way SQLAlchemy's literal rendering works, we have to return - the value itself because, by the time it reaches our custom type code, it's already been converted - into a string. - - TimeTest - DateTimeTest - DateTimeTZTest - - This dynamic only seems to affect the literal rendering of datetime and time objects. - - All fail without this hack in-place. I'm not sure why. But it works. - """ - return value - - -@compiles(sqlalchemy.types.Enum, "databricks") -@compiles(sqlalchemy.types.String, "databricks") -@compiles(sqlalchemy.types.Text, "databricks") -@compiles(sqlalchemy.types.Time, "databricks") -@compiles(sqlalchemy.types.Unicode, "databricks") -@compiles(sqlalchemy.types.UnicodeText, "databricks") -@compiles(sqlalchemy.types.Uuid, "databricks") -def compile_string_databricks(type_, compiler, **kw): - """ - We override the default compilation for Enum(), String(), Text(), and Time() because SQLAlchemy - defaults to incompatible / abnormal compiled names - - Enum -> VARCHAR - String -> VARCHAR[LENGTH] - Text -> VARCHAR[LENGTH] - Time -> TIME - Unicode -> VARCHAR[LENGTH] - UnicodeText -> TEXT - Uuid -> CHAR[32] - - But all of these types will be compiled to STRING in Databricks SQL - """ - return "STRING" - - -@compiles(sqlalchemy.types.Integer, "databricks") -def compile_integer_databricks(type_, compiler, **kw): - """ - We need to override the default Integer compilation rendering because Databricks uses "INT" instead of "INTEGER" - """ - return "INT" - - -@compiles(sqlalchemy.types.LargeBinary, "databricks") -def compile_binary_databricks(type_, compiler, **kw): - """ - We need to override the default LargeBinary compilation rendering because Databricks uses "BINARY" instead of "BLOB" - """ - return "BINARY" - - -@compiles(sqlalchemy.types.Numeric, "databricks") -def compile_numeric_databricks(type_, compiler, **kw): - """ - We need to override the default Numeric compilation rendering because Databricks uses "DECIMAL" instead of "NUMERIC" - - The built-in visit_DECIMAL behaviour captures the precision and scale. Here we're just mapping calls to compile Numeric - to the SQLAlchemy Decimal() implementation - """ - return compiler.visit_DECIMAL(type_, **kw) - - -@compiles(sqlalchemy.types.DateTime, "databricks") -def compile_datetime_databricks(type_, compiler, **kw): - """ - We need to override the default DateTime compilation rendering because Databricks uses "TIMESTAMP_NTZ" instead of "DATETIME" - """ - return "TIMESTAMP_NTZ" - - -@compiles(sqlalchemy.types.ARRAY, "databricks") -def compile_array_databricks(type_, compiler, **kw): - """ - SQLAlchemy's default ARRAY can't compile as it's only implemented for Postgresql. - The Postgres implementation works for Databricks SQL, so we duplicate that here. - - :type_: - This is an instance of sqlalchemy.types.ARRAY which always includes an item_type attribute - which is itself an instance of TypeEngine - - https://docs.sqlalchemy.org/en/20/core/type_basics.html#sqlalchemy.types.ARRAY - """ - - inner = compiler.process(type_.item_type, **kw) - - return f"ARRAY<{inner}>" - - -class TIMESTAMP_NTZ(sqlalchemy.types.TypeDecorator): - """Represents values comprising values of fields year, month, day, hour, minute, and second. - All operations are performed without taking any time zone into account. - - Our dialect maps sqlalchemy.types.DateTime() to this type, which means that all DateTime() - objects are stored without tzinfo. To read and write timezone-aware datetimes use - databricks.sql.TIMESTAMP instead. - - https://docs.databricks.com/en/sql/language-manual/data-types/timestamp-ntz-type.html - """ - - impl = sqlalchemy.types.DateTime - - cache_ok = True - - def process_result_value(self, value: Union[None, datetime], dialect): - if value is None: - return None - return value.replace(tzinfo=None) - - -class TIMESTAMP(sqlalchemy.types.TypeDecorator): - """Represents values comprising values of fields year, month, day, hour, minute, and second, - with the session local time-zone. - - Our dialect maps sqlalchemy.types.DateTime() to TIMESTAMP_NTZ, which means that all DateTime() - objects are stored without tzinfo. To read and write timezone-aware datetimes use - this type instead. - - ```python - # This won't work - `Column(sqlalchemy.DateTime(timezone=True))` - - # But this does - `Column(TIMESTAMP)` - ```` - - https://docs.databricks.com/en/sql/language-manual/data-types/timestamp-type.html - """ - - impl = sqlalchemy.types.DateTime - - cache_ok = True - - def process_result_value(self, value: Union[None, datetime], dialect): - if value is None: - return None - - if not value.tzinfo: - return value.replace(tzinfo=timezone.utc) - return value - - def process_bind_param( - self, value: Union[datetime, None], dialect - ) -> Optional[datetime]: - """pysql can pass datetime.datetime() objects directly to DBR""" - return value - - def process_literal_param( - self, value: Union[datetime, None], dialect: Dialect - ) -> str: - """ """ - return process_literal_param_hack(value) - - -@compiles(TIMESTAMP, "databricks") -def compile_timestamp_databricks(type_, compiler, **kw): - """ - We need to override the default DateTime compilation rendering because Databricks uses "TIMESTAMP_NTZ" instead of "DATETIME" - """ - return "TIMESTAMP" - - -class DatabricksTimeType(sqlalchemy.types.TypeDecorator): - """Databricks has no native TIME type. So we store it as a string.""" - - impl = sqlalchemy.types.Time - cache_ok = True - - BASE_FMT = "%H:%M:%S" - MICROSEC_PART = ".%f" - TIMEZONE_PART = "%z" - - def _generate_fmt_string(self, ms: bool, tz: bool) -> str: - """Return a format string for datetime.strptime() that includes or excludes microseconds and timezone.""" - _ = lambda x, y: x if y else "" - return f"{self.BASE_FMT}{_(self.MICROSEC_PART,ms)}{_(self.TIMEZONE_PART,tz)}" - - @property - def allowed_fmt_strings(self): - """Time strings can be read with or without microseconds and with or without a timezone.""" - - if not hasattr(self, "_allowed_fmt_strings"): - ms_switch = tz_switch = [True, False] - self._allowed_fmt_strings = [ - self._generate_fmt_string(x, y) - for x, y in product(ms_switch, tz_switch) - ] - - return self._allowed_fmt_strings - - def _parse_result_string(self, value: str) -> time: - """Parse a string into a time object. Try all allowed formats until one works.""" - for fmt in self.allowed_fmt_strings: - try: - # We use timetz() here because we want to preserve the timezone information - # Calling .time() will strip the timezone information - return datetime.strptime(value, fmt).timetz() - except ValueError: - pass - - raise ValueError(f"Could not parse time string {value}") - - def _determine_fmt_string(self, value: time) -> str: - """Determine which format string to use to render a time object as a string.""" - ms_bool = value.microsecond > 0 - tz_bool = value.tzinfo is not None - return self._generate_fmt_string(ms_bool, tz_bool) - - def process_bind_param(self, value: Union[time, None], dialect) -> Union[None, str]: - """Values sent to the database are converted to %:H:%M:%S strings.""" - if value is None: - return None - fmt_string = self._determine_fmt_string(value) - return value.strftime(fmt_string) - - # mypy doesn't like this workaround because TypeEngine wants process_literal_param to return a string - def process_literal_param(self, value, dialect) -> time: # type: ignore - """ """ - return process_literal_param_hack(value) - - def process_result_value( - self, value: Union[None, str], dialect - ) -> Union[time, None]: - """Values received from the database are parsed into datetime.time() objects""" - if value is None: - return None - - return self._parse_result_string(value) - - -class DatabricksStringType(sqlalchemy.types.TypeDecorator): - """We have to implement our own String() type because SQLAlchemy's default implementation - wants to escape single-quotes with a doubled single-quote. Databricks uses a backslash for - escaping of literal strings. And SQLAlchemy's default escaping breaks Databricks SQL. - """ - - impl = sqlalchemy.types.String - cache_ok = True - pe = ParamEscaper() - - def process_literal_param(self, value, dialect) -> str: - """SQLAlchemy's default string escaping for backslashes doesn't work for databricks. The logic here - implements the same logic as our legacy inline escaping logic. - """ - - return self.pe.escape_string(value) - - def literal_processor(self, dialect): - """We manually override this method to prevent further processing of the string literal beyond - what happens in the process_literal_param() method. - - The SQLAlchemy docs _specifically_ say to not override this method. - - It appears that any processing that happens from TypeEngine.process_literal_param happens _before_ - and _in addition to_ whatever the class's impl.literal_processor() method does. The String.literal_processor() - method performs a string replacement that doubles any single-quote in the contained string. This raises a syntax - error in Databricks. And it's not necessary because ParamEscaper() already implements all the escaping we need. - - We should consider opening an issue on the SQLAlchemy project to see if I'm using it wrong. - - See type_api.py::TypeEngine.literal_processor: - - ```python - def process(value: Any) -> str: - return fixed_impl_processor( - fixed_process_literal_param(value, dialect) - ) - ``` - - That call to fixed_impl_processor wraps the result of fixed_process_literal_param (which is the - process_literal_param defined in our Databricks dialect) - - https://docs.sqlalchemy.org/en/20/core/custom_types.html#sqlalchemy.types.TypeDecorator.literal_processor - """ - - def process(value): - """This is a copy of the default String.literal_processor() method but stripping away - its double-escaping behaviour for single-quotes. - """ - - _step1 = self.process_literal_param(value, dialect="databricks") - if dialect.identifier_preparer._double_percents: - _step2 = _step1.replace("%", "%%") - else: - _step2 = _step1 - - return "%s" % _step2 - - return process - - -class TINYINT(sqlalchemy.types.TypeDecorator): - """Represents 1-byte signed integers - - Acts like a sqlalchemy SmallInteger() in Python but writes to a TINYINT field in Databricks - - https://docs.databricks.com/en/sql/language-manual/data-types/tinyint-type.html - """ - - impl = sqlalchemy.types.SmallInteger - cache_ok = True - - -@compiles(TINYINT, "databricks") -def compile_tinyint(type_, compiler, **kw): - return "TINYINT" diff --git a/src/databricks/sqlalchemy/base.py b/src/databricks/sqlalchemy/base.py deleted file mode 100644 index 9148de7f..00000000 --- a/src/databricks/sqlalchemy/base.py +++ /dev/null @@ -1,436 +0,0 @@ -from typing import Any, List, Optional, Dict, Union - -import databricks.sqlalchemy._ddl as dialect_ddl_impl -import databricks.sqlalchemy._types as dialect_type_impl -from databricks import sql -from databricks.sqlalchemy._parse import ( - _describe_table_extended_result_to_dict_list, - _match_table_not_found_string, - build_fk_dict, - build_pk_dict, - get_fk_strings_from_dte_output, - get_pk_strings_from_dte_output, - get_comment_from_dte_output, - parse_column_info_from_tgetcolumnsresponse, -) - -import sqlalchemy -from sqlalchemy import DDL, event -from sqlalchemy.engine import Connection, Engine, default, reflection -from sqlalchemy.engine.interfaces import ( - ReflectedForeignKeyConstraint, - ReflectedPrimaryKeyConstraint, - ReflectedColumn, - ReflectedTableComment, -) -from sqlalchemy.engine.reflection import ReflectionDefaults -from sqlalchemy.exc import DatabaseError, SQLAlchemyError - -try: - import alembic -except ImportError: - pass -else: - from alembic.ddl import DefaultImpl - - class DatabricksImpl(DefaultImpl): - __dialect__ = "databricks" - - -import logging - -logger = logging.getLogger(__name__) - - -class DatabricksDialect(default.DefaultDialect): - """This dialect implements only those methods required to pass our e2e tests""" - - # See sqlalchemy.engine.interfaces for descriptions of each of these properties - name: str = "databricks" - driver: str = "databricks" - default_schema_name: str = "default" - preparer = dialect_ddl_impl.DatabricksIdentifierPreparer # type: ignore - ddl_compiler = dialect_ddl_impl.DatabricksDDLCompiler - statement_compiler = dialect_ddl_impl.DatabricksStatementCompiler - supports_statement_cache: bool = True - supports_multivalues_insert: bool = True - supports_native_decimal: bool = True - supports_sane_rowcount: bool = False - non_native_boolean_check_constraint: bool = False - supports_identity_columns: bool = True - supports_schemas: bool = True - default_paramstyle: str = "named" - div_is_floordiv: bool = False - supports_default_values: bool = False - supports_server_side_cursors: bool = False - supports_sequences: bool = False - supports_native_boolean: bool = True - - colspecs = { - sqlalchemy.types.DateTime: dialect_type_impl.TIMESTAMP_NTZ, - sqlalchemy.types.Time: dialect_type_impl.DatabricksTimeType, - sqlalchemy.types.String: dialect_type_impl.DatabricksStringType, - } - - # SQLAlchemy requires that a table with no primary key - # constraint return a dictionary that looks like this. - EMPTY_PK: Dict[str, Any] = {"constrained_columns": [], "name": None} - - # SQLAlchemy requires that a table with no foreign keys - # defined return an empty list. Same for indexes. - EMPTY_FK: List - EMPTY_INDEX: List - EMPTY_FK = EMPTY_INDEX = [] - - @classmethod - def import_dbapi(cls): - return sql - - def _force_paramstyle_to_native_mode(self): - """This method can be removed after databricks-sql-connector wholly switches to NATIVE ParamApproach. - - This is a hack to trick SQLAlchemy into using a different paramstyle - than the one declared by this module in src/databricks/sql/__init__.py - - This method is called _after_ the dialect has been initialised, which is important because otherwise - our users would need to include a `paramstyle` argument in their SQLAlchemy connection string. - - This dialect is written to support NATIVE queries. Although the INLINE approach can technically work, - the same behaviour can be achieved within SQLAlchemy itself using its literal_processor methods. - """ - - self.paramstyle = self.default_paramstyle - - def create_connect_args(self, url): - # TODO: can schema be provided after HOST? - # Expected URI format is: databricks+thrift://token:dapi***@***.cloud.databricks.com?http_path=/sql/*** - - kwargs = { - "server_hostname": url.host, - "access_token": url.password, - "http_path": url.query.get("http_path"), - "catalog": url.query.get("catalog"), - "schema": url.query.get("schema"), - "use_inline_params": False, - } - - self.schema = kwargs["schema"] - self.catalog = kwargs["catalog"] - - self._force_paramstyle_to_native_mode() - - return [], kwargs - - def get_columns( - self, connection, table_name, schema=None, **kwargs - ) -> List[ReflectedColumn]: - """Return information about columns in `table_name`.""" - - with self.get_connection_cursor(connection) as cur: - resp = cur.columns( - catalog_name=self.catalog, - schema_name=schema or self.schema, - table_name=table_name, - ).fetchall() - - if not resp: - # TGetColumnsRequest will not raise an exception if passed a table that doesn't exist - # But Databricks supports tables with no columns. So if the result is an empty list, - # we need to check if the table exists (and raise an exception if not) or simply return - # an empty list. - self._describe_table_extended( - connection, - table_name, - self.catalog, - schema or self.schema, - expect_result=False, - ) - return resp - columns = [] - for col in resp: - row_dict = parse_column_info_from_tgetcolumnsresponse(col) - columns.append(row_dict) - - return columns - - def _describe_table_extended( - self, - connection: Connection, - table_name: str, - catalog_name: Optional[str] = None, - schema_name: Optional[str] = None, - expect_result=True, - ) -> Union[List[Dict[str, str]], None]: - """Run DESCRIBE TABLE EXTENDED on a table and return a list of dictionaries of the result. - - This method is the fastest way to check for the presence of a table in a schema. - - If expect_result is False, this method returns None as the output dict isn't required. - - Raises NoSuchTableError if the table is not present in the schema. - """ - - _target_catalog = catalog_name or self.catalog - _target_schema = schema_name or self.schema - _target = f"`{_target_catalog}`.`{_target_schema}`.`{table_name}`" - - # sql injection risk? - # DESCRIBE TABLE EXTENDED in DBR doesn't support parameterised inputs :( - stmt = DDL(f"DESCRIBE TABLE EXTENDED {_target}") - - try: - result = connection.execute(stmt) - except DatabaseError as e: - if _match_table_not_found_string(str(e)): - raise sqlalchemy.exc.NoSuchTableError( - f"No such table {table_name}" - ) from e - raise e - - if not expect_result: - return None - - fmt_result = _describe_table_extended_result_to_dict_list(result) - return fmt_result - - @reflection.cache - def get_pk_constraint( - self, - connection, - table_name: str, - schema: Optional[str] = None, - **kw: Any, - ) -> ReflectedPrimaryKeyConstraint: - """Fetch information about the primary key constraint on table_name. - - Returns a dictionary with these keys: - constrained_columns - a list of column names that make up the primary key. Results is an empty list - if no PRIMARY KEY is defined. - - name - the name of the primary key constraint - """ - - result = self._describe_table_extended( - connection=connection, - table_name=table_name, - schema_name=schema, - ) - - # Type ignore is because mypy knows that self._describe_table_extended *can* - # return None (even though it never will since expect_result defaults to True) - raw_pk_constraints: List = get_pk_strings_from_dte_output(result) # type: ignore - if not any(raw_pk_constraints): - return self.EMPTY_PK # type: ignore - - if len(raw_pk_constraints) > 1: - logger.warning( - "Found more than one primary key constraint in DESCRIBE TABLE EXTENDED output. " - "This is unexpected. Please report this as a bug. " - "Only the first primary key constraint will be returned." - ) - - first_pk_constraint = raw_pk_constraints[0] - pk_name = first_pk_constraint.get("col_name") - pk_constraint_string = first_pk_constraint.get("data_type") - - # TODO: figure out how to return sqlalchemy.interfaces in a way that mypy respects - return build_pk_dict(pk_name, pk_constraint_string) # type: ignore - - def get_foreign_keys( - self, connection, table_name, schema=None, **kw - ) -> List[ReflectedForeignKeyConstraint]: - """Return information about foreign_keys in `table_name`.""" - - result = self._describe_table_extended( - connection=connection, - table_name=table_name, - schema_name=schema, - ) - - # Type ignore is because mypy knows that self._describe_table_extended *can* - # return None (even though it never will since expect_result defaults to True) - raw_fk_constraints: List = get_fk_strings_from_dte_output(result) # type: ignore - - if not any(raw_fk_constraints): - return self.EMPTY_FK - - fk_constraints = [] - for constraint_dict in raw_fk_constraints: - fk_name = constraint_dict.get("col_name") - fk_constraint_string = constraint_dict.get("data_type") - this_constraint_dict = build_fk_dict( - fk_name, fk_constraint_string, schema_name=schema - ) - fk_constraints.append(this_constraint_dict) - - # TODO: figure out how to return sqlalchemy.interfaces in a way that mypy respects - return fk_constraints # type: ignore - - def get_indexes(self, connection, table_name, schema=None, **kw): - """SQLAlchemy requires this method. Databricks doesn't support indexes.""" - return self.EMPTY_INDEX - - @reflection.cache - def get_table_names(self, connection: Connection, schema=None, **kwargs): - """Return a list of tables in the current schema.""" - - _target_catalog = self.catalog - _target_schema = schema or self.schema - _target = f"`{_target_catalog}`.`{_target_schema}`" - - stmt = DDL(f"SHOW TABLES FROM {_target}") - - tables_result = connection.execute(stmt).all() - views_result = self.get_view_names(connection=connection, schema=schema) - - # In Databricks, SHOW TABLES FROM returns both tables and views. - # Potential optimisation: rewrite this to instead query information_schema - tables_minus_views = [ - row.tableName for row in tables_result if row.tableName not in views_result - ] - - return tables_minus_views - - @reflection.cache - def get_view_names( - self, - connection, - schema=None, - only_materialized=False, - only_temp=False, - **kwargs, - ) -> List[str]: - """Returns a list of string view names contained in the schema, if any.""" - - _target_catalog = self.catalog - _target_schema = schema or self.schema - _target = f"`{_target_catalog}`.`{_target_schema}`" - - stmt = DDL(f"SHOW VIEWS FROM {_target}") - result = connection.execute(stmt).all() - - return [ - row.viewName - for row in result - if (not only_materialized or row.isMaterialized) - and (not only_temp or row.isTemporary) - ] - - @reflection.cache - def get_materialized_view_names( - self, connection: Connection, schema: Optional[str] = None, **kw: Any - ) -> List[str]: - """A wrapper around get_view_names that fetches only the names of materialized views""" - return self.get_view_names(connection, schema, only_materialized=True) - - @reflection.cache - def get_temp_view_names( - self, connection: Connection, schema: Optional[str] = None, **kw: Any - ) -> List[str]: - """A wrapper around get_view_names that fetches only the names of temporary views""" - return self.get_view_names(connection, schema, only_temp=True) - - def do_rollback(self, dbapi_connection): - # Databricks SQL Does not support transactions - pass - - @reflection.cache - def has_table( - self, connection, table_name, schema=None, catalog=None, **kwargs - ) -> bool: - """For internal dialect use, check the existence of a particular table - or view in the database. - """ - - try: - self._describe_table_extended( - connection=connection, - table_name=table_name, - catalog_name=catalog, - schema_name=schema, - ) - return True - except sqlalchemy.exc.NoSuchTableError as e: - return False - - def get_connection_cursor(self, connection): - """Added for backwards compatibility with 1.3.x""" - if hasattr(connection, "_dbapi_connection"): - return connection._dbapi_connection.dbapi_connection.cursor() - elif hasattr(connection, "raw_connection"): - return connection.raw_connection().cursor() - elif hasattr(connection, "connection"): - return connection.connection.cursor() - - raise SQLAlchemyError( - "Databricks dialect can't obtain a cursor context manager from the dbapi" - ) - - @reflection.cache - def get_schema_names(self, connection, **kw): - """Return a list of all schema names available in the database.""" - stmt = DDL("SHOW SCHEMAS") - result = connection.execute(stmt) - schema_list = [row[0] for row in result] - return schema_list - - @reflection.cache - def get_table_comment( - self, - connection: Connection, - table_name: str, - schema: Optional[str] = None, - **kw: Any, - ) -> ReflectedTableComment: - result = self._describe_table_extended( - connection=connection, - table_name=table_name, - schema_name=schema, - ) - - if result is None: - return ReflectionDefaults.table_comment() - - comment = get_comment_from_dte_output(result) - - if comment: - return dict(text=comment) - else: - return ReflectionDefaults.table_comment() - - -@event.listens_for(Engine, "do_connect") -def receive_do_connect(dialect, conn_rec, cargs, cparams): - """Helpful for DS on traffic from clients using SQLAlchemy in particular""" - - # Ignore connect invocations that don't use our dialect - if not dialect.name == "databricks": - return - - ua = cparams.get("_user_agent_entry", "") - - def add_sqla_tag_if_not_present(val: str): - if not val: - output = "sqlalchemy" - - if val and "sqlalchemy" in val: - output = val - - else: - output = f"sqlalchemy + {val}" - - return output - - cparams["_user_agent_entry"] = add_sqla_tag_if_not_present(ua) - - if sqlalchemy.__version__.startswith("1.3"): - # SQLAlchemy 1.3.x fails to parse the http_path, catalog, and schema from our connection string - # These should be passed in as connect_args when building the Engine - - if "schema" in cparams: - dialect.schema = cparams["schema"] - - if "catalog" in cparams: - dialect.catalog = cparams["catalog"] diff --git a/src/databricks/sqlalchemy/requirements.py b/src/databricks/sqlalchemy/requirements.py deleted file mode 100644 index 5c70c029..00000000 --- a/src/databricks/sqlalchemy/requirements.py +++ /dev/null @@ -1,249 +0,0 @@ -""" -The complete list of requirements is provided by SQLAlchemy here: - -https://github.com/sqlalchemy/sqlalchemy/blob/main/lib/sqlalchemy/testing/requirements.py - -When SQLAlchemy skips a test because a requirement is closed() it gives a generic skip message. -To make these failures more actionable, we only define requirements in this file that we wish to -force to be open(). If a test should be skipped on Databricks, it will be specifically marked skip -in test_suite.py with a Databricks-specific reason. - -See the special note about the array_type exclusion below. -See special note about has_temp_table exclusion below. -""" - -import sqlalchemy.testing.requirements -import sqlalchemy.testing.exclusions - - -class Requirements(sqlalchemy.testing.requirements.SuiteRequirements): - @property - def date_historic(self): - """target dialect supports representation of Python - datetime.datetime() objects with historic (pre 1970) values.""" - - return sqlalchemy.testing.exclusions.open() - - @property - def datetime_historic(self): - """target dialect supports representation of Python - datetime.datetime() objects with historic (pre 1970) values.""" - - return sqlalchemy.testing.exclusions.open() - - @property - def datetime_literals(self): - """target dialect supports rendering of a date, time, or datetime as a - literal string, e.g. via the TypeEngine.literal_processor() method. - - """ - - return sqlalchemy.testing.exclusions.open() - - @property - def timestamp_microseconds(self): - """target dialect supports representation of Python - datetime.datetime() with microsecond objects but only - if TIMESTAMP is used.""" - - return sqlalchemy.testing.exclusions.open() - - @property - def time_microseconds(self): - """target dialect supports representation of Python - datetime.time() with microsecond objects. - - This requirement declaration isn't needed but I've included it here for completeness. - Since Databricks doesn't have a TIME type, SQLAlchemy will compile Time() columns - as STRING Databricks data types. And we use a custom time type to render those strings - between str() and time.time() representations. Therefore we can store _any_ precision - that SQLAlchemy needs. The time_microseconds requirement defaults to ON for all dialects - except mssql, mysql, mariadb, and oracle. - """ - - return sqlalchemy.testing.exclusions.open() - - @property - def infinity_floats(self): - """The Float type can persist and load float('inf'), float('-inf').""" - - return sqlalchemy.testing.exclusions.open() - - @property - def precision_numerics_retains_significant_digits(self): - """A precision numeric type will return empty significant digits, - i.e. a value such as 10.000 will come back in Decimal form with - the .000 maintained.""" - - return sqlalchemy.testing.exclusions.open() - - @property - def precision_numerics_many_significant_digits(self): - """target backend supports values with many digits on both sides, - such as 319438950232418390.273596, 87673.594069654243 - - """ - return sqlalchemy.testing.exclusions.open() - - @property - def array_type(self): - """While Databricks does support ARRAY types, pysql cannot bind them. So - we cannot use them with SQLAlchemy - - Due to a bug in SQLAlchemy, we _must_ define this exclusion as closed() here or else the - test runner will crash the pytest process due to an AttributeError - """ - - # TODO: Implement array type using inline? - return sqlalchemy.testing.exclusions.closed() - - @property - def table_ddl_if_exists(self): - """target platform supports IF NOT EXISTS / IF EXISTS for tables.""" - - return sqlalchemy.testing.exclusions.open() - - @property - def identity_columns(self): - """If a backend supports GENERATED { ALWAYS | BY DEFAULT } - AS IDENTITY""" - return sqlalchemy.testing.exclusions.open() - - @property - def identity_columns_standard(self): - """If a backend supports GENERATED { ALWAYS | BY DEFAULT } - AS IDENTITY with a standard syntax. - This is mainly to exclude MSSql. - """ - return sqlalchemy.testing.exclusions.open() - - @property - def has_temp_table(self): - """target dialect supports checking a single temp table name - - unfortunately this is not the same as temp_table_names - - SQLAlchemy's HasTableTest is not normalised in such a way that temp table tests - are separate from temp view and normal table tests. If those tests were split out, - we would just add detailed skip markers in test_suite.py. But since we'd like to - run the HasTableTest group for the features we support, we must set this exclusinon - to closed(). - - It would be ideal if there were a separate requirement for has_temp_view. Without it, - we're in a bind. - """ - return sqlalchemy.testing.exclusions.closed() - - @property - def temporary_views(self): - """target database supports temporary views""" - return sqlalchemy.testing.exclusions.open() - - @property - def views(self): - """Target database must support VIEWs.""" - - return sqlalchemy.testing.exclusions.open() - - @property - def temporary_tables(self): - """target database supports temporary tables - - ComponentReflection test is intricate and simply cannot function without this exclusion being defined here. - This happens because we cannot skip individual combinations used in ComponentReflection test. - """ - return sqlalchemy.testing.exclusions.closed() - - @property - def table_reflection(self): - """target database has general support for table reflection""" - return sqlalchemy.testing.exclusions.open() - - @property - def comment_reflection(self): - """Indicates if the database support table comment reflection""" - return sqlalchemy.testing.exclusions.open() - - @property - def comment_reflection_full_unicode(self): - """Indicates if the database support table comment reflection in the - full unicode range, including emoji etc. - """ - return sqlalchemy.testing.exclusions.open() - - @property - def temp_table_reflection(self): - """ComponentReflection test is intricate and simply cannot function without this exclusion being defined here. - This happens because we cannot skip individual combinations used in ComponentReflection test. - """ - return sqlalchemy.testing.exclusions.closed() - - @property - def index_reflection(self): - """ComponentReflection test is intricate and simply cannot function without this exclusion being defined here. - This happens because we cannot skip individual combinations used in ComponentReflection test. - """ - return sqlalchemy.testing.exclusions.closed() - - @property - def unique_constraint_reflection(self): - """ComponentReflection test is intricate and simply cannot function without this exclusion being defined here. - This happens because we cannot skip individual combinations used in ComponentReflection test. - - Databricks doesn't support UNIQUE constraints. - """ - return sqlalchemy.testing.exclusions.closed() - - @property - def reflects_pk_names(self): - """Target driver reflects the name of primary key constraints.""" - - return sqlalchemy.testing.exclusions.open() - - @property - def datetime_implicit_bound(self): - """target dialect when given a datetime object will bind it such - that the database server knows the object is a date, and not - a plain string. - """ - - return sqlalchemy.testing.exclusions.open() - - @property - def tuple_in(self): - return sqlalchemy.testing.exclusions.open() - - @property - def ctes(self): - return sqlalchemy.testing.exclusions.open() - - @property - def ctes_with_update_delete(self): - return sqlalchemy.testing.exclusions.open() - - @property - def delete_from(self): - """Target must support DELETE FROM..FROM or DELETE..USING syntax""" - return sqlalchemy.testing.exclusions.open() - - @property - def table_value_constructor(self): - return sqlalchemy.testing.exclusions.open() - - @property - def reflect_tables_no_columns(self): - return sqlalchemy.testing.exclusions.open() - - @property - def denormalized_names(self): - """Target database must have 'denormalized', i.e. - UPPERCASE as case insensitive names.""" - - return sqlalchemy.testing.exclusions.open() - - @property - def time_timezone(self): - """target dialect supports representation of Python - datetime.time() with tzinfo with Time(timezone=True).""" - - return sqlalchemy.testing.exclusions.open() diff --git a/src/databricks/sqlalchemy/setup.cfg b/src/databricks/sqlalchemy/setup.cfg deleted file mode 100644 index ab89d17d..00000000 --- a/src/databricks/sqlalchemy/setup.cfg +++ /dev/null @@ -1,4 +0,0 @@ - -[sqla_testing] -requirement_cls=databricks.sqlalchemy.requirements:Requirements -profile_file=profiles.txt diff --git a/src/databricks/sqlalchemy/test/_extra.py b/src/databricks/sqlalchemy/test/_extra.py deleted file mode 100644 index 2f3e7a7d..00000000 --- a/src/databricks/sqlalchemy/test/_extra.py +++ /dev/null @@ -1,70 +0,0 @@ -"""Additional tests authored by Databricks that use SQLAlchemy's test fixtures -""" - -import datetime - -from sqlalchemy.testing.suite.test_types import ( - _LiteralRoundTripFixture, - fixtures, - testing, - eq_, - select, - Table, - Column, - config, - _DateFixture, - literal, -) -from databricks.sqlalchemy import TINYINT, TIMESTAMP - - -class TinyIntegerTest(_LiteralRoundTripFixture, fixtures.TestBase): - __backend__ = True - - def test_literal(self, literal_round_trip): - literal_round_trip(TINYINT, [5], [5]) - - @testing.fixture - def integer_round_trip(self, metadata, connection): - def run(datatype, data): - int_table = Table( - "tiny_integer_table", - metadata, - Column( - "id", - TINYINT, - primary_key=True, - test_needs_autoincrement=False, - ), - Column("tiny_integer_data", datatype), - ) - - metadata.create_all(config.db) - - connection.execute(int_table.insert(), {"id": 1, "integer_data": data}) - - row = connection.execute(select(int_table.c.integer_data)).first() - - eq_(row, (data,)) - - assert isinstance(row[0], int) - - return run - - -class DateTimeTZTestCustom(_DateFixture, fixtures.TablesTest): - """This test confirms that when a user uses the TIMESTAMP - type to store a datetime object, it retains its timezone - """ - - __backend__ = True - datatype = TIMESTAMP - data = datetime.datetime(2012, 10, 15, 12, 57, 18, tzinfo=datetime.timezone.utc) - - @testing.requires.datetime_implicit_bound - def test_select_direct(self, connection): - - # We need to pass the TIMESTAMP type to the literal function - # so that the value is processed correctly. - result = connection.scalar(select(literal(self.data, TIMESTAMP))) - eq_(result, self.data) diff --git a/src/databricks/sqlalchemy/test/_future.py b/src/databricks/sqlalchemy/test/_future.py deleted file mode 100644 index 6e470f60..00000000 --- a/src/databricks/sqlalchemy/test/_future.py +++ /dev/null @@ -1,331 +0,0 @@ -# type: ignore - -from enum import Enum - -import pytest -from databricks.sqlalchemy.test._regression import ( - ExpandingBoundInTest, - IdentityAutoincrementTest, - LikeFunctionsTest, - NormalizedNameTest, -) -from databricks.sqlalchemy.test._unsupported import ( - ComponentReflectionTest, - ComponentReflectionTestExtra, - CTETest, - InsertBehaviorTest, -) -from sqlalchemy.testing.suite import ( - ArrayTest, - BinaryTest, - BizarroCharacterFKResolutionTest, - CollateTest, - ComputedColumnTest, - ComputedReflectionTest, - DifficultParametersTest, - FutureWeCanSetDefaultSchemaWEventsTest, - IdentityColumnTest, - IdentityReflectionTest, - JSONLegacyStringCastIndexTest, - JSONTest, - NativeUUIDTest, - QuotedNameArgumentTest, - RowCountTest, - SimpleUpdateDeleteTest, - WeCanSetDefaultSchemaWEventsTest, -) - - -class FutureFeature(Enum): - ARRAY = "ARRAY column type handling" - BINARY = "BINARY column type handling" - CHECK = "CHECK constraint handling" - COLLATE = "COLLATE DDL generation" - CTE_FEAT = "required CTE features" - EMPTY_INSERT = "empty INSERT support" - FK_OPTS = "foreign key option checking" - GENERATED_COLUMNS = "Delta computed / generated columns support" - IDENTITY = "identity reflection" - JSON = "JSON column type handling" - MULTI_PK = "get_multi_pk_constraint method" - PROVISION = "event-driven engine configuration" - REGEXP = "_visit_regexp" - SANE_ROWCOUNT = "sane_rowcount support" - TBL_OPTS = "get_table_options method" - TEST_DESIGN = "required test-fixture overrides" - TUPLE_LITERAL = "tuple-like IN markers completely" - UUID = "native Uuid() type" - VIEW_DEF = "get_view_definition method" - - -def render_future_feature(rsn: FutureFeature, extra=False) -> str: - postfix = " More detail in _future.py" if extra else "" - return f"[FUTURE][{rsn.name}]: This dialect doesn't implement {rsn.value}.{postfix}" - - -@pytest.mark.reviewed -@pytest.mark.skip(render_future_feature(FutureFeature.BINARY)) -class BinaryTest(BinaryTest): - """Databricks doesn't support binding of BINARY type values. When DBR supports this, we can implement - in this dialect. - """ - - pass - - -class ExpandingBoundInTest(ExpandingBoundInTest): - @pytest.mark.skip(render_future_feature(FutureFeature.TUPLE_LITERAL)) - def test_empty_heterogeneous_tuples_bindparam(self): - pass - - @pytest.mark.skip(render_future_feature(FutureFeature.TUPLE_LITERAL)) - def test_empty_heterogeneous_tuples_direct(self): - pass - - @pytest.mark.skip(render_future_feature(FutureFeature.TUPLE_LITERAL)) - def test_empty_homogeneous_tuples_bindparam(self): - pass - - @pytest.mark.skip(render_future_feature(FutureFeature.TUPLE_LITERAL)) - def test_empty_homogeneous_tuples_direct(self): - pass - - -class NormalizedNameTest(NormalizedNameTest): - @pytest.mark.skip(render_future_feature(FutureFeature.TEST_DESIGN, True)) - def test_get_table_names(self): - """I'm not clear how this test can ever pass given that it's assertion looks like this: - - ```python - eq_(tablenames[0].upper(), tablenames[0].lower()) - eq_(tablenames[1].upper(), tablenames[1].lower()) - ``` - - It's forcibly calling .upper() and .lower() on the same string and expecting them to be equal. - """ - pass - - -class CTETest(CTETest): - @pytest.mark.skip(render_future_feature(FutureFeature.CTE_FEAT, True)) - def test_delete_from_round_trip(self): - """Databricks dialect doesn't implement multiple-table criteria within DELETE""" - pass - - -@pytest.mark.reviewed -@pytest.mark.skip(render_future_feature(FutureFeature.TEST_DESIGN, True)) -class IdentityColumnTest(IdentityColumnTest): - """Identity works. Test needs rewrite for Databricks. See comments in test_suite.py - - The setup for these tests tries to create a table with a DELTA IDENTITY column but has two problems: - 1. It uses an Integer() type for the column. Whereas DELTA IDENTITY columns must be BIGINT. - 2. It tries to set the start == 42, which Databricks doesn't support - - I can get the tests to _run_ by patching the table fixture to use BigInteger(). But it asserts that the - identity of two rows are 42 and 43, which is not possible since they will be rows 1 and 2 instead. - - I'm satisified through manual testing that our implementation of visit_identity_column works but a better test is needed. - """ - - pass - - -class IdentityAutoincrementTest(IdentityAutoincrementTest): - @pytest.mark.skip(render_future_feature(FutureFeature.TEST_DESIGN, True)) - def test_autoincrement_with_identity(self): - """This test has the same issue as IdentityColumnTest.test_select_all in that it creates a table with identity - using an Integer() rather than a BigInteger(). If I override this behaviour to use a BigInteger() instead, the - test passes. - """ - - -@pytest.mark.reviewed -@pytest.mark.skip(render_future_feature(FutureFeature.TEST_DESIGN)) -class BizarroCharacterFKResolutionTest(BizarroCharacterFKResolutionTest): - """Some of the combinations in this test pass. Others fail. Given the esoteric nature of these failures, - we have opted to defer implementing fixes to a later time, guided by customer feedback. Passage of - these tests is not an acceptance criteria for our dialect. - """ - - -@pytest.mark.reviewed -@pytest.mark.skip(render_future_feature(FutureFeature.TEST_DESIGN)) -class DifficultParametersTest(DifficultParametersTest): - """Some of the combinations in this test pass. Others fail. Given the esoteric nature of these failures, - we have opted to defer implementing fixes to a later time, guided by customer feedback. Passage of - these tests is not an acceptance criteria for our dialect. - """ - - -@pytest.mark.reviewed -@pytest.mark.skip(render_future_feature(FutureFeature.IDENTITY, True)) -class IdentityReflectionTest(IdentityReflectionTest): - """It's not clear _how_ to implement this for SQLAlchemy. Columns created with GENERATED ALWAYS AS IDENTITY - are not specially demarked in the output of TGetColumnsResponse or DESCRIBE TABLE EXTENDED. - - We could theoretically parse this from the contents of `SHOW CREATE TABLE` but that feels like a hack. - """ - - -@pytest.mark.reviewed -@pytest.mark.skip(render_future_feature(FutureFeature.JSON)) -class JSONTest(JSONTest): - """Databricks supports JSON path expressions in queries it's just not implemented in this dialect.""" - - pass - - -@pytest.mark.reviewed -@pytest.mark.skip(render_future_feature(FutureFeature.JSON)) -class JSONLegacyStringCastIndexTest(JSONLegacyStringCastIndexTest): - """Same comment applies as JSONTest""" - - pass - - -class LikeFunctionsTest(LikeFunctionsTest): - @pytest.mark.skip(render_future_feature(FutureFeature.REGEXP)) - def test_not_regexp_match(self): - """The defaul dialect doesn't implement _visit_regexp methods so we don't get them automatically.""" - pass - - @pytest.mark.skip(render_future_feature(FutureFeature.REGEXP)) - def test_regexp_match(self): - """The defaul dialect doesn't implement _visit_regexp methods so we don't get them automatically.""" - pass - - -@pytest.mark.reviewed -@pytest.mark.skip(render_future_feature(FutureFeature.COLLATE)) -class CollateTest(CollateTest): - """This is supported in Databricks. Not implemented here.""" - - -@pytest.mark.reviewed -@pytest.mark.skip(render_future_feature(FutureFeature.UUID, True)) -class NativeUUIDTest(NativeUUIDTest): - """Type implementation will be straightforward. Since Databricks doesn't have a native UUID type we can use - a STRING field, create a custom TypeDecorator for sqlalchemy.types.Uuid and add it to the dialect's colspecs. - - Then mark requirements.uuid_data_type as open() so this test can run. - """ - - -@pytest.mark.reviewed -@pytest.mark.skip(render_future_feature(FutureFeature.SANE_ROWCOUNT)) -class RowCountTest(RowCountTest): - pass - - -@pytest.mark.reviewed -@pytest.mark.skip(render_future_feature(FutureFeature.SANE_ROWCOUNT)) -class SimpleUpdateDeleteTest(SimpleUpdateDeleteTest): - pass - - -@pytest.mark.reviewed -@pytest.mark.skip(render_future_feature(FutureFeature.PROVISION, True)) -class WeCanSetDefaultSchemaWEventsTest(WeCanSetDefaultSchemaWEventsTest): - """provision.py allows us to define event listeners that emit DDL for things like setting up a test schema - or, in this case, changing the default schema for the connection after it's been built. This would override - the schema defined in the sqlalchemy connection string. This support is possible but is not implemented - in the dialect. Deferred for now. - """ - - pass - - -@pytest.mark.reviewed -@pytest.mark.skip(render_future_feature(FutureFeature.PROVISION, True)) -class FutureWeCanSetDefaultSchemaWEventsTest(FutureWeCanSetDefaultSchemaWEventsTest): - """provision.py allows us to define event listeners that emit DDL for things like setting up a test schema - or, in this case, changing the default schema for the connection after it's been built. This would override - the schema defined in the sqlalchemy connection string. This support is possible but is not implemented - in the dialect. Deferred for now. - """ - - pass - - -class ComponentReflectionTest(ComponentReflectionTest): - @pytest.mark.skip(reason=render_future_feature(FutureFeature.TBL_OPTS, True)) - def test_multi_get_table_options_tables(self): - """It's not clear what the expected ouput from this method would even _be_. Requires research.""" - pass - - @pytest.mark.skip(render_future_feature(FutureFeature.VIEW_DEF)) - def test_get_view_definition(self): - pass - - @pytest.mark.skip(render_future_feature(FutureFeature.VIEW_DEF)) - def test_get_view_definition_does_not_exist(self): - pass - - @pytest.mark.skip(render_future_feature(FutureFeature.MULTI_PK)) - def test_get_multi_pk_constraint(self): - pass - - @pytest.mark.skip(render_future_feature(FutureFeature.CHECK)) - def test_get_multi_check_constraints(self): - pass - - -class ComponentReflectionTestExtra(ComponentReflectionTestExtra): - @pytest.mark.skip(render_future_feature(FutureFeature.CHECK)) - def test_get_check_constraints(self): - pass - - @pytest.mark.skip(render_future_feature(FutureFeature.FK_OPTS)) - def test_get_foreign_key_options(self): - """It's not clear from the test code what the expected output is here. Further research required.""" - pass - - -class InsertBehaviorTest(InsertBehaviorTest): - @pytest.mark.skip(render_future_feature(FutureFeature.EMPTY_INSERT, True)) - def test_empty_insert(self): - """Empty inserts are possible using DEFAULT VALUES on Databricks. To implement it, we need - to hook into the SQLCompiler to render a no-op column list. With SQLAlchemy's default implementation - the request fails with a syntax error - """ - pass - - @pytest.mark.skip(render_future_feature(FutureFeature.EMPTY_INSERT, True)) - def test_empty_insert_multiple(self): - """Empty inserts are possible using DEFAULT VALUES on Databricks. To implement it, we need - to hook into the SQLCompiler to render a no-op column list. With SQLAlchemy's default implementation - the request fails with a syntax error - """ - pass - - -@pytest.mark.reviewed -@pytest.mark.skip(render_future_feature(FutureFeature.ARRAY)) -class ArrayTest(ArrayTest): - """While Databricks supports ARRAY types, DBR cannot handle bound parameters of this type. - This makes them unusable to SQLAlchemy without some workaround. Potentially we could inline - the values of these parameters (which risks sql injection). - """ - - -@pytest.mark.reviewed -@pytest.mark.skip(render_future_feature(FutureFeature.TEST_DESIGN, True)) -class QuotedNameArgumentTest(QuotedNameArgumentTest): - """These tests are challenging. The whole test setup depends on a table with a name like `quote ' one` - which will never work on Databricks because table names can't contains spaces. But QuotedNamedArgumentTest - also checks the behaviour of DDL identifier preparation process. We need to override some of IdentifierPreparer - methods because these are the ultimate control for whether or not CHECK and UNIQUE constraints are emitted. - """ - - -@pytest.mark.reviewed -@pytest.mark.skip(reason=render_future_feature(FutureFeature.GENERATED_COLUMNS)) -class ComputedColumnTest(ComputedColumnTest): - pass - - -@pytest.mark.reviewed -@pytest.mark.skip(reason=render_future_feature(FutureFeature.GENERATED_COLUMNS)) -class ComputedReflectionTest(ComputedReflectionTest): - pass diff --git a/src/databricks/sqlalchemy/test/_regression.py b/src/databricks/sqlalchemy/test/_regression.py deleted file mode 100644 index 4dbc5ec2..00000000 --- a/src/databricks/sqlalchemy/test/_regression.py +++ /dev/null @@ -1,311 +0,0 @@ -# type: ignore - -import pytest -from sqlalchemy.testing.suite import ( - ArgSignatureTest, - BooleanTest, - CastTypeDecoratorTest, - ComponentReflectionTestExtra, - CompositeKeyReflectionTest, - CompoundSelectTest, - DateHistoricTest, - DateTest, - DateTimeCoercedToDateTimeTest, - DateTimeHistoricTest, - DateTimeMicrosecondsTest, - DateTimeTest, - DeprecatedCompoundSelectTest, - DistinctOnTest, - EscapingTest, - ExistsTest, - ExpandingBoundInTest, - FetchLimitOffsetTest, - FutureTableDDLTest, - HasTableTest, - IdentityAutoincrementTest, - InsertBehaviorTest, - IntegerTest, - IsOrIsNotDistinctFromTest, - JoinTest, - LikeFunctionsTest, - NormalizedNameTest, - NumericTest, - OrderByLabelTest, - PingTest, - PostCompileParamsTest, - ReturningGuardsTest, - RowFetchTest, - SameNamedSchemaTableTest, - StringTest, - TableDDLTest, - TableNoColumnsTest, - TextTest, - TimeMicrosecondsTest, - TimestampMicrosecondsTest, - TimeTest, - TimeTZTest, - TrueDivTest, - UnicodeTextTest, - UnicodeVarcharTest, - UuidTest, - ValuesExpressionTest, -) - -from databricks.sqlalchemy.test.overrides._ctetest import CTETest -from databricks.sqlalchemy.test.overrides._componentreflectiontest import ( - ComponentReflectionTest, -) - - -@pytest.mark.reviewed -class NumericTest(NumericTest): - pass - - -@pytest.mark.reviewed -class HasTableTest(HasTableTest): - pass - - -@pytest.mark.reviewed -class ComponentReflectionTestExtra(ComponentReflectionTestExtra): - pass - - -@pytest.mark.reviewed -class InsertBehaviorTest(InsertBehaviorTest): - pass - - -@pytest.mark.reviewed -class ComponentReflectionTest(ComponentReflectionTest): - """This test requires two schemas be present in the target Databricks workspace: - - The schema set in --dburi - - A second schema named "test_schema" - - Note that test_get_multi_foreign keys is flaky because DBR does not guarantee the order of data returned in DESCRIBE TABLE EXTENDED - - _Most_ of these tests pass if we manually override the bad test setup. - """ - - pass - - -@pytest.mark.reviewed -class TableDDLTest(TableDDLTest): - pass - - -@pytest.mark.reviewed -class FutureTableDDLTest(FutureTableDDLTest): - pass - - -@pytest.mark.reviewed -class FetchLimitOffsetTest(FetchLimitOffsetTest): - pass - - -@pytest.mark.reviewed -class UuidTest(UuidTest): - pass - - -@pytest.mark.reviewed -class ValuesExpressionTest(ValuesExpressionTest): - pass - - -@pytest.mark.reviewed -class BooleanTest(BooleanTest): - pass - - -@pytest.mark.reviewed -class PostCompileParamsTest(PostCompileParamsTest): - pass - - -@pytest.mark.reviewed -class TimeMicrosecondsTest(TimeMicrosecondsTest): - pass - - -@pytest.mark.reviewed -class TextTest(TextTest): - pass - - -@pytest.mark.reviewed -class StringTest(StringTest): - pass - - -@pytest.mark.reviewed -class DateTimeMicrosecondsTest(DateTimeMicrosecondsTest): - pass - - -@pytest.mark.reviewed -class TimestampMicrosecondsTest(TimestampMicrosecondsTest): - pass - - -@pytest.mark.reviewed -class DateTimeCoercedToDateTimeTest(DateTimeCoercedToDateTimeTest): - pass - - -@pytest.mark.reviewed -class TimeTest(TimeTest): - pass - - -@pytest.mark.reviewed -class DateTimeTest(DateTimeTest): - pass - - -@pytest.mark.reviewed -class DateTimeHistoricTest(DateTimeHistoricTest): - pass - - -@pytest.mark.reviewed -class DateTest(DateTest): - pass - - -@pytest.mark.reviewed -class DateHistoricTest(DateHistoricTest): - pass - - -@pytest.mark.reviewed -class RowFetchTest(RowFetchTest): - pass - - -@pytest.mark.reviewed -class CompositeKeyReflectionTest(CompositeKeyReflectionTest): - pass - - -@pytest.mark.reviewed -class TrueDivTest(TrueDivTest): - pass - - -@pytest.mark.reviewed -class ArgSignatureTest(ArgSignatureTest): - pass - - -@pytest.mark.reviewed -class CompoundSelectTest(CompoundSelectTest): - pass - - -@pytest.mark.reviewed -class DeprecatedCompoundSelectTest(DeprecatedCompoundSelectTest): - pass - - -@pytest.mark.reviewed -class CastTypeDecoratorTest(CastTypeDecoratorTest): - pass - - -@pytest.mark.reviewed -class DistinctOnTest(DistinctOnTest): - pass - - -@pytest.mark.reviewed -class EscapingTest(EscapingTest): - pass - - -@pytest.mark.reviewed -class ExistsTest(ExistsTest): - pass - - -@pytest.mark.reviewed -class IntegerTest(IntegerTest): - pass - - -@pytest.mark.reviewed -class IsOrIsNotDistinctFromTest(IsOrIsNotDistinctFromTest): - pass - - -@pytest.mark.reviewed -class JoinTest(JoinTest): - pass - - -@pytest.mark.reviewed -class OrderByLabelTest(OrderByLabelTest): - pass - - -@pytest.mark.reviewed -class PingTest(PingTest): - pass - - -@pytest.mark.reviewed -class ReturningGuardsTest(ReturningGuardsTest): - pass - - -@pytest.mark.reviewed -class SameNamedSchemaTableTest(SameNamedSchemaTableTest): - pass - - -@pytest.mark.reviewed -class UnicodeTextTest(UnicodeTextTest): - pass - - -@pytest.mark.reviewed -class UnicodeVarcharTest(UnicodeVarcharTest): - pass - - -@pytest.mark.reviewed -class TableNoColumnsTest(TableNoColumnsTest): - pass - - -@pytest.mark.reviewed -class ExpandingBoundInTest(ExpandingBoundInTest): - pass - - -@pytest.mark.reviewed -class CTETest(CTETest): - pass - - -@pytest.mark.reviewed -class NormalizedNameTest(NormalizedNameTest): - pass - - -@pytest.mark.reviewed -class IdentityAutoincrementTest(IdentityAutoincrementTest): - pass - - -@pytest.mark.reviewed -class LikeFunctionsTest(LikeFunctionsTest): - pass - - -@pytest.mark.reviewed -class TimeTZTest(TimeTZTest): - pass diff --git a/src/databricks/sqlalchemy/test/_unsupported.py b/src/databricks/sqlalchemy/test/_unsupported.py deleted file mode 100644 index c1f81205..00000000 --- a/src/databricks/sqlalchemy/test/_unsupported.py +++ /dev/null @@ -1,450 +0,0 @@ -# type: ignore - -from enum import Enum - -import pytest -from databricks.sqlalchemy.test._regression import ( - ComponentReflectionTest, - ComponentReflectionTestExtra, - CTETest, - FetchLimitOffsetTest, - FutureTableDDLTest, - HasTableTest, - InsertBehaviorTest, - NumericTest, - TableDDLTest, - UuidTest, -) - -# These are test suites that are fully skipped with a SkipReason -from sqlalchemy.testing.suite import ( - AutocommitIsolationTest, - DateTimeTZTest, - ExceptionTest, - HasIndexTest, - HasSequenceTest, - HasSequenceTestEmpty, - IsolationLevelTest, - LastrowidTest, - LongNameBlowoutTest, - PercentSchemaNamesTest, - ReturningTest, - SequenceCompilerTest, - SequenceTest, - ServerSideCursorsTest, - UnicodeSchemaTest, -) - - -class SkipReason(Enum): - AUTO_INC = "implicit AUTO_INCREMENT" - CTE_FEAT = "required CTE features" - CURSORS = "server-side cursors" - DECIMAL_FEAT = "required decimal features" - ENFORCE_KEYS = "enforcing primary or foreign key restraints" - FETCH = "fetch clauses" - IDENTIFIER_LENGTH = "identifiers > 255 characters" - IMPL_FLOAT_PREC = "required implicit float precision" - IMPLICIT_ORDER = "deterministic return order if ORDER BY is not present" - INDEXES = "SQL INDEXes" - RETURNING = "INSERT ... RETURNING syntax" - SEQUENCES = "SQL SEQUENCES" - STRING_FEAT = "required STRING type features" - SYMBOL_CHARSET = "symbols expected by test" - TEMP_TBL = "temporary tables" - TIMEZONE_OPT = "timezone-optional TIMESTAMP fields" - TRANSACTIONS = "transactions" - UNIQUE = "UNIQUE constraints" - - -def render_skip_reason(rsn: SkipReason, setup_error=False, extra=False) -> str: - prefix = "[BADSETUP]" if setup_error else "" - postfix = " More detail in _unsupported.py" if extra else "" - return f"[UNSUPPORTED]{prefix}[{rsn.name}]: Databricks does not support {rsn.value}.{postfix}" - - -@pytest.mark.reviewed -@pytest.mark.skip(reason=render_skip_reason(SkipReason.ENFORCE_KEYS)) -class ExceptionTest(ExceptionTest): - """Per Databricks documentation, primary and foreign key constraints are informational only - and are not enforced. - - https://docs.databricks.com/api/workspace/tableconstraints - """ - - pass - - -@pytest.mark.reviewed -@pytest.mark.skip(reason=render_skip_reason(SkipReason.IDENTIFIER_LENGTH)) -class LongNameBlowoutTest(LongNameBlowoutTest): - """These tests all include assertions that the tested name > 255 characters""" - - pass - - -@pytest.mark.reviewed -@pytest.mark.skip(reason=render_skip_reason(SkipReason.SEQUENCES)) -class HasSequenceTest(HasSequenceTest): - pass - - -@pytest.mark.reviewed -@pytest.mark.skip(reason=render_skip_reason(SkipReason.SEQUENCES)) -class HasSequenceTestEmpty(HasSequenceTestEmpty): - pass - - -@pytest.mark.reviewed -@pytest.mark.skip(reason=render_skip_reason(SkipReason.INDEXES)) -class HasIndexTest(HasIndexTest): - pass - - -@pytest.mark.reviewed -@pytest.mark.skip(reason=render_skip_reason(SkipReason.SYMBOL_CHARSET)) -class UnicodeSchemaTest(UnicodeSchemaTest): - pass - - -@pytest.mark.reviewed -@pytest.mark.skip(reason=render_skip_reason(SkipReason.CURSORS)) -class ServerSideCursorsTest(ServerSideCursorsTest): - pass - - -@pytest.mark.reviewed -@pytest.mark.skip(reason=render_skip_reason(SkipReason.SYMBOL_CHARSET)) -class PercentSchemaNamesTest(PercentSchemaNamesTest): - pass - - -@pytest.mark.reviewed -@pytest.mark.skip(reason=render_skip_reason(SkipReason.TRANSACTIONS)) -class IsolationLevelTest(IsolationLevelTest): - pass - - -@pytest.mark.reviewed -@pytest.mark.skip(reason=render_skip_reason(SkipReason.TRANSACTIONS)) -class AutocommitIsolationTest(AutocommitIsolationTest): - pass - - -@pytest.mark.reviewed -@pytest.mark.skip(reason=render_skip_reason(SkipReason.RETURNING)) -class ReturningTest(ReturningTest): - pass - - -@pytest.mark.reviewed -@pytest.mark.skip(reason=render_skip_reason(SkipReason.SEQUENCES)) -class SequenceTest(SequenceTest): - pass - - -@pytest.mark.reviewed -@pytest.mark.skip(reason=render_skip_reason(SkipReason.SEQUENCES)) -class SequenceCompilerTest(SequenceCompilerTest): - pass - - -class FetchLimitOffsetTest(FetchLimitOffsetTest): - @pytest.mark.flaky - @pytest.mark.skip(reason=render_skip_reason(SkipReason.IMPLICIT_ORDER, extra=True)) - def test_limit_render_multiple_times(self): - """This test depends on the order that records are inserted into the table. It's passing criteria requires that - a record inserted with id=1 is the first record returned when no ORDER BY clause is specified. But Databricks occasionally - INSERTS in a different order, which makes this test seem to fail. The test is flaky, but the underlying functionality - (can multiple LIMIT clauses be rendered) is not broken. - - Unclear if this is a bug in Databricks, Delta, or some race-condition in the test itself. - """ - pass - - @pytest.mark.skip(reason=render_skip_reason(SkipReason.FETCH)) - def test_bound_fetch_offset(self): - pass - - @pytest.mark.skip(reason=render_skip_reason(SkipReason.FETCH)) - def test_fetch_offset_no_order(self): - pass - - @pytest.mark.skip(reason=render_skip_reason(SkipReason.FETCH)) - def test_fetch_offset_nobinds(self): - pass - - @pytest.mark.skip(reason=render_skip_reason(SkipReason.FETCH)) - def test_simple_fetch(self): - pass - - @pytest.mark.skip(reason=render_skip_reason(SkipReason.FETCH)) - def test_simple_fetch_offset(self): - pass - - @pytest.mark.skip(reason=render_skip_reason(SkipReason.FETCH)) - def test_simple_fetch_percent(self): - pass - - @pytest.mark.skip(reason=render_skip_reason(SkipReason.FETCH)) - def test_simple_fetch_percent_ties(self): - pass - - @pytest.mark.skip(reason=render_skip_reason(SkipReason.FETCH)) - def test_simple_fetch_ties(self): - pass - - @pytest.mark.skip(reason=render_skip_reason(SkipReason.FETCH)) - def test_expr_fetch_offset(self): - pass - - @pytest.mark.skip(reason=render_skip_reason(SkipReason.FETCH)) - def test_fetch_offset_percent(self): - pass - - @pytest.mark.skip(reason=render_skip_reason(SkipReason.FETCH)) - def test_fetch_offset_percent_ties(self): - pass - - @pytest.mark.skip(reason=render_skip_reason(SkipReason.FETCH)) - def test_fetch_offset_ties(self): - pass - - @pytest.mark.skip(reason=render_skip_reason(SkipReason.FETCH)) - def test_fetch_offset_ties_exact_number(self): - pass - - -class UuidTest(UuidTest): - @pytest.mark.skip(reason=render_skip_reason(SkipReason.RETURNING)) - def test_uuid_returning(self): - pass - - -class FutureTableDDLTest(FutureTableDDLTest): - @pytest.mark.skip(render_skip_reason(SkipReason.INDEXES)) - def test_create_index_if_not_exists(self): - """We could use requirements.index_reflection and requirements.index_ddl_if_exists - here to disable this but prefer a more meaningful skip message - """ - pass - - @pytest.mark.skip(render_skip_reason(SkipReason.INDEXES)) - def test_drop_index_if_exists(self): - """We could use requirements.index_reflection and requirements.index_ddl_if_exists - here to disable this but prefer a more meaningful skip message - """ - pass - - -class TableDDLTest(TableDDLTest): - @pytest.mark.skip(reason=render_skip_reason(SkipReason.INDEXES)) - def test_create_index_if_not_exists(self, connection): - """We could use requirements.index_reflection and requirements.index_ddl_if_exists - here to disable this but prefer a more meaningful skip message - """ - pass - - @pytest.mark.skip(reason=render_skip_reason(SkipReason.INDEXES)) - def test_drop_index_if_exists(self, connection): - """We could use requirements.index_reflection and requirements.index_ddl_if_exists - here to disable this but prefer a more meaningful skip message - """ - pass - - -class ComponentReflectionTest(ComponentReflectionTest): - """This test requires two schemas be present in the target Databricks workspace: - - The schema set in --dburi - - A second schema named "test_schema" - - Note that test_get_multi_foreign keys is flaky because DBR does not guarantee the order of data returned in DESCRIBE TABLE EXTENDED - """ - - @pytest.mark.skip(reason=render_skip_reason(SkipReason.UNIQUE)) - def test_get_multi_unique_constraints(self): - pass - - @pytest.mark.skip(reason=render_skip_reason(SkipReason.TEMP_TBL, True, True)) - def test_get_temp_view_names(self): - """While Databricks supports temporary views, this test creates a temp view aimed at a temp table. - Databricks doesn't support temp tables. So the test can never pass. - """ - pass - - @pytest.mark.skip(reason=render_skip_reason(SkipReason.TEMP_TBL)) - def test_get_temp_table_columns(self): - pass - - @pytest.mark.skip(reason=render_skip_reason(SkipReason.TEMP_TBL)) - def test_get_temp_table_indexes(self): - pass - - @pytest.mark.skip(reason=render_skip_reason(SkipReason.TEMP_TBL)) - def test_get_temp_table_names(self): - pass - - @pytest.mark.skip(reason=render_skip_reason(SkipReason.TEMP_TBL)) - def test_get_temp_table_unique_constraints(self): - pass - - @pytest.mark.skip(reason=render_skip_reason(SkipReason.TEMP_TBL)) - def test_reflect_table_temp_table(self): - pass - - @pytest.mark.skip(render_skip_reason(SkipReason.INDEXES)) - def test_get_indexes(self): - pass - - @pytest.mark.skip(render_skip_reason(SkipReason.INDEXES)) - def test_multi_indexes(self): - pass - - @pytest.mark.skip(render_skip_reason(SkipReason.INDEXES)) - def get_noncol_index(self): - pass - - @pytest.mark.skip(render_skip_reason(SkipReason.UNIQUE)) - def test_get_unique_constraints(self): - pass - - -class NumericTest(NumericTest): - @pytest.mark.skip(render_skip_reason(SkipReason.DECIMAL_FEAT)) - def test_enotation_decimal(self): - """This test automatically runs if requirements.precision_numerics_enotation_large is open()""" - pass - - @pytest.mark.skip(render_skip_reason(SkipReason.DECIMAL_FEAT)) - def test_enotation_decimal_large(self): - """This test automatically runs if requirements.precision_numerics_enotation_large is open()""" - pass - - @pytest.mark.skip(render_skip_reason(SkipReason.IMPL_FLOAT_PREC, extra=True)) - def test_float_coerce_round_trip(self): - """ - This automatically runs if requirements.literal_float_coercion is open() - - Without additional work, Databricks returns 15.75629997253418 when you SELECT 15.7563. - This is a potential area where we could override the Float literal processor to add a CAST. - Will leave to a PM to decide if we should do so. - """ - pass - - @pytest.mark.skip(render_skip_reason(SkipReason.IMPL_FLOAT_PREC, extra=True)) - def test_float_custom_scale(self): - """This test automatically runs if requirements.precision_generic_float_type is open()""" - pass - - -class HasTableTest(HasTableTest): - """Databricks does not support temporary tables.""" - - @pytest.mark.skip(render_skip_reason(SkipReason.TEMP_TBL)) - def test_has_table_temp_table(self): - pass - - @pytest.mark.skip(render_skip_reason(SkipReason.TEMP_TBL, True, True)) - def test_has_table_temp_view(self): - """Databricks supports temporary views but this test depends on requirements.has_temp_table, which we - explicitly close so that we can run other tests in this group. See the comment under has_temp_table in - requirements.py for details. - - From what I can see, there is no way to run this test since it will fail during setup if we mark has_temp_table - open(). It _might_ be possible to hijack this behaviour by implementing temp_table_keyword_args in our own - provision.py. Doing so would mean creating a real table during this class setup instead of a temp table. Then - we could just skip the temp table tests but run the temp view tests. But this test fixture doesn't cleanup its - temp tables and has no hook to do so. - - It would be ideal for SQLAlchemy to define a separate requirements.has_temp_views. - """ - pass - - -class ComponentReflectionTestExtra(ComponentReflectionTestExtra): - @pytest.mark.skip(render_skip_reason(SkipReason.INDEXES)) - def test_reflect_covering_index(self): - pass - - @pytest.mark.skip(render_skip_reason(SkipReason.INDEXES)) - def test_reflect_expression_based_indexes(self): - pass - - @pytest.mark.skip(render_skip_reason(SkipReason.STRING_FEAT, extra=True)) - def test_varchar_reflection(self): - """Databricks doesn't enforce string length limitations like STRING(255).""" - pass - - -class InsertBehaviorTest(InsertBehaviorTest): - @pytest.mark.skip(render_skip_reason(SkipReason.AUTO_INC, True, True)) - def test_autoclose_on_insert(self): - """The setup for this test creates a column with implicit autoincrement enabled. - This dialect does not implement implicit autoincrement - users must declare Identity() explicitly. - """ - pass - - @pytest.mark.skip(render_skip_reason(SkipReason.AUTO_INC, True, True)) - def test_insert_from_select_autoinc(self): - """Implicit autoincrement is not implemented in this dialect.""" - pass - - @pytest.mark.skip(render_skip_reason(SkipReason.AUTO_INC, True, True)) - def test_insert_from_select_autoinc_no_rows(self): - pass - - @pytest.mark.skip(render_skip_reason(SkipReason.RETURNING)) - def test_autoclose_on_insert_implicit_returning(self): - pass - - -@pytest.mark.reviewed -@pytest.mark.skip(render_skip_reason(SkipReason.AUTO_INC, extra=True)) -class LastrowidTest(LastrowidTest): - """SQLAlchemy docs describe that a column without an explicit Identity() may implicitly create one if autoincrement=True. - That is what this method tests. Databricks supports auto-incrementing IDENTITY columns but they must be explicitly - declared. This limitation is present in our dialect as well. Which means that SQLAlchemy's autoincrement setting of a column - is ignored. We emit a logging.WARN message if you try it. - - In the future we could handle this autoincrement by implicitly calling the visit_identity_column() method of our DDLCompiler - when autoincrement=True. There is an example of this in the Microsoft SQL Server dialect: MSSDDLCompiler.get_column_specification - - For now, if you need to create a SQLAlchemy column with an auto-incrementing identity, you must set this explicitly in your column - definition by passing an Identity() to the column constructor. - """ - - pass - - -class CTETest(CTETest): - """During the teardown for this test block, it tries to drop a constraint that it never named which raises - a compilation error. This could point to poor constraint reflection but our other constraint reflection - tests pass. Requires investigation. - """ - - @pytest.mark.skip(render_skip_reason(SkipReason.CTE_FEAT, extra=True)) - def test_select_recursive_round_trip(self): - pass - - @pytest.mark.skip(render_skip_reason(SkipReason.CTE_FEAT, extra=True)) - def test_delete_scalar_subq_round_trip(self): - """Error received is [UNSUPPORTED_SUBQUERY_EXPRESSION_CATEGORY.MUST_AGGREGATE_CORRELATED_SCALAR_SUBQUERY] - - This suggests a limitation of the platform. But a workaround may be possible if customers require it. - """ - pass - - -@pytest.mark.reviewed -@pytest.mark.skip(render_skip_reason(SkipReason.TIMEZONE_OPT, True)) -class DateTimeTZTest(DateTimeTZTest): - """Test whether the sqlalchemy.DateTime() type can _optionally_ include timezone info. - This dialect maps DateTime() → TIMESTAMP, which _always_ includes tzinfo. - - Users can use databricks.sqlalchemy.TIMESTAMP_NTZ for a tzinfo-less timestamp. The SQLA docs - acknowledge this is expected for some dialects. - - https://docs.sqlalchemy.org/en/20/core/type_basics.html#sqlalchemy.types.DateTime - """ - - pass diff --git a/src/databricks/sqlalchemy/test/conftest.py b/src/databricks/sqlalchemy/test/conftest.py deleted file mode 100644 index ea43e8d3..00000000 --- a/src/databricks/sqlalchemy/test/conftest.py +++ /dev/null @@ -1,13 +0,0 @@ -from sqlalchemy.dialects import registry -import pytest - -registry.register("databricks", "databricks.sqlalchemy", "DatabricksDialect") -# sqlalchemy's dialect-testing machinery wants an entry like this. -# This seems to be based around dialects maybe having multiple drivers -# and wanting to test driver-specific URLs, but doesn't seem to make -# much sense for dialects with only one driver. -registry.register("databricks.databricks", "databricks.sqlalchemy", "DatabricksDialect") - -pytest.register_assert_rewrite("sqlalchemy.testing.assertions") - -from sqlalchemy.testing.plugin.pytestplugin import * diff --git a/src/databricks/sqlalchemy/test/overrides/_componentreflectiontest.py b/src/databricks/sqlalchemy/test/overrides/_componentreflectiontest.py deleted file mode 100644 index a1f58fa6..00000000 --- a/src/databricks/sqlalchemy/test/overrides/_componentreflectiontest.py +++ /dev/null @@ -1,189 +0,0 @@ -"""The default test setup uses self-referential foreign keys and indexes for a test table. -We override to remove these assumptions. - -Note that test_multi_foreign_keys currently does not pass for all combinations due to -an ordering issue. The dialect returns the expected information. But this test makes assertions -on the order of the returned results. We can't guarantee that order at the moment. - -The test fixture actually tries to sort the outputs, but this sort isn't working. Will need -to follow-up on this later. -""" -import sqlalchemy as sa -from sqlalchemy.testing import config -from sqlalchemy.testing.schema import Column -from sqlalchemy.testing.schema import Table -from sqlalchemy import ForeignKey -from sqlalchemy import testing - -from sqlalchemy.testing.suite.test_reflection import ComponentReflectionTest - - -class ComponentReflectionTest(ComponentReflectionTest): # type: ignore - @classmethod - def define_reflected_tables(cls, metadata, schema): - if schema: - schema_prefix = schema + "." - else: - schema_prefix = "" - - if testing.requires.self_referential_foreign_keys.enabled: - parent_id_args = ( - ForeignKey( - "%susers.user_id" % schema_prefix, name="user_id_fk", use_alter=True - ), - ) - else: - parent_id_args = () - users = Table( - "users", - metadata, - Column("user_id", sa.INT, primary_key=True), - Column("test1", sa.CHAR(5), nullable=False), - Column("test2", sa.Float(), nullable=False), - Column("parent_user_id", sa.Integer, *parent_id_args), - sa.CheckConstraint( - "test2 > 0", - name="zz_test2_gt_zero", - comment="users check constraint", - ), - sa.CheckConstraint("test2 <= 1000"), - schema=schema, - test_needs_fk=True, - ) - - Table( - "dingalings", - metadata, - Column("dingaling_id", sa.Integer, primary_key=True), - Column( - "address_id", - sa.Integer, - ForeignKey( - "%semail_addresses.address_id" % schema_prefix, - name="zz_email_add_id_fg", - comment="di fk comment", - ), - ), - Column( - "id_user", - sa.Integer, - ForeignKey("%susers.user_id" % schema_prefix), - ), - Column("data", sa.String(30), unique=True), - sa.CheckConstraint( - "address_id > 0 AND address_id < 1000", - name="address_id_gt_zero", - ), - sa.UniqueConstraint( - "address_id", - "dingaling_id", - name="zz_dingalings_multiple", - comment="di unique comment", - ), - schema=schema, - test_needs_fk=True, - ) - Table( - "email_addresses", - metadata, - Column("address_id", sa.Integer), - Column("remote_user_id", sa.Integer, ForeignKey(users.c.user_id)), - Column("email_address", sa.String(20)), - sa.PrimaryKeyConstraint( - "address_id", name="email_ad_pk", comment="ea pk comment" - ), - schema=schema, - test_needs_fk=True, - ) - Table( - "comment_test", - metadata, - Column("id", sa.Integer, primary_key=True, comment="id comment"), - Column("data", sa.String(20), comment="data % comment"), - Column( - "d2", - sa.String(20), - comment=r"""Comment types type speedily ' " \ '' Fun!""", - ), - Column("d3", sa.String(42), comment="Comment\nwith\rescapes"), - schema=schema, - comment=r"""the test % ' " \ table comment""", - ) - Table( - "no_constraints", - metadata, - Column("data", sa.String(20)), - schema=schema, - comment="no\nconstraints\rhas\fescaped\vcomment", - ) - - if testing.requires.cross_schema_fk_reflection.enabled: - if schema is None: - Table( - "local_table", - metadata, - Column("id", sa.Integer, primary_key=True), - Column("data", sa.String(20)), - Column( - "remote_id", - ForeignKey("%s.remote_table_2.id" % testing.config.test_schema), - ), - test_needs_fk=True, - schema=config.db.dialect.default_schema_name, - ) - else: - Table( - "remote_table", - metadata, - Column("id", sa.Integer, primary_key=True), - Column( - "local_id", - ForeignKey( - "%s.local_table.id" % config.db.dialect.default_schema_name - ), - ), - Column("data", sa.String(20)), - schema=schema, - test_needs_fk=True, - ) - Table( - "remote_table_2", - metadata, - Column("id", sa.Integer, primary_key=True), - Column("data", sa.String(20)), - schema=schema, - test_needs_fk=True, - ) - - if testing.requires.index_reflection.enabled: - Index("users_t_idx", users.c.test1, users.c.test2, unique=True) - Index("users_all_idx", users.c.user_id, users.c.test2, users.c.test1) - - if not schema: - # test_needs_fk is at the moment to force MySQL InnoDB - noncol_idx_test_nopk = Table( - "noncol_idx_test_nopk", - metadata, - Column("q", sa.String(5)), - test_needs_fk=True, - ) - - noncol_idx_test_pk = Table( - "noncol_idx_test_pk", - metadata, - Column("id", sa.Integer, primary_key=True), - Column("q", sa.String(5)), - test_needs_fk=True, - ) - - if ( - testing.requires.indexes_with_ascdesc.enabled - and testing.requires.reflect_indexes_with_ascdesc.enabled - ): - Index("noncol_idx_nopk", noncol_idx_test_nopk.c.q.desc()) - Index("noncol_idx_pk", noncol_idx_test_pk.c.q.desc()) - - if testing.requires.view_column_reflection.enabled: - cls.define_views(metadata, schema) - if not schema and testing.requires.temp_table_reflection.enabled: - cls.define_temp_tables(metadata) diff --git a/src/databricks/sqlalchemy/test/overrides/_ctetest.py b/src/databricks/sqlalchemy/test/overrides/_ctetest.py deleted file mode 100644 index 3cdae036..00000000 --- a/src/databricks/sqlalchemy/test/overrides/_ctetest.py +++ /dev/null @@ -1,33 +0,0 @@ -"""The default test setup uses a self-referential foreign key. With our dialect this requires -`use_alter=True` and the fk constraint to be named. So we override this to make the test pass. -""" - -from sqlalchemy.testing.suite import CTETest - -from sqlalchemy.testing.schema import Column -from sqlalchemy.testing.schema import Table -from sqlalchemy import ForeignKey -from sqlalchemy import Integer -from sqlalchemy import String - - -class CTETest(CTETest): # type: ignore - @classmethod - def define_tables(cls, metadata): - Table( - "some_table", - metadata, - Column("id", Integer, primary_key=True), - Column("data", String(50)), - Column( - "parent_id", ForeignKey("some_table.id", name="fk_test", use_alter=True) - ), - ) - - Table( - "some_other_table", - metadata, - Column("id", Integer, primary_key=True), - Column("data", String(50)), - Column("parent_id", Integer), - ) diff --git a/src/databricks/sqlalchemy/test/test_suite.py b/src/databricks/sqlalchemy/test/test_suite.py deleted file mode 100644 index 2b40a432..00000000 --- a/src/databricks/sqlalchemy/test/test_suite.py +++ /dev/null @@ -1,13 +0,0 @@ -""" -The order of these imports is important. Test cases are imported first from SQLAlchemy, -then are overridden by our local skip markers in _regression, _unsupported, and _future. -""" - - -# type: ignore -# fmt: off -from sqlalchemy.testing.suite import * -from databricks.sqlalchemy.test._regression import * -from databricks.sqlalchemy.test._unsupported import * -from databricks.sqlalchemy.test._future import * -from databricks.sqlalchemy.test._extra import TinyIntegerTest, DateTimeTZTestCustom diff --git a/src/databricks/sqlalchemy/test_local/__init__.py b/src/databricks/sqlalchemy/test_local/__init__.py deleted file mode 100644 index eca1cf55..00000000 --- a/src/databricks/sqlalchemy/test_local/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -""" -This module contains tests entirely maintained by Databricks. - -These tests do not rely on SQLAlchemy's custom test runner. -""" diff --git a/src/databricks/sqlalchemy/test_local/conftest.py b/src/databricks/sqlalchemy/test_local/conftest.py deleted file mode 100644 index c8b350be..00000000 --- a/src/databricks/sqlalchemy/test_local/conftest.py +++ /dev/null @@ -1,44 +0,0 @@ -import os -import pytest - - -@pytest.fixture(scope="session") -def host(): - return os.getenv("DATABRICKS_SERVER_HOSTNAME") - - -@pytest.fixture(scope="session") -def http_path(): - return os.getenv("DATABRICKS_HTTP_PATH") - - -@pytest.fixture(scope="session") -def access_token(): - return os.getenv("DATABRICKS_TOKEN") - - -@pytest.fixture(scope="session") -def ingestion_user(): - return os.getenv("DATABRICKS_USER") - - -@pytest.fixture(scope="session") -def catalog(): - return os.getenv("DATABRICKS_CATALOG") - - -@pytest.fixture(scope="session") -def schema(): - return os.getenv("DATABRICKS_SCHEMA", "default") - - -@pytest.fixture(scope="session", autouse=True) -def connection_details(host, http_path, access_token, ingestion_user, catalog, schema): - return { - "host": host, - "http_path": http_path, - "access_token": access_token, - "ingestion_user": ingestion_user, - "catalog": catalog, - "schema": schema, - } diff --git a/src/databricks/sqlalchemy/test_local/e2e/MOCK_DATA.xlsx b/src/databricks/sqlalchemy/test_local/e2e/MOCK_DATA.xlsx deleted file mode 100644 index e080689a9d978891664c1848474f64401a453165..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 59837 zcmZU)WmH^2w=LR0kl+L-xCaUD5FA2qPjIJ$HZ<UI-7eob@4b7^ zeLrgNQ9st$YtFT5R@JK7O0sYtZ~y=R5&)2td1TCMIkWiQ6NLo;P~PAA_NG=~maqSL zmc@1|bhDxco(W2BKc(1wN6I#BCeK^NWWX@^)DW@M6N8=ABJ>B&2#AC(pmqE9vfjD; z^2zK*ELKPw?x+A>an}S7NIid5kx8Gs;LohMR4rc~r!`Yi)2CzU5P5 zq9gLw@!{u%vjHedB{*t(g;0{a0=(=9g6toWb1&0Kx zDwRkk^|+7ggny>=KNvV0%Iz6|0RTS20|03647gaafE`?|Ou=7WtgZ6ZmMvCTF+I;T zkvv6Aty4tshm~Lh#jueF#r5kfU9Bg2zuQ+{-4M!@GmDES%wUvEE`rCxrHm8D##ICz z#auYQ6q9tP-*v}eI-mOgJ1rsf5A@bD#=Wbo!~ciC=(6SeUB;e1R3OKt!cP5Ad3H#^ zfjjpWl}rOEf5*I=l^HoacD42Qhi(o*i7!)}1To&o`|hwaj?(#&W0Ew#rZi10uQlL% zKLRYvKe%niZj#mSee&yTrS+Mrw1gh+w2;G)%Mc`PEXzU#bO8|ijWv`K%Y`f&uMET@ zwa?5xlI&uvVZ(@1VLKc`D{Y05C~6Mu^;k}ts=k;YmA@1!wWPg_<_pdjLt|1zVaGc? zAYJ?mnNidgNbO60v(~3U^@CAR8EjFu;8{eIM(o1kK>{Gf4o~G9e4_Bl>1`n%jRFB( z<)Rs5WR9Nr{=p~TlbJ3>o8(U>Ydo1(8B3nA>F_V!VY;~2%R!_b1l!NNkhn@mk5MCB zSH!vQevMl%zt_9opuCe4#nt;gS2*8qaGe~6RU zxsQ==)n0cjeJrKSueee%Yn#lfyh;floR%9Z9xoC8XUL6RJp|F;L%#dou--#%Vr#5q zZwmsm7~9&L{wM6#G0KYFDf*k)5f89q~^DjC7J7Pk1E$674=vWf+`L^)>YUjcF(E?%} z-1S3*Ml)6hgR;I2#}nHv5}(A&?S26ekLu>6^OWXMN}35_W&p?TeWCf%T0w8>Z0RKH zlzU|4#UB*pM{29_DKc?KO?M5_@&NZg$(v|XER1|7?caOD{6EPv1cCm;U3z2{au+L( zq;q?Yi7hsX#E;aky^GmRiQ9E(Nq1tGOgCo{NKFEIV>_S}o2xscfFxo=#@qN$@L3c` zAe&xC-bA{|gWXSGL`)Ei2fXFVekO@Co3f+av#Apr(_%{s+h^D%aLSK~DDG~;{389! z|6_q*!k4*Qlm1(u+m3gB!bmgAYM}YRx77jt^0ijL|MdNyxQwRh-RH#jz!SZ{-^coY z3!byBy`_<@t>u3J{I~c;%cFGtNBmwxW1EL@aIs48v7F)`;C>`}Zp<<6JNhUqSz!SKE6dg`Rb7m_Fn?rt8^0 zem3*4)O6pWu{5p6`9Du`EQ4j}#S;s1rVl*sUfRYJT7Ei?;URtp`c^EYjsJwvgSYljtr|bke(2E{ z?!6^|LD9PTS;f6~&-4L;mnK0<>5?)-!IxYt9}%o_VgmQiklP)H;U4u{BGkNc=O4TU zv;q1vYEP)ih+WF^@5o;<;+AiI3GE^o9q0lJDZzKO9ij7)^u}%}l-StBV*#FuO&D%L zCY(5*lP{Yc@;>nw$s=RIA3S6=(>fPf=K2u0OMgOd%}3Cc5)&Ij=#$mC)S&3R*)5bl zn#Cp&U}Pm@-e|7~W&1O;0Bd6YfYiG2Ofq%K=)YRl`ZScz>E-)Y8E!;>^%?PDE7LxB zQ=9qB>;KB42op`Fu6Hi!-g(9N?=1R1^Cu!|+~z%X`p(=#2kv6nvqRM7q2$yy0v&KJ z5wf}5cBY66TXCHe&C?N<6=m}xSsQHne-{}zhyH6d_QtP4B;rb)F zUk>xo1Jm}Fv`!F0i%>GY#LJ9-VoOv}&@iXC=EvOOU2K53HDHPRB4#JNu{NHGOKYEw z#i)3R_|uN`mY^GI+=Fkw^cffINY~jekQdz-ekxC8r&%nt9Or z+!j1uu1)RoH&pbhYJ$WOBC)fZ(1EW?)q?RqT$#qFVo%^w`z=<7-MW4aan!FP5CnKD zgYW;ZwEtB1r$cTH00^vr2N3-KeE#3w=CyjpEpyjbJgKWd)$iJsKeU0@llf|C`W39` zzEwG^xNENR!^cMV;7Kxg{D!_9u!z)Q*H%_~t$CKv#x<59s#Hk%x%dPGc)wPR7v46z zbA`*VzZ|(&X?ee;A41w}nqJ!5-o_P$U#Xs6y4xY(` zpIJ2!9b>m3!X*lCqvd1@_EX-(x5D9jqJwUK6izTY-O`^yvGR8>bpdY#ZTS&i4VDab zCnXPgO$SQ@Sz3Gt7y_gzSBV>K!ifzptR8n~-WfZ3o@&$H z88?|uqu0f6S?%W)XU^3mvv=z+=V#up=l4tAZ#6d8%}Kl-JVmSRZQc~GThdi}D61Jp)9pNm>{@RxtN90qua}1Vbn5c5^knOXu{+YUhb)lCii(J!*USFw#D=!# zBf&cP!f)TwhT2FnpJ$`^S;35Num2{FF&;m=6*^7F;lDNzxxGMsCy}+EFRws02k{?= z3vAc-GoxNRm%T6CfQQbHFK>@VWlr`sbXSh@*WPuP`Dcn`jEjeg6~@;u&INvdJ-s26^cj-CVPbMs?pgwSK^xX zIBgs@&!SuI{asTz!TeW>#L>QG>inE<|D5lesHIpbyCQhg7`*wG2Q|TUNeLrO4SPi# zg}<;k`Tf<)>g#K!N0ULkdx|V+s_d(`XN|Cqcbn|l!jR1CQ>M80gUwH`j>pjv@79So zzOSpY?c&qS?Jm<<-s_*y6y9tOySp;6?QA=d08@a$vlB!+U$)T#K~m zXj^~U{oWT>xVEpPvc8hguYlNH7S?w9nERR>t_!x(=#~q>_plNQRPNj<7Szf&qLHXY z&iNZ+d++QaPS@5c?;(E6ybdu0YC+(vUCvDzOSVK~_Ys*u{W=2y9mj? z6qDIt%&^=CBCZcxERVa7Z$+4w#FZX0rr{(`%tt&(JuC8L49BVGZ4vh~e(iZ!rx;7e zH0PlhTjY7pwRLu1R9^oVNGE(KExh>BAm~hp&&qt7s&MOfePh^$t53FDOg>YPaCBd; zNrV7DG9H75yT+wb}dK`v$l8e6x#=-q0 zE$_05-{+O)!#9`R@jtTvr?LCtjJuudMM?J36>(r*>wwPw?;@=Ig8pPRL5Y-OJS5+1 z*G+(XJ&9*YkoMYz7-_nAOTBjYpY^YFP@TexTl$9X9tM}gwwx7QN9L*NJXe24ik2;m z`|q|cb+!x)`FU5rQJ!ry?x=$p@$vW@37fLF!G$$zz6=nJ;)+%VAf-@^QNZH24cvet zMjSd87m_v1qJn1aT=$KK9@mWVKU`qC?g)#02ooMDo?DCTk?N#iSk1tb+ z+l8xuQ+e;-zrVm3IRE^vK2lh~e{Vv>`45e4qRk4|@<8^+>KEu6Gt&`v`E7{y8Mep< z^>0s~$9>&WfC1On?eZiw$D%;!wBc<8!8o?Ck8Y6pYE&I;!Lg4aG!wL=xU!VFVY(k7 zc)${qz1xZLVtVOdMk<(wZ?8X4ok?f=H^##)XT+Fxh*DPbfx&cdEaCa%{A*9y>&La= zwW7zVV;VvegUBGW1}bksZ4uJ1n%C#dLMgVP%DALBhK*Vk*PJZD&`G{ZIaZLo(h6iF zZXPqL!|YpZ6~fBX;WTShT%K4hyp=nofN=%qscpSssK-rRrJbxoMEey4H^D^QD;7n0 z!0@vMreTH^G|Sqk-tD7V5T!0Gk49)m`V0c5atP_bsf-`sU6=E6$rlqXmfgO*KysGc z;Fm5>#V?ZP&Z%|t-Ne#C;xY!d*U$t4UTx17j@Bl6`$~ibTTkKGf3Un<1m`~o9%uHy zaMp=lA#-k`u|&bgpqG=D-t5Qt>v%8=pa!S6X$zpb$Uagl(z%@jY>H;~4+aH!Pw=am z3qynAuQu*idbHSvN=y#KjSberA{MLD;h? z{YJa@c3B?Pn9yz5;T(E2uVTXa7Cf%|iT4d5<}-gH-RnaeX$m@GyF22$H#E@U))Mq; zF=n(@5DaX0|9l!^?${Ou>)Iv6dGErc7{HGBxuuxHO5PdCis09NJ@YZzVS4(J3|{G;yNa_EqO@tgni?WIQ5Y+J|;<2 z9Z6IhT(*t;gZYCjt#N}0CTkVvmlu~juP^n0Q$_DdZ#)dh&1tQVQxfpX0LX4|EzUOJ zN`=9sjjE%KYGci|QFg$SFQBO%w@mxFL9f4L3K~oOFO6b41d8OP_-l^0;fQNo-LidE6Nd)7(|`WHSVD=>m-$FubKsUdwp+%W_z(Sr#PU3Px!(4X22qaI*}jsE}_j$+l#0 z>)9P_ke~k|Q?3Db`RXFc(pw~{JEUG%l-8=5MQDa83L#*EH0DuPR@LjUIq1H7+ly(1 zcnc@wf4)$lUZ@vCNam9UVSI!V6h5fcTYS4)>&2W%XfVF|`IjlIun+hlRomv5@xGR^ zi)93bt@7i8R3WYv;J$f9RW*pYW5|6}2&WL5o>C)>22^_@3Cck874D1C$;apzukx?3tP{o#pOQIgh5Rdx zgJ{CD8N)ZrSOY}}?lL}n?(j@|T+5@1$wsK={Z?(Hdu;KhcIMg~3JCjWx+TeUG^>-u ztc%w%vLlDa4r}i7C=%t+pJi+QUq~a}kQHtQJ%d`{YHGVt@MV`2=+j1L)+j z%}@ms*3Rn~iJAN|-u^FqihAN6|3qi6jgSDsXrsw#moPRwgi0R*tWs4H)^zl)p{+3p zS-%SBkd5o@sBD}_el-z5n+R49jXr!*P^Y6~-@@@p73a8RhK8XGWH;qdavsTBd$aZD zwm!UNv914vtu*cZqF>RqlLe1&BTGc3NtCPoJKQDcvFi8e13CW&IoOeSClZxs{3H)X zQtkQLP%45v(E`_k>NG0R=zieHJ-HR7od0Q-|Nkp}$L&2id&Z)lsgIx$Fnv949YDe8* z3Rp@f`xS%pcoz8>v*}1U@wXxDDoLhf&Qm9^DKls#kGi^rjZ%Sd&CR$+;O{R&b!^XA zY!c2E+ruj1iS#t{8~-aTyx-ZU>OAq{9IkXUSzNXQot@%w$Ia*(eYOi}>jRhl12_Hq zb7WL5&y~u~KbFyiYR|97U;OC6Fg}IN0XV4hf3X^M}pi|C$%w>vwE%v#7cNsO^$SK^%p;gFReF zGbe(XqQZ5e!fk}uVbPt^A7kux2le|^>g$L+Ij}R~^i02!tA1@huSVpn2sC`!Fa@fTN2=D6$9Bvf*0TvFQaq%G0Ta%xG z25qkjqkgitstg*t69~(N%wgfWVd1vS?7Q-Z5gCTl*;37(v-L6EAxtZ^@aLVx3++=z zg=CRIZFhlBuBnj#Y`4le?C2g;@)c_8`eQby6SlUx{-pUL#Y9AYa~P2At`Kw%vGO2` zuZjiI^f%wx9D%dT0q9_1XKIhj4yqQcAD(LLBM=Dq?PC21)hwgy6TDy68QZq)!t-(N z77G=hTrb%q%OC8z2zZ2kgaA+}g`D>J>WWQ!6N*EaHN!*1xEjg{;FeXJ(}{qa%YfT5 zL6=At*z?oI=$tQd3Bkg1;{xqSm8LM%y0;%C`XY%F82ZW?`WhevaOyMTuL*j`aNB1P z4P9{Be%v@sZoIfNP}J(d==jmd<0y=cMX{Z0HWpQ0 zir88ZUB2P&+M1N4Pg;W*1TP$&Xfx#`=U`-}}@S9aZ9@Odg)eO;N7RRjn{MV0P{4rl9gkw7UBv<$^Y{r>zyVWWFl zoDL7kj|eG=7%bzaHFzrLi}}m}a4-34;v>2E$#zp-!TGWv{ zU8RJ05jPohDn2=NR0^`$39&(Mdi*sD6Mxh@$YTP$qU)BpAT;^Jl{*iNOs`Cj5N*ey z`QprmexynrVc6au!f3yA<(PEkbadrx9N9KU5Aun=yTUAJ2n!snH~|uF#6q)!N07=a z)*aw#w<6deW<<==g!60zbqdG61?BX#3NosCsR>=FCTXsx>av>dOR(h{2sp;sL3@W; zEdlZm8sDx@{N4;uec-T-!6}n+#|yL5Ju1Ea^mc9~p&wb~DiB}XSB?ke!9K4PbeUte zFiq`()kX|W7u7gQ(c4b=z=7qOPQyNWo&4l4IplLzmCW*TI`VQhTx^@x2ae*IB3Rs_tZQh%iI4hKn8m;ekQGGFc*7UGe#T5;D8pXyo$}Dv~td6~`P~2|13JTH? z6=w$f8Vk4qo)^>k>B6V?Ds*{KBTgMe-SKi>X7}?Y-jCfXa#pT&*tnG*+l0ad!h&+b zf(ElBFL5n~2ZImf0+VpZ=XVlkVC9D}LSM7zEkwYyhfFQoDoB1?@%o#dB=VsP7G$@BJ`NM`^X=F+Ar)_TuZ|i*l^l`S({_@loX> zVmo10S+O}!-&@6H$fu~gek;v-1J7v{^TVv6!i2GG(eIrm#$Sx{=o?a1Nx5<^#j;N5}p`|Bm#(~RGVrn z8O~x@CJ3gh{;>4TqkRQ;WO{$qYD;QrOImw08hV=cUcJkpb^_Qu#tq^YP>tnc%u;>s z>6MK4ihv1Xdl^>nnZX#K)*0fr8}A=c=^Ih+yVfAgYza)0P7|E{34q2&&@~YGsJBLo zsuYc1I^2>4A)K(zx1DG4X*3X5gw?1Ip;0C0hbNbWFP5>J0Xbp>@1QUX6}hA+`#5SM z8Sj4PHAxT4L@lo=39}&L8c?EZ?h>KZ5K%5DSbsF$MK*R~3@5ae&lIHl#qIMnUSkfA z3!1j-AZJVf=a|Z=%|m_sIzVL9<DF!;#(X-HO9VS}|-69J~`b=W%ChJhyz-VmETs`3p1@qRn=y%gt zyOa3&M38LZ_-l0vOsZRPRG+&V(J1-a^x*#43D z{$mYNjP2@tBHq`cbpJMQW0dbgCM-ZDJOPrg>wowskgf6SXNIQJB)$d)U#po61#-BBATJw%$0U04petN7HRY07A!rOr3cz^@)~#604Dz?E zC*@>5fy1n4Hnd?Z=}%dg#!Y&LV@&7nq_yxg{(}`vQFAqgpDtTMqS<-s2$(|&rpP!= ze{bI7Aay0SZzaC(ScB+2Sq`W3Uy)rkEa4`EM-(bqlwm5ncNmF}Z~RU6uQyJb;XZQi zMDYSD)Gw)20~M*$1a=bycFP5J8%~q17;dgsCi_QW?(AakYV+aFZFB}Ul=ekDZ}o{E zSJI^b2x5xj30NP9Ne7r6M|2%WY-iaJzvP?YP;8BWS=65b!r7xpWJlrUDaJ?mk$~Oq z0*sZtYKjFlJCE8KAu2*xHkN8&{K|`V+UEP% zxZTfENFR}`+E{$#t(mr%2zj{^wat>6&63tr49DE3cFB)Y#^eFp{VW)CFE~Q6%Q`+~ zeHEYy{AP_P`Zal&iP#4B+$C<=Nl@Hm?QZOVy(|V;6XQx-XhfOTT)yDM25_g4F}9Nh zf%}yV+fjJS!q1Dp+O6VTp^AUDgb0js{eEdcydRXy=R|FPdpcJ~6FN+0lj~&}s;Dn9 zQn)iQ)o^Tu`6!@U;2PGL3%%sKk8AJzz}2sIK{_l%rB!y@wiy)olrW}2AtRjmA0ro zm;6$$-*6_JOlqfwa>;V7NSLc4kIDWpCJqywj79-~Ifwh((`c05L~f77UtpK}qB~3b zld`|G(w!-By#Bn>A&-rRYWF443TO2@Rq77T*lvILj4Mv!p{Te~xI>On_}W%H@Qbp` z@_~nJ1)q-j-l7Sl$(w`s(z(YMC-Q0AO-UcwvwBkTc)xk>%Ur|K!?k+Pk0Fm+`4{eB zPg@&Ys;D|!EdI1|E6^9WMYWL-!Dk8&QABC-QDaWM)M-*H8ICH2lceTP408>5kMFkR z*@!BSV*(Ohu}?y}?e<|4W=++ddpV4ZHi3~^m(s-Ffd(2M<%RX_2$zl07h#;4)!L)5 zL3#sTO>||hevKFwMJXiKuff)1QJ`}G3R_nFGu9q-C#7>njeq!I(8xx5g=;cD^g+Jo zPZ;D+IhIdHlzkC8zG4%cui};_|GmoJ(Zqg^A~{9x@^H$jzZ>k`k1C;I%M#dKx5}H3 z`}RinzmGFpLS3>T?)GG8c4TUfg5NK>Kl;z@F@7!&J7m3QsS!J2g+VkiK){blP32l_ zruMIlP{WjBAg01?iEEvGIq3v%elikP0PDJ=^HbdNe(TDvoU=Qyf3LEAJr^XDq_t*d zJ-}SiO2n*>Mi%=XK$QneoMNS&84GI( zZ?Jv0E_4w?cpsY{pJ@Uye}c;v1h0mk{h&b9j6)Iz3sSdSM>K^No)OWjDPeH9J_ zxZqjui@%}LCm>_g1;A)Hg>Gg+3;9QFMfmK(u1VF-;l;CS^EL9*lVd)ukdCKZif#fc z-7K?bA>w0iuk@x;DkV{Iq-801!`!YaK$Yv;P#uwna!qApIacw2fJ0h#$oRRS_W|@( z)*Ij3CWDDm(kk-FbT}5@_N^;HS#+QXWbz*-!d`cUuE3-#W2=kHy{@$-6;?^OyWxI= zYPaRZeg%zLex-}_Tn9gk`SiQWi&3CBq z)_=`6Pe58|a`W&0F+Qk$Kr$vOpu>ZRmr@#Q6<`CMEHIHyQ<~^dp73$5j0oZN#F$2? zA|n+nf%=q;5uqL#2@i{6Fp$(t!D?NO4a1n%f}si?gYqU+HmT+!N#+|#<{eM-6@C^6 zD;^T;gIP!?WHqAV>clDbw^3W;^$H>>FMIh-?ZKK@&0mNX<_Iz82zloSeOl#T;1xS! zZqeiw$A@6Pyvyfl=+QylQDBkq@wiZW$ak=MGW29=Wb}cix8ru)=X|{5VjOTB$Z(jt zN`AbR-R`o>WboO6)1;AQsQyebpQwc=Fe~A?8yTcnV?Zh?>uTItqGZr)F4Abe(P$3% zQ2zUK^~fti42#9$mrGt`N@sh~ZgIN=zhaY7D~#-@@oPJWag%zrr};RC&Vp-8nL0m( zd-IKZa{xE?Q)C-0`X93K2)-yOy-(o_)AvY0@{`mfpYm(L*_g z$~>$dO#h3m7MGJ6hZBTzJy;TaVc33G`WNmTW7f^aODwO23FyMKYe)1~u#9%eUqoP} z2=+Tz_FU!JW%15DD1L%_JeqZUf^9rHvGVPbt`1<&*gT1EpW#u2&uKLZnak6dHdnk* znTc))f>X$&UH338k1?*}sIB9muH|s3<@iv@hPaN!D`E3^$jk6p)PdJ&dJ~VXC`oe@ zrQ;4PffPgUGN#3>?MX7|rfGzn53US+s%SW=sW}~}Ikz2;N39Ahv0upHkNbCiqI$C6 z7-4Kt5j#m@bGG?YPK36%!+ng9UzImxAd@)m0|Nzs-+6)iyujc{4^ry%dyEdAn$xN{ z{y45VEJ5P7DMS71OwH=c-@?G)qiu4od7UNVeJrV{0}r|eW=?7*P6sB=51-5rLGFDH z4v2%VIXLNauoJqTrrlt>!u+C}y>!38=k&C_%kztC^RYyuPuVA#6q}jZTbUG?GCNgX z9j$R!6XCe6u}BM5p8nm!VMC~UFRx^}HK{F$Uvdzj<>>bIxgLZgaB+#lQ7rXgf&O8E zkzs)b!m7zaGW_>IL`MP-fowoo3FBw<-s0Z{D#`hhaVWaVC8-rk;8)Udu-jUa32g+U z*}S&cvZQ#Iqy)iG_z*gG`tgQAm9_-dz*p8>Q=uI_`-wLK%cit1I z_;wkb@m19{Cz;H1UnsS|Q;B^Sm;U}fBN0%eyM^EATmlwZFdQ0TG}Vbv_l$%G-1%g@ z8L)ssAw1-X?pimE)l4mwsdzEWc~}&AF;sb2-+$jI>H!|wmTuGYDp*91Pfe<|If>Ws zcMJ|uxUl0WLcfLZoC=zsvarnxa==7_CF_%<{F9_2lca#cwU#YZ{6@3Tcy)0-s}@O+ zu6KSniuM{2SC6AJ$VM&6BWK%SU^a%rnMS4)m=gOa(_z4a7_#HReMO=;$}TU4waK z6r11;F!rQ*J?IOv`Gz0c2;I#<-t|eaQ)+f>cPggEgjNe3%VtDaW<-2uL>G}}_5cqI zRkhio>%TC7tPF@H`?JyQ;q_l-X9w?rPH6)bT>yg$IBVY$g7PL&cfv_|%xQnjDV89k zE7c;ppfUF3M>N1W&Fs7U$VgJwZrG1ryX&0+%f@91;}-sLG$Pq*!kQ%li6w%VCBlWr zJvYTB3m=6iI=BD1TNPAO0Dq~_?ToOcKwaGDAR zO%2#hArZe?X^l@x0@_a96|;+(#rj|3Bg8y{(K&mCesuQIn|0YZu}heh%kSloE?W>` zTM+SCJZ`J2l~0zRNx9L%WqA{){7w-PAoPh(l;#a>yWk}OPAu=h;QPDnHmdoVH@jPX zwvZ#RI3}$w}+M>EywQ(3}>>Pi7)*GhmDPoZF%nVJ=USwRJiq_1p0D+d1M(vUZ*i%Jk7k zd=Oc5ur*=CN5Tja!U$I~_tl0Bqp;v-=pAhYti5+bmhx@cd|?&>v<=-s&H2i5!??cX zyVWpIZUA^Z034BBq_fWj`&G&dCtSZE2G59c1OMXFxxRumsIX7XPh(&eKDcSU{;UqJ zVE(jBM1)O5BtS%T#eQp%S0N&@V*6rxoz4tfL{x4?9VQF58XNAUTpuB>-NE2i9N#%=)9E zM&cIMNoZhsllGXy>6imyIc2&tZmmUh-~C}HEGM2U`sKH1?xXe(iLIzj6aikQCIt<< zrV$)?cip`2R!?$BPvIm_9+wP8%2fw$l5|cmms_qtehZ{E%4!mPPL)P>eBj_`yN+bk z#2vlRg)}&W_eGbv7|{G5(7elXL7VS^`xWM4*U-wHKMFm%zuQ-;xl6)p2`z#&PTM;a z3mBP913M)sl-La|7=ag1l(RyVM?#b*MwEB?{PZj08Xkn?u{gxC%SxOM57=nFWF$HH z&xIOe#{R15X##x`{0VY=kE~p~TK3+sPgO$;lI1=1lm3;HJynw*7WM@nBEu=GFLYqT zJM5rcl>9gXe20WjGEtmzLEZ%suwzV?LK)$^EQFszPI5nP=7wzLejHdh+502-2o>Fa zf^yYxxedZsb{}9xridw$o3|5XV{D@Ft>NJ=NyxK|KSVFnreM;h@Y1H7Yu(mldlAwEZ_l6&EK3C#M}J z=cfFK<*UyV?}42UIyVMnGFmyz!tTAUo{g}aX;}SZt|Whq?5(ACm4jl1!)}G+&*>t) zzOrnG*97v`uLaocgO;qx;)$4B>8HhyP#>H{H$Fjg2u{^z{r_vQiFTamb^f>d!-l}@ zNuju~OGdolN}c{e5u?Hn`Kv{;Yx6Tt*r^qnzhC(#o@qKM>sC2v{_~})_qB@MW>$)Z zN+B&b(u={^`VEQhGjZk8w+heQQ3v42s}{Jv7t zZs8;;OogVwd*91rU*4 zA@?h{`xS67l&4wA{&(p$`O|AM|7-FkWqr(a=X1S#7y5SH#d@F zFaU*DG3CeC-Agmo?{quOP0U$eD6)#Fvc7z`wm!Z>wc`i?>I(|@iiD@grjoE6kg*HA@bSi{F0rS4Vo%{`Pq~yb`#Q%z`IP1JoOPIe z7at5;Dk$7Fxo0GJr&@DWk>FSG{6d_!+1F{o7lu;qKV*eHii9msg6;XPF{Z=ELc$Xz z%zzgIWDHlEA<0tycT;h;gN(!R1aUIo)^vhN+T?+iFDcpbZzFh<;h2*nSd-yA)z0=n zDK{6G?XU~bfhX51ObOY%Chwz4_=}*L#XN>P7%4r)u1#!|EfDRKv35Hd zem*&JF^PH{I21X;w?ckCt0EiuelZ$Z9fha@<)uOfP3IpmFcgKaNoPi<%NnnBL{!=!2ZlS-N#7|Tm$ z35)VzEz@HaO*2cN6cD%z1R{*NK?cE3#DE)*s_E8nJnxQ-eraCx(5KzJXXsuQ#aS*6 z$gqhzD8-}KAEWUki*Pe5c`J*s!e~t#>f>H~n8CmCKE-Bpnh@hvKZ0!#xJZbsrRm-{ zNoxBem<0(X4j85tCwp{iI;2+TCc-BsMkXgviP6(b`ZTGDvffO+r!7-vMGWBd{2FSs2_U4DJ^O*J43V2Y=rn z#Apk3F1wor0k~*FQ9thm9Nt#1aX7w;s;>Qk2mQ8yJh9wlC&~^dCoS(LWA7#l>?Zr2 z$d%{88tBW#U9Ty9_z4d92ire|$~HR@^}4+KeyywVDh;}53pEt=iG@N>GWa$#pj#Py zX9lvS^!&T@53_kwr=HH(a82J{0{Ts}(O-Jk=QET2QFy_|vTZA|!_q4n<7rCcE6U@< zCqC$&M|?gg6Fk*~bl%d40$K0H0-Y*N24AOVm_0t`Ct8x3703X_UcLLSzx!^a`)-TR zi}U*9;U^e-4n^#AGxQFYc#18e6SVg!qG}gPt#dF&vP%l(bf^(SgRYjtxt3!OcZ4Z% z`T}9raqqB^1D^08_zI-$oxi1P_xn0`dVMg{(-?(ZzH-`983;0#wJ9-IC^A1SG9Qy& zu%Cq5#v+wEmMgw&B&S{#LCURKSemXj&UVIe6gv)hQ?;4ogwldnlLo9WMm}DQkX($o zqPQ~)Wh3CX6uFVtu+|5!tTO{WSZlh>8m{h6&BN%(B@;^V&U8NB8faA3*McAXuurs4 zJRTV+tvnw@zOiBzx#K}if*S=$^1j%FwjP$4Sv=naDv7^>sMvpgU2a0A)7}&Oi6XF8 z4mxzU|A$sPNj#su6%mRq!;WYa*5Sr#;-M+H4|064$|(jt_FcR~&`e>(Bx8 zN(t5Go~9IdG148jfL^#25f>+WpD>@QCSStS2q^6d@HuV5(pBnSu^e7+v}W+|T`H%4 zhYlgmTh=eqnO8<0H#WBwuMdU@ELEx9ZP#G`VF_xZ3mc#SF`*G=wwXo{*X^W2$iE_O z#TTa;L|)Ydl9d_@{42M8M@KofJbz{Z{9BlLwXMC#y@l_1VS-Na{Cdwr<~*O$_((I- zeGPTb2p$>5Vf(3p%`tZC=V&Co&NX4+X_=*{FWIXU#Q~RIAFyyYt3B&@{m5(-Re0Fg zVW&Scy5|0y-XaKvUr9;TU@cGe!?Vn*LHd%YGp<^D47S4tsG3k~_IR+B;)6H;h~F1g z4OcVvjnjz(7Qt`$rna$6&sE-QwDkY3*HZp<8os?u z?C&u^|M@Pg$9r*eQ~go|hHc!wGLpRg;uM%=(|v`0ga&h_ju&t$lUCdt(l&ITA{Tp+ z(H##k30!7vBK%=71KaUH0WFfp=^p-D*-BRvnL9sR%}zmNcVwp1GM64lQNi4M4A;*> zC>^ z2YZokbMfs8W|Z}rDBAMy*#%w7sxZ!+LNCNVWEj=_^|V0=b2L8lTk9Tz%y-zs#e8gW zgcIcSyg}#rjYdG5t$!FH2wk`8qW2Jxhx+-_ZTnt9YD#^6kPG0AUr(@Mm;7?vOLXhu zjA*4$T?YOP*CO=jN#qwc7_G^xl$OG+7Fqije0FJWZ)JI>UfBfGRX^n7`eEH2>1BJ>y zBQ2}J8$!C+)L~R?Z&ntu?TZ?(A6YgOoQdQl3?sp=7%VwbjX(XCB#Vp73H&KC-^f1H zry=;KAxkU#=Olm{-GLI}pO>oR>NLJ2mD>BF^kb;jDo=Il_J((!Z(mCdeg|_e^T>5C zbwQ06(SjEdrWX;f7g3hbK&v?$z-Kx8Ao7~0MY6P%Y?GRapDQ9>&>h^Oq%9|nXoAPI z7YYkr3n=j|M|2Z7^p8b|j74Zkc$q1V`Ia3vpeUr<0dF^&*GbLL_e#e9@73L2NkFI3 zm0i2shAWXVG#aCUIR}O!=MPm542!Yw5%jC>Ek;cH4V~hmu*4b~N=lF9Y{BKykl9GSxrj&H4~E6M>M35Xv81UT*%y&86WF)K6sTEo?)V-Z6tUM2i zP!R#0mOp+e1{GIFhWOmJoJdv}Xh;}n#2EiE<5F_R{OsSFD(sF=00UWKOwE%7Lt54M z-VdG3Y3ZXZ(YK`LgKOqT5dTKipF{x2?%*ikm$$zMlVFrd<=R0W?11RmO5FcTMKR||}Lzs5Q z@^ng_u8H`7y<`^Ae=Q+&dPi1>$12tasiq=HrW;A79jS9An==mA7xw;b8scY*34pvM z7N81VKJhiDTA1g2;wuMUkDr_{k;iadlY8)Jc1;`Uo6y*!?DKW-l~S+IzD-a8bE%r}Q#B!fHQ{C7&6x8O zE3EK5WYk5>uU~Ll~L>plV_5SIOA<*JqZxZNl5*TR`*aB*` zBpb2gQe?pQ5hh8=zDbW42MKuQW2@JKkIE)Oi7ur{(vW6FqvB}xDK`Y5%&8OA zJ!2YMntunH?*q+|A_IpGMQrom4`roJoGkVQ-|r#3+TZ`6Lj>#ZcZaU%xdkj;IJ+qk z&Tm%*1B;`ErBoR~p8309?u?0ttwcXAe3Of})z4bh00Fl7qFG6SI|rwQx^-uq@1t z=K2pj* zQYtc1s^f@=9|qsxv7N>fi;Yro%a_)iWWP#v#0rbEM^FTYkjee=D1Su1i&jmdK32*< zRw^=9YGc7%KOrL-<$n?NRZ(qqUAs6GiWPS&ZiNJQC|2Byw?J_xxJz*>#U;2GC{TiH zaR>=k+={zH@sszv_|F*gc8%<;u`}mdkA;41>;N?|F)$8(@wU3r`nyz|4!}2qM26F( zXY#3_H-*TCrw{rM&sog&y|C?;ur1nqZ9K4N6|U^s=!wc9m6mki@@ASE20+H<{vQCH z*mZ~TkEZw8xK-GN#Iim-S`$931&{Ww&wq#%w{MYIAM-ja-g zjzs7sm8a#qoMl&DgBpg8Bnem~3DhJBAwvRA9+J~>JV;OTAXAM$R?CbaA+rT59{X8A zy=j>0e7;jrh^m^Kg+zbmhf6-G@p%?XLKaF97D|RRD%z7_Dr17yZ^bT}m|4wf4B!1z zcVB03{KW;%q)G#33o^pUj;1Ue<7`!Xuwp`e@=HPh^2WtYmBa(Q;2}upU_|I(V(4H;)YIQev8a}eaP6&xJ>&OY+hHvE zo+VG)Zd#nGw-{DkC5vV+8;33Z5Z}QF-@!!R!B27?aJn3x{f^KdK`7~+tXtxuY6gP7 zY=f6FN&C+gQqvO1WY{&*JK69MDF@o69SWiy3b7rE@a2WFgQ!TZfd-i)l{-VvCf-i! z9=W?T^)BM4r(SP#Li-^)cUQ4JQB!-v%#>$BW97GyYXHj?!0QUIqjm%}nKB4)Cb(P8 zO)DKEWV&)~YuLXx#>@T<+lUi&)g;V8m|zP;#PnkR4@H%s8A*Yrzj#20` zv#qq|lsG*7S!VjDZRF(ISbkO*j&Ngc(~p$$3KSzpP8(G=nXz&TvTN8e#q@?(8BZoztnkpY!u$oWQR> zgJUeguXsQ4j@M||XYUcx^iVyu?tSG#8XZlxGt0g2TEcC7fpE{fAp;eF7ei2rW!R-M zmsYa~R(0vs?-~*KIlt=e!3Lmzp-1s%O7Nk2O<$Bdo~r=86w^t)(ZlGfWQ`Ek!bHbX(7^20PQ`>!+9zMKs({ zcexNV=PKYW`<_f*rzVfm{{iSdGVn={0RRf3T{i8yUXj-OIab_YsLxtLT9uP7b*qq^ z+@iLS?0>K&*2&AsEISdZVG*iP6RPCmhmYiL>4?e}ri|EpxY>K63+4J%i7jBkUjb-kr1t`>ipq?$~D%qBoj6Ck>OK~=V&-wDcalFC6t+c2izBG%dO ze?s}RaWb^K%5FO3aBw&5(j=QZ=Y81;z@Nc6dC@xijdixDb@oNdjsJA>mh%I~H#(U6 znUxgkMUV;n7k8Vr(49(nm~~4n-U*HXt4l`mWfWA$SVc!xMMqIZXB8S$?BM52&@8gY zT4>WqS0Ly4Wm-pzZ=b7x#A_PmXBqfK)G*7DF^CS>^cBb)0rZRjZYLa-Z)|08Uo0!? zw2uJF&a41Dwu>awGI^>J!2;^5@uYC_xdGp)Wy5c*W1S=gNeJfR4NJBtOLiEI+P+91 zN=t^EIR{LgP5O2_P3=MU_EgK${PF2A>q`t$MkZ0goD-v97IPCikQoi=i3Z&Ma)dv7 z4F>FlJfq9=74V9C(O%n2t58L ze7qikT_OwU)$IQz33-GMD^D`v&~N+7Oc(A=Jm-?avB;dkK*dpLjPdvOffGY3NQiYr zh;?F!bw_lkZdqy{SdyVj=IZbnRKyyE#C02~IwYI^BwjPDU{b$wpz`W@p}J5NBQkA#TGJ$aE&Suid`^=g*MVt36$quES=nEQr$1^T^legB zAane^fLE8}`mlIS*tixfUZ`#@r}mOuihgZ+x2QR~UCQoVUei8o)T-%U_|wtv{E&Dk z7MLqT594uvXZ#N|9u6I!guXtqz6swY^?l!U#XN#s%b^yz9Ox!=IydYB{EH4xxg^C{d%Ea zkjQpok9|mZwzuXhmLOS#kSq~M7P5nGm+=^ZFaj;4u-ia8LI}+EZ5lPZ7N?bC3P_xV zH%0=SMzy&V*HQEX`ImrzJ|JKS2>AKMR-F%6K4nmZvcA;%ho+|T794}EF?P=@9>_Lo zy=IU81^XI7UR%C~Wu4`8YItDt?Klnp|6Qrq2>%gKjG#C3_DkJE)P`DBx`{<~^PlEK zLIr!|jY7<DY{0jyg9k1fQVX!-Wp_axxr)v(%Hp7@+N>P4*)+_A?0 zUxt{6MxNtS4p9N0bux0vu)z*Lw#O}Z@a6|+V;l94HYXo#@c8X+^M35%I0{8SHBpt6 zNFizhuGHR&vFVSjB6`DSvsrOsw>%^ zDA`WnER@Ryr&w-|WTT!bwl3ZAb0LW)7zoD2=4pG{@i1JoLP7>xP7-4Y<;UVzmdS_c zx}i>V&{uR&8aimNvZe&&UcUwD-c0?&q|gXzg9NuNX|P<4=0Ao2K1Fo5@aJ@e;6?PL33u4>cE$GAeHMwH2PB$bcn1P^5Y{O}i+U{C25_dgSVF>Q{ThrT{ z(h$aO&u>fa*+lNyV(!_OV2IwAQ)L;hYuP<9)>UqzTHw15&GiQ5^KUd0M zhd5awA>G5n!PVcS1=0aRfW9}zyn*)|eJWo{gU5_vXc&__*~@lC-)Wk$x;VS1vbgXr zoy!%yRvuvEeWLbLF97Khik0h6mIxgwOnP1lbx!+QOhgJ**Q#P+dR7|2U}{0q>NGhm z##Wee)8t!;!!ND^W(atzq6NvKGWW_~1$X+k{HNkAs`^jG%j;mQETv0$t4-5ZR(uO2 zFwRJF)hFRL??e|Exc1V0>)pw3th{4XkxRAr+tU_vrD?`B(4i86s6xLOEx^eFoe3V= zP7n1{{JpViZVd#hp#m+4Sd-CvGhCxAXvEa_xKPUN%W1;STy-)P{>W#cW_pTRriY>R zScjG_lDYD_qOw;zW+t!-{KRinQM|NH@)aj_I60Mn9Z#0c-W+wTytMv_Rbo&@ZyTz? z?|K;WS~hiBZhYi=gH3=$kV5=X^f49lxVZQTr2GUT>}aHcbqo169E-oc=2@+B0z zYI#WCH;1{&BM$rVIvwgZ?*EwOcg12o`4%)%5w_6WOLU?oDH-zGLU^yDb@Yob_0ewqIKOL8Dc1`=kZ&4{Y#g?pCO%x zDzv>Kn{N3un&tbg{J% zd?v-X{B~)(m%}(kaNc#-tHsn8cdq&3Khd!E>ThX-WZCs3cMV!5`DMHE-Xp3B>R$Yw zcn(`3Pv-HRYi+*>V|<*iPBnc^`cLZ3=Rp@7_WK~)8L78{F!8XLWYDYE{dqi;Yl-DCD;-J8N`8NQwOzK(gWkhdEKH*xyR(Z2JTI*_g*GhFVvrGpf8v>V7N zp@lgR;djqS)<4UNYaRR?SSimzyN4`rG|kM+()V*6J74wuXFYW zfwK>?$`mBmRH?FjEyt25df|HMYpf)H$=xnUw=3%ImNJ$@wi_gE^v%Bfiu}7L9UhXb zx$)n*xApYVVQE>&o*t)@-b+k;27Hd59N1Z~U}A#zRK;+()hV0cgXz(e^-cbLed}M@ z5Z9#(3W5s?;R}k80Fx(jCMg8-NCyk|PRD`=bu=slYK^EvtUCvmhGdLY=`c&!slGU) zVEwu)KPvM5v}7YaZ-c-=dW`4m{M(A132ckZGVa4C-F7#aPjK=InUpKogUvDhORTL3 zC*4uv4W9qMP}ZuCqwWIrLh2S%K+S5vIRd=HnG?t}vs+p`UH%z?3QI-BbcD9Be80Gj zjR-w1K;9?_-7G*pwd~IHdjxNwEAJhywo9SSpXeTw3H!g(2CkZWZ;R7BEj$#45c}E- z7N}`S6L=Xbpvo$sDk`8N;RkKqDk>WnU!<9oU=Z?B zL2D~QI}VZZYGwYZ2c*{py6OUv9txb+e7q2vq5XIGHFa>Du63TO~Hz_5X3ym;IP+u0Ry?G>^IF%dcFFU)R6if7sP> zo7tda6;U0x;JN1s2*u20hZ1mCzCS8q{-pcFmgb!JcFxUAfi8F32@cJk|j_XSKA~FG#4Am$&XS4*TSMUm1Mj(FPgd7Ggf?U>lA*eWU6?kU+08_ye?-w~p9 z>D=14(2^7tPizj|xJYJvI;HzHAfb%tkME3m(BStlmzO9=XkqnJ8+Sx2Et8uB!7HZ(I)&di&9}>2D#{DvZuQ6Ak#P3kqur7zzstiwhWBEQ6Ia zq;~6@%gd%b9Q@eHer9Yk6BKN@|lgU3P;q{Zr zWQ`i`+50-4NG6FC>>BB8pUR(@a_8hL@fxaAy`?CD=;~z1izZo+WigBNwt14l;hU-G zvnlxb6l-0PHN)mim9@qyZNhqz5p!UYZV1n{`8bgt$wAz+SWT(gEdNMcfrLvPX0CD( zQ5Jl~OoJZa)e++;mVhOeKrNQwqvF5jPwI`_OmXjaC83AxUq!nSV3P+rSD*<%U9CWs zfI_*&cKX_=Q?s#|mNc3AG?|7pnOs?P@tA2m!4IYo>-d6jywPLCiydHYk}Wg)>?0*6 zPnO&(I=iU?l4#?ZH}tn^w|n1Wx7A|TkF!`;b8w&DwS-k440Pfwk1=yYa}4&4xRm3& z9qYQPDnTdtwRO5Dt@hn2En)`^Dc^;7!Ge;|f>OkS@;n=o*B+ZFw3tJU@tx^C%o@q$ zv}?*&NA=0~s;)OA{Sg^+r0qPVzlGRge4!&_IDI-gd3yNUG}f`ls=t_sMv(39G>{E8 zkHSMQA=P(hdY;AJ8jeskQJY-8;nq`zGQKvi=8Ja}NWc)F8j90a44%)=s=;X)LVNx6XmTmj+yrF&1$6%fL>X!8GI|}d zl-bl*SZXkXyoB(etdd^K8F)WWn-nmg5Mqx51-27)zDta1O#PqQw?_sRe(|2#v2s*h z3Z&2bIS-$J11&hR1?6rk=ZikD;KRseo^u z)=8b!Zv-cVPFLcJMnVy7Z{==uXSax5W0es7*WV<8mV<#A!9;_1`D~CT0Lw3c*Dt^h z?opooAgV+>>P6rhK58I7hV0UG8uWz=VdGL6F;M;^tiWBU%M((r!mZ#D3_JwaAA|We z!1bG8K4JO~yFE?>w>cg0(}i@yQ?HHR&;2&{ktB`^qD>QWAp~xp?u%zcsW@3mwET=~ zt#<=AVLRaye(6qa=ygaS@pw|L0(@0Y>TU zhlBK|V(A}myfT0gJs^uN&`TG%6L_=@zXgY4y!vE%o4=oVY2|AK58x?_Y7|v@X<)RC zjuRe+nJN(^zD{SX>osxGZ zG58}UDMyN4I`ANIC-v42Pq=Rbu@n)5gLDix_!R>;l-@8=j-Wpu~YmW zji-0R69BC(t?y9|AMbFzDGDJiz34NXQ!J@9?v3c9k;^v|5oZ(q=Mxb^KMl$U4XCk` zbHXWE*BE$ustyR#l@X@t&fSqa$78guYySJREXbLFG0~D3QJ)ymkQmWw-xaW4yGxa{ z=Xvo@yMiyC8}dzUaKslQ>YdK2hwqM}gR$V6!jRWRj!kCju@|1kI{(v+5-G|eqzfM{ z_`GYq1Ol~*e@a0*ZC0p5+-;PyNGt)B-~UcTh5b*7i-KSsQYD2}VR(E0AJ!GNs_fnx zdBEJlWFAVPHAZR`SqJ z_cP%((f!y|h z+!h-z3UYtjmLq<9$avq}wQ>jfie!Xqpr1YYqkbSmfizPOG>HcMf>x#Mx^!<^P1j zC^4z{U_%ik^Rdp|3TmXoPKGZDRc}JAGnKLFp&uDl6}l@m`giS0AwgvkL1l?SW$*mc zRkxPuUB%{_cE62;dp9wZaxCtEef7iI)$TlYLP)K>eOvS8-v)Fz9jd-MRHZpol{xtV zE=mPkrQ(K}H-DHUa!1nlmm6J4LBe*HMpe5TA_h?deZ*79i>_K`NMWf2uo*&Fs`on& z?da7|)b*(6!poIdRQN@}m+h{KgHfp;;!9%4SrR82Kdw8H*FLE`ovXe&SEV^u^*uG( z`3maj<%28eUmU3@q5T9?NKRe!7+hiE-aQBlzfdr9N@Qa0Sc4=v`5M0hm?8k45&tQ? zJ6}m?&^yFC5!|`k5-U3sE~MjdhkF=8brfCxy9i)y+U)#-9q>YUf4b0=~4&cHqKx8P~%1tED^w%je?TR zj*`vb!`x6SVCWEG?)emav;;ZN<;UD>X>YjrD1LQ0X^w*(i1reVH@r+-+d;bihOk}j z+rr;0!oOJ}f3waptVug1aAHCN%%6%*B&zV?`?{WyX~#mjt_pO&sX$=B9IRYL&*Fw6 zdtoPwa3@P-C#!Hu5;ZRptEJg5ajq(=zEWa@D}*-*kwB&!KNMA1m#OuXg@@fznkbuE zI}ZBRT_MoO3}ku>^mq&0OgLIG5dizkA>EzB@uyRm)uX`WOXs{MUIW6=@&>&|F#KQm z%lL|B5BGP^#(t6zoFag+5>RK%|P?ti^R0;wVpZwdnxivdc_0QKSZUy3j0y(KM_Tf47{M7r|nXt?ND_V&^? zQt51cU%-75I{JO*Q*P^9%vJQf1%+%vg=`Up>`-Ep%Pa{U^7V$Q>fLbG1Nv9Zh;zAJ$Lz^{MATdYTDzDOi(N)C^Zw*N5FrnqSvDvj_`x-US>T8HAjml z;&4P+{D@~4XfI1)71(>R8ANykB2lKSqO!4VeM1_!-HkOoWHkVa8i36m~F&Q3{$bXw+kY4x#Ea%3>L8r>Eq)`mJUPFKuqL#S&w?j!+yB>|<8fch#w zuwnWw;JBV<`)e?gAt^RiER9tD6K8ut`Z;MTBHl@*(n8ykxpTJQLxqLPIlBmr;*dX4t}?Ik+;rx{fj% zWSR$nto=amexRcJ+&5Ft@Se3Vw<}_^%ahf7NRlgKwE{l_*zi1#S0<65u1UI}cffc1 ztz$ZRXL51JWg;78t(#>c!pr;Y|L}jR1xs>jgz-p!@%*w)miY$P=r1GQ7!kTdeH*Ei zB|?;Kr~c9I)92T~ zt88P8=>LT;l-wue+Fpj;=on7@#z8T^v5JR8JAA-R|EvYsS+hUW*L#n9=P$$vrO4oF zO&`W>acZ}vnLqa`)kWxaU0lgjrV{~o1o9=7tbqrZvj=u`G(N81#s=&Q;$u%X-e#?% zN?79^FzRnjerT^0Sts$h%&?_dob;akPt6(#V!$!Pq4|7T?X>0zcGx8d5Z1d+5*$y9Y2<|lymx2jRDk4V;=wK8;z%d(lwQcmZGdEuG(yJl2*U-?n>h7 zr1Vy+8Jxd2idR!-SuxGTf1IqJ zCW(@(@jv=&#Lq-vCDBOM=PTs}K1+{`nzFoy(_s@AG1buw9)z>-z9#omc*#;>_wAK^ zGW!Gk;V(4&;XA`vN5!~uuuFH48JOqqxGB7vO8~EbP^5A5sLkeAmnW>ej*+x!F=~22 zNMvjB<>9O1PiY>)Zn89U z3Zgne$Q5?T+|P8KWwtOSzISIH(erO1)xym88~B$B-KQEaDl0pBF^~CrF-s^$8m~Eo z7I= zL(j@f9)J*papfrX32MraSGc~`4Uv#yl;(eBvsKiIGO zF(M)x4%Ra(P7dt+I(x#@RFWdN!C?d%8`0(K$A@|n)9vC2mMLR4DxBI#+2)ARM6p`* zB{(DY9sry?14o7DXi zbA3Jk=Y`zgHYK3J-@evN&g9b`&;WVMs(#$!gzk=cz#8GenowL1$wyG zKVxsELhG_yvKBgvYL1p5(>OaxKAmw5->n)$9y2Fn%56Y;)W?PzgMBJZNn~N98cBp2 zFhULdJxfmNY^Jl{@0ls*BbTwb6NS7Cb}w11MPV0G!QHe2S0Er5GO=eRT}s-(EVp8O zXkH14_KT19ON{nw%Un5*XbCr25dnEJ<+>pMQPnuB9ohYuF#t->ekv^dl^JeUlv=6M zOhV{WPvr+m@{3RMOHA@x_xk?aV<5Zid3B;y0sGoBMbO#~<2BX!qrJ*TDIeFY?x!LG zOtLQs$6 z2O|^sK~ntUQ~VNB5WCj74@1o}$I+o~a;lt{qB2G!W%|}Gm{0cIOS4PCyrwkdD!mIE zoYWQ-(dASsX;})kEajCP_FyBkyLeF;+1N##`SV%t&A zj1nBO#kS`dd9t0?_IACU+8Ray#!`U_Qh|z6fu_CBT#HQYqzFM((|*Y`x3pO~`pQ$9 z!A@b85Z3_P+Q*RkEfVzY4IY8_ox-Nf2m7rqIpwg-^3%-nx^}XaX&eL6kVpRI%X5r@ z*ujk;q30zsZd;&JN;qcGTd5#-{|_a+91zfbJm@|Vbl(=xxgQJY7D8J$Xmpr2ogn=- z@T;Jm{WP+0Eh$4GYr8I8U0NGQe(~*ud7dBTHy5#Tm~i>2aCzOs9G&%6Tpi*e0cpr2 zU80^BpDXD_k0ObNZ`{z!Ze@wkuoxKH#zJYFD=`oPf}+5XVFlL+TD zEb+;)!l9Ml7ifOUf*DEplcxnS$%zw@bSpQT2S9ws!`I}8S2TwK=j_%}tCH9;BuJ0C z@%WDBzqlF4oMpV{9v!U?Bc#kSS3-<;1obvA3}XCbY3I8r-gQw3cTt2?!z{M)C7I4( zWlm$EQhg}4Zk_e*r@phQ+uKn-syG{3Xx31BiXg`e0v^i1#_-nkjp=#QXx+I7?D2O^F zh`Q1dv=5!ZTFXXSA!ZG-#A+$a?OjQFG{UcrlsM>=ld~JG5Zgcp$P`SzVi-f^q6(HRxD8+i#;;DO}6# zA0*U@UZW3sO@z2j^teq(ll{b3sq~P{M=8izJPX_=@ZpN#Lanjjq?9u=3~}}Aja_^_ zjptT#ANYyNria{So!q7%Zmuk|>@VY%tRulwC7To$2HLQ%lo>75l7go*(!gK+(+Qk@ z@#@bAy&>~P3QqQcL+o)5@@7uxW)5--izCz*?C@yE{VBs@kZ6qot^Q$!$VA~zcfy=C z7QM4pf#>g@Z)`>+XZ{~|&+I6OTV%W&MxtBSKVol_v8R|-ddKHg31Zm}WFt=@Nc^QS zQSRMo(g$s+N41CoRA*nJ_olA=ATRR86AJzl3egjagEu!#B0wD>UY-C+#)7pRGu!NF zr2}1&;x8$%r3^U-eWv_hD3Raf@XP`yp5>a1D#;QlU5zE>6eQ&oCFL@O$Nq#WEI*7* z5R*YdMP!q%6S#HYs#LfoQY(reg1WslO+gJW&&D^42Ne7V6ru+d2OKwLP75~SRA|lz zMNVok2>AkICT02B9)El1gpudtG|9VU@e!F??m%ztxDTVSDc%gZC26@qX}KY3xf0hW z`JW8ZK?s$^00h3k_{+h?(Qs?7==j&6`oBXJK1CPt&DoSweQXmE;oU@TNnD`xDjninafw+^%|j`dC}0nJvx+c{B~dyoiOLO%$_*Hh|5{K&$`dpUgNN z)!LWb8ioV}S62Bl?`+~_hyM|R0Pj9?(Z+f(NG)N?+3{hXN`>#~6MLI2=n>*N5#mx1 z;?fY}_C9DllapyShUq(24{$TW+R?dh?8X&9s4|#634-&S6-3REK?b_(w)UTqX=E*T#*y6x$KQ^EaM# zZx&W5_*N-IRw)YKFSrev2_t%D?+j|N9CFWvl9i!D+C7}S;0;?JyKk6dzTemW1C^Lz zQdl%0Y?u&+wd|KD$NNP9^$H4dE8h`3PP zR;N5d%e-%vd4$7h+=G9?bxK__N4pkBl$H6^Vdc{RQ`*ahwUMOTUg=Ru^km&A=F)a+ zcraVM<7GVgWjys|Jm}?K|Ekl~1aH_|zPo%BvoE&x_b#HDKwhd`QOGsGXKZUXe;hck z4DL4zh9R)%df0G1OsTm(DM`Y7jUx0kt{Mcw8HSj_8^%uKbSCqyH_6DZ4a@^kQl>^S z-tIc0cPTnyCXiz$P-7;5&~v=!`Z6je0>RCAOzlqdj3Do}`Y$iM%S7?aFgH!#VdFL( zc@zjU$1szSl*?w?{-cRNZ6-i-Lw@!E=8Z7w2bkg9g`kH*c;{0#L`H0g=K-3@WXMkCQ={CVbBm2DMq%j)!_I-MAaRhs`1a^8(_>#lsnRl6E%&Qi5R3)We~d*v&*yG|FCxDJ_|c_bEO+us&ypz$o;DM> zd3}|S50~_pe};V9%4E7T(NgiKBB{G6#IiJ&v{aC^RFt$_={i{i2fB94tmjy#_SJ-! z(KiHS%A0o`pji%wVyNk-xTcM#Xh`H;eO>l~X1YP=-Jl;cYTLcufTvN;j$MzjpRX@T z>#c9mGUkk*V0jH1zD>?3QxQvALsII=GG&s*4(_r-@4~XRMlN8s3-AO z!}TtD)H?jzuPi(sv0Vyv7l)BcK+}6!M}M1fADbf|o7W{yW_nb<<t<&EFQABF_1tyZk&d z4WuAjzaXJ*c`JVtIL9#W_L&^s4QC#aE4X=Uo(G0MX>Xxo)2C$fSIMSmY%X6-OoRV3 zpG;Ttl0e_Ar1Y-SU7!BvE~?04{T0&ZH`;Jdp18{Rv)IJp>m8XG?z~q_c`=Bp{cIi? zW7Lo1VPf;*k#$gfRr4ylN_=&mqwSiDS4|K2K~{=f{ow-504xJ3`paq7GJ4g7U~VEN z;r)|vw71J2IC1*yH^hjx7FS`PNOrH`S^JLIBCL3?-@@E=4cUxpL$!m`DoIo453=4J zWCQUc~4Tj+QiA0VR!yVXQa$jVr1Z%U1BA^=NzrIpg$HF*S@{SR{}2hw}Xr!TSLy(vAjc z*LI7lI_;`b?5a}lB3_i8w{Cx@e;_PEfV>5CyEthb0z;BJPyjoQXSItoHMib6Vwqa?sW#XTY@Q%I!m6(jw_W|)3^KvmSVM!6Q3H4es`ZeiqI zZfiQNs$&k#+o($JTz011N~TRQbjLjPiF+!SE1UsESxl;(kY+aGQZL?pMA9T7`qUNPQM1R&Mv9MyKwRrg7Du3 zPM+}Oee8byx};_oLF}xkX?nEyy42F8l_Q}2`2RusdzHIvC4nfe{`=m)e;hxqQulnh zRyBBd?AqtJ8^I=5Hv80byzBQgBROmQ@b&=f`s4OidiMN}-tWI>(qXCJU^Cxf#2tT1Za(!5q6_E}Hsl~1XaGJ6 z+=-9n{Jk5AC2-L_P>L0*aLj?DY6OGV?gc#TIc-MSY!29LurI#sCqaHDA8xgmWfsp| z1sLcdHAmUBwZY;hj*NU=p6wsdtOTofJ9uyP>-~Z<7cR2+F0w=~?*8h;Pbxumk(z0` zE>~m_xUfEs5zUKysQbIrH!jjCuS)li->aDR1B|5r3Q_HhGsx4bpWTHoyPQ z+{>W{7zY^30Tko_igEzc!PS@p{GOU*qh>_f2V=wiq1SeR^NpIwDaFv;%jg2^lak0u zL6Z&0VS)OgS~aQNsffa|D9f^l0ywI**MKrySN=yFiXWky5E500 zw919swv)S}W=Db&+e_9D1YG%-6aj;ZfFVVIEBaz4YQ8OAk5pWJSG+8@Q;$C4rY*V~ z_kusB_{;ZsbS^)nxP){~B^|@fKGNbBDN(l-T@WS*6VBHu=G`vsE8#YWWQ(Y?p;Thc^_O3pcpwK&xB|_wH}zH@pIYMlaKf=0UL!dA!BR z;AlA15ooXhN3S=%Lo-;rbT7zAj>^Bc4kmRX#$^jmV4Nz(>sv@!Jq;b9ML@-Ah}AmE zh}Bs~jsl0ceUc>G&FB2*5;=q>p7OqCdk5!hkb?g1WMcm_%2cC8ZKD&v)+(KFx`kfv zQqZ%O`q{T>fj%+$_dz@rcDVjUiSQGUWi?$e>yBVC`@l335G;7A#aFaN`amo`@E=y} zH>b9nzZF4i!j~v-E;#+I^R4{ZgNhXI-1nbplM8`QXgQvePo8#FG9B5GFRwpr$rSN; z24KszO>5?8Dd35A=Zxmg(T^4t9qaH{i;yq6(uTy{u6S3TqfRZ z1xvZ7dZ*Sk>Am-(_|p4c_sYb@jIahE_OG@^;V^jdU`&G#le1}N1pk}2&h!QICkR&F zc!GcoPlls>xb68Lt{mP)+XnF1;lWnOe`JAkKbPS7J$5Vo+CNNLRvi?!-^9&;qs9YmL&2 zW}a~-lggneK$Og;Q8bv$47uQ2Dh1x~jCsS$lQ9XIz!s#`FTT_-vDB|eJEt(;JL!m! z++rGGcJo-;P#Lf!8OoG}K%nW;!E~w{P*^0+O_Hrd-DK^zrLlsrK6;`v8Bh^rnHVWj#=g!(W-eGM{5 zyS~qTSP%Zd9VUt9`et3@&OfxAt5zK1hxi>l+>#mrY2A#@wq_uU_Lz|Z__&gGvl5Ib z9((VMa93&69v9RlMcX)G53tPlr@Sg2^|QH^EXKsu=V^!696?}PXcQBSR40j02S%u) zVbJKZ`L!D;zuxZ4w{BVEvHSaVHe-I|nB727{&FG1FT(_9WhVQ)7IA7l^Jhh{QxUCY z5!kYb_UDLvh+4wf*YJ{u-?$15G#5Vc&qzp_S~LO-}hxX8CSr zIp&m8PM0H&A%&9=VqTQVaph_U=b+O6eL zF))Dk?*BkZdV^;2sGD!WpwM|K%EEbSlrc<3sHD`(73kKyKJU$>@Hz=iAA1514%;20 z_AP7Iah~L6UKgUA%}*)osflH~!E|?nfmq_I@3ZA31#G6F6TH@wh2ORe@h&{WubsXq zR~D7F#Xms2?&H1g6TR-+TvnP4Hae(7s5qSbE51_MA>~|T33_$`F3nY#Xcxokw|<7_ zObQ#Fu)PT`-G<9|)nrK)XLTW});K5fIUrjEC170!;%tb2nHc3A*pog@if~X_S zd+QUqua=Bn?2aw$C1qa>IGf?VBoa6%5;!ChX!?6q2_H`QuSsxV-ZVlYZh{o?e2b6% z({mZV$_6(v1%ob32IDeE+PZH)ib_1#w_Xl6Gtbz(o$-DIVAP8ESImm6d@$F+67~TA`#=o)05KC zQ>k8Iya5GjYr+fVSI8X*2t0QdYHtQiyS({3`BO;xx;oju2PqaX#{9e&}XCvX2Kl*be#3W4b9E z*yITcZmZv^>-dq=ApCl_e!jO3h!pAg z|D{N`C3KcEes@emc-1H#B3(>7r;+xPp0j9rY(}{uQDFZubPkelXMx6ZVYGAic3@uJiY{V2FyakG?Zw0yC~u}E)vAQ z-`6oyqC-+J;{O+DLeDU#ioVq37(TpK9n0$$)8kG*CBD`8U81%vQUw7^hYx#DEA3J5 zzB5us>d<__XkrF_dJA-Y3xrD?&H1LkvM~;M)YT2B`+&CU{XW~l?C67Nmnq5PXB=_` z;}Or4suuVdsS%tgkfHe)@aYKPdIW$oA6WvF$71k;3cFk_nQOKRtSA4e=A7PDO2(A$ zAh51T)N9qigLoq50tzkS#lIB%e<_6jBF@0e35b;Hv8(U|kxy%Q8FYOJ*z3|+25Q;G zTXy-F=gZ-DM?*%g8ZWxu@M#2!y&G6mXC@H^35$#mi%blQY)cMojmKR|APF@;ynBc> z*GCdj$FUFE((x76Z1}!Y?5{lZrLdC4&0ibvQ+Uma^7ab%oZ$9pP3e289 zf>!*Cp}VB?yDkfJJB>sa(mgPAHw+;;ApLth-}jt%t^40+&6+3fz3*#( z_O<2u_~i_1+X>A_I;dBe&UHCBi0wD-2>*25w!3RQLzRJT&_U)ZD_DaMSXZGr<%!IB zi8*A7fs4k6ES;-zG7mZFxFu3V_g5Vvk79#R6+v|EMb+3)^}cn&hfpn4V( zhhyW0;c>&Hi_V$*nuFPByIEZtPXaA5YFn3QeOluQK@7viHouIy()jYF{gd_oMW<5tseVn!JnIkZ#gB+=rJ}! zFzO*f`XS&5eW~4yBzC$4y`JE6Z| z)W2YKzhIqWBZJ5Lk5(nq^wnxfosNH>oxeS*@yOER`T96`eGKx_JQed>=KdJjWlh5M z7HBRJ#Fq#XNdyJFo9a7xNoRz9dgz|w>TOOb?b?F9MCGBIzP*(u*7q-kubO_N*NRMz zT`xD@P^!07gjp&^TPij~eUi6=NCb-`7T(=5e2(nju$8%N>2XuDwjiZeCJ^cWshT9F zx@VMGa&;5Ol|sgv633cC_F~S7=dImwYz;C7+qjc4XX&K(&rs_uQ4)A+&h}d3VVCIs zTC3ZII|qZ}+-6a~Oo`)v*Tz*C9u2q3kZE0_EAwP2wL70sR)wCCw^mMW>hZ$p>@xEB zDFQlDcbbgOS*s1fCI(LiY9RK5ZjE;}f~92BD+6q{XqZ19n-q_J6{;VYXqWOJF6fFJa}g zY@6rG?h#~{PNCjH5oVznZK24-SC0&BUuDpOl3qufJd=n+J3t*{9qr#bKnOe|*+TE8 zrXTfth%xXe)mjIwvDasSEz-e`|B{nM##=weorjRD4gcG=@@rXktw->quBauUtEOG< zb(KfF!(1ZfD@L=1h%vm&#K)&|WZEla+9zb{TxQiRZ{hFTSgSgqM7#Kf zA$*bXtQ2&uW7qB8ajNrtHIYBep*h!*Bc(Z_uimZ~X4e~S*W2vw1J-dvUX2J#cO>TV zP2}Kz8bgq`+U?w}r7XXumVm``uCPw&He zK+(I<$5nD)ij&t99)!@A-4e&n+ix0>o1b1o+r|e(j-B&gf21gd_143BVX)q4Snq1p z_FTF(>lK;5R6<%@DLtM_E3vxa$K_ogk~EvVU$yWc7C8UFdzZpBP?s85FS^W8$u!;cVOaN^Kj;J;@4byZsG<74nK}pibVaZrZ1A>^*ol=O;iCb=(e$wX>{DV(3Lb{|xNS+LDLJlp1O$ z-q^=Z(%goeET!0@cBYdOZ%taPem7dJH(KQ`%yVtamSUR_tYwqglaPQLrFOm}IhM8Z zNO>bD?K1K`@6KhzT)D2#!n@y325U}sYfj2vzS&TR?Fel#% zxjr28R5x~h(1W{t)7sgV@T{Lma@1EPS5zfeQYAMWY!^HX(wGIs`qlQda?{Pd`W(56 z1l}^^vu1N^S>BL%|-Q0PXv!o zbdOKS>sD7jN1Tmpku`BNlV7s6Dy+VH>~VbR%Z~36bU)aT<*%=7Q?RcuRU^ODm|q`M z2n%`^9rUlXT*Mb0>$m3{YE?e#n)m@ zV9bK~O97Xc0@#&Oq1KQ&x_Tm6DXN;z(}75TI&}h1JPVHIcO&)-xsjj!wrDjvzn<5o zZ+pJY2$?Ki;7;Y|P8H=&-BY|M5UK9d0g`D(;t6Zw{wgE5++Uu()xez1FiaBFJbedJ zu33(5I>_5Gtq*jU+YGf+l zXdh+26W0b5i3M5hURmuvS#3ZvbM6^Erjqx+^~_8tjN8*eYfR2W%KfPG!s&u=io|O0 z=qayg%}S_~G(&SZOmj+1b2#jUOKqQ-VWXWd-0Ht+vqx{1TA7vm#yG4YxMOjl!TbdC z4wKyh_pvhl<6NCqBji@57T{Oi`yapRrEWpg>ShPx*%Sr9`U! zY8SWE6IQ^KAff!Kj7JXrO%s<2f`6WW&t((5AP@e}Q~0;Htm!*#9nA=a-M;hgIDP2W z4}QNCB@uts{%wTu9s}SKl(rd%FH*{7b)>=i(qN#;NJ$zTo7+Mg*}PUb19%)oUg`TA z>!FEwN#=yU$kIDp2NAL=(N1;}y?^7(dHmzgdwzX!u%b9vNgNzo^o5}%fD5ZbW)~L` z8Tt|}BUD*ZY`xKw(U{fEQ@%r)g(<(}T4=bG1BPi#q53_)USwx3fG{?4CrA-{)C`kMA z8iG&S>d-#YQJup#leV=YXq|Zn?c^R170AfA8}0|t_~f(92nik!06c1t5HJ-Na1s|l zBph76*}ZI+*}SM;QQ?HC5l;m&{5ks=k)K)iiO@p=7hicIYtj13LHY%@uEqVI+VjuuD$NrAlb>4h*l`5eTf@1AcHHEJuuNVe1>$WXfK|n zOy#3Y6`@QGpw;MF7PKL4C@}E3x5bOquFDP;%u;hf3>A?75_YV0v0aW|nLiI`XgZlc z9o@1^|MY+m19Z--gtXIsDJz-H7RBO`p9*GfAK-GhM&mgSn}_%;J4b`=_q~LWz_OEz z1NW9aP~}5OCXZB4@y_Q@s(21Zkq&*+vGXj|CBU6QoHd17Jf8`WWwi>`3q!GwWYx3B zN0hyvik@kwcb<{nKuHmuZvmN;uhNwGY#Ot?FMm(>J;4Ido6jYE&_mTU%TDD0u6Rt^Ag5yY2*5q0vYaqORU2+ z^d^6`J$G>&r98rti8;t_nFjuzRR-|)Y+gkqj_idl+9Tb(O|Q62%Jv>f!miA}#e5kT zK8?VzUcEkhs;(QGn1|53-mv;;C zBce8#VB|eqWBw^_o77!F6nF%s(J~)16Hf1}D*)UVg|n@q#&G^bMfaqz*_8;Vk>^V+ z)%#TE_Mj-DL8f3CT$g~o7Z7h7In&N!95A9qeFb;oIE!EIdK&6#N=CusO+dl8P$fGpP@Mg(mU*2elwPrg-mmw0DGxIQm zVIw1RBZHx_+9twrwyGfmaJ&$>#UZl~{TNUY%IE>UX-nDh^e`}NZ0Qgah!sa@axHq? zUFZg=ZK~I z_zlzrkXySQg8P(l40TkwsSA@f>gb?${*?1;g5K<2Q76r(H6>XU=N zFr(dnl&bgst|R|`2D@BNq6zr0mbK6liS3czk=tH{W6le561{Q~eR2|pgT7p%96-3g zVLvBRT)mSvqu)w0BEBf2r-(h*W6P=3vd+r(jk~T7x`qXL{>x$}A3}pL{(P|ss-a#yf>3iU7Ei|damut(nCny~`sj=KD2n+gi2=y$@;h%G{O+Z?kknxO-z2NQ z#St!v>V19RZEik2<=X3%*zo^GjLeB>esuyxor6&4^r&+e%$q#+g}|2Ud0;!5Js){@ zCSs50TbzY#{!67=2e)5;PiG(hD2c0uqAuI}VBs1GzD9~&BZHhjYBPA8VXYQ&HGijY zre8`Bs$br+&<(r0g7Y?bp86TWesW<6n=?FHzGZyTQ>wJ|UM)*gZADYfShuo8_GL>H zyrG7-`cJJw{u~#?aJw)$>Eg;yQdwjR!tRlYxs~3qCL?S9`236T%6~6pTr||m!Sa0` z{TdvZ`_mNNQ2GYDoO4l+;QsM7I(N3^cz#? z>{~ORFD}i_Z_W()l9EHmh3BbC)C!r~Sh5}}eQVbjej~xZkz&7*n0DebQO<27aP_Te z*pj{E!}(V~gHC8E4~utQG~{mtd*yK|0gmLa1LNf=&^-|M0#{}#XOpvA6(yY-t#0$A6I*un`6xfS7=O2#F77>u*B zG`zAj!m>2Gz8BEq7rqGM&Jnnf*nO7!e@Znob@Md1QLMgM7`Dsc>oQ(>A%y^FKDuds z0I(`7?dBtRm}5F0SX;E^H4iZEydKc>HJ|5NO;jSE%>F&J3sH%xJn)}NK;^s(j!&|%Z=qRNlX?(8K6*Vl=9<5(-4YCnEd?kkrmD-!;xD)8y8 zfc;wmG&{kxuG@^BqY?DxlE|8O-E>vjS6DNBt#pywydz@CHI2qtc>@76O&%!XBn#oa zaFtJAAe|7%Dg=V&2Z0LAJwWxJ%!^!(N6N7ar?4-#X?8c~-QhZ{61 zqfiULv_dj?CmCFtstRzqBmedo{mK%dN%VwyGy7~1^{Ar7;1A4SF>XgNcI=fzfA0Ge z0PPk2`9B#I`#{z5gVO@jaT21Ns!;mVIO)erk0wrCo2_#(fnYkzPibe~h4f51Jdx+p z3-uu0dXR8E$lqq6KDi*M(luySUBRVMaAJ?>6x#BPaTnMeIlm*6>5Qg2wp<$8dp zGwtjkh<6YqJP6v&M3qxt;aK3^N~in{Iu^(KxwAoTkac?P$U6mpqRPX#7`Rs3na8Pa zr$BEvf#&QOA@?7>5wnjUASf5UHr$TmuS!yFT>b68V+KG};icK@%2qL{ur-`K_BEWR zP=XSmBSU%8=QD)Bq94y<7tbQB+O~(E3GZ(5JT!M>M?|{pPwzDA)b`PErf+DCyDfnt zV!h(~nUEz7mlZ{dS2gRmf|5V4WduHJ!%6bk!KiBC(UWlPF z>tDil@pR1o6Jm|R2AUr3cM9(zHx~VWEq4D}6m!mGxj1b*`3T}I88t6`s~=D@46U33 z8)~cPe73nbW#*z*$=ns)xkBD3(VkEL{1wL899r48Q#5!QTz+{A2AD z?Q$;1o@~|a+uGx*R8SueRFJwfw_Oc?>F0W!u~fMQ3AtVgxjqRw!@k$y!B!PPE4^!u z%OIIBG=t16O*QRK2L+Z|ml#pZJ*MIN=UZ~U zu6=MlanN0Ki;j+rpI$*DBPvXK@33*-t%T>R#rCpvmEWX!^68c8 z!xUj*iqT<;%?Wq>qbsJrU(O6XP$<>VRyY^+_8Ii0^*x+MWsRX{y?u})zU3^Suz$A^ z9$|$b=W@}Ev0;m%W{aX@i*gq8`VtJB%iB_xGK|O38#%*l0=-L!S$eJ#+_L=7kdhD6 zKMf9_Q7F|%D8eEXqazfh6;y{3kAQhNOD~VLmW2I{NQCMioh|Xw(VBY^D}KWyei}b9 zJTvrdeILDnUciJ-z>!X1Lg7G;5C2jY&HGFKg=(1iAIjSBQ=_O?f3N1fUUq)-P*fpV zOYqepn>z*Zoq|M8K}d#+EZj9TDtkWxtrZJ-Ny(QG@*Tvnw%w@mXL;Ul;%zS6dEe%c z5icNRC0TLMOfu8VIMYnB#@2%o63Kbd%ck8DHX=q_dZ?OE{hNE+N|w=MU8xhSr)=dK zduyVyUq7(ui?Aw+uquhLW=AV5NS^jo3dLq@C?5T~Y7DxrN02Y(UzG?CKkVn^d?6h3Do_eGhmsjq@ zN1tI9{|AZGV_ZlCffGSuiJ<-8r~wKa-0wsf36~rG2ewCjX+B2Lg~CE47Q}8s=ihJJ zuJ(r1qzmU>6y*L!pBjCSX}51NylZi=Yw@FOMz@Bu(23Q@C2*lP7qp-AWFiDj{4fi- zeSGxrq*Y5AedlY#es;lT)ch0fXn@ED;YQKpM!A3%PCayKyM5Y5`N*%UY{?8%$DFlS z_``4z-zf$nM>(|}(3KQIIHdRnIzO7&q(O$0AqUA2ShW>T-p}yyUp$2jie1K{n6`RZQ@SO^HX3J6SR!1RP1;(!2$m^ev^ zX;PzH3KYD&s<=9dAJ4ThH$vw6pKVu8u^TQElvu~la;Nfg(}-~c8_1%Jjh_?`-z`4L zY-L{|)QYtqmjT{_v}+aFzU+bzv<0t8eRWMI`0#>J-L5Y2)Mw9I-G)z_yO&WabCP;oaej2w^23a`H%AQ!}IbVHC`W( zSoDpR6Iooo^GhcYYW!Dpqan!?$D$lw}@4r$i6Ru<4HyxOf#dIn2+7M=8L z_2TriS!9PD>0k5NN2+OLA_N>cs#)quCjok`7fK2Vose~F;>N9X&h(M^iI3-qq;5e( zFdw3u4}q0p#eATT)_7<90PerPOuHDg5V9pRdXJqmV!vsd9?XV5}BG$~60FFd&-Cvx2~5InP80?l4}I)x zisKXyTHDl7+_5-}v64`Ke?8rE0S`Wbcb~xJF9F%Fz#BZC67p`eIJ%ukCc> zL7CAgxelRJutc4LfVsSYgFKMME@t`6(5z+9p-EB3NadS3`KMm= z3MTov4Pu9APrFHLaZ*aUx0l*&cyZ){no$D zER{A**Sr1>G4HzuyC9A#=%M?&o2oVmp8xG{E>Y(rVD2s8;w>;$2XlqV4!azyBNFT0 zeWd-lfqGbDSbMJ;zR9t&Z-KA1%(Z z0{RRJJnKCcx*ws++$e_$iNnEAE?D2V(swGb)XT5;DTMhHM*9>te{Eai!qf00ggTHW zKF)Z_Kw0ZC%=0c;L7W@6D~y(+v**0mLzRnNyzLljtztR&+H&u;Whw0}P_lh9W{a*Y z2Gu?RDTtRkmJ&R`*lKvfDmW72R)VFAhD={W2##-$W9D0C=A&XhLqmS_66Fq zTrK$d`@R?)DS+axabRaJRIm@x`gV-o&~IV&H$3_DbHUaDsJ2;umSXib8ty944{R1u zhcc>y7***RRe!apfm7cI1EV_h5hNpSlEY=%yoY~d_ie-^e_h^p=-C+gu;TYWH5mW=M^xcZR}1h% z>HUu%ieZ2I2pK5=#Q5!Eu6%Zg?5trvl}vUE>-rpC@4SnFGqGO&9qGyOQ?;A}TvsS- zp#tfB4cU9W8>!CMaW1|UU9Rzg_@^lTp$%>$X{r2q>!XoW7R6&IrQADWwyKx?efD-q8Gj%~M z2KDPmq2hp>cP$e!#jm4j92S(py~^M|WpJ#+g0mP{naUY$neZ_DP^U#+ z%;S9S;Q6SakH~nIGb}>AD)3I|Z~L{b?)WER==Q$Z0hdc-phs4PyyQoHt>)V(@+&`O zB!FM`QWid;c$DM(|eKGX=_C)`b6l$H*2QLMb!A>W}la#XN2fhrxu=< zHW6Rw`%y?SDHM;J= zN+YkgPNqZ>WuX%xetekv15<3eBIBR#_^Mb-;Itf=VRC!=;pa+e*g_Q8-B>%jsF|kQ zf*C?5GM%jti7gLwPWZupxXAV~)HUCbC&Ld$ZUsNBq3-L&yHddO6+2IW^JC?p{`4*j zT?tH@h>FrE-V-b2Q;m|py z$nyIBo+GX**6sifIen{y?W8*y>mKc$ua26HZU&gzF^AlWFY}dhJz1Sh$NN7-wfbF7 zef8isrfTZwg*AM6bVoB52Q1%=n#|TF>Lw8Qdd~nDx2fzOj4Mk#VHk<*rUEZ8#oHD} zVtOSQ%Y$|c3{{90jc2SWuDm4(^R$>0o1dICl zW&9T~68%`$M?YB7$nL-=OK52=_a#rI7k5PC%j~k3(xcfzcF_qG0>>jwf^~b3NHVL2{BTQ^ z;=b3#JFkmz?cU7YLb}^%L%TX9`DD#JKEQ9Z+rJPW{scRoND0KBD&Y4o)b|vDDt%)- z>%BZ-UY^ljp3M
8FQw81_MY4=P}IJ#Iy=ITwyy{ay|K>Ec?nku7uqj54Z9ZZZZPnn~!tBnc%op*GbvS$!ps4pf7 zQPOo{90@-1X(m@L18Z(3u(-|KOzpPkMndVc5lx#bXiO8y*lit~H=h@+$o!VrIM&&T z{9jO&mjuSO8&|!WG!>Y15|~7MJxD6}#>Hs&DJTEcNdH~SJWD-C*6I!JJf~KBB(;-9 zk8Vc=nt(a41!pOBfuGwv_Z}bjo(T6|Kub^G`{d14Pv_;i2H7LaZdV{^+>7GgT?^a6 z6TnpGBut|&OgWP^3G*+8X{n4mm``m8P}Bq{Is%k4>x~O(1SRXSsY8Ob0_%BuTh--? zleQsyoyyCokdTS@&KgZ&6S*bSXj@U%bK~K5L&anL#XJ2#v~qa9HXo{f~d1A&dF6=82!7f{(s}kD`K)l7i3DfFuQL zno^a_>Ws-E!b6zRJwrlG{rE1%+qzj>Y;3MNQ|jrMCvPtQKKh$q9jIYV@Fe+4qE=R% z)=Hd~@l3UlmLe-g2cbFFs~561e3HNVTzprYrn3 z*2i7L;;y6Pt~+0+wg0$MM&kL@c$sm|+mX4y2ot~dFovU>jx!AZY}HJ1iiXFY{!m&{ znBgoX{A)MuzM$#TtLf9H=@SdnAB$`y^~|`PI=`DeR_7v%EYI6%rp_81(;sd@g`>u zjSl731ieDZ6DS`F#D}8iLm6`v`g=*GJxN0Y%!_n+cKJrtQ5uwG-}bkiaQ*D~COVan z&T-59S^;Er7bfcql6zpuJu%6>eZ6{m?p&IH$E`8mnpE97J%kM8FPuHv58@Rz6LfR^ z^VFgx+xT+zkk&J~Pk$;1e3GAZ`NM}PF>41`pW&t=8TZ~jF9e(&>3}Iw;TM5&R>e8>!YwX*5;kBOiW8_ z8n@OfdV#RU5X@)`@GF;(Me#iaz4Px!{53~9HAm3Qrc!ai_)l_4Z}!a}cM%yyF7xbG z4c{P}#o1p3hjoauQpMhR!ESVJ~UBE3Crvw z_iokTmnwWw8acojCFG1EM)zZF1dzN(nsk+}cWz+{7tDD*$V-u%G5 zJH83!%h2^=p01o$mevYbFQ{g5dHQ~3h%JCC5uG(LfHe`_bH8cf5ywma#ngwB66&1{ zY(__V+cFUYEXK=XaS9q`0`Xm@)McYA?P`N;Kdbpv5N|3-I2E)zkNTp?@bzaVY3|OfN{pE`EEqS_V7vu(_%dY2L{23hTt^2jochBr(a!u zL|YpE5@fLATieW7nwH$FADD(Q_UFWLqPl?Akh#I zMwd;YURV?Hz4f)yXLQ`nE8t1@c_<*aART&q=qd1M`=;?3IVh(^KfA>)`#&|dA-9y! z?W`Sg^HOtwRbjAT z;%(-)PsGykYAO6>LdQ0`y^C*QjG=8i)zWRpP-w^}UXef2LacrYb$8oQ5vfab^A#Ze z3Xo_8Xm1_mYV$#9MH&vf`{;fDXmt)fbJsM>E$QN|Va`@|$Gj*yvS2PE^|5B0NL`%C zmhyu2Q|%HvTjpR62u}pFpiiaPvIde&QAg!^+D$guzJ9D zU&FW}xZW!S<`oj{6|#!G`{7)IN5=oQX;s^>9#8xrTs-r%L}aW*;ZPC-mcOKgHmP4} z;)zc4ji7`X7%6O43wEvrPh#5`84!zYmtGE&!YSgP;NIk{U^;mlJmk*-S%W7($9M)QWaX;gD# z%>%or^ra_3$J6Qrut_}FF&;dDeNd61Ho-?Jy<)iC`DiQ%%^EoAxDQ17re?gBEW>8C zGEPM+c(?75+XtMge3Z==EW2kdH)k!e4xDxwJ;(q}<1Zh$WhkynLy^edHHPWFW%sV-=B{Nv?JVONkA(7f`h*^?QPfxU59qr< zsu{C#wq&0BF2N&#Pf6sol2Sw3bG%%TTZcpk5bgfIOM{nzK1$>|dA9K|^6BC}rlXND zo`mGKxtKwzkADB5Ed0Z%Gwvx;8-edBycU2X^2Ur7(oGH7q=w|D%^EJ?dEB6R>x=*4 z_BQtjP6Y^~`~H?(y3hj6NII16_6$a}7V7&J`uZRyt) z67(SZcIAq7s=;vJm!uzpX+`i1?Cp5E!#XeG)I`EZd><5$)iBzQS{_dF7yk@ z)mF6k%EZ|nwNO?H`#BcZ`y8Q0qO<+4fo{~HSCyw7{<`hwSm_O!mZEK&S#A*}%}Oy) z`X~^66g_>E3!4}Ck?x)UP0bZIHvPFp)8jcs50RVQCJ$bxpJFeppV7MpoW$RWFmdcm zN9MAHzCJWd>8_9Mg~j$p$M!ZS`DEwbfBS}UargnxkhcL(;*YHj?an)wGJ&&As>+;S zAM{oO;wysm-<-K4XLA*o6DN}6CT8O%J~UKU7KK1x)XJ`Rp}qq-0W%DT!ypG?kRLa*l_AS$r)0P|!mxx`EA;*wTbYHI zc8o(Zk_hZBz|870yH<&0B7yNYtZB9$o?Hc=t%3s&q$R=*8r@h~luMOFLH`O71&wU= z`w>y-^X@G37v%JJpuuE^#8gLu>bpGKA;@q)hnSQ>eiuUyivOuFA>-dnj?*apZ9`7jYb_Oa zb6TsQBKvXa#dwNjc|^i?$R>L3%RAp6MSAbccVdnFURBWxe5Mm{pc6nRE=A4B9v9_$gL&|Na|!ql zRj@~XwN+J189O|Nv!C52JjQQi&zWdA_nRW}FrkI~riQFjLvp{&F2#6&c=Xz51Or#J z#U$@<$YTqQ;I1*5%z+NIP0{!(6Vc|x?l>>3+3J0t!F-=Z`##&1Jby>K7KoQU`8Pye z9qW@qhTZ%{tZ||2VssE+?Wdrp2G)Hvqv+}-Rs}RrU@V~Gf5rkXwYNXXI4=gp9!099 zE{VEVgxl=zARLkCKnG5wP^l-aX1T9&RTv7pM{qQlZ?v0lR9>jk%_1LvUA9wZ-PS>p z00XPP3wXfK7)Q2#6x8_#Y+@ViY?}w;2fF{FbDugP*B2&N6ed>^CSPi6p9*s4!Ge!S zk2nE>?q-t#{6Z2aSzZ-_U@QQ3*j&)nwQ~{X*$7>DOTF-xZsDyn(o+cLeWs6fFPZ4V zho5FoFF`0&FR^WLuOjhqq*OxANuv@i;(m;jntFh-t7}Fz9i~3oNDbdeee^n;iy>I< zCECPe?qNGPR(#@d74*z=If>HJT*AX8Ltol+pq1WFvTUk_sj&V_A?(Yu=q~{7lqY#R zumZR@@MygeMpF}GS>To$kJ>4&%^^5ChqeN%)k z`9g_B5PYR13@?kbO~FP?{1-IKIBJgLD7f>rV*3TyWX zYcDk}8_gED2Q^-H1@ZoJvDF>>baTXj8<4XJ*{5rR6#5L#8zb4E+WKH^MX%VjU(3)z;) zeiL!OBB*JBwVGhm(%9LEZSQ<>nl|bPh}%RG*}k$`BQ7Rg`60fi>29rJG5FeI@3lqg z!nD^tg;`LekQid{ZUEf?@Rt$W(w<9h7tjHxYR#3lY6${g;~M*Eyk12SVATLEgh30V zp@q$!J~j-0GlW-{#|BM;wH~2C_h}b0>oD{_kwRV2-O_w(^+fTsGh-#kp>uCRb8qSA z-nt;KwP=~=SFLLVHz0pAf9PYyAizsTr=GjRd);?dEpi(W=9vM3kEM*7YbAO z3sXc3Q}(7&mf*1(8GA;ik|>cFDg5>xIoFA~ZBSX4 zL-}UdW@VUq(n-Q04<&7s(5!&C`_!k7hy@KFAL9k&PV00yE1(@n@LI2EA+$s~V-e13Mh-tDK z$=Mx)q;^b5pet#M6TTl1@0%i0u_&|bEw+dBR*cL) zUWwk+wZllF&O@*}r_SHH`TDNVJ-d2I1Rmn859~A%Bzg z{u)481-JP#XeE;vbL{%o?0IE3-e6I+ST;vKNLuATb>jF7k@l!&bsR@L)wj)+sH?GW zzBu^X;+b0UP*MZ{s>{nleQp)V&OpCd;2ngLaE z^|TE~C$cX8%Sv?9mr~?)Rw3}qKNcAAPg*F(8AA(A&dAAT>*QUnmdDDXlCNQkAB2acR@y)Er|Nzb4m@72OnkGyZJ3cU%Ng#MLI} zpRuSRx%4C<&Cv(REtH}UOwYHQBig{GeD4oL&}4<=RK&z3H$^?!?Q9wVcQ4$kX9P5g znQPfQgRKhZ95IlsP|sFVvE_<@!hJuQ-63@cw}%@4n)~qn%Ms^I3B0&VRG=$c+ZSK< zK_#|4#wfN>gPp{szuHG~bqLjFHQ*;tQz{-fvVgn!cCsK4!?+HkigDy2nJTI3Hj<>3 zc#J@$snak)X*psft`+E6CByw1daFMO2JAI3Gw0FWM!<#-2ys9I|M(8SomPC^m+N}% zvq?*x4VhP?U!d4`A;ey%&y&R*nOZiwRun9#ppKM&r&MVdDO%HYsJC4F#Hrc@B z$-uj9YCIm(A%$GXyWZT2rxz=S!QVU~VpWI6deOR1hIwpRdJ6jO&5a_PMnxX7{d%u~ z=J>Q+kG#$!O|&NQAkO?=`>;PrZuSJr^rueHf8y&=Ws4gEX@uZH=fr)rK0vxYX3`XEtvrfEcOT86pY#CE& zA_1Om+P<_@kWE6N%CXPS<^`NRKAb%foV|detQai^QovGUUR#0&NkO$Xh3xQl8|FNN zQ`|LC_be(1WK;TOcFMku%>BIi6{i*3u;`s`rfv4rd!Lg3ABPp2EhS@(B|D8sK*$}! z>`25sxGNBk(8Hgt_YG|6IK4Bs>2aZh<^ACFKi{z%WkapV8E3bb=St?~wgaoeoN<@< zLs*mAy+y@@8d+?guT0B{v5vVyo|OY`L$+^qZf3Toyo=ssaXZxpHy_0Nd=!ah-%=5me9OQ(xgz zH=d~+GwoXGWv<{FwUqpV7h8k~bk(hE;RURY1wF}WG&Gf)Sx%=2DOUv5viQ_i_|zDm z!YO|rlCA<)y-V(7)3)>yMs_xr5em3i})yt z_$Z0^{PV)YwF(Iu%Q)M%E{Lo(rYVsifIa=9exW;F9>Z#97b1qz~O13&%;=} z-z6I^oE-rAjeNgymh_$?<;BhaoF)P)J|9_fkd1ypzp_ex&QW>e|Dy%_@Oy`yxJXP} zP=~*8pA5Qza!g)k*yN7lO2)z-h<6Vpya(DPzJQW~htaRH)>w3^rFXARdM~X0uNvL) zG)3jJp9>8)Qxc8So94+>#P?7PeQ8WZX-p+)%;R7u_Ajgk`9OjgdbO`*5InoTdmu-! zVA&)xL^3qayLwCs_Z9Yw3L%>co>1v8$~nwq-pC8s$OH5iy5FM~Pg`ZNHjhkKYHu+x z`>j8~+x!0&7PK$~s1*#1(RR!m^tP>9Bc_hv@M(DLI2=9>*PyBf8sBMhEI}qKl!#mC zhm!}mzPxTr$BA!Jn#aDdbCf|CHBr$^e*b*m(dAaZf!_)D@ocpT0%q|7&hY}1BL{69 zv$UB+7*c|FRpv+A4S0rwDPuOq+X}k3)ZKqLAxvEBq984=_P?(6#geo5P(~sUBN06# z(fG}nYagR4^Up@LmrV+Q3J)=s=xK@7Ps+*J#KswbH0NelYR8s8ndheA=cWP3d z)dak1LPMx#MX9Uf2ijt#v&mVPL4mx8Kgs38*3Df~=ej?`)47*8u4vIt?!(uTSjbQt-tMyuiw{@8-{hE#8Ia&~Gm89q6SnEW94D$QGMCQEM9&hLX?OaL`aC} zX0$MRNrI3FqSs-JQAe*MdJlpaF-n9GC3;H`1PNnCiQb}&PLvVdH+kRxTHpWoe(!f@ zow;+@I?tZ7&%W#4bNAi**=>zEkIdzCspJyDazySI*iG39fp_-A+`yJmd?;@wAD1`4 zm>zrJF0k?$85Z%A{MDn@s!FpxD4!MY-HJl~h(dmJA%FW8KR+{vyG(Nhvy{}In9ty{ zRb2X_y>o7C3#&MbJcfjITqgeT0Uy0|R-^wTa|vB430(yVU3<0DM+ETkXP@ZaKPP^7 z{+tG?hpZWSt@cs=Bn;WfiQ{hmD!q}nVIWsyRy$L-d}pxBa5zGLxXWNzJ)`zdGTG&m z8=r2K(&Mu~K|FCJ(*(oN_tE*E%O{f3p`1=mBq6Xg)-C2CpZN9II~Do-5&8V+eE#Z! zFB{Kwtuje}r@6LCl4O2ewEwOWpBO#J^Q;ykX*y+yG7)Vf=$Z5BQt|03@afv$JT(z1 zc36O&ksO^v>?eEp_A1Nqw#+pp3noN3{ zOzwuI%{?APe^vlrvPSgN47`&6R+NlJOdR&m?BrW>3a(1BSJGn}R&})(;-IQA(1lni z@5gZgySe@X-tgR=#==m*lbXBHPQgOPx|-LYA-QxTRrl6)6aA+{E*@c&aDBv7e}I9PzA+Q6otbX_kJEFaHIF0#{Bpy(nWbD&;=3g`g^4;Vh zi?Um!lRYJ5EoaM82D9vq{KUP+;YNmWHwrgPOY-MDNPgEyw>b09QRP|Th2*Cz{k^q* ziL)l$F=148DN$;1rcO<4l+m-G*QKV{1=H)Q3~W19izsOE(~#j#4`sz4U+FEi9Q>89 zaJZNn919z+(cwkxxio7Tl<8!$Hl(A^j zg}&a8Y)hoDqY6}4W|7{ot&yHi`ev6gVu*dMj~y|NG-X zODZ>izcAV|uhPH|%AOriTvLA+lve1&OyA(yZb7C?O{NPb(}hq67K`R&&GhS%BQJl7 zS`osJmAr5&iPJrf0RX*(cIBWE4@iNOAb7G@@^$lJ*7d`zfWxeUz^;<>jtEX>{!$RZ z);78Q!urt{a+7B38%|GCzFRw+->Va3J^a?h<~jy8=l~mF!3KUQa$>L#Z#2xKLTgOU z{v2!pl|P;eeGK0lqTeJZXVZw2^W$l*VrE`|@?F(mFLiwU!EwFB@v3F?69=G9y^PjU zkHK)A-8<1x+%AQldXHNpUEjRq2#N8J>j_(jT+*6Yh%MZ6mDHa#$mVi3a&MK={W)2)HLL7+Iz|bMY z^CbUNIwx*6iLYAXs^>EU-E4-u;igZA%F5Com!Z>Mon;yb~mLvzi@|>M%CaSr7%h;NXxX0)lo8^pV zw?St)qS+fS;X7t04phj~q9#XEK|u9K`0I5vOPKR{EH{K>27P`4 z`9^^IBS5kSGzK@_fOpf=BATGVafVXr)l36pP>^4dw`g%SGOVz%&pyN@hG~4L(RSQ| z1VcrFQ6Rw}zpTZ=`D)eX>0vO}I)~N=ka{KX*XDa4pIEJ%WrKP`eOrmK-mDHcv*Vn* ztP~nB=I8xX=lu%j{kUuV=i`WI<@~Z+JoIDLA*SWYh~tNCl9w#*&Qd8Iv%yjIRWI3G z#uR}aia@L)&^^o_#$OHSqf1c&;s0YaFQ#C6{H^VaPvzpdqQFnR)H4#JuB8c zw>OcrF6230TUmSpeajV5_=qSxIto93f4_?R&1-~m2z<+g*XSYcI&VirH{L=uyRzb$X{cM{i*)-#1?j<|-F&1x3&DokV|fw#x*B;F!|hQH`$? zr&DY>zh(#bp7qjNT=1}6eX*-zG1kPYtG$IE#l(*aaxRp!DQt2qY!ZgF!HQb%&R=+Ec@dVo zMAq}P`WAuZE!D%!Z5^K{HAvXYzUS3%4L4idu-l%5J{gB@Pe4U!CgdTvIwk+8BhO>M z98k{h_64lXWwOpZVr8~w3rPz1Z2E3m2E|OLc5Xd}K6wb;egvg^KEyvMlR%v>-wPwM z>9}?QlnzQu(SG39b+DG6V*ZTj_xISIqIMr@#p~+dajpUCU-{zhUCZMI`r}0mf{CGd zfWOCOohwSuxN-g(r+(aWGd^~D?D`oK9ebR8x+q=70T=6X+4~wOVt&|x*4m|sH)QFsnVGI z2-zfLM)@Dp;%@%TntM>)fCISEhwhySkFk=gQ9IM{Y%$lmaC5jVAu2k&hhDl5bs&aF zna}gvG)R2xqyq@$_zV{IZF+1ix_@CWI+zU8bC#|V4{)?|xhXup|4rS}+Wf8v)m;&V zyCTTc`};1Bi_HLg;UW+E&4)?6)+)=;+Ecm_$$x}~JxMa7vI|~VWmA^;5v&?jO&L^8 z{ZvhRRSzB5M#2JzNXO|stend>RNnwQ%|Wu)=cGY+4{t?XKGyAiD4+j|iY+O3H}6TxM9EaD z9OI_j)5d3}49`sco|z)=%#`Ffnrbta5PB`N$!be0%38$&rE*8|7k3_h)gQ8_v~0$V zHWx^fs3kNBy(ho@K2qR4IfFLF+yXEVL}n~dqcCVP2)Gzh8a~Odb+k-mv{PKKY`slh z=(6pIScMO1^N2}SG)0eO1injW}uJGwdBy z%qgV7%#nAGac@7x4n#eE5+0ajaI#o1_MZ*&;HiX?ocYx1s*jSp^9K(mo0OD@7U>>Q+`-TDyzr%dChz0INm)2#E`u-i1hS( zs-3m;%6Sm`i(Re7hcq-5InKk)&uW}gp2f$xY<5pI;A~HgX@ZXhZFSLfrLN2-w>heI z2xJz%zR3?~%2(Mkf?M1(FQE(}0pSh#hp+CM<|=9XBECs`gm1}zIF_^g^wjiY%68Fv zL#nU2JE$qM8d9L4I>RhBbkFX2X{($Us!AV}0UCx#wn*Qo@?I&d`DUBlT=hp}H2cBSvTFR6yvw7 zmWhN6RERb&;?w7qi;*{AtDpJC}`Wz9XRboHF|Ct6clH8b0{ZTfh-A z>#ZRyQ*G-jeR_;HGx|Q?JISER&+U1HnO308EZ9%y8kUs&cz>pY{91>eXI9OGr%yDS zYZw2@w|aM?lEr_OOnJbaY(K$$(JRjzfzmztkz``Rg@0lk2kz2Pd$e!HpH_S| zBmZomfXe)sHt1Nm9vwtnqj-ozoEJT<@{nJg5?WtLP+EI`T6&Y&$#?pSD)BavkD*@g z31j67KBZBP)_jGQYK2x|g|?jwT1-)0L`oX^g|>8k(rmAt^DE51sHdO|CS6XR*eQAs zX{NnM3{ZhEJSu*nF@NHqeB>~H>;R~Cnp#(9?N(V@F&KUe?V&83U@`G}zFb~Ptef)L z?Pbal@?BixZntH}L|@46=XxshIa;bYT7@}Uq!g@1H=p^|s~v&@&~b?>r&&mz`$u$) z(9lbYU++W^PR)Y#&UT!;;&WvZmprZZlQ|G4c-0P-9Fe;)wbjnU?o14|!37P2rE?>zrKgi%R4>=ij!;4|B{HQ(V4Wy5$&bEh-3TGM*XB7*L($ zt1r);uM&}$63aGwFmKIvdmc%`6I=pT^|(R$109n5wvw!nipchsK<%VvotM`p2&W<(VadQ(PXb#wClQ{WSmn}kL z^(yos6?ByvdM|#w4F$2m%U#zuV+&I>y0W@>p3a4Z%&dp$ZD^4_x=kt9Fj4nz;>7)- z!qim@l55l?*T5v#zQ*gHKlFj9O!s_e@hIAf0tB#CZpx+yPjRD+t{n!%w0Kgy+uca$ zFZcDp?kK;!q-QnS!@IsjXrP#X9>Z1dzm9Jm7)%sFNODJtM7< zcnaHay2js=j)A&5uiavKjg|ORmH70O_)Iz!Wen6BUaKu-mkiT;eO3YUnkg;XnD<}& zmaQ?)vu3cdASoe^Cz-XS zS=im5glrGU^$_a$C6|Wu^p)y#uX;t4W<-=GI!cphi&-D9s9$s`xm!%8laYX9@yb;{ zEAGO0Iq|EvT(fG#&|7KDr`1+;^&2^izj9Jma#A*}l|Gg@eQ4XHOOdfa*f=!N&T^HwNbG0P|tNd?wu?z?%fEZJHZL zj>Cu^Q{?AdW3Mx^JF_os3JIKdX1m9_rg*1%={TC`I2h?T{OLHp8~F@RBYsgND^0Br zoXzg$Wkzn(#c9gEwWO6$Eco3vq)qfKJvR55vd!&@{p$M##QPbv_ZgI$zPWGv4hy%y zd9QX_be65;J=^xs=&6L1Z-wt$>iS;0w5Cbm*9~|T?L2+&vtl4c+U(E47~Ej*@ZiJ4 zYMJ4o1FtEOh>KCS3`KvE!Yws>+`dtnf0E`eM>+kcW?~~p&^x~R_jrxf`Bc^U^wjw# zdJZwZ^j`a&T&i>Z#o_4HK1bx}`VTbOh%R?xm)6;w-xZj)@p!}ev+u8XeZU}o1<;HcmqVi+jHmUHp9_CKW5a3x!@fgI*dDa4MB=fADlxb5XllBI_bWbZCA0@JiZQcm(6`{a^zEXBPO=uAf z#R3`Kk44R}UMV^$6XNr9nj$&5QSh%aBF{lsN5rvB~@?^p^u;2DK%<3)g zaXgw7D{vM0#eYq@%4PUI7GbPnr>bJ7r(#Dm__V{*Zr9q0`N3ilWEs6sfYI`F?&v~e z9E-j=leSf`O( zjVhj!$0aM`X!1QTW9DX;HzvXz(}lpOOIPF`wBOKHUz(~Yp5b=4;vhqB?BR5Lb;lH{ zdyI*G$7#u|H}3Dp>&D_ts^Uy~;!Gwrhc3F#bn}Y7tUjldxPaE`m8rd1SdvK5 zw_MhjUke54QsFW-ei*jv8T0W8o%B196+4@s0GQC&j@_YPVv@8n+Ck z$_|cSz`PwZnWgcSwc73Fv8q0&ne4@xcH2K7s=bhf?+{-3(I>jWeF_YutD#nt+e!Yf z$#LsJ5NvU^k(SHon}8LpQ)sD5x*C~@FD z%@En2uEX@}1mvOOEFA6i4Kox%!&aiK--fRLa-&@QZZ9~z!${;XoE+eZ6oHCz=r87lt&(!hwk0AO#N> z;B7c?7Y+UQv?q_gMx$)$QZ(NB3&$x5DJZ_XAs_gbF}V0)w=ZUKOfdZ?i(tJd$ch3Cv6 zYZ_df4Oxx?l)TA7cdvj5S3u-DQhEWreD!Key6<2WSLu{9JJZ1%o*}i%_c`hzoE&k< za_gihOW-{?Z^2odg0~3ht}uuo3`)1yH26rtKg{w<9z0Wjt>F`LQtc;G5O<#sT8veZ z7$&5zJUQfXxE3KY#Txtgbu$O*ItMC%165!)5N8zH9<1NV>Z(h^`u02yjct4>eFk*j z)ntA94fW~Sm2TDLohQaB2C6CsdMXC)Z6#}|y7E^-;K^E|^Yfkv^TGY+q5+?ME(?5ER|Le=jUD>Nz zI|@*<<_t+9Tj>w4^EYZtsi}T8CTq|kYk-wCm}pk~B=rK&XuQ?VjG%y8SxxLe8(4`) zq5PjSyD3|Pc7G~bzne!*!0DTocJ%&bGy^?31C#byAFkmiw$V7>lZ*|q1RUNEf@S4H z!{i?I9SeP9a`zz?oNsbztANs=n}Y1%kY7$m=0dRS_wT_S=wClo&~x{ z86@?9?Cnk;OqNQ%+c=Ypw9ti}H9GsVa&~4v^I!tQ;0V(oYi_~Ixzq-F`75(JrNsMA zr(U~Jw7}7szGzx`{)eMZbK*;zWfI3*9TR0{r}spe-Bf{Ns=y9aAXXJP(RZPQsG|Qw zWC7P@`ZLbbZguK-a;D|K*~6FZ9)EEI-u~6_gN0>9WHhPVIS>H>@!5m+?LlNEHJMJg z=NPvc{D&ug{!$hbMSdX$a<24vzDjccDjU?L(&6$J4oB6@eC%1skU;2y_;f(~Iv_Gd zr`8_smMAB0vU%)!Sk|JXF@{1Rlo=@0&d|Y2*DKy;Bw2Hf@!PpAo&&1M2F0^ODRiqH zT9rtS`!0{?h*@)Or>zDKV_!K@;kN^VPu2CNLRWY>E-V^<_q}zoa@|+LC~BhDE0P@| zk{!^=4$49H8I?U(Tq^Bwx%g9AJLlgF>U>z z8rLL>Y;|t^WpLt_Q4LV5X1eA6wa)M%Rp3i}1qvU5!lP06>i5C03?}eC$&rfTEfhY< zWS&HUP`Yj$@Z(2#xtMIW$W~tD6D;(h%^sQLtZQ|U-RQCEXGx5OHilXo1J=estfsOm zWdm+fx8JODkM%~D(Fr|R&Z}f+!za2=(&(FoI%iLWn0eM(s9~tpFkm$z;IwIr9y21b z1^`@)zd8T26)6`-@_Xh;M^8ge%3Es9-yzwK=p|F2jBTl}huAD|7iXI*kn~yDVW{me zU^@)N?9>}Z(cDN1P^v`ps<{wO*98)kb_)5@ zMn)6wTqI{he78~5R0ef}6=8w%vn8|e<5fp7fMJi2yi{WzV}J4Ps{4Nj*KuQR+x>7y_53LrP~a!*HRmw;F_i9$d2z)N_e9=s=y5KpZIQbN&_ znO-)j$i0wAp?vb&`H=$ok#K97!-VvY67=Keh0o>xyzbm)4E&HgzxG_Q7V&MsOFZdQ z1wfK2RcbY5OGKbzIr3vJd46tWaW46%hcn#lmsghPP3H#by-56IO*~=R5Gy}jj3;bG zh4A?xe0TgVh5K$@$uUaxKE+HO?&T+3M-5Ij&$gVjOXLwWQ4$4JFmW1kaS zpSUyOP>2R^yT;hcinN40y%0M}$uW2GOMpL;#X( ztF{skPuI`p5)r#s;mZ=?%Yyc0na^JZNr(XeM|!&J>)akN@%fR(m$JnBj~vNB7R8t) z^P}dJj||Ke-YgN`ENE|*hZ99apBWNL=q{t#?|5wrXf)&@bm^&?c%M9*^MPH@$TYS} zAe9uCyy2M5OFEZR5yTP^#DWfDnGb_@626kp2SP5iRQc&Q8BKkSeTL@0^Rj7OmC@Im zx<5Fp-^5L|~K}R~GBR6e!MxN8_Fl!)Njp?VQKs?DaaZl|0KW4UM9t4J zd)czM?A4{bW!SN9@!s4&5fWPn{fTW0N{|heS_p=3?mT-MeUylbnuwA2*Crv793|+j zc)FrY1c7rU2B!_~!XeFSUcnC?`GeoWJ)OcN$jHSmt{w5iLNhKam8GlaP_dh`_2I;o z2aCg~+Tn$JWN`^R&>N0VE8TvhneQ$xan!5jQtw)QculZ3t)dNETx4^vJLn&iz=&Z*#^s*b`y1667?A$yd{|%07)hkXS!WM{);U93xEB^~__7&_|r*cbkc* zOyZw&`~$sBL=(sTZ=HSJJe_Ub+?@Z4_^P6S1*XG zm(_EBcL>7bzYQeO-dMPU2oxX+q5p>f5aquH^1p}hpHnz_FC2y-g5rsd?;l`3qFsqv z`~@SR5QrDze-!%HfyDn;o$GaB?K-mnKvX3;fbAaw$NnFI|5ud0HaW%L&eX)u6Qb=* Ki0m!^!2bbc3S|iZ diff --git a/src/databricks/sqlalchemy/test_local/e2e/test_basic.py b/src/databricks/sqlalchemy/test_local/e2e/test_basic.py deleted file mode 100644 index ce0b5d89..00000000 --- a/src/databricks/sqlalchemy/test_local/e2e/test_basic.py +++ /dev/null @@ -1,543 +0,0 @@ -import datetime -import decimal -from typing import Tuple, Union, List -from unittest import skipIf - -import pytest -from sqlalchemy import ( - Column, - MetaData, - Table, - Text, - create_engine, - insert, - select, - text, -) -from sqlalchemy.engine import Engine -from sqlalchemy.engine.reflection import Inspector -from sqlalchemy.orm import DeclarativeBase, Mapped, Session, mapped_column -from sqlalchemy.schema import DropColumnComment, SetColumnComment -from sqlalchemy.types import BOOLEAN, DECIMAL, Date, Integer, String - -try: - from sqlalchemy.orm import declarative_base -except ImportError: - from sqlalchemy.ext.declarative import declarative_base - - -USER_AGENT_TOKEN = "PySQL e2e Tests" - - -def sqlalchemy_1_3(): - import sqlalchemy - - return sqlalchemy.__version__.startswith("1.3") - - -def version_agnostic_select(object_to_select, *args, **kwargs): - """ - SQLAlchemy==1.3.x requires arguments to select() to be a Python list - - https://docs.sqlalchemy.org/en/20/changelog/migration_14.html#orm-query-is-internally-unified-with-select-update-delete-2-0-style-execution-available - """ - - if sqlalchemy_1_3(): - return select([object_to_select], *args, **kwargs) - else: - return select(object_to_select, *args, **kwargs) - - -def version_agnostic_connect_arguments(connection_details) -> Tuple[str, dict]: - HOST = connection_details["host"] - HTTP_PATH = connection_details["http_path"] - ACCESS_TOKEN = connection_details["access_token"] - CATALOG = connection_details["catalog"] - SCHEMA = connection_details["schema"] - - ua_connect_args = {"_user_agent_entry": USER_AGENT_TOKEN} - - if sqlalchemy_1_3(): - conn_string = f"databricks://token:{ACCESS_TOKEN}@{HOST}" - connect_args = { - **ua_connect_args, - "http_path": HTTP_PATH, - "server_hostname": HOST, - "catalog": CATALOG, - "schema": SCHEMA, - } - - return conn_string, connect_args - else: - return ( - f"databricks://token:{ACCESS_TOKEN}@{HOST}?http_path={HTTP_PATH}&catalog={CATALOG}&schema={SCHEMA}", - ua_connect_args, - ) - - -@pytest.fixture -def db_engine(connection_details) -> Engine: - conn_string, connect_args = version_agnostic_connect_arguments(connection_details) - return create_engine(conn_string, connect_args=connect_args) - - -def run_query(db_engine: Engine, query: Union[str, Text]): - if not isinstance(query, Text): - _query = text(query) # type: ignore - else: - _query = query # type: ignore - with db_engine.begin() as conn: - return conn.execute(_query).fetchall() - - -@pytest.fixture -def samples_engine(connection_details) -> Engine: - details = connection_details.copy() - details["catalog"] = "samples" - details["schema"] = "nyctaxi" - conn_string, connect_args = version_agnostic_connect_arguments(details) - return create_engine(conn_string, connect_args=connect_args) - - -@pytest.fixture() -def base(db_engine): - return declarative_base() - - -@pytest.fixture() -def session(db_engine): - return Session(db_engine) - - -@pytest.fixture() -def metadata_obj(db_engine): - return MetaData() - - -def test_can_connect(db_engine): - simple_query = "SELECT 1" - result = run_query(db_engine, simple_query) - assert len(result) == 1 - - -def test_connect_args(db_engine): - """Verify that extra connect args passed to sqlalchemy.create_engine are passed to DBAPI - - This will most commonly happen when partners supply a user agent entry - """ - - conn = db_engine.connect() - connection_headers = conn.connection.thrift_backend._transport._headers - user_agent = connection_headers["User-Agent"] - - expected = f"(sqlalchemy + {USER_AGENT_TOKEN})" - assert expected in user_agent - - -@pytest.mark.skipif(sqlalchemy_1_3(), reason="Pandas requires SQLAlchemy >= 1.4") -@pytest.mark.skip( - reason="DBR is currently limited to 256 parameters per call to .execute(). Test cannot pass." -) -def test_pandas_upload(db_engine, metadata_obj): - import pandas as pd - - SCHEMA = "default" - try: - df = pd.read_excel( - "src/databricks/sqlalchemy/test_local/e2e/demo_data/MOCK_DATA.xlsx" - ) - df.to_sql( - "mock_data", - db_engine, - schema=SCHEMA, - index=False, - method="multi", - if_exists="replace", - ) - - df_after = pd.read_sql_table("mock_data", db_engine, schema=SCHEMA) - assert len(df) == len(df_after) - except Exception as e: - raise e - finally: - db_engine.execute("DROP TABLE mock_data") - - -def test_create_table_not_null(db_engine, metadata_obj: MetaData): - table_name = "PySQLTest_{}".format(datetime.datetime.utcnow().strftime("%s")) - - SampleTable = Table( - table_name, - metadata_obj, - Column("name", String(255)), - Column("episodes", Integer), - Column("some_bool", BOOLEAN, nullable=False), - ) - - metadata_obj.create_all(db_engine) - - columns = db_engine.dialect.get_columns( - connection=db_engine.connect(), table_name=table_name - ) - - name_column_description = columns[0] - some_bool_column_description = columns[2] - - assert name_column_description.get("nullable") is True - assert some_bool_column_description.get("nullable") is False - - metadata_obj.drop_all(db_engine) - - -def test_column_comment(db_engine, metadata_obj: MetaData): - table_name = "PySQLTest_{}".format(datetime.datetime.utcnow().strftime("%s")) - - column = Column("name", String(255), comment="some comment") - SampleTable = Table(table_name, metadata_obj, column) - - metadata_obj.create_all(db_engine) - connection = db_engine.connect() - - columns = db_engine.dialect.get_columns( - connection=connection, table_name=table_name - ) - - assert columns[0].get("comment") == "some comment" - - column.comment = "other comment" - connection.execute(SetColumnComment(column)) - - columns = db_engine.dialect.get_columns( - connection=connection, table_name=table_name - ) - - assert columns[0].get("comment") == "other comment" - - connection.execute(DropColumnComment(column)) - - columns = db_engine.dialect.get_columns( - connection=connection, table_name=table_name - ) - - assert columns[0].get("comment") == None - - metadata_obj.drop_all(db_engine) - - -def test_bulk_insert_with_core(db_engine, metadata_obj, session): - import random - - # Maximum number of parameter is 256. 256/4 == 64 - num_to_insert = 64 - - table_name = "PySQLTest_{}".format(datetime.datetime.utcnow().strftime("%s")) - - names = ["Bim", "Miki", "Sarah", "Ira"] - - SampleTable = Table( - table_name, metadata_obj, Column("name", String(255)), Column("number", Integer) - ) - - rows = [ - {"name": names[i % 3], "number": random.choice(range(64))} - for i in range(num_to_insert) - ] - - metadata_obj.create_all(db_engine) - with db_engine.begin() as conn: - conn.execute(insert(SampleTable).values(rows)) - - with db_engine.begin() as conn: - rows = conn.execute(version_agnostic_select(SampleTable)).fetchall() - - assert len(rows) == num_to_insert - - -def test_create_insert_drop_table_core(base, db_engine, metadata_obj: MetaData): - """ """ - - SampleTable = Table( - "PySQLTest_{}".format(datetime.datetime.utcnow().strftime("%s")), - metadata_obj, - Column("name", String(255)), - Column("episodes", Integer), - Column("some_bool", BOOLEAN), - Column("dollars", DECIMAL(10, 2)), - ) - - metadata_obj.create_all(db_engine) - - insert_stmt = insert(SampleTable).values( - name="Bim Adewunmi", episodes=6, some_bool=True, dollars=decimal.Decimal(125) - ) - - with db_engine.connect() as conn: - conn.execute(insert_stmt) - - select_stmt = version_agnostic_select(SampleTable) - with db_engine.begin() as conn: - resp = conn.execute(select_stmt) - - result = resp.fetchall() - - assert len(result) == 1 - - metadata_obj.drop_all(db_engine) - - -# ORM tests are made following this tutorial -# https://docs.sqlalchemy.org/en/14/orm/quickstart.html - - -@skipIf(False, "Unity catalog must be supported") -def test_create_insert_drop_table_orm(db_engine): - """ORM classes built on the declarative base class must have a primary key. - This is restricted to Unity Catalog. - """ - - class Base(DeclarativeBase): - pass - - class SampleObject(Base): - __tablename__ = "PySQLTest_{}".format(datetime.datetime.utcnow().strftime("%s")) - - name: Mapped[str] = mapped_column(String(255), primary_key=True) - episodes: Mapped[int] = mapped_column(Integer) - some_bool: Mapped[bool] = mapped_column(BOOLEAN) - - Base.metadata.create_all(db_engine) - - sample_object_1 = SampleObject(name="Bim Adewunmi", episodes=6, some_bool=True) - sample_object_2 = SampleObject(name="Miki Meek", episodes=12, some_bool=False) - - session = Session(db_engine) - session.add(sample_object_1) - session.add(sample_object_2) - session.flush() - - stmt = version_agnostic_select(SampleObject).where( - SampleObject.name.in_(["Bim Adewunmi", "Miki Meek"]) - ) - - if sqlalchemy_1_3(): - output = [i for i in session.execute(stmt)] - else: - output = [i for i in session.scalars(stmt)] - - assert len(output) == 2 - - Base.metadata.drop_all(db_engine) - - -def test_dialect_type_mappings(db_engine, metadata_obj: MetaData): - """Confirms that we get back the same time we declared in a model and inserted using Core""" - - class Base(DeclarativeBase): - pass - - SampleTable = Table( - "PySQLTest_{}".format(datetime.datetime.utcnow().strftime("%s")), - metadata_obj, - Column("string_example", String(255)), - Column("integer_example", Integer), - Column("boolean_example", BOOLEAN), - Column("decimal_example", DECIMAL(10, 2)), - Column("date_example", Date), - ) - - string_example = "" - integer_example = 100 - boolean_example = True - decimal_example = decimal.Decimal(125) - date_example = datetime.date(2013, 1, 1) - - metadata_obj.create_all(db_engine) - - insert_stmt = insert(SampleTable).values( - string_example=string_example, - integer_example=integer_example, - boolean_example=boolean_example, - decimal_example=decimal_example, - date_example=date_example, - ) - - with db_engine.connect() as conn: - conn.execute(insert_stmt) - - select_stmt = version_agnostic_select(SampleTable) - with db_engine.begin() as conn: - resp = conn.execute(select_stmt) - - result = resp.fetchall() - this_row = result[0] - - assert this_row.string_example == string_example - assert this_row.integer_example == integer_example - assert this_row.boolean_example == boolean_example - assert this_row.decimal_example == decimal_example - assert this_row.date_example == date_example - - metadata_obj.drop_all(db_engine) - - -def test_inspector_smoke_test(samples_engine: Engine): - """It does not appear that 3L namespace is supported here""" - - schema, table = "nyctaxi", "trips" - - try: - inspector = Inspector.from_engine(samples_engine) - except Exception as e: - assert False, f"Could not build inspector: {e}" - - # Expect six columns - columns = inspector.get_columns(table, schema=schema) - - # Expect zero views, but the method should return - views = inspector.get_view_names(schema=schema) - - assert ( - len(columns) == 6 - ), "Dialect did not find the expected number of columns in samples.nyctaxi.trips" - assert len(views) == 0, "Views could not be fetched" - - -@pytest.mark.skip(reason="engine.table_names has been removed in sqlalchemy verison 2") -def test_get_table_names_smoke_test(samples_engine: Engine): - with samples_engine.connect() as conn: - _names = samples_engine.table_names(schema="nyctaxi", connection=conn) # type: ignore - _names is not None, "get_table_names did not succeed" - - -def test_has_table_across_schemas( - db_engine: Engine, samples_engine: Engine, catalog: str, schema: str -): - """For this test to pass these conditions must be met: - - Table samples.nyctaxi.trips must exist - - Table samples.tpch.customer must exist - - The `catalog` and `schema` environment variables must be set and valid - """ - - with samples_engine.connect() as conn: - # 1) Check for table within schema declared at engine creation time - assert samples_engine.dialect.has_table(connection=conn, table_name="trips") - - # 2) Check for table within another schema in the same catalog - assert samples_engine.dialect.has_table( - connection=conn, table_name="customer", schema="tpch" - ) - - # 3) Check for a table within a different catalog - # Create a table in a different catalog - with db_engine.connect() as conn: - conn.execute(text("CREATE TABLE test_has_table (numbers_are_cool INT);")) - - try: - # Verify that this table is not found in the samples catalog - assert not samples_engine.dialect.has_table( - connection=conn, table_name="test_has_table" - ) - # Verify that this table is found in a separate catalog - assert samples_engine.dialect.has_table( - connection=conn, - table_name="test_has_table", - schema=schema, - catalog=catalog, - ) - finally: - conn.execute(text("DROP TABLE test_has_table;")) - - -def test_user_agent_adjustment(db_engine): - # If .connect() is called multiple times on an engine, don't keep pre-pending the user agent - # https://github.com/databricks/databricks-sql-python/issues/192 - c1 = db_engine.connect() - c2 = db_engine.connect() - - def get_conn_user_agent(conn): - return conn.connection.dbapi_connection.thrift_backend._transport._headers.get( - "User-Agent" - ) - - ua1 = get_conn_user_agent(c1) - ua2 = get_conn_user_agent(c2) - same_ua = ua1 == ua2 - - c1.close() - c2.close() - - assert same_ua, f"User agents didn't match \n {ua1} \n {ua2}" - - -@pytest.fixture -def sample_table(metadata_obj: MetaData, db_engine: Engine): - """This fixture creates a sample table and cleans it up after the test is complete.""" - from databricks.sqlalchemy._parse import GET_COLUMNS_TYPE_MAP - - table_name = "PySQLTest_{}".format(datetime.datetime.utcnow().strftime("%s")) - - args: List[Column] = [ - Column(colname, coltype) for colname, coltype in GET_COLUMNS_TYPE_MAP.items() - ] - - SampleTable = Table(table_name, metadata_obj, *args) - - metadata_obj.create_all(db_engine) - - yield table_name - - metadata_obj.drop_all(db_engine) - - -def test_get_columns(db_engine, sample_table: str): - """Created after PECO-1297 and Github Issue #295 to verify that get_columsn behaves like it should for all known SQLAlchemy types""" - - inspector = Inspector.from_engine(db_engine) - - # this raises an exception if `parse_column_info_from_tgetcolumnsresponse` fails a lookup - columns = inspector.get_columns(sample_table) - - assert True - - -class TestCommentReflection: - @pytest.fixture(scope="class") - def engine(self, connection_details: dict): - HOST = connection_details["host"] - HTTP_PATH = connection_details["http_path"] - ACCESS_TOKEN = connection_details["access_token"] - CATALOG = connection_details["catalog"] - SCHEMA = connection_details["schema"] - - connection_string = f"databricks://token:{ACCESS_TOKEN}@{HOST}?http_path={HTTP_PATH}&catalog={CATALOG}&schema={SCHEMA}" - connect_args = {"_user_agent_entry": USER_AGENT_TOKEN} - - engine = create_engine(connection_string, connect_args=connect_args) - return engine - - @pytest.fixture - def inspector(self, engine: Engine) -> Inspector: - return Inspector.from_engine(engine) - - @pytest.fixture(scope="class") - def table(self, engine): - md = MetaData() - tbl = Table( - "foo", - md, - Column("bar", String, comment="column comment"), - comment="table comment", - ) - md.create_all(bind=engine) - - yield tbl - - md.drop_all(bind=engine) - - def test_table_comment_reflection(self, inspector: Inspector, table: Table): - comment = inspector.get_table_comment(table.name) - assert comment == {"text": "table comment"} - - def test_column_comment(self, inspector: Inspector, table: Table): - result = inspector.get_columns(table.name)[0].get("comment") - assert result == "column comment" diff --git a/src/databricks/sqlalchemy/test_local/test_ddl.py b/src/databricks/sqlalchemy/test_local/test_ddl.py deleted file mode 100644 index f596dffa..00000000 --- a/src/databricks/sqlalchemy/test_local/test_ddl.py +++ /dev/null @@ -1,96 +0,0 @@ -import pytest -from sqlalchemy import Column, MetaData, String, Table, create_engine -from sqlalchemy.schema import ( - CreateTable, - DropColumnComment, - DropTableComment, - SetColumnComment, - SetTableComment, -) - - -class DDLTestBase: - engine = create_engine( - "databricks://token:****@****?http_path=****&catalog=****&schema=****" - ) - - def compile(self, stmt): - return str(stmt.compile(bind=self.engine)) - - -class TestColumnCommentDDL(DDLTestBase): - @pytest.fixture - def metadata(self) -> MetaData: - """Assemble a metadata object with one table containing one column.""" - metadata = MetaData() - - column = Column("foo", String, comment="bar") - table = Table("foobar", metadata, column) - - return metadata - - @pytest.fixture - def table(self, metadata) -> Table: - return metadata.tables.get("foobar") - - @pytest.fixture - def column(self, table) -> Column: - return table.columns[0] - - def test_create_table_with_column_comment(self, table): - stmt = CreateTable(table) - output = self.compile(stmt) - - # output is a CREATE TABLE statement - assert "foo STRING COMMENT 'bar'" in output - - def test_alter_table_add_column_comment(self, column): - stmt = SetColumnComment(column) - output = self.compile(stmt) - assert output == "ALTER TABLE foobar ALTER COLUMN foo COMMENT 'bar'" - - def test_alter_table_drop_column_comment(self, column): - stmt = DropColumnComment(column) - output = self.compile(stmt) - assert output == "ALTER TABLE foobar ALTER COLUMN foo COMMENT ''" - - -class TestTableCommentDDL(DDLTestBase): - @pytest.fixture - def metadata(self) -> MetaData: - """Assemble a metadata object with one table containing one column.""" - metadata = MetaData() - - col1 = Column("foo", String) - col2 = Column("foo", String) - tbl_w_comment = Table("martin", metadata, col1, comment="foobar") - tbl_wo_comment = Table("prs", metadata, col2) - - return metadata - - @pytest.fixture - def table_with_comment(self, metadata) -> Table: - return metadata.tables.get("martin") - - @pytest.fixture - def table_without_comment(self, metadata) -> Table: - return metadata.tables.get("prs") - - def test_create_table_with_comment(self, table_with_comment): - stmt = CreateTable(table_with_comment) - output = self.compile(stmt) - assert "USING DELTA" in output - assert "COMMENT 'foobar'" in output - - def test_alter_table_add_comment(self, table_without_comment: Table): - table_without_comment.comment = "wireless mechanical keyboard" - stmt = SetTableComment(table_without_comment) - output = self.compile(stmt) - - assert output == "COMMENT ON TABLE prs IS 'wireless mechanical keyboard'" - - def test_alter_table_drop_comment(self, table_with_comment): - """The syntax for COMMENT ON is here: https://docs.databricks.com/en/sql/language-manual/sql-ref-syntax-ddl-comment.html""" - stmt = DropTableComment(table_with_comment) - output = self.compile(stmt) - assert output == "COMMENT ON TABLE martin IS NULL" diff --git a/src/databricks/sqlalchemy/test_local/test_parsing.py b/src/databricks/sqlalchemy/test_local/test_parsing.py deleted file mode 100644 index c8ab443d..00000000 --- a/src/databricks/sqlalchemy/test_local/test_parsing.py +++ /dev/null @@ -1,160 +0,0 @@ -import pytest -from databricks.sqlalchemy._parse import ( - extract_identifiers_from_string, - extract_identifier_groups_from_string, - extract_three_level_identifier_from_constraint_string, - build_fk_dict, - build_pk_dict, - match_dte_rows_by_value, - get_comment_from_dte_output, - DatabricksSqlAlchemyParseException, -) - - -# These are outputs from DESCRIBE TABLE EXTENDED -@pytest.mark.parametrize( - "input, expected", - [ - ("PRIMARY KEY (`pk1`, `pk2`)", ["pk1", "pk2"]), - ("PRIMARY KEY (`a`, `b`, `c`)", ["a", "b", "c"]), - ("PRIMARY KEY (`name`, `id`, `attr`)", ["name", "id", "attr"]), - ], -) -def test_extract_identifiers(input, expected): - assert ( - extract_identifiers_from_string(input) == expected - ), "Failed to extract identifiers from string" - - -@pytest.mark.parametrize( - "input, expected", - [ - ( - "FOREIGN KEY (`pname`, `pid`, `pattr`) REFERENCES `main`.`pysql_sqlalchemy`.`tb1` (`name`, `id`, `attr`)", - [ - "(`pname`, `pid`, `pattr`)", - "(`name`, `id`, `attr`)", - ], - ) - ], -) -def test_extract_identifer_batches(input, expected): - assert ( - extract_identifier_groups_from_string(input) == expected - ), "Failed to extract identifier groups from string" - - -def test_extract_3l_namespace_from_constraint_string(): - input = "FOREIGN KEY (`parent_user_id`) REFERENCES `main`.`pysql_dialect_compliance`.`users` (`user_id`)" - expected = { - "catalog": "main", - "schema": "pysql_dialect_compliance", - "table": "users", - } - - assert ( - extract_three_level_identifier_from_constraint_string(input) == expected - ), "Failed to extract 3L namespace from constraint string" - - -def test_extract_3l_namespace_from_bad_constraint_string(): - input = "FOREIGN KEY (`parent_user_id`) REFERENCES `pysql_dialect_compliance`.`users` (`user_id`)" - - with pytest.raises(DatabricksSqlAlchemyParseException): - extract_three_level_identifier_from_constraint_string(input) - - -@pytest.mark.parametrize("tschema", [None, "some_schema"]) -def test_build_fk_dict(tschema): - fk_constraint_string = "FOREIGN KEY (`parent_user_id`) REFERENCES `main`.`some_schema`.`users` (`user_id`)" - - result = build_fk_dict("some_fk_name", fk_constraint_string, schema_name=tschema) - - assert result == { - "name": "some_fk_name", - "constrained_columns": ["parent_user_id"], - "referred_schema": tschema, - "referred_table": "users", - "referred_columns": ["user_id"], - } - - -def test_build_pk_dict(): - pk_constraint_string = "PRIMARY KEY (`id`, `name`, `email_address`)" - pk_name = "pk1" - - result = build_pk_dict(pk_name, pk_constraint_string) - - assert result == { - "constrained_columns": ["id", "name", "email_address"], - "name": "pk1", - } - - -# This is a real example of the output from DESCRIBE TABLE EXTENDED as of 15 October 2023 -RAW_SAMPLE_DTE_OUTPUT = [ - ["id", "int"], - ["name", "string"], - ["", ""], - ["# Detailed Table Information", ""], - ["Catalog", "main"], - ["Database", "pysql_sqlalchemy"], - ["Table", "exampleexampleexample"], - ["Created Time", "Sun Oct 15 21:12:54 UTC 2023"], - ["Last Access", "UNKNOWN"], - ["Created By", "Spark "], - ["Type", "MANAGED"], - ["Location", "s3://us-west-2-****-/19a85dee-****/tables/ccb7***"], - ["Provider", "delta"], - ["Comment", "some comment"], - ["Owner", "some.user@example.com"], - ["Is_managed_location", "true"], - ["Predictive Optimization", "ENABLE (inherited from CATALOG main)"], - [ - "Table Properties", - "[delta.checkpoint.writeStatsAsJson=false,delta.checkpoint.writeStatsAsStruct=true,delta.minReaderVersion=1,delta.minWriterVersion=2]", - ], - ["", ""], - ["# Constraints", ""], - ["exampleexampleexample_pk", "PRIMARY KEY (`id`)"], - [ - "exampleexampleexample_fk", - "FOREIGN KEY (`parent_user_id`) REFERENCES `main`.`pysql_dialect_compliance`.`users` (`user_id`)", - ], -] - -FMT_SAMPLE_DT_OUTPUT = [ - {"col_name": i[0], "data_type": i[1]} for i in RAW_SAMPLE_DTE_OUTPUT -] - - -@pytest.mark.parametrize( - "match, output", - [ - ( - "PRIMARY KEY", - [ - { - "col_name": "exampleexampleexample_pk", - "data_type": "PRIMARY KEY (`id`)", - } - ], - ), - ( - "FOREIGN KEY", - [ - { - "col_name": "exampleexampleexample_fk", - "data_type": "FOREIGN KEY (`parent_user_id`) REFERENCES `main`.`pysql_dialect_compliance`.`users` (`user_id`)", - } - ], - ), - ], -) -def test_filter_dict_by_value(match, output): - result = match_dte_rows_by_value(FMT_SAMPLE_DT_OUTPUT, match) - assert result == output - - -def test_get_comment_from_dte_output(): - assert get_comment_from_dte_output(FMT_SAMPLE_DT_OUTPUT) == "some comment" diff --git a/src/databricks/sqlalchemy/test_local/test_types.py b/src/databricks/sqlalchemy/test_local/test_types.py deleted file mode 100644 index b91217ed..00000000 --- a/src/databricks/sqlalchemy/test_local/test_types.py +++ /dev/null @@ -1,161 +0,0 @@ -import enum - -import pytest -import sqlalchemy - -from databricks.sqlalchemy.base import DatabricksDialect -from databricks.sqlalchemy._types import TINYINT, TIMESTAMP, TIMESTAMP_NTZ - - -class DatabricksDataType(enum.Enum): - """https://docs.databricks.com/en/sql/language-manual/sql-ref-datatypes.html""" - - BIGINT = enum.auto() - BINARY = enum.auto() - BOOLEAN = enum.auto() - DATE = enum.auto() - DECIMAL = enum.auto() - DOUBLE = enum.auto() - FLOAT = enum.auto() - INT = enum.auto() - INTERVAL = enum.auto() - VOID = enum.auto() - SMALLINT = enum.auto() - STRING = enum.auto() - TIMESTAMP = enum.auto() - TIMESTAMP_NTZ = enum.auto() - TINYINT = enum.auto() - ARRAY = enum.auto() - MAP = enum.auto() - STRUCT = enum.auto() - - -# Defines the way that SQLAlchemy CamelCase types are compiled into Databricks SQL types. -# Note: I wish I could define this within the TestCamelCaseTypesCompilation class, but pytest doesn't like that. -camel_case_type_map = { - sqlalchemy.types.BigInteger: DatabricksDataType.BIGINT, - sqlalchemy.types.LargeBinary: DatabricksDataType.BINARY, - sqlalchemy.types.Boolean: DatabricksDataType.BOOLEAN, - sqlalchemy.types.Date: DatabricksDataType.DATE, - sqlalchemy.types.DateTime: DatabricksDataType.TIMESTAMP_NTZ, - sqlalchemy.types.Double: DatabricksDataType.DOUBLE, - sqlalchemy.types.Enum: DatabricksDataType.STRING, - sqlalchemy.types.Float: DatabricksDataType.FLOAT, - sqlalchemy.types.Integer: DatabricksDataType.INT, - sqlalchemy.types.Interval: DatabricksDataType.TIMESTAMP_NTZ, - sqlalchemy.types.Numeric: DatabricksDataType.DECIMAL, - sqlalchemy.types.PickleType: DatabricksDataType.BINARY, - sqlalchemy.types.SmallInteger: DatabricksDataType.SMALLINT, - sqlalchemy.types.String: DatabricksDataType.STRING, - sqlalchemy.types.Text: DatabricksDataType.STRING, - sqlalchemy.types.Time: DatabricksDataType.STRING, - sqlalchemy.types.Unicode: DatabricksDataType.STRING, - sqlalchemy.types.UnicodeText: DatabricksDataType.STRING, - sqlalchemy.types.Uuid: DatabricksDataType.STRING, -} - - -def dict_as_tuple_list(d: dict): - """Return a list of [(key, value), ...] from a dictionary.""" - return [(key, value) for key, value in d.items()] - - -class CompilationTestBase: - dialect = DatabricksDialect() - - def _assert_compiled_value( - self, type_: sqlalchemy.types.TypeEngine, expected: DatabricksDataType - ): - """Assert that when type_ is compiled for the databricks dialect, it renders the DatabricksDataType name. - - This method initialises the type_ with no arguments. - """ - compiled_result = type_().compile(dialect=self.dialect) # type: ignore - assert compiled_result == expected.name - - def _assert_compiled_value_explicit( - self, type_: sqlalchemy.types.TypeEngine, expected: str - ): - """Assert that when type_ is compiled for the databricks dialect, it renders the expected string. - - This method expects an initialised type_ so that we can test how a TypeEngine created with arguments - is compiled. - """ - compiled_result = type_.compile(dialect=self.dialect) - assert compiled_result == expected - - -class TestCamelCaseTypesCompilation(CompilationTestBase): - """Per the sqlalchemy documentation[^1] here, the camel case members of sqlalchemy.types are - are expected to work across all dialects. These tests verify that the types compile into valid - Databricks SQL type strings. For example, the sqlalchemy.types.Integer() should compile as "INT". - - Truly custom types like STRUCT (notice the uppercase) are not expected to work across all dialects. - We test these separately. - - Note that these tests have to do with type **name** compiliation. Which is separate from actually - mapping values between Python and Databricks. - - Note: SchemaType and MatchType are not tested because it's not used in table definitions - - [1]: https://docs.sqlalchemy.org/en/20/core/type_basics.html#generic-camelcase-types - """ - - @pytest.mark.parametrize("type_, expected", dict_as_tuple_list(camel_case_type_map)) - def test_bare_camel_case_types_compile(self, type_, expected): - self._assert_compiled_value(type_, expected) - - def test_numeric_renders_as_decimal_with_precision(self): - self._assert_compiled_value_explicit( - sqlalchemy.types.Numeric(10), "DECIMAL(10)" - ) - - def test_numeric_renders_as_decimal_with_precision_and_scale(self): - self._assert_compiled_value_explicit( - sqlalchemy.types.Numeric(10, 2), "DECIMAL(10, 2)" - ) - - -uppercase_type_map = { - sqlalchemy.types.ARRAY: DatabricksDataType.ARRAY, - sqlalchemy.types.BIGINT: DatabricksDataType.BIGINT, - sqlalchemy.types.BINARY: DatabricksDataType.BINARY, - sqlalchemy.types.BOOLEAN: DatabricksDataType.BOOLEAN, - sqlalchemy.types.DATE: DatabricksDataType.DATE, - sqlalchemy.types.DECIMAL: DatabricksDataType.DECIMAL, - sqlalchemy.types.DOUBLE: DatabricksDataType.DOUBLE, - sqlalchemy.types.FLOAT: DatabricksDataType.FLOAT, - sqlalchemy.types.INT: DatabricksDataType.INT, - sqlalchemy.types.SMALLINT: DatabricksDataType.SMALLINT, - sqlalchemy.types.TIMESTAMP: DatabricksDataType.TIMESTAMP, - TINYINT: DatabricksDataType.TINYINT, - TIMESTAMP: DatabricksDataType.TIMESTAMP, - TIMESTAMP_NTZ: DatabricksDataType.TIMESTAMP_NTZ, -} - - -class TestUppercaseTypesCompilation(CompilationTestBase): - """Per the sqlalchemy documentation[^1], uppercase types are considered to be specific to some - database backends. These tests verify that the types compile into valid Databricks SQL type strings. - - [1]: https://docs.sqlalchemy.org/en/20/core/type_basics.html#backend-specific-uppercase-datatypes - """ - - @pytest.mark.parametrize("type_, expected", dict_as_tuple_list(uppercase_type_map)) - def test_bare_uppercase_types_compile(self, type_, expected): - if isinstance(type_, type(sqlalchemy.types.ARRAY)): - # ARRAY cannot be initialised without passing an item definition so we test separately - # I preserve it in the uppercase_type_map for clarity - assert True - else: - self._assert_compiled_value(type_, expected) - - def test_array_string_renders_as_array_of_string(self): - """SQLAlchemy's ARRAY type requires an item definition. And their docs indicate that they've only tested - it with Postgres since that's the only first-class dialect with support for ARRAY. - - https://docs.sqlalchemy.org/en/20/core/type_basics.html#sqlalchemy.types.ARRAY - """ - self._assert_compiled_value_explicit( - sqlalchemy.types.ARRAY(sqlalchemy.types.String), "ARRAY" - ) From a022590eca53872395b5e37b7fa8545638cef468 Mon Sep 17 00:00:00 2001 From: Jothi Prakash Date: Wed, 25 Sep 2024 22:41:41 +0530 Subject: [PATCH 03/19] Changed the folder structure such that sqlalchemy has not reference here --- .../src/databricks/sqlalchemy/__init__.py | 6 ------ .../tests/e2e/common/large_queries_mixin.py | 2 +- 2 files changed, 1 insertion(+), 7 deletions(-) delete mode 100644 databricks_sql_connector_core/src/databricks/sqlalchemy/__init__.py diff --git a/databricks_sql_connector_core/src/databricks/sqlalchemy/__init__.py b/databricks_sql_connector_core/src/databricks/sqlalchemy/__init__.py deleted file mode 100644 index f79d4c20..00000000 --- a/databricks_sql_connector_core/src/databricks/sqlalchemy/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -try: - from databricks_sqlalchemy import * -except: - import warnings - - warnings.warn("Install databricks-sqlalchemy plugin before using this") \ No newline at end of file diff --git a/databricks_sql_connector_core/tests/e2e/common/large_queries_mixin.py b/databricks_sql_connector_core/tests/e2e/common/large_queries_mixin.py index 07d02447..f337eb76 100644 --- a/databricks_sql_connector_core/tests/e2e/common/large_queries_mixin.py +++ b/databricks_sql_connector_core/tests/e2e/common/large_queries_mixin.py @@ -44,7 +44,7 @@ def fetch_rows(self, cursor, row_count, fetchmany_size): + "assuming 10K fetch size." ) - @pytest.mark.skipif(not pysql_supports_arrow(), "Without pyarrow lz4 compression is not supported") + @pytest.mark.skipif(not pysql_supports_arrow(), reason="Without pyarrow lz4 compression is not supported") def test_query_with_large_wide_result_set(self): resultSize = 300 * 1000 * 1000 # 300 MB width = 8192 # B From af473018b41d8470340ba8c43f55d5a6eb0656a5 Mon Sep 17 00:00:00 2001 From: Jothi Prakash Date: Tue, 8 Oct 2024 12:15:16 +0530 Subject: [PATCH 04/19] Fixed README.md and CONTRIBUTING.md --- CONTRIBUTING.md | 8 ++++---- README.md | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index ce0968d4..473c6063 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -85,18 +85,18 @@ We use [Pytest](https://docs.pytest.org/en/7.1.x/) as our test runner. Invoke it Unit tests do not require a Databricks account. ```bash -poetry run python -m pytest tests/unit +poetry run python -m pytest databricks_sql_connector_core/tests/unit ``` #### Only a specific test file ```bash -poetry run python -m pytest tests/unit/tests.py +poetry run python -m pytest databricks_sql_connector_core/tests/unit/tests.py ``` #### Only a specific method ```bash -poetry run python -m pytest tests/unit/tests.py::ClientTestSuite::test_closing_connection_closes_commands +poetry run python -m pytest databricks_sql_connector_core/tests/unit/tests.py::ClientTestSuite::test_closing_connection_closes_commands ``` #### e2e Tests @@ -133,7 +133,7 @@ There are several e2e test suites available: To execute the core test suite: ```bash -poetry run python -m pytest tests/e2e/driver_tests.py::PySQLCoreTestSuite +poetry run python -m pytest databricks_sql_connector_core/tests/e2e/driver_tests.py::PySQLCoreTestSuite ``` The `PySQLCoreTestSuite` namespace contains tests for all of the connector's basic features and behaviours. This is the default namespace where tests should be written unless they require specially configured clusters or take an especially long-time to execute by design. diff --git a/README.md b/README.md index 54d4b178..db37bf51 100644 --- a/README.md +++ b/README.md @@ -3,7 +3,7 @@ [![PyPI](https://img.shields.io/pypi/v/databricks-sql-connector?style=flat-square)](https://pypi.org/project/databricks-sql-connector/) [![Downloads](https://pepy.tech/badge/databricks-sql-connector)](https://pepy.tech/project/databricks-sql-connector) -The Databricks SQL Connector for Python allows you to develop Python applications that connect to Databricks clusters and SQL warehouses. It is a Thrift-based client with no dependencies on ODBC or JDBC. It conforms to the [Python DB API 2.0 specification](https://www.python.org/dev/peps/pep-0249/) and exposes a [SQLAlchemy](https://www.sqlalchemy.org/) dialect for use with tools like `pandas` and `alembic` which use SQLAlchemy to execute DDL. Use `pip install databricks-sql-connector[sqlalchemy]` to install with SQLAlchemy's dependencies. `pip install databricks-sql-connector[alembic]` will install alembic's dependencies. +The Databricks SQL Connector for Python allows you to develop Python applications that connect to Databricks clusters and SQL warehouses. It is a Thrift-based client with no dependencies on ODBC or JDBC. It conforms to the [Python DB API 2.0 specification](https://www.python.org/dev/peps/pep-0249/) and exposes a [SQLAlchemy](https://www.sqlalchemy.org/) dialect for use with tools like `pandas` and `alembic` which use SQLAlchemy to execute DDL. Use `pip install databricks-sql-connector[databricks-sqlalchemy]` to install with SQLAlchemy's dependencies. `pip install databricks-sql-connector[alembic]` will install alembic's dependencies. This connector uses Arrow as the data-exchange format, and supports APIs to directly fetch Arrow tables. Arrow tables are wrapped in the `ArrowQueue` class to provide a natural API to get several rows at a time. From 64b2818d2ea4bbae63bf7a25de509491f550bcda Mon Sep 17 00:00:00 2001 From: Jothi Prakash Date: Wed, 9 Oct 2024 00:32:25 +0530 Subject: [PATCH 05/19] Added manual publish --- .github/workflows/publish-manual.yml | 84 ++++++++++++++++++++ databricks_sql_connector/pyproject.toml | 6 +- databricks_sql_connector_core/pyproject.toml | 2 +- 3 files changed, 88 insertions(+), 4 deletions(-) create mode 100644 .github/workflows/publish-manual.yml diff --git a/.github/workflows/publish-manual.yml b/.github/workflows/publish-manual.yml new file mode 100644 index 00000000..832d1894 --- /dev/null +++ b/.github/workflows/publish-manual.yml @@ -0,0 +1,84 @@ +name: Publish to PyPI [Production] + +# Allow manual triggering of the workflow +on: + workflow_dispatch: # This enables manual triggering of the workflow + inputs: + version: # The version input that will be provided manually when the workflow is triggered + description: 'Specify the version to release (e.g., 4.0.0, 4.0.0.b0)' + required: true + default: '4.0.0.b0' # Set a default version value + +jobs: + publish: + name: Publish + runs-on: ubuntu-latest + + steps: + #---------------------------------------------- + # Step 1: Check out the repository code + #---------------------------------------------- + - name: Check out repository + uses: actions/checkout@v2 # Check out the repository to access the code + + #---------------------------------------------- + # Step 2: Set up Python environment + #---------------------------------------------- + - name: Set up python + id: setup-python + uses: actions/setup-python@v2 + with: + python-version: 3.9 # Specify the Python version to be used + + #---------------------------------------------- + # Step 3: Install and configure Poetry + #---------------------------------------------- + - name: Install Poetry + uses: snok/install-poetry@v1 # Install Poetry, the Python package manager + with: + virtualenvs-create: true + virtualenvs-in-project: true + installer-parallel: true + +# #---------------------------------------------- +# # Step 4: Load cached virtual environment (if available) +# #---------------------------------------------- +# - name: Load cached venv +# id: cached-poetry-dependencies +# uses: actions/cache@v2 +# with: +# path: .venv # Path to the virtual environment +# key: venv-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ github.event.repository.name }}-${{ hashFiles('**/poetry.lock') }} +# # Cache key is generated based on OS, Python version, repo name, and the `poetry.lock` file hash + +# #---------------------------------------------- +# # Step 5: Install dependencies if the cache is not found +# #---------------------------------------------- +# - name: Install dependencies +# if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' # Only run if the cache was not hit +# run: poetry install --no-interaction --no-root # Install dependencies without interaction + +# #---------------------------------------------- +# # Step 6: Update the version to the manually provided version +# #---------------------------------------------- +# - name: Update pyproject.toml with the specified version +# run: poetry version ${{ github.event.inputs.version }} # Use the version provided by the user input + + #---------------------------------------------- + # Step 7: Build and publish the first package to PyPI + #---------------------------------------------- + - name: Build and publish databricks sql connector to PyPI + working-directory: ./databricks_sql_connector + uses: JRubics/poetry-publish@v1.10 # Use the poetry-publish action to handle publishing + with: + pypi_token: ${{ secrets.PROD_PYPI_TOKEN }} # The PyPI token for authentication, stored in GitHub Secrets + + #---------------------------------------------- + # Step 7: Build and publish the second package to PyPI + #---------------------------------------------- + + - name: Build and publish databricks sql connector core to PyPI + working-directory: ./databricks_sql_connector_core + uses: JRubics/poetry-publish@v1.10 # Use the poetry-publish action to handle publishing + with: + pypi_token: ${{ secrets.PROD_PYPI_TOKEN }} # The PyPI token for authentication, stored in GitHub Secrets diff --git a/databricks_sql_connector/pyproject.toml b/databricks_sql_connector/pyproject.toml index 6e7297d1..37d13802 100644 --- a/databricks_sql_connector/pyproject.toml +++ b/databricks_sql_connector/pyproject.toml @@ -1,14 +1,14 @@ [tool.poetry] name = "databricks-sql-connector" -version = "3.5.0" +version = "4.0.0.b0" description = "Databricks SQL Connector for Python" authors = ["Databricks "] license = "Apache-2.0" [tool.poetry.dependencies] -databricks_sql_connector_core = { version = ">=1.0.0", extras=["all"]} -databricks_sqlalchemy = { version = ">=1.0.0", optional = true } +databricks_sql_connector_core = { version = ">=4.0.0", extras=["all"]} +databricks_sqlalchemy = { version = ">=4.0.0", optional = true } [tool.poetry.extras] databricks_sqlalchemy = ["databricks_sqlalchemy"] diff --git a/databricks_sql_connector_core/pyproject.toml b/databricks_sql_connector_core/pyproject.toml index a6e36091..c1b614f8 100644 --- a/databricks_sql_connector_core/pyproject.toml +++ b/databricks_sql_connector_core/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "databricks-sql-connector-core" -version = "1.0.0" +version = "4.0.0.b0" description = "Databricks SQL Connector core for Python" authors = ["Databricks "] packages = [{ include = "databricks", from = "src" }] From 44b52accbcbc141ab0f796cb2a3c3d8a34633675 Mon Sep 17 00:00:00 2001 From: Jothi Prakash Date: Wed, 9 Oct 2024 00:58:09 +0530 Subject: [PATCH 06/19] On push trigger added --- .github/workflows/publish-manual.yml | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/.github/workflows/publish-manual.yml b/.github/workflows/publish-manual.yml index 832d1894..2101383c 100644 --- a/.github/workflows/publish-manual.yml +++ b/.github/workflows/publish-manual.yml @@ -2,12 +2,10 @@ name: Publish to PyPI [Production] # Allow manual triggering of the workflow on: - workflow_dispatch: # This enables manual triggering of the workflow - inputs: - version: # The version input that will be provided manually when the workflow is triggered - description: 'Specify the version to release (e.g., 4.0.0, 4.0.0.b0)' - required: true - default: '4.0.0.b0' # Set a default version value + workflow_dispatch: {} + push: + branches: + - 'PECO-1803/connector-split' jobs: publish: From 8db3fd081b90ac5f9e9415a1177531802c173656 Mon Sep 17 00:00:00 2001 From: Jothi Prakash Date: Wed, 9 Oct 2024 01:04:12 +0530 Subject: [PATCH 07/19] Manually setting the publish step --- .github/workflows/publish-manual.yml | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/.github/workflows/publish-manual.yml b/.github/workflows/publish-manual.yml index 2101383c..1768b821 100644 --- a/.github/workflows/publish-manual.yml +++ b/.github/workflows/publish-manual.yml @@ -1,4 +1,4 @@ -name: Publish to PyPI [Production] +name: Publish to PyPI Manual [Production] # Allow manual triggering of the workflow on: @@ -67,16 +67,15 @@ jobs: #---------------------------------------------- - name: Build and publish databricks sql connector to PyPI working-directory: ./databricks_sql_connector - uses: JRubics/poetry-publish@v1.10 # Use the poetry-publish action to handle publishing - with: - pypi_token: ${{ secrets.PROD_PYPI_TOKEN }} # The PyPI token for authentication, stored in GitHub Secrets - + run: | + poetry build + poetry publish -u __token__ -p ${{ secrets.PROD_PYPI_TOKEN }} # Publish with PyPI token #---------------------------------------------- # Step 7: Build and publish the second package to PyPI #---------------------------------------------- - name: Build and publish databricks sql connector core to PyPI working-directory: ./databricks_sql_connector_core - uses: JRubics/poetry-publish@v1.10 # Use the poetry-publish action to handle publishing - with: - pypi_token: ${{ secrets.PROD_PYPI_TOKEN }} # The PyPI token for authentication, stored in GitHub Secrets + run: | + poetry build + poetry publish -u __token__ -p ${{ secrets.PROD_PYPI_TOKEN }} # Publish with PyPI token \ No newline at end of file From 3d1ef794b8131fd44c2d3c78d63d504bf2c8b2e2 Mon Sep 17 00:00:00 2001 From: Jothi Prakash Date: Thu, 17 Oct 2024 10:52:14 +0530 Subject: [PATCH 08/19] Changed versioning in pyproject.toml --- .github/workflows/publish-manual.yml | 3 --- databricks_sql_connector/pyproject.toml | 7 ++++--- databricks_sql_connector_core/pyproject.toml | 2 +- 3 files changed, 5 insertions(+), 7 deletions(-) diff --git a/.github/workflows/publish-manual.yml b/.github/workflows/publish-manual.yml index 1768b821..ecad71a2 100644 --- a/.github/workflows/publish-manual.yml +++ b/.github/workflows/publish-manual.yml @@ -3,9 +3,6 @@ name: Publish to PyPI Manual [Production] # Allow manual triggering of the workflow on: workflow_dispatch: {} - push: - branches: - - 'PECO-1803/connector-split' jobs: publish: diff --git a/databricks_sql_connector/pyproject.toml b/databricks_sql_connector/pyproject.toml index 37d13802..7d2bb0d4 100644 --- a/databricks_sql_connector/pyproject.toml +++ b/databricks_sql_connector/pyproject.toml @@ -1,14 +1,15 @@ [tool.poetry] name = "databricks-sql-connector" -version = "4.0.0.b0" +version = "4.0.0.b1" description = "Databricks SQL Connector for Python" authors = ["Databricks "] license = "Apache-2.0" [tool.poetry.dependencies] -databricks_sql_connector_core = { version = ">=4.0.0", extras=["all"]} -databricks_sqlalchemy = { version = ">=4.0.0", optional = true } +python = "^3.8.0" +databricks_sql_connector_core = { version = ">=4.0.0", extras=["pyarrow"]} +databricks_sqlalchemy = { version = ">=1.0.0", optional = true } [tool.poetry.extras] databricks_sqlalchemy = ["databricks_sqlalchemy"] diff --git a/databricks_sql_connector_core/pyproject.toml b/databricks_sql_connector_core/pyproject.toml index c1b614f8..f837fd6f 100644 --- a/databricks_sql_connector_core/pyproject.toml +++ b/databricks_sql_connector_core/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "databricks-sql-connector-core" -version = "4.0.0.b0" +version = "4.0.1" description = "Databricks SQL Connector core for Python" authors = ["Databricks "] packages = [{ include = "databricks", from = "src" }] From ee7f1e3720ecca56bbba13f9b507a1ec04e79fc2 Mon Sep 17 00:00:00 2001 From: Jothi Prakash Date: Wed, 6 Nov 2024 13:34:24 +0530 Subject: [PATCH 09/19] Bumped up the version to 4.0.0.b3 and also changed the structure to have pyarrow as optional --- CHANGELOG.md | 15 + CONTRIBUTING.md | 8 +- README.md | 2 +- .../tests/conftest.py => conftest.py | 0 databricks_sql_connector/pyproject.toml | 24 - .../tests/unit/__init__.py | 0 examples/custom_cred_provider.py | 22 +- examples/insert_data.py | 26 +- examples/interactive_oauth.py | 8 +- examples/m2m_oauth.py | 12 +- examples/persistent_oauth.py | 47 +- examples/query_cancel.py | 69 +- examples/query_execute.py | 18 +- examples/set_user_agent.py | 20 +- examples/v3_retries_query_execute.py | 24 +- .../poetry.lock => poetry.lock | 676 +++++++------ .../pyproject.toml => pyproject.toml | 28 +- .../src => src}/databricks/__init__.py | 0 .../src => src}/databricks/sql/__init__.py | 2 +- .../databricks/sql/auth}/__init__.py | 0 .../src => src}/databricks/sql/auth/auth.py | 0 .../databricks/sql/auth/authenticators.py | 0 .../databricks/sql/auth/endpoint.py | 0 .../src => src}/databricks/sql/auth/oauth.py | 0 .../databricks/sql/auth/oauth_http_handler.py | 0 .../src => src}/databricks/sql/auth/retry.py | 0 .../databricks/sql/auth/thrift_http_client.py | 41 +- .../src => src}/databricks/sql/client.py | 113 ++- .../sql/cloudfetch/download_manager.py | 9 +- .../databricks/sql/cloudfetch/downloader.py | 13 +- .../src => src}/databricks/sql/exc.py | 0 .../databricks/sql/experimental}/__init__.py | 0 .../sql/experimental/oauth_persistence.py | 0 .../databricks/sql/parameters/__init__.py | 0 .../databricks/sql/parameters/native.py | 0 .../databricks/sql/parameters/py.typed | 0 .../src => src}/databricks/sql/py.typed | 0 .../thrift_api/TCLIService/TCLIService-remote | 0 .../sql/thrift_api/TCLIService/TCLIService.py | 0 .../sql/thrift_api/TCLIService/__init__.py | 0 .../sql/thrift_api/TCLIService/constants.py | 0 .../sql/thrift_api/TCLIService/ttypes.py | 0 .../databricks/sql/thrift_api}/__init__.py | 0 .../databricks/sql/thrift_backend.py | 75 +- .../src => src}/databricks/sql/types.py | 48 + .../src => src}/databricks/sql/utils.py | 176 +++- src/databricks/sqlalchemy/__init__.py | 6 + .../sql/thrift_api => tests}/__init__.py | 0 .../tests => tests/e2e}/__init__.py | 0 .../e2e => tests/e2e/common}/__init__.py | 0 .../tests => tests}/e2e/common/core_tests.py | 31 +- .../e2e/common/decimal_tests.py | 48 +- .../e2e/common/large_queries_mixin.py | 17 +- .../tests => tests}/e2e/common/predicates.py | 40 +- .../e2e/common/retry_test_mixins.py | 41 +- .../e2e/common/staging_ingestion_tests.py | 89 +- .../e2e/common/timestamp_tests.py | 17 +- .../e2e/common/uc_volume_tests.py | 73 +- .../tests => tests}/e2e/test_complex_types.py | 7 +- .../tests => tests}/e2e/test_driver.py | 114 ++- .../e2e/test_parameterized_queries.py | 23 +- .../e2e/common => tests/unit}/__init__.py | 0 .../tests => tests}/unit/test_arrow_queue.py | 25 +- .../tests => tests}/unit/test_auth.py | 12 +- .../tests => tests}/unit/test_client.py | 178 ++-- .../unit/test_cloud_fetch_queue.py | 102 +- tests/unit/test_column_queue.py | 26 + .../unit/test_download_manager.py | 38 +- .../tests => tests}/unit/test_downloader.py | 85 +- .../tests => tests}/unit/test_endpoint.py | 0 .../tests => tests}/unit/test_fetches.py | 100 +- .../unit/test_fetches_bench.py | 27 +- .../tests => tests}/unit/test_init_file.py | 0 .../unit/test_oauth_persistence.py | 11 +- .../unit/test_param_escaper.py | 26 +- .../tests => tests}/unit/test_parameters.py | 0 .../tests => tests}/unit/test_retry.py | 5 +- .../unit/test_thrift_backend.py | 897 ++++++++++++++---- 78 files changed, 2403 insertions(+), 1111 deletions(-) rename databricks_sql_connector_core/tests/conftest.py => conftest.py (100%) delete mode 100644 databricks_sql_connector/pyproject.toml delete mode 100644 databricks_sql_connector_core/tests/unit/__init__.py rename databricks_sql_connector_core/poetry.lock => poetry.lock (53%) mode change 100755 => 100644 rename databricks_sql_connector_core/pyproject.toml => pyproject.toml (63%) rename {databricks_sql_connector_core/src => src}/databricks/__init__.py (100%) rename {databricks_sql_connector_core/src => src}/databricks/sql/__init__.py (99%) rename {databricks_sql_connector/databricks_sql_connector => src/databricks/sql/auth}/__init__.py (100%) rename {databricks_sql_connector_core/src => src}/databricks/sql/auth/auth.py (100%) rename {databricks_sql_connector_core/src => src}/databricks/sql/auth/authenticators.py (100%) rename {databricks_sql_connector_core/src => src}/databricks/sql/auth/endpoint.py (100%) rename {databricks_sql_connector_core/src => src}/databricks/sql/auth/oauth.py (100%) rename {databricks_sql_connector_core/src => src}/databricks/sql/auth/oauth_http_handler.py (100%) rename {databricks_sql_connector_core/src => src}/databricks/sql/auth/retry.py (100%) rename {databricks_sql_connector_core/src => src}/databricks/sql/auth/thrift_http_client.py (85%) rename {databricks_sql_connector_core/src => src}/databricks/sql/client.py (92%) rename {databricks_sql_connector_core/src => src}/databricks/sql/cloudfetch/download_manager.py (96%) rename {databricks_sql_connector_core/src => src}/databricks/sql/cloudfetch/downloader.py (95%) rename {databricks_sql_connector_core/src => src}/databricks/sql/exc.py (100%) rename {databricks_sql_connector_core/src/databricks/sql/auth => src/databricks/sql/experimental}/__init__.py (100%) rename {databricks_sql_connector_core/src => src}/databricks/sql/experimental/oauth_persistence.py (100%) rename {databricks_sql_connector_core/src => src}/databricks/sql/parameters/__init__.py (100%) rename {databricks_sql_connector_core/src => src}/databricks/sql/parameters/native.py (100%) rename {databricks_sql_connector_core/src => src}/databricks/sql/parameters/py.typed (100%) rename {databricks_sql_connector_core/src => src}/databricks/sql/py.typed (100%) rename {databricks_sql_connector_core/src => src}/databricks/sql/thrift_api/TCLIService/TCLIService-remote (100%) rename {databricks_sql_connector_core/src => src}/databricks/sql/thrift_api/TCLIService/TCLIService.py (100%) rename {databricks_sql_connector_core/src => src}/databricks/sql/thrift_api/TCLIService/__init__.py (100%) rename {databricks_sql_connector_core/src => src}/databricks/sql/thrift_api/TCLIService/constants.py (100%) rename {databricks_sql_connector_core/src => src}/databricks/sql/thrift_api/TCLIService/ttypes.py (100%) rename {databricks_sql_connector_core/src/databricks/sql/experimental => src/databricks/sql/thrift_api}/__init__.py (100%) rename {databricks_sql_connector_core/src => src}/databricks/sql/thrift_backend.py (94%) rename {databricks_sql_connector_core/src => src}/databricks/sql/types.py (79%) rename {databricks_sql_connector_core/src => src}/databricks/sql/utils.py (82%) create mode 100644 src/databricks/sqlalchemy/__init__.py rename {databricks_sql_connector_core/src/databricks/sql/thrift_api => tests}/__init__.py (100%) rename {databricks_sql_connector_core/tests => tests/e2e}/__init__.py (100%) rename {databricks_sql_connector_core/tests/e2e => tests/e2e/common}/__init__.py (100%) rename {databricks_sql_connector_core/tests => tests}/e2e/common/core_tests.py (87%) rename {databricks_sql_connector_core/tests => tests}/e2e/common/decimal_tests.py (66%) rename {databricks_sql_connector_core/tests => tests}/e2e/common/large_queries_mixin.py (92%) rename {databricks_sql_connector_core/tests => tests}/e2e/common/predicates.py (78%) rename {databricks_sql_connector_core/tests => tests}/e2e/common/retry_test_mixins.py (93%) rename {databricks_sql_connector_core/tests => tests}/e2e/common/staging_ingestion_tests.py (82%) rename {databricks_sql_connector_core/tests => tests}/e2e/common/timestamp_tests.py (88%) rename {databricks_sql_connector_core/tests => tests}/e2e/common/uc_volume_tests.py (82%) rename {databricks_sql_connector_core/tests => tests}/e2e/test_complex_types.py (90%) rename {databricks_sql_connector_core/tests => tests}/e2e/test_driver.py (91%) rename {databricks_sql_connector_core/tests => tests}/e2e/test_parameterized_queries.py (96%) rename {databricks_sql_connector_core/tests/e2e/common => tests/unit}/__init__.py (100%) rename {databricks_sql_connector_core/tests => tests}/unit/test_arrow_queue.py (56%) rename {databricks_sql_connector_core/tests => tests}/unit/test_auth.py (95%) rename {databricks_sql_connector_core/tests => tests}/unit/test_client.py (84%) rename {databricks_sql_connector_core/tests => tests}/unit/test_cloud_fetch_queue.py (82%) create mode 100644 tests/unit/test_column_queue.py rename {databricks_sql_connector_core/tests => tests}/unit/test_download_manager.py (65%) rename {databricks_sql_connector_core/tests => tests}/unit/test_downloader.py (54%) rename {databricks_sql_connector_core/tests => tests}/unit/test_endpoint.py (100%) rename {databricks_sql_connector_core/tests => tests}/unit/test_fetches.py (81%) rename {databricks_sql_connector_core/tests => tests}/unit/test_fetches_bench.py (76%) rename {databricks_sql_connector_core/tests => tests}/unit/test_init_file.py (100%) rename {databricks_sql_connector_core/tests => tests}/unit/test_oauth_persistence.py (82%) rename {databricks_sql_connector_core/tests => tests}/unit/test_param_escaper.py (92%) rename {databricks_sql_connector_core/tests => tests}/unit/test_parameters.py (100%) rename {databricks_sql_connector_core/tests => tests}/unit/test_retry.py (94%) rename {databricks_sql_connector_core/tests => tests}/unit/test_thrift_backend.py (73%) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4fca2046..e1a70f96 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,20 @@ # Release History +# 3.6.0 (2024-10-25) + +- Support encryption headers in the cloud fetch request (https://github.com/databricks/databricks-sql-python/pull/460 by @jackyhu-db) + +# 3.5.0 (2024-10-18) + +- Create a non pyarrow flow to handle small results for the column set (databricks/databricks-sql-python#440 by @jprakash-db) +- Fix: On non-retryable error, ensure PySQL includes useful information in error (databricks/databricks-sql-python#447 by @shivam2680) + +# 3.4.0 (2024-08-27) + +- Unpin pandas to support v2.2.2 (databricks/databricks-sql-python#416 by @kfollesdal) +- Make OAuth as the default authenticator if no authentication setting is provided (databricks/databricks-sql-python#419 by @jackyhu-db) +- Fix (regression): use SSL options with HTTPS connection pool (databricks/databricks-sql-python#425 by @kravets-levko) + # 3.3.0 (2024-07-18) - Don't retry requests that fail with HTTP code 401 (databricks/databricks-sql-python#408 by @Hodnebo) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 473c6063..ce0968d4 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -85,18 +85,18 @@ We use [Pytest](https://docs.pytest.org/en/7.1.x/) as our test runner. Invoke it Unit tests do not require a Databricks account. ```bash -poetry run python -m pytest databricks_sql_connector_core/tests/unit +poetry run python -m pytest tests/unit ``` #### Only a specific test file ```bash -poetry run python -m pytest databricks_sql_connector_core/tests/unit/tests.py +poetry run python -m pytest tests/unit/tests.py ``` #### Only a specific method ```bash -poetry run python -m pytest databricks_sql_connector_core/tests/unit/tests.py::ClientTestSuite::test_closing_connection_closes_commands +poetry run python -m pytest tests/unit/tests.py::ClientTestSuite::test_closing_connection_closes_commands ``` #### e2e Tests @@ -133,7 +133,7 @@ There are several e2e test suites available: To execute the core test suite: ```bash -poetry run python -m pytest databricks_sql_connector_core/tests/e2e/driver_tests.py::PySQLCoreTestSuite +poetry run python -m pytest tests/e2e/driver_tests.py::PySQLCoreTestSuite ``` The `PySQLCoreTestSuite` namespace contains tests for all of the connector's basic features and behaviours. This is the default namespace where tests should be written unless they require specially configured clusters or take an especially long-time to execute by design. diff --git a/README.md b/README.md index db37bf51..54d4b178 100644 --- a/README.md +++ b/README.md @@ -3,7 +3,7 @@ [![PyPI](https://img.shields.io/pypi/v/databricks-sql-connector?style=flat-square)](https://pypi.org/project/databricks-sql-connector/) [![Downloads](https://pepy.tech/badge/databricks-sql-connector)](https://pepy.tech/project/databricks-sql-connector) -The Databricks SQL Connector for Python allows you to develop Python applications that connect to Databricks clusters and SQL warehouses. It is a Thrift-based client with no dependencies on ODBC or JDBC. It conforms to the [Python DB API 2.0 specification](https://www.python.org/dev/peps/pep-0249/) and exposes a [SQLAlchemy](https://www.sqlalchemy.org/) dialect for use with tools like `pandas` and `alembic` which use SQLAlchemy to execute DDL. Use `pip install databricks-sql-connector[databricks-sqlalchemy]` to install with SQLAlchemy's dependencies. `pip install databricks-sql-connector[alembic]` will install alembic's dependencies. +The Databricks SQL Connector for Python allows you to develop Python applications that connect to Databricks clusters and SQL warehouses. It is a Thrift-based client with no dependencies on ODBC or JDBC. It conforms to the [Python DB API 2.0 specification](https://www.python.org/dev/peps/pep-0249/) and exposes a [SQLAlchemy](https://www.sqlalchemy.org/) dialect for use with tools like `pandas` and `alembic` which use SQLAlchemy to execute DDL. Use `pip install databricks-sql-connector[sqlalchemy]` to install with SQLAlchemy's dependencies. `pip install databricks-sql-connector[alembic]` will install alembic's dependencies. This connector uses Arrow as the data-exchange format, and supports APIs to directly fetch Arrow tables. Arrow tables are wrapped in the `ArrowQueue` class to provide a natural API to get several rows at a time. diff --git a/databricks_sql_connector_core/tests/conftest.py b/conftest.py similarity index 100% rename from databricks_sql_connector_core/tests/conftest.py rename to conftest.py diff --git a/databricks_sql_connector/pyproject.toml b/databricks_sql_connector/pyproject.toml deleted file mode 100644 index 7d2bb0d4..00000000 --- a/databricks_sql_connector/pyproject.toml +++ /dev/null @@ -1,24 +0,0 @@ -[tool.poetry] -name = "databricks-sql-connector" -version = "4.0.0.b1" -description = "Databricks SQL Connector for Python" -authors = ["Databricks "] -license = "Apache-2.0" - - -[tool.poetry.dependencies] -python = "^3.8.0" -databricks_sql_connector_core = { version = ">=4.0.0", extras=["pyarrow"]} -databricks_sqlalchemy = { version = ">=1.0.0", optional = true } - -[tool.poetry.extras] -databricks_sqlalchemy = ["databricks_sqlalchemy"] - -[tool.poetry.urls] -"Homepage" = "https://github.com/databricks/databricks-sql-python" -"Bug Tracker" = "https://github.com/databricks/databricks-sql-python/issues" - -[build-system] -requires = ["poetry-core>=1.0.0"] -build-backend = "poetry.core.masonry.api" - diff --git a/databricks_sql_connector_core/tests/unit/__init__.py b/databricks_sql_connector_core/tests/unit/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/examples/custom_cred_provider.py b/examples/custom_cred_provider.py index 4c43280f..67945f23 100644 --- a/examples/custom_cred_provider.py +++ b/examples/custom_cred_provider.py @@ -4,23 +4,27 @@ from databricks.sdk.oauth import OAuthClient import os -oauth_client = OAuthClient(host=os.getenv("DATABRICKS_SERVER_HOSTNAME"), - client_id=os.getenv("DATABRICKS_CLIENT_ID"), - client_secret=os.getenv("DATABRICKS_CLIENT_SECRET"), - redirect_url=os.getenv("APP_REDIRECT_URL"), - scopes=['all-apis', 'offline_access']) +oauth_client = OAuthClient( + host=os.getenv("DATABRICKS_SERVER_HOSTNAME"), + client_id=os.getenv("DATABRICKS_CLIENT_ID"), + client_secret=os.getenv("DATABRICKS_CLIENT_SECRET"), + redirect_url=os.getenv("APP_REDIRECT_URL"), + scopes=["all-apis", "offline_access"], +) consent = oauth_client.initiate_consent() creds = consent.launch_external_browser() -with sql.connect(server_hostname = os.getenv("DATABRICKS_SERVER_HOSTNAME"), - http_path = os.getenv("DATABRICKS_HTTP_PATH"), - credentials_provider=creds) as connection: +with sql.connect( + server_hostname=os.getenv("DATABRICKS_SERVER_HOSTNAME"), + http_path=os.getenv("DATABRICKS_HTTP_PATH"), + credentials_provider=creds, +) as connection: for x in range(1, 5): cursor = connection.cursor() - cursor.execute('SELECT 1+1') + cursor.execute("SELECT 1+1") result = cursor.fetchall() for row in result: print(row) diff --git a/examples/insert_data.py b/examples/insert_data.py index b304a0e9..053ed158 100644 --- a/examples/insert_data.py +++ b/examples/insert_data.py @@ -1,21 +1,23 @@ from databricks import sql import os -with sql.connect(server_hostname = os.getenv("DATABRICKS_SERVER_HOSTNAME"), - http_path = os.getenv("DATABRICKS_HTTP_PATH"), - access_token = os.getenv("DATABRICKS_TOKEN")) as connection: +with sql.connect( + server_hostname=os.getenv("DATABRICKS_SERVER_HOSTNAME"), + http_path=os.getenv("DATABRICKS_HTTP_PATH"), + access_token=os.getenv("DATABRICKS_TOKEN"), +) as connection: - with connection.cursor() as cursor: - cursor.execute("CREATE TABLE IF NOT EXISTS squares (x int, x_squared int)") + with connection.cursor() as cursor: + cursor.execute("CREATE TABLE IF NOT EXISTS squares (x int, x_squared int)") - squares = [(i, i * i) for i in range(100)] - values = ",".join([f"({x}, {y})" for (x, y) in squares]) + squares = [(i, i * i) for i in range(100)] + values = ",".join([f"({x}, {y})" for (x, y) in squares]) - cursor.execute(f"INSERT INTO squares VALUES {values}") + cursor.execute(f"INSERT INTO squares VALUES {values}") - cursor.execute("SELECT * FROM squares LIMIT 10") + cursor.execute("SELECT * FROM squares LIMIT 10") - result = cursor.fetchall() + result = cursor.fetchall() - for row in result: - print(row) + for row in result: + print(row) diff --git a/examples/interactive_oauth.py b/examples/interactive_oauth.py index dad5cac6..8dbc8c47 100644 --- a/examples/interactive_oauth.py +++ b/examples/interactive_oauth.py @@ -13,12 +13,14 @@ token across script executions. """ -with sql.connect(server_hostname = os.getenv("DATABRICKS_SERVER_HOSTNAME"), - http_path = os.getenv("DATABRICKS_HTTP_PATH")) as connection: +with sql.connect( + server_hostname=os.getenv("DATABRICKS_SERVER_HOSTNAME"), + http_path=os.getenv("DATABRICKS_HTTP_PATH"), +) as connection: for x in range(1, 100): cursor = connection.cursor() - cursor.execute('SELECT 1+1') + cursor.execute("SELECT 1+1") result = cursor.fetchall() for row in result: print(row) diff --git a/examples/m2m_oauth.py b/examples/m2m_oauth.py index eba2095c..1c8c7278 100644 --- a/examples/m2m_oauth.py +++ b/examples/m2m_oauth.py @@ -22,17 +22,19 @@ def credential_provider(): # Service Principal UUID client_id=os.getenv("DATABRICKS_CLIENT_ID"), # Service Principal Secret - client_secret=os.getenv("DATABRICKS_CLIENT_SECRET")) + client_secret=os.getenv("DATABRICKS_CLIENT_SECRET"), + ) return oauth_service_principal(config) with sql.connect( - server_hostname=server_hostname, - http_path=os.getenv("DATABRICKS_HTTP_PATH"), - credentials_provider=credential_provider) as connection: + server_hostname=server_hostname, + http_path=os.getenv("DATABRICKS_HTTP_PATH"), + credentials_provider=credential_provider, +) as connection: for x in range(1, 100): cursor = connection.cursor() - cursor.execute('SELECT 1+1') + cursor.execute("SELECT 1+1") result = cursor.fetchall() for row in result: print(row) diff --git a/examples/persistent_oauth.py b/examples/persistent_oauth.py index 0f2ba077..1a2eded2 100644 --- a/examples/persistent_oauth.py +++ b/examples/persistent_oauth.py @@ -17,37 +17,44 @@ from typing import Optional from databricks import sql -from databricks.sql.experimental.oauth_persistence import OAuthPersistence, OAuthToken, DevOnlyFilePersistence +from databricks.sql.experimental.oauth_persistence import ( + OAuthPersistence, + OAuthToken, + DevOnlyFilePersistence, +) class SampleOAuthPersistence(OAuthPersistence): - def persist(self, hostname: str, oauth_token: OAuthToken): - """To be implemented by the end user to persist in the preferred storage medium. + def persist(self, hostname: str, oauth_token: OAuthToken): + """To be implemented by the end user to persist in the preferred storage medium. - OAuthToken has two properties: - 1. OAuthToken.access_token - 2. OAuthToken.refresh_token + OAuthToken has two properties: + 1. OAuthToken.access_token + 2. OAuthToken.refresh_token - Both should be persisted. - """ - pass + Both should be persisted. + """ + pass - def read(self, hostname: str) -> Optional[OAuthToken]: - """To be implemented by the end user to fetch token from the preferred storage + def read(self, hostname: str) -> Optional[OAuthToken]: + """To be implemented by the end user to fetch token from the preferred storage - Fetch the access_token and refresh_token for the given hostname. - Return OAuthToken(access_token, refresh_token) - """ - pass + Fetch the access_token and refresh_token for the given hostname. + Return OAuthToken(access_token, refresh_token) + """ + pass -with sql.connect(server_hostname = os.getenv("DATABRICKS_SERVER_HOSTNAME"), - http_path = os.getenv("DATABRICKS_HTTP_PATH"), - auth_type="databricks-oauth", - experimental_oauth_persistence=DevOnlyFilePersistence("./sample.json")) as connection: + +with sql.connect( + server_hostname=os.getenv("DATABRICKS_SERVER_HOSTNAME"), + http_path=os.getenv("DATABRICKS_HTTP_PATH"), + auth_type="databricks-oauth", + experimental_oauth_persistence=DevOnlyFilePersistence("./sample.json"), +) as connection: for x in range(1, 100): cursor = connection.cursor() - cursor.execute('SELECT 1+1') + cursor.execute("SELECT 1+1") result = cursor.fetchall() for row in result: print(row) diff --git a/examples/query_cancel.py b/examples/query_cancel.py index 4e0b74a5..b67fc085 100644 --- a/examples/query_cancel.py +++ b/examples/query_cancel.py @@ -5,47 +5,52 @@ The current operation of a cursor may be cancelled by calling its `.cancel()` method as shown in the example below. """ -with sql.connect(server_hostname = os.getenv("DATABRICKS_SERVER_HOSTNAME"), - http_path = os.getenv("DATABRICKS_HTTP_PATH"), - access_token = os.getenv("DATABRICKS_TOKEN")) as connection: +with sql.connect( + server_hostname=os.getenv("DATABRICKS_SERVER_HOSTNAME"), + http_path=os.getenv("DATABRICKS_HTTP_PATH"), + access_token=os.getenv("DATABRICKS_TOKEN"), +) as connection: - with connection.cursor() as cursor: - def execute_really_long_query(): - try: - cursor.execute("SELECT SUM(A.id - B.id) " + - "FROM range(1000000000) A CROSS JOIN range(100000000) B " + - "GROUP BY (A.id - B.id)") - except sql.exc.RequestError: - print("It looks like this query was cancelled.") + with connection.cursor() as cursor: - exec_thread = threading.Thread(target=execute_really_long_query) + def execute_really_long_query(): + try: + cursor.execute( + "SELECT SUM(A.id - B.id) " + + "FROM range(1000000000) A CROSS JOIN range(100000000) B " + + "GROUP BY (A.id - B.id)" + ) + except sql.exc.RequestError: + print("It looks like this query was cancelled.") - print("\n Beginning to execute long query") - exec_thread.start() + exec_thread = threading.Thread(target=execute_really_long_query) - # Make sure the query has started before cancelling - print("\n Waiting 15 seconds before canceling", end="", flush=True) + print("\n Beginning to execute long query") + exec_thread.start() - seconds_waited = 0 - while seconds_waited < 15: - seconds_waited += 1 - print(".", end="", flush=True) - time.sleep(1) + # Make sure the query has started before cancelling + print("\n Waiting 15 seconds before canceling", end="", flush=True) - print("\n Cancelling the cursor's operation. This can take a few seconds.") - cursor.cancel() + seconds_waited = 0 + while seconds_waited < 15: + seconds_waited += 1 + print(".", end="", flush=True) + time.sleep(1) - print("\n Now checking the cursor status:") - exec_thread.join(5) + print("\n Cancelling the cursor's operation. This can take a few seconds.") + cursor.cancel() - assert not exec_thread.is_alive() - print("\n The previous command was successfully canceled") + print("\n Now checking the cursor status:") + exec_thread.join(5) - print("\n Now reusing the cursor to run a separate query.") + assert not exec_thread.is_alive() + print("\n The previous command was successfully canceled") - # We can still execute a new command on the cursor - cursor.execute("SELECT * FROM range(3)") + print("\n Now reusing the cursor to run a separate query.") - print("\n Execution was successful. Results appear below:") + # We can still execute a new command on the cursor + cursor.execute("SELECT * FROM range(3)") - print(cursor.fetchall()) + print("\n Execution was successful. Results appear below:") + + print(cursor.fetchall()) diff --git a/examples/query_execute.py b/examples/query_execute.py index a851ab50..38d2f17a 100644 --- a/examples/query_execute.py +++ b/examples/query_execute.py @@ -1,13 +1,15 @@ from databricks import sql import os -with sql.connect(server_hostname = os.getenv("DATABRICKS_SERVER_HOSTNAME"), - http_path = os.getenv("DATABRICKS_HTTP_PATH"), - access_token = os.getenv("DATABRICKS_TOKEN")) as connection: +with sql.connect( + server_hostname=os.getenv("DATABRICKS_SERVER_HOSTNAME"), + http_path=os.getenv("DATABRICKS_HTTP_PATH"), + access_token=os.getenv("DATABRICKS_TOKEN"), +) as connection: - with connection.cursor() as cursor: - cursor.execute("SELECT * FROM default.diamonds LIMIT 2") - result = cursor.fetchall() + with connection.cursor() as cursor: + cursor.execute("SELECT * FROM default.diamonds LIMIT 2") + result = cursor.fetchall() - for row in result: - print(row) + for row in result: + print(row) diff --git a/examples/set_user_agent.py b/examples/set_user_agent.py index 449692cf..93eb2e0b 100644 --- a/examples/set_user_agent.py +++ b/examples/set_user_agent.py @@ -1,14 +1,16 @@ from databricks import sql import os -with sql.connect(server_hostname = os.getenv("DATABRICKS_SERVER_HOSTNAME"), - http_path = os.getenv("DATABRICKS_HTTP_PATH"), - access_token = os.getenv("DATABRICKS_TOKEN"), - _user_agent_entry="ExamplePartnerTag") as connection: +with sql.connect( + server_hostname=os.getenv("DATABRICKS_SERVER_HOSTNAME"), + http_path=os.getenv("DATABRICKS_HTTP_PATH"), + access_token=os.getenv("DATABRICKS_TOKEN"), + _user_agent_entry="ExamplePartnerTag", +) as connection: - with connection.cursor() as cursor: - cursor.execute("SELECT * FROM default.diamonds LIMIT 2") - result = cursor.fetchall() + with connection.cursor() as cursor: + cursor.execute("SELECT * FROM default.diamonds LIMIT 2") + result = cursor.fetchall() - for row in result: - print(row) + for row in result: + print(row) diff --git a/examples/v3_retries_query_execute.py b/examples/v3_retries_query_execute.py index 4b6772fe..aaab47d1 100644 --- a/examples/v3_retries_query_execute.py +++ b/examples/v3_retries_query_execute.py @@ -28,16 +28,18 @@ # # For complete information about configuring retries, see the docstring for databricks.sql.thrift_backend.ThriftBackend -with sql.connect(server_hostname = os.getenv("DATABRICKS_SERVER_HOSTNAME"), - http_path = os.getenv("DATABRICKS_HTTP_PATH"), - access_token = os.getenv("DATABRICKS_TOKEN"), - _enable_v3_retries = True, - _retry_dangerous_codes=[502,400], - _retry_max_redirects=2) as connection: +with sql.connect( + server_hostname=os.getenv("DATABRICKS_SERVER_HOSTNAME"), + http_path=os.getenv("DATABRICKS_HTTP_PATH"), + access_token=os.getenv("DATABRICKS_TOKEN"), + _enable_v3_retries=True, + _retry_dangerous_codes=[502, 400], + _retry_max_redirects=2, +) as connection: - with connection.cursor() as cursor: - cursor.execute("SELECT * FROM default.diamonds LIMIT 2") - result = cursor.fetchall() + with connection.cursor() as cursor: + cursor.execute("SELECT * FROM default.diamonds LIMIT 2") + result = cursor.fetchall() - for row in result: - print(row) + for row in result: + print(row) diff --git a/databricks_sql_connector_core/poetry.lock b/poetry.lock old mode 100755 new mode 100644 similarity index 53% rename from databricks_sql_connector_core/poetry.lock rename to poetry.lock index 9fe49690..2031daa8 --- a/databricks_sql_connector_core/poetry.lock +++ b/poetry.lock @@ -2,13 +2,13 @@ [[package]] name = "alembic" -version = "1.13.2" +version = "1.14.0" description = "A database migration tool for SQLAlchemy." optional = true python-versions = ">=3.8" files = [ - {file = "alembic-1.13.2-py3-none-any.whl", hash = "sha256:6b8733129a6224a9a711e17c99b08462dbf7cc9670ba8f2e2ae9af860ceb1953"}, - {file = "alembic-1.13.2.tar.gz", hash = "sha256:1ff0ae32975f4fd96028c39ed9bb3c867fe3af956bd7bb37343b54c9fe7445ef"}, + {file = "alembic-1.14.0-py3-none-any.whl", hash = "sha256:99bd884ca390466db5e27ffccff1d179ec5c05c965cfefc0607e69f9e411cb25"}, + {file = "alembic-1.14.0.tar.gz", hash = "sha256:b00892b53b3642d0b8dbedba234dbf1924b69be83a9a769d5a624b01094e304b"}, ] [package.dependencies] @@ -23,13 +23,13 @@ tz = ["backports.zoneinfo"] [[package]] name = "astroid" -version = "3.2.2" +version = "3.2.4" description = "An abstract syntax tree for Python with inference support." optional = false python-versions = ">=3.8.0" files = [ - {file = "astroid-3.2.2-py3-none-any.whl", hash = "sha256:e8a0083b4bb28fcffb6207a3bfc9e5d0a68be951dd7e336d5dcf639c682388c0"}, - {file = "astroid-3.2.2.tar.gz", hash = "sha256:8ead48e31b92b2e217b6c9733a21afafe479d52d6e164dd25fb1a770c7c3cf94"}, + {file = "astroid-3.2.4-py3-none-any.whl", hash = "sha256:413658a61eeca6202a59231abb473f932038fbcbf1666587f66d482083413a25"}, + {file = "astroid-3.2.4.tar.gz", hash = "sha256:0e14202810b30da1b735827f78f5157be2bbd4a7a59b7707ca0bfc2fb4c0063a"}, ] [package.dependencies] @@ -72,112 +72,127 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "certifi" -version = "2024.6.2" +version = "2024.8.30" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.6.2-py3-none-any.whl", hash = "sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56"}, - {file = "certifi-2024.6.2.tar.gz", hash = "sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516"}, + {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, + {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, ] [[package]] name = "charset-normalizer" -version = "3.3.2" +version = "3.4.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, + {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, + {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, ] [[package]] @@ -205,15 +220,29 @@ files = [ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +[[package]] +name = "databricks-sqlalchemy" +version = "2.0.1" +description = "Databricks SQLAlchemy plugin for Python" +optional = true +python-versions = "<4.0.0,>=3.8.0" +files = [ + {file = "databricks_sqlalchemy-2.0.1-py3-none-any.whl", hash = "sha256:b8e5aa7ef9add762a8ead039fe94e0f3a6e073ae4e644c88ebf29c97ec160998"}, + {file = "databricks_sqlalchemy-2.0.1.tar.gz", hash = "sha256:ce18879b4d84bd46ee3fdc864f097bdd573acc7310156d68049b0e17cfe9a6f9"}, +] + +[package.dependencies] +sqlalchemy = ">=2.0.21" + [[package]] name = "dill" -version = "0.3.8" +version = "0.3.9" description = "serialize all of Python" optional = false python-versions = ">=3.8" files = [ - {file = "dill-0.3.8-py3-none-any.whl", hash = "sha256:c36ca9ffb54365bdd2f8eb3eff7d2a21237f8452b57ace88b1ac615b7e815bd7"}, - {file = "dill-0.3.8.tar.gz", hash = "sha256:3ebe3c479ad625c4553aca177444d89b486b1d84982eeacded644afc0cf797ca"}, + {file = "dill-0.3.9-py3-none-any.whl", hash = "sha256:468dff3b89520b474c0397703366b7b95eebe6303f108adf9b19da1f702be87a"}, + {file = "dill-0.3.9.tar.gz", hash = "sha256:81aa267dddf68cbfe8029c42ca9ec6a4ab3b22371d1c450abc54422577b4512c"}, ] [package.extras] @@ -222,24 +251,24 @@ profile = ["gprof2dot (>=2022.7.29)"] [[package]] name = "et-xmlfile" -version = "1.1.0" +version = "2.0.0" description = "An implementation of lxml.xmlfile for the standard library" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "et_xmlfile-1.1.0-py3-none-any.whl", hash = "sha256:a2ba85d1d6a74ef63837eed693bcb89c3f752169b0e3e7ae5b16ca5e1b3deada"}, - {file = "et_xmlfile-1.1.0.tar.gz", hash = "sha256:8eb9e2bc2f8c97e37a2dc85a09ecdcdec9d8a396530a6d5a33b30b9a92da0c5c"}, + {file = "et_xmlfile-2.0.0-py3-none-any.whl", hash = "sha256:7a91720bc756843502c3b7504c77b8fe44217c85c537d85037f0f536151b2caa"}, + {file = "et_xmlfile-2.0.0.tar.gz", hash = "sha256:dab3f4764309081ce75662649be815c4c9081e88f0837825f90fd28317d4da54"}, ] [[package]] name = "exceptiongroup" -version = "1.2.1" +version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.2.1-py3-none-any.whl", hash = "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad"}, - {file = "exceptiongroup-1.2.1.tar.gz", hash = "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16"}, + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, ] [package.extras] @@ -247,69 +276,84 @@ test = ["pytest (>=6)"] [[package]] name = "greenlet" -version = "3.0.3" +version = "3.1.1" description = "Lightweight in-process concurrent programming" optional = true python-versions = ">=3.7" files = [ - {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, - {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, - {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, - {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, - {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, - {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, - {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, - {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, - {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, - {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, - {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, - {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, - {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, - {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, - {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, - {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, + {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36b89d13c49216cadb828db8dfa6ce86bbbc476a82d3a6c397f0efae0525bdd0"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b6150a85e1b33b40b1464a3f9988dcc5251d6ed06842abff82e42632fac120"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93147c513fac16385d1036b7e5b102c7fbbdb163d556b791f0f11eada7ba65dc"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da7a9bff22ce038e19bf62c4dd1ec8391062878710ded0a845bcf47cc0200617"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b2795058c23988728eec1f36a4e5e4ebad22f8320c85f3587b539b9ac84128d7"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ed10eac5830befbdd0c32f83e8aa6288361597550ba669b04c48f0f9a2c843c6"}, + {file = "greenlet-3.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:77c386de38a60d1dfb8e55b8c1101d68c79dfdd25c7095d51fec2dd800892b80"}, + {file = "greenlet-3.1.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395"}, + {file = "greenlet-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39"}, + {file = "greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942"}, + {file = "greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01"}, + {file = "greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c"}, + {file = "greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47da355d8687fd65240c364c90a31569a133b7b60de111c255ef5b606f2ae291"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98884ecf2ffb7d7fe6bd517e8eb99d31ff7855a840fa6d0d63cd07c037f6a981"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1d4aeb8891338e60d1ab6127af1fe45def5259def8094b9c7e34690c8858803"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db32b5348615a04b82240cc67983cb315309e88d444a288934ee6ceaebcad6cc"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dcc62f31eae24de7f8dce72134c8651c58000d3b1868e01392baea7c32c247de"}, + {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1d3755bcb2e02de341c55b4fca7a745a24a9e7212ac953f6b3a48d117d7257aa"}, + {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b8da394b34370874b4572676f36acabac172602abf054cbc4ac910219f3340af"}, + {file = "greenlet-3.1.1-cp37-cp37m-win32.whl", hash = "sha256:a0dfc6c143b519113354e780a50381508139b07d2177cb6ad6a08278ec655798"}, + {file = "greenlet-3.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:54558ea205654b50c438029505def3834e80f0869a70fb15b871c29b4575ddef"}, + {file = "greenlet-3.1.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:346bed03fe47414091be4ad44786d1bd8bef0c3fcad6ed3dee074a032ab408a9"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfc59d69fc48664bc693842bd57acfdd490acafda1ab52c7836e3fc75c90a111"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21e10da6ec19b457b82636209cbe2331ff4306b54d06fa04b7c138ba18c8a81"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37b9de5a96111fc15418819ab4c4432e4f3c2ede61e660b1e33971eba26ef9ba"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef9ea3f137e5711f0dbe5f9263e8c009b7069d8a1acea822bd5e9dae0ae49c8"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85f3ff71e2e60bd4b4932a043fbbe0f499e263c628390b285cb599154a3b03b1"}, + {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:95ffcf719966dd7c453f908e208e14cde192e09fde6c7186c8f1896ef778d8cd"}, + {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:03a088b9de532cbfe2ba2034b2b85e82df37874681e8c470d6fb2f8c04d7e4b7"}, + {file = "greenlet-3.1.1-cp38-cp38-win32.whl", hash = "sha256:8b8b36671f10ba80e159378df9c4f15c14098c4fd73a36b9ad715f057272fbef"}, + {file = "greenlet-3.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:7017b2be767b9d43cc31416aba48aab0d2309ee31b4dbf10a1d38fb7972bdf9d"}, + {file = "greenlet-3.1.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:396979749bd95f018296af156201d6211240e7a23090f50a8d5d18c370084dc3"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9d0ff5ad43e785350894d97e13633a66e2b50000e8a183a50a88d834752d42"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f6ff3b14f2df4c41660a7dec01045a045653998784bf8cfcb5a525bdffffbc8f"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94ebba31df2aa506d7b14866fed00ac141a867e63143fe5bca82a8e503b36437"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aaad12ac0ff500f62cebed98d8789198ea0e6f233421059fa68a5aa7220145"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63e4844797b975b9af3a3fb8f7866ff08775f5426925e1e0bbcfe7932059a12c"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7939aa3ca7d2a1593596e7ac6d59391ff30281ef280d8632fa03d81f7c5f955e"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d0028e725ee18175c6e422797c407874da24381ce0690d6b9396c204c7f7276e"}, + {file = "greenlet-3.1.1-cp39-cp39-win32.whl", hash = "sha256:5e06afd14cbaf9e00899fae69b24a32f2196c19de08fcb9f4779dd4f004e5e7c"}, + {file = "greenlet-3.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22"}, + {file = "greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467"}, ] [package.extras] @@ -318,51 +362,62 @@ test = ["objgraph", "psutil"] [[package]] name = "idna" -version = "3.7" +version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" files = [ - {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, - {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, ] +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + [[package]] name = "importlib-metadata" -version = "8.0.0" +version = "8.5.0" description = "Read metadata from Python packages" optional = true python-versions = ">=3.8" files = [ - {file = "importlib_metadata-8.0.0-py3-none-any.whl", hash = "sha256:15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f"}, - {file = "importlib_metadata-8.0.0.tar.gz", hash = "sha256:188bd24e4c346d3f0a933f275c2fec67050326a856b9a359881d7c2a697e8812"}, + {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, + {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, ] [package.dependencies] -zipp = ">=0.5" +zipp = ">=3.20" [package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] perf = ["ipython"] -test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] +test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +type = ["pytest-mypy"] [[package]] name = "importlib-resources" -version = "6.4.0" +version = "6.4.5" description = "Read resources from Python packages" optional = true python-versions = ">=3.8" files = [ - {file = "importlib_resources-6.4.0-py3-none-any.whl", hash = "sha256:50d10f043df931902d4194ea07ec57960f66a80449ff867bfe782b4c486ba78c"}, - {file = "importlib_resources-6.4.0.tar.gz", hash = "sha256:cdb2b453b8046ca4e3798eb1d84f3cce1446a0e8e7b5ef4efb600f19fc398145"}, + {file = "importlib_resources-6.4.5-py3-none-any.whl", hash = "sha256:ac29d5f956f01d5e4bb63102a5a19957f1b9175e45649977264a1416783bb717"}, + {file = "importlib_resources-6.4.5.tar.gz", hash = "sha256:980862a1d16c9e147a59603677fa2aa5fd82b87f223b6cb870695bcfce830065"}, ] [package.dependencies] zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["jaraco.test (>=5.4)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)", "zipp (>=3.17)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["jaraco.test (>=5.4)", "pytest (>=6,!=8.1.*)", "zipp (>=3.17)"] +type = ["pytest-mypy"] [[package]] name = "iniconfig" @@ -441,13 +496,13 @@ tests = ["psutil", "pytest (!=3.3.0)", "pytest-cov"] [[package]] name = "mako" -version = "1.3.5" +version = "1.3.6" description = "A super-fast templating language that borrows the best ideas from the existing templating languages." optional = true python-versions = ">=3.8" files = [ - {file = "Mako-1.3.5-py3-none-any.whl", hash = "sha256:260f1dbc3a519453a9c856dedfe4beb4e50bd5a26d96386cb6c80856556bb91a"}, - {file = "Mako-1.3.5.tar.gz", hash = "sha256:48dbc20568c1d276a2698b36d968fa76161bf127194907ea6fc594fa81f943bc"}, + {file = "Mako-1.3.6-py3-none-any.whl", hash = "sha256:a91198468092a2f1a0de86ca92690fb0cfc43ca90ee17e15d93662b4c04b241a"}, + {file = "mako-1.3.6.tar.gz", hash = "sha256:9ec3a1583713479fae654f83ed9fa8c9a4c16b7bb0daba0e6bbebff50c0d983d"}, ] [package.dependencies] @@ -540,47 +595,53 @@ files = [ [[package]] name = "mypy" -version = "1.10.1" +version = "1.13.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e36f229acfe250dc660790840916eb49726c928e8ce10fbdf90715090fe4ae02"}, - {file = "mypy-1.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:51a46974340baaa4145363b9e051812a2446cf583dfaeba124af966fa44593f7"}, - {file = "mypy-1.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:901c89c2d67bba57aaaca91ccdb659aa3a312de67f23b9dfb059727cce2e2e0a"}, - {file = "mypy-1.10.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0cd62192a4a32b77ceb31272d9e74d23cd88c8060c34d1d3622db3267679a5d9"}, - {file = "mypy-1.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:a2cbc68cb9e943ac0814c13e2452d2046c2f2b23ff0278e26599224cf164e78d"}, - {file = "mypy-1.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bd6f629b67bb43dc0d9211ee98b96d8dabc97b1ad38b9b25f5e4c4d7569a0c6a"}, - {file = "mypy-1.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a1bbb3a6f5ff319d2b9d40b4080d46cd639abe3516d5a62c070cf0114a457d84"}, - {file = "mypy-1.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8edd4e9bbbc9d7b79502eb9592cab808585516ae1bcc1446eb9122656c6066f"}, - {file = "mypy-1.10.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6166a88b15f1759f94a46fa474c7b1b05d134b1b61fca627dd7335454cc9aa6b"}, - {file = "mypy-1.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:5bb9cd11c01c8606a9d0b83ffa91d0b236a0e91bc4126d9ba9ce62906ada868e"}, - {file = "mypy-1.10.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d8681909f7b44d0b7b86e653ca152d6dff0eb5eb41694e163c6092124f8246d7"}, - {file = "mypy-1.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:378c03f53f10bbdd55ca94e46ec3ba255279706a6aacaecac52ad248f98205d3"}, - {file = "mypy-1.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bacf8f3a3d7d849f40ca6caea5c055122efe70e81480c8328ad29c55c69e93e"}, - {file = "mypy-1.10.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:701b5f71413f1e9855566a34d6e9d12624e9e0a8818a5704d74d6b0402e66c04"}, - {file = "mypy-1.10.1-cp312-cp312-win_amd64.whl", hash = "sha256:3c4c2992f6ea46ff7fce0072642cfb62af7a2484efe69017ed8b095f7b39ef31"}, - {file = "mypy-1.10.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:604282c886497645ffb87b8f35a57ec773a4a2721161e709a4422c1636ddde5c"}, - {file = "mypy-1.10.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37fd87cab83f09842653f08de066ee68f1182b9b5282e4634cdb4b407266bade"}, - {file = "mypy-1.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8addf6313777dbb92e9564c5d32ec122bf2c6c39d683ea64de6a1fd98b90fe37"}, - {file = "mypy-1.10.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5cc3ca0a244eb9a5249c7c583ad9a7e881aa5d7b73c35652296ddcdb33b2b9c7"}, - {file = "mypy-1.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:1b3a2ffce52cc4dbaeee4df762f20a2905aa171ef157b82192f2e2f368eec05d"}, - {file = "mypy-1.10.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fe85ed6836165d52ae8b88f99527d3d1b2362e0cb90b005409b8bed90e9059b3"}, - {file = "mypy-1.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c2ae450d60d7d020d67ab440c6e3fae375809988119817214440033f26ddf7bf"}, - {file = "mypy-1.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6be84c06e6abd72f960ba9a71561c14137a583093ffcf9bbfaf5e613d63fa531"}, - {file = "mypy-1.10.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2189ff1e39db399f08205e22a797383613ce1cb0cb3b13d8bcf0170e45b96cc3"}, - {file = "mypy-1.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:97a131ee36ac37ce9581f4220311247ab6cba896b4395b9c87af0675a13a755f"}, - {file = "mypy-1.10.1-py3-none-any.whl", hash = "sha256:71d8ac0b906354ebda8ef1673e5fde785936ac1f29ff6987c7483cfbd5a4235a"}, - {file = "mypy-1.10.1.tar.gz", hash = "sha256:1f8f492d7db9e3593ef42d4f115f04e556130f2819ad33ab84551403e97dd4c0"}, + {file = "mypy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6607e0f1dd1fb7f0aca14d936d13fd19eba5e17e1cd2a14f808fa5f8f6d8f60a"}, + {file = "mypy-1.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8a21be69bd26fa81b1f80a61ee7ab05b076c674d9b18fb56239d72e21d9f4c80"}, + {file = "mypy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b2353a44d2179846a096e25691d54d59904559f4232519d420d64da6828a3a7"}, + {file = "mypy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0730d1c6a2739d4511dc4253f8274cdd140c55c32dfb0a4cf8b7a43f40abfa6f"}, + {file = "mypy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:c5fc54dbb712ff5e5a0fca797e6e0aa25726c7e72c6a5850cfd2adbc1eb0a372"}, + {file = "mypy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:581665e6f3a8a9078f28d5502f4c334c0c8d802ef55ea0e7276a6e409bc0d82d"}, + {file = "mypy-1.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3ddb5b9bf82e05cc9a627e84707b528e5c7caaa1c55c69e175abb15a761cec2d"}, + {file = "mypy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:20c7ee0bc0d5a9595c46f38beb04201f2620065a93755704e141fcac9f59db2b"}, + {file = "mypy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3790ded76f0b34bc9c8ba4def8f919dd6a46db0f5a6610fb994fe8efdd447f73"}, + {file = "mypy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:51f869f4b6b538229c1d1bcc1dd7d119817206e2bc54e8e374b3dfa202defcca"}, + {file = "mypy-1.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5c7051a3461ae84dfb5dd15eff5094640c61c5f22257c8b766794e6dd85e72d5"}, + {file = "mypy-1.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39bb21c69a5d6342f4ce526e4584bc5c197fd20a60d14a8624d8743fffb9472e"}, + {file = "mypy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:164f28cb9d6367439031f4c81e84d3ccaa1e19232d9d05d37cb0bd880d3f93c2"}, + {file = "mypy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a4c1bfcdbce96ff5d96fc9b08e3831acb30dc44ab02671eca5953eadad07d6d0"}, + {file = "mypy-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:a0affb3a79a256b4183ba09811e3577c5163ed06685e4d4b46429a271ba174d2"}, + {file = "mypy-1.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a7b44178c9760ce1a43f544e595d35ed61ac2c3de306599fa59b38a6048e1aa7"}, + {file = "mypy-1.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d5092efb8516d08440e36626f0153b5006d4088c1d663d88bf79625af3d1d62"}, + {file = "mypy-1.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de2904956dac40ced10931ac967ae63c5089bd498542194b436eb097a9f77bc8"}, + {file = "mypy-1.13.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:7bfd8836970d33c2105562650656b6846149374dc8ed77d98424b40b09340ba7"}, + {file = "mypy-1.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:9f73dba9ec77acb86457a8fc04b5239822df0c14a082564737833d2963677dbc"}, + {file = "mypy-1.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:100fac22ce82925f676a734af0db922ecfea991e1d7ec0ceb1e115ebe501301a"}, + {file = "mypy-1.13.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7bcb0bb7f42a978bb323a7c88f1081d1b5dee77ca86f4100735a6f541299d8fb"}, + {file = "mypy-1.13.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bde31fc887c213e223bbfc34328070996061b0833b0a4cfec53745ed61f3519b"}, + {file = "mypy-1.13.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07de989f89786f62b937851295ed62e51774722e5444a27cecca993fc3f9cd74"}, + {file = "mypy-1.13.0-cp38-cp38-win_amd64.whl", hash = "sha256:4bde84334fbe19bad704b3f5b78c4abd35ff1026f8ba72b29de70dda0916beb6"}, + {file = "mypy-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0246bcb1b5de7f08f2826451abd947bf656945209b140d16ed317f65a17dc7dc"}, + {file = "mypy-1.13.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7f5b7deae912cf8b77e990b9280f170381fdfbddf61b4ef80927edd813163732"}, + {file = "mypy-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7029881ec6ffb8bc233a4fa364736789582c738217b133f1b55967115288a2bc"}, + {file = "mypy-1.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3e38b980e5681f28f033f3be86b099a247b13c491f14bb8b1e1e134d23bb599d"}, + {file = "mypy-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:a6789be98a2017c912ae6ccb77ea553bbaf13d27605d2ca20a76dfbced631b24"}, + {file = "mypy-1.13.0-py3-none-any.whl", hash = "sha256:9c250883f9fd81d212e0952c92dbfcc96fc237f4b7c92f56ac81fd48460b3e5a"}, + {file = "mypy-1.13.0.tar.gz", hash = "sha256:0291a61b6fbf3e6673e3405cfcc0e7650bebc7939659fdca2702958038bd835e"}, ] [package.dependencies] mypy-extensions = ">=1.0.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=4.1.0" +typing-extensions = ">=4.6.0" [package.extras] dmypy = ["psutil (>=4.0)"] +faster-cache = ["orjson"] install-types = ["pip"] mypyc = ["setuptools (>=50)"] reports = ["lxml"] @@ -799,19 +860,19 @@ files = [ [[package]] name = "platformdirs" -version = "4.2.2" +version = "4.3.6" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, - {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, + {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, + {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, ] [package.extras] -docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] -type = ["mypy (>=1.8)"] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.11.2)"] [[package]] name = "pluggy" @@ -832,7 +893,7 @@ testing = ["pytest", "pytest-benchmark"] name = "pyarrow" version = "16.1.0" description = "Python library for Apache Arrow" -optional = false +optional = true python-versions = ">=3.8" files = [ {file = "pyarrow-16.1.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:17e23b9a65a70cc733d8b738baa6ad3722298fa0c81d88f63ff94bf25eaa77b9"}, @@ -878,17 +939,17 @@ numpy = ">=1.16.6" [[package]] name = "pylint" -version = "3.2.5" +version = "3.2.7" description = "python code static checker" optional = false python-versions = ">=3.8.0" files = [ - {file = "pylint-3.2.5-py3-none-any.whl", hash = "sha256:32cd6c042b5004b8e857d727708720c54a676d1e22917cf1a2df9b4d4868abd6"}, - {file = "pylint-3.2.5.tar.gz", hash = "sha256:e9b7171e242dcc6ebd0aaa7540481d1a72860748a0a7816b8fe6cf6c80a6fe7e"}, + {file = "pylint-3.2.7-py3-none-any.whl", hash = "sha256:02f4aedeac91be69fb3b4bea997ce580a4ac68ce58b89eaefeaf06749df73f4b"}, + {file = "pylint-3.2.7.tar.gz", hash = "sha256:1b7a721b575eaeaa7d39db076b6e7743c993ea44f57979127c517c6c572c803e"}, ] [package.dependencies] -astroid = ">=3.2.2,<=3.3.0-dev0" +astroid = ">=3.2.4,<=3.3.0-dev0" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} dill = [ {version = ">=0.2", markers = "python_version < \"3.11\""}, @@ -973,13 +1034,13 @@ cli = ["click (>=5.0)"] [[package]] name = "pytz" -version = "2024.1" +version = "2024.2" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ - {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, - {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, + {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, + {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, ] [[package]] @@ -1016,60 +1077,68 @@ files = [ [[package]] name = "sqlalchemy" -version = "2.0.31" +version = "2.0.36" description = "Database Abstraction Library" optional = true python-versions = ">=3.7" files = [ - {file = "SQLAlchemy-2.0.31-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f2a213c1b699d3f5768a7272de720387ae0122f1becf0901ed6eaa1abd1baf6c"}, - {file = "SQLAlchemy-2.0.31-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9fea3d0884e82d1e33226935dac990b967bef21315cbcc894605db3441347443"}, - {file = "SQLAlchemy-2.0.31-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3ad7f221d8a69d32d197e5968d798217a4feebe30144986af71ada8c548e9fa"}, - {file = "SQLAlchemy-2.0.31-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f2bee229715b6366f86a95d497c347c22ddffa2c7c96143b59a2aa5cc9eebbc"}, - {file = "SQLAlchemy-2.0.31-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cd5b94d4819c0c89280b7c6109c7b788a576084bf0a480ae17c227b0bc41e109"}, - {file = "SQLAlchemy-2.0.31-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:750900a471d39a7eeba57580b11983030517a1f512c2cb287d5ad0fcf3aebd58"}, - {file = "SQLAlchemy-2.0.31-cp310-cp310-win32.whl", hash = "sha256:7bd112be780928c7f493c1a192cd8c5fc2a2a7b52b790bc5a84203fb4381c6be"}, - {file = "SQLAlchemy-2.0.31-cp310-cp310-win_amd64.whl", hash = "sha256:5a48ac4d359f058474fadc2115f78a5cdac9988d4f99eae44917f36aa1476327"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f68470edd70c3ac3b6cd5c2a22a8daf18415203ca1b036aaeb9b0fb6f54e8298"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2e2c38c2a4c5c634fe6c3c58a789712719fa1bf9b9d6ff5ebfce9a9e5b89c1ca"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd15026f77420eb2b324dcb93551ad9c5f22fab2c150c286ef1dc1160f110203"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2196208432deebdfe3b22185d46b08f00ac9d7b01284e168c212919891289396"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:352b2770097f41bff6029b280c0e03b217c2dcaddc40726f8f53ed58d8a85da4"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:56d51ae825d20d604583f82c9527d285e9e6d14f9a5516463d9705dab20c3740"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-win32.whl", hash = "sha256:6e2622844551945db81c26a02f27d94145b561f9d4b0c39ce7bfd2fda5776dac"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-win_amd64.whl", hash = "sha256:ccaf1b0c90435b6e430f5dd30a5aede4764942a695552eb3a4ab74ed63c5b8d3"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3b74570d99126992d4b0f91fb87c586a574a5872651185de8297c6f90055ae42"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6f77c4f042ad493cb8595e2f503c7a4fe44cd7bd59c7582fd6d78d7e7b8ec52c"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd1591329333daf94467e699e11015d9c944f44c94d2091f4ac493ced0119449"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74afabeeff415e35525bf7a4ecdab015f00e06456166a2eba7590e49f8db940e"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b9c01990d9015df2c6f818aa8f4297d42ee71c9502026bb074e713d496e26b67"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:66f63278db425838b3c2b1c596654b31939427016ba030e951b292e32b99553e"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-win32.whl", hash = "sha256:0b0f658414ee4e4b8cbcd4a9bb0fd743c5eeb81fc858ca517217a8013d282c96"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-win_amd64.whl", hash = "sha256:fa4b1af3e619b5b0b435e333f3967612db06351217c58bfb50cee5f003db2a5a"}, - {file = "SQLAlchemy-2.0.31-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f43e93057cf52a227eda401251c72b6fbe4756f35fa6bfebb5d73b86881e59b0"}, - {file = "SQLAlchemy-2.0.31-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d337bf94052856d1b330d5fcad44582a30c532a2463776e1651bd3294ee7e58b"}, - {file = "SQLAlchemy-2.0.31-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c06fb43a51ccdff3b4006aafee9fcf15f63f23c580675f7734245ceb6b6a9e05"}, - {file = "SQLAlchemy-2.0.31-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:b6e22630e89f0e8c12332b2b4c282cb01cf4da0d26795b7eae16702a608e7ca1"}, - {file = "SQLAlchemy-2.0.31-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:79a40771363c5e9f3a77f0e28b3302801db08040928146e6808b5b7a40749c88"}, - {file = "SQLAlchemy-2.0.31-cp37-cp37m-win32.whl", hash = "sha256:501ff052229cb79dd4c49c402f6cb03b5a40ae4771efc8bb2bfac9f6c3d3508f"}, - {file = "SQLAlchemy-2.0.31-cp37-cp37m-win_amd64.whl", hash = "sha256:597fec37c382a5442ffd471f66ce12d07d91b281fd474289356b1a0041bdf31d"}, - {file = "SQLAlchemy-2.0.31-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:dc6d69f8829712a4fd799d2ac8d79bdeff651c2301b081fd5d3fe697bd5b4ab9"}, - {file = "SQLAlchemy-2.0.31-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:23b9fbb2f5dd9e630db70fbe47d963c7779e9c81830869bd7d137c2dc1ad05fb"}, - {file = "SQLAlchemy-2.0.31-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a21c97efcbb9f255d5c12a96ae14da873233597dfd00a3a0c4ce5b3e5e79704"}, - {file = "SQLAlchemy-2.0.31-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26a6a9837589c42b16693cf7bf836f5d42218f44d198f9343dd71d3164ceeeac"}, - {file = "SQLAlchemy-2.0.31-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc251477eae03c20fae8db9c1c23ea2ebc47331bcd73927cdcaecd02af98d3c3"}, - {file = "SQLAlchemy-2.0.31-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:2fd17e3bb8058359fa61248c52c7b09a97cf3c820e54207a50af529876451808"}, - {file = "SQLAlchemy-2.0.31-cp38-cp38-win32.whl", hash = "sha256:c76c81c52e1e08f12f4b6a07af2b96b9b15ea67ccdd40ae17019f1c373faa227"}, - {file = "SQLAlchemy-2.0.31-cp38-cp38-win_amd64.whl", hash = "sha256:4b600e9a212ed59355813becbcf282cfda5c93678e15c25a0ef896b354423238"}, - {file = "SQLAlchemy-2.0.31-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b6cf796d9fcc9b37011d3f9936189b3c8074a02a4ed0c0fbbc126772c31a6d4"}, - {file = "SQLAlchemy-2.0.31-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:78fe11dbe37d92667c2c6e74379f75746dc947ee505555a0197cfba9a6d4f1a4"}, - {file = "SQLAlchemy-2.0.31-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2fc47dc6185a83c8100b37acda27658fe4dbd33b7d5e7324111f6521008ab4fe"}, - {file = "SQLAlchemy-2.0.31-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a41514c1a779e2aa9a19f67aaadeb5cbddf0b2b508843fcd7bafdf4c6864005"}, - {file = "SQLAlchemy-2.0.31-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:afb6dde6c11ea4525318e279cd93c8734b795ac8bb5dda0eedd9ebaca7fa23f1"}, - {file = "SQLAlchemy-2.0.31-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3f9faef422cfbb8fd53716cd14ba95e2ef655400235c3dfad1b5f467ba179c8c"}, - {file = "SQLAlchemy-2.0.31-cp39-cp39-win32.whl", hash = "sha256:fc6b14e8602f59c6ba893980bea96571dd0ed83d8ebb9c4479d9ed5425d562e9"}, - {file = "SQLAlchemy-2.0.31-cp39-cp39-win_amd64.whl", hash = "sha256:3cb8a66b167b033ec72c3812ffc8441d4e9f5f78f5e31e54dcd4c90a4ca5bebc"}, - {file = "SQLAlchemy-2.0.31-py3-none-any.whl", hash = "sha256:69f3e3c08867a8e4856e92d7afb618b95cdee18e0bc1647b77599722c9a28911"}, - {file = "SQLAlchemy-2.0.31.tar.gz", hash = "sha256:b607489dd4a54de56984a0c7656247504bd5523d9d0ba799aef59d4add009484"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:59b8f3adb3971929a3e660337f5dacc5942c2cdb760afcabb2614ffbda9f9f72"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:37350015056a553e442ff672c2d20e6f4b6d0b2495691fa239d8aa18bb3bc908"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8318f4776c85abc3f40ab185e388bee7a6ea99e7fa3a30686580b209eaa35c08"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c245b1fbade9c35e5bd3b64270ab49ce990369018289ecfde3f9c318411aaa07"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:69f93723edbca7342624d09f6704e7126b152eaed3cdbb634cb657a54332a3c5"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f9511d8dd4a6e9271d07d150fb2f81874a3c8c95e11ff9af3a2dfc35fe42ee44"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-win32.whl", hash = "sha256:c3f3631693003d8e585d4200730616b78fafd5a01ef8b698f6967da5c605b3fa"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-win_amd64.whl", hash = "sha256:a86bfab2ef46d63300c0f06936bd6e6c0105faa11d509083ba8f2f9d237fb5b5"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fd3a55deef00f689ce931d4d1b23fa9f04c880a48ee97af488fd215cf24e2a6c"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4f5e9cd989b45b73bd359f693b935364f7e1f79486e29015813c338450aa5a71"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ddd9db6e59c44875211bc4c7953a9f6638b937b0a88ae6d09eb46cced54eff"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2519f3a5d0517fc159afab1015e54bb81b4406c278749779be57a569d8d1bb0d"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59b1ee96617135f6e1d6f275bbe988f419c5178016f3d41d3c0abb0c819f75bb"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:39769a115f730d683b0eb7b694db9789267bcd027326cccc3125e862eb03bfd8"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-win32.whl", hash = "sha256:66bffbad8d6271bb1cc2f9a4ea4f86f80fe5e2e3e501a5ae2a3dc6a76e604e6f"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-win_amd64.whl", hash = "sha256:23623166bfefe1487d81b698c423f8678e80df8b54614c2bf4b4cfcd7c711959"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7b64e6ec3f02c35647be6b4851008b26cff592a95ecb13b6788a54ef80bbdd4"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:46331b00096a6db1fdc052d55b101dbbfc99155a548e20a0e4a8e5e4d1362855"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdf3386a801ea5aba17c6410dd1dc8d39cf454ca2565541b5ac42a84e1e28f53"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac9dfa18ff2a67b09b372d5db8743c27966abf0e5344c555d86cc7199f7ad83a"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:90812a8933df713fdf748b355527e3af257a11e415b613dd794512461eb8a686"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1bc330d9d29c7f06f003ab10e1eaced295e87940405afe1b110f2eb93a233588"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-win32.whl", hash = "sha256:79d2e78abc26d871875b419e1fd3c0bca31a1cb0043277d0d850014599626c2e"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-win_amd64.whl", hash = "sha256:b544ad1935a8541d177cb402948b94e871067656b3a0b9e91dbec136b06a2ff5"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b5cc79df7f4bc3d11e4b542596c03826063092611e481fcf1c9dfee3c94355ef"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3c01117dd36800f2ecaa238c65365b7b16497adc1522bf84906e5710ee9ba0e8"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bc633f4ee4b4c46e7adcb3a9b5ec083bf1d9a97c1d3854b92749d935de40b9b"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e46ed38affdfc95d2c958de328d037d87801cfcbea6d421000859e9789e61c2"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b2985c0b06e989c043f1dc09d4fe89e1616aadd35392aea2844f0458a989eacf"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a121d62ebe7d26fec9155f83f8be5189ef1405f5973ea4874a26fab9f1e262c"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-win32.whl", hash = "sha256:0572f4bd6f94752167adfd7c1bed84f4b240ee6203a95e05d1e208d488d0d436"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-win_amd64.whl", hash = "sha256:8c78ac40bde930c60e0f78b3cd184c580f89456dd87fc08f9e3ee3ce8765ce88"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:be9812b766cad94a25bc63bec11f88c4ad3629a0cec1cd5d4ba48dc23860486b"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50aae840ebbd6cdd41af1c14590e5741665e5272d2fee999306673a1bb1fdb4d"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4557e1f11c5f653ebfdd924f3f9d5ebfc718283b0b9beebaa5dd6b77ec290971"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:07b441f7d03b9a66299ce7ccf3ef2900abc81c0db434f42a5694a37bd73870f2"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:28120ef39c92c2dd60f2721af9328479516844c6b550b077ca450c7d7dc68575"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-win32.whl", hash = "sha256:b81ee3d84803fd42d0b154cb6892ae57ea6b7c55d8359a02379965706c7efe6c"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-win_amd64.whl", hash = "sha256:f942a799516184c855e1a32fbc7b29d7e571b52612647866d4ec1c3242578fcb"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3d6718667da04294d7df1670d70eeddd414f313738d20a6f1d1f379e3139a545"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:72c28b84b174ce8af8504ca28ae9347d317f9dba3999e5981a3cd441f3712e24"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b11d0cfdd2b095e7b0686cf5fabeb9c67fae5b06d265d8180715b8cfa86522e3"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e32092c47011d113dc01ab3e1d3ce9f006a47223b18422c5c0d150af13a00687"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:6a440293d802d3011028e14e4226da1434b373cbaf4a4bbb63f845761a708346"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c54a1e53a0c308a8e8a7dffb59097bff7facda27c70c286f005327f21b2bd6b1"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-win32.whl", hash = "sha256:1e0d612a17581b6616ff03c8e3d5eff7452f34655c901f75d62bd86449d9750e"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-win_amd64.whl", hash = "sha256:8958b10490125124463095bbdadda5aa22ec799f91958e410438ad6c97a7b793"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dc022184d3e5cacc9579e41805a681187650e170eb2fd70e28b86192a479dcaa"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b817d41d692bf286abc181f8af476c4fbef3fd05e798777492618378448ee689"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4e46a888b54be23d03a89be510f24a7652fe6ff660787b96cd0e57a4ebcb46d"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4ae3005ed83f5967f961fd091f2f8c5329161f69ce8480aa8168b2d7fe37f06"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:03e08af7a5f9386a43919eda9de33ffda16b44eb11f3b313e6822243770e9763"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3dbb986bad3ed5ceaf090200eba750b5245150bd97d3e67343a3cfed06feecf7"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-win32.whl", hash = "sha256:9fe53b404f24789b5ea9003fc25b9a3988feddebd7e7b369c8fac27ad6f52f28"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-win_amd64.whl", hash = "sha256:af148a33ff0349f53512a049c6406923e4e02bf2f26c5fb285f143faf4f0e46a"}, + {file = "SQLAlchemy-2.0.36-py3-none-any.whl", hash = "sha256:fddbe92b4760c6f5d48162aef14824add991aeda8ddadb3c31d56eb15ca69f8e"}, + {file = "sqlalchemy-2.0.36.tar.gz", hash = "sha256:7f2767680b6d2398aea7082e45a774b2b0767b5c8d8ffb9c8b683088ea9b29c5"}, ] [package.dependencies] @@ -1082,7 +1151,7 @@ aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] asyncio = ["greenlet (!=0.4.17)"] asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] -mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5,!=1.1.10)"] mssql = ["pyodbc"] mssql-pymssql = ["pymssql"] mssql-pyodbc = ["pyodbc"] @@ -1121,24 +1190,24 @@ twisted = ["twisted"] [[package]] name = "tomli" -version = "2.0.1" +version = "2.0.2" description = "A lil' TOML parser" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, + {file = "tomli-2.0.2-py3-none-any.whl", hash = "sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38"}, + {file = "tomli-2.0.2.tar.gz", hash = "sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed"}, ] [[package]] name = "tomlkit" -version = "0.12.5" +version = "0.13.2" description = "Style preserving TOML library" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "tomlkit-0.12.5-py3-none-any.whl", hash = "sha256:af914f5a9c59ed9d0762c7b64d3b5d5df007448eb9cd2edc8a46b1eafead172f"}, - {file = "tomlkit-0.12.5.tar.gz", hash = "sha256:eef34fba39834d4d6b73c9ba7f3e4d1c417a4e56f89a7e96e090dd0d24b8fb3c"}, + {file = "tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde"}, + {file = "tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79"}, ] [[package]] @@ -1154,24 +1223,24 @@ files = [ [[package]] name = "tzdata" -version = "2024.1" +version = "2024.2" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" files = [ - {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, - {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, + {file = "tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd"}, + {file = "tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc"}, ] [[package]] name = "urllib3" -version = "2.2.2" +version = "2.2.3" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, - {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, + {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, + {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, ] [package.extras] @@ -1182,24 +1251,29 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "zipp" -version = "3.19.2" +version = "3.20.2" description = "Backport of pathlib-compatible object wrapper for zip files" optional = true python-versions = ">=3.8" files = [ - {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"}, - {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"}, + {file = "zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350"}, + {file = "zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29"}, ] [package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] [extras] -alembic = ["alembic", "sqlalchemy"] -sqlalchemy = ["sqlalchemy"] +alembic = ["alembic", "databricks-sqlalchemy"] +databricks-sqlalchemy = ["databricks-sqlalchemy"] +pyarrow = ["pyarrow"] [metadata] lock-version = "2.0" python-versions = "^3.8.0" -content-hash = "31066a85f646d0009d6fe9ffc833a64fcb4b6923c2e7f2652e7aa8540acba298" +content-hash = "0d31d27041b5bcb2c26a5f4d62bf87ca32d7f6b518a1fa436d5ccfbd65fc7c07" diff --git a/databricks_sql_connector_core/pyproject.toml b/pyproject.toml similarity index 63% rename from databricks_sql_connector_core/pyproject.toml rename to pyproject.toml index f837fd6f..a0a40e85 100644 --- a/databricks_sql_connector_core/pyproject.toml +++ b/pyproject.toml @@ -1,9 +1,12 @@ [tool.poetry] -name = "databricks-sql-connector-core" -version = "4.0.1" -description = "Databricks SQL Connector core for Python" +name = "databricks-sql-connector" +version = "4.0.0.b3" +description = "Databricks SQL Connector for Python" authors = ["Databricks "] +license = "Apache-2.0" +readme = "README.md" packages = [{ include = "databricks", from = "src" }] +include = ["CHANGELOG.md"] [tool.poetry.dependencies] python = "^3.8.0" @@ -14,12 +17,21 @@ pandas = [ lz4 = "^4.0.2" requests = "^2.18.1" oauthlib = "^3.1.0" +numpy = [ + { version = "^1.16.6", python = ">=3.8,<3.11" }, + { version = "^1.23.4", python = ">=3.11" }, +] openpyxl = "^3.0.10" -alembic = { version = "^1.0.11", optional = true } urllib3 = ">=1.26" -pyarrow = {version = ">=14.0.1,<17", optional = true} + +databricks-sqlalchemy = { version = ">=2.0.0", optional = true } +pyarrow = { version = ">=14.0.1,<17", optional=true } +alembic = { version = "^1.0.11", optional = true } + [tool.poetry.extras] +databricks-sqlalchemy = ["databricks-sqlalchemy"] +alembic = ["databricks-sqlalchemy", "alembic"] pyarrow = ["pyarrow"] [tool.poetry.dev-dependencies] @@ -33,6 +45,8 @@ pytest-dotenv = "^0.5.2" "Homepage" = "https://github.com/databricks/databricks-sql-python" "Bug Tracker" = "https://github.com/databricks/databricks-sql-python/issues" +[tool.poetry.plugins."sqlalchemy.dialects"] +"databricks" = "databricks.sqlalchemy:DatabricksDialect" [build-system] requires = ["poetry-core>=1.0.0"] @@ -50,5 +64,5 @@ markers = {"reviewed" = "Test case has been reviewed by Databricks"} minversion = "6.0" log_cli = "false" log_cli_level = "INFO" -testpaths = ["tests", "databricks_sql_connector_core/tests"] -env_files = ["test.env"] +testpaths = ["tests", "src/databricks/sqlalchemy/test_local"] +env_files = ["test.env"] \ No newline at end of file diff --git a/databricks_sql_connector_core/src/databricks/__init__.py b/src/databricks/__init__.py similarity index 100% rename from databricks_sql_connector_core/src/databricks/__init__.py rename to src/databricks/__init__.py diff --git a/databricks_sql_connector_core/src/databricks/sql/__init__.py b/src/databricks/sql/__init__.py similarity index 99% rename from databricks_sql_connector_core/src/databricks/sql/__init__.py rename to src/databricks/sql/__init__.py index 767cbf05..42167b00 100644 --- a/databricks_sql_connector_core/src/databricks/sql/__init__.py +++ b/src/databricks/sql/__init__.py @@ -68,7 +68,7 @@ def __repr__(self): DATE = DBAPITypeObject("date") ROWID = DBAPITypeObject() -__version__ = "3.3.0" +__version__ = "3.6.0" USER_AGENT_NAME = "PyDatabricksSqlConnector" # These two functions are pyhive legacy diff --git a/databricks_sql_connector/databricks_sql_connector/__init__.py b/src/databricks/sql/auth/__init__.py similarity index 100% rename from databricks_sql_connector/databricks_sql_connector/__init__.py rename to src/databricks/sql/auth/__init__.py diff --git a/databricks_sql_connector_core/src/databricks/sql/auth/auth.py b/src/databricks/sql/auth/auth.py similarity index 100% rename from databricks_sql_connector_core/src/databricks/sql/auth/auth.py rename to src/databricks/sql/auth/auth.py diff --git a/databricks_sql_connector_core/src/databricks/sql/auth/authenticators.py b/src/databricks/sql/auth/authenticators.py similarity index 100% rename from databricks_sql_connector_core/src/databricks/sql/auth/authenticators.py rename to src/databricks/sql/auth/authenticators.py diff --git a/databricks_sql_connector_core/src/databricks/sql/auth/endpoint.py b/src/databricks/sql/auth/endpoint.py similarity index 100% rename from databricks_sql_connector_core/src/databricks/sql/auth/endpoint.py rename to src/databricks/sql/auth/endpoint.py diff --git a/databricks_sql_connector_core/src/databricks/sql/auth/oauth.py b/src/databricks/sql/auth/oauth.py similarity index 100% rename from databricks_sql_connector_core/src/databricks/sql/auth/oauth.py rename to src/databricks/sql/auth/oauth.py diff --git a/databricks_sql_connector_core/src/databricks/sql/auth/oauth_http_handler.py b/src/databricks/sql/auth/oauth_http_handler.py similarity index 100% rename from databricks_sql_connector_core/src/databricks/sql/auth/oauth_http_handler.py rename to src/databricks/sql/auth/oauth_http_handler.py diff --git a/databricks_sql_connector_core/src/databricks/sql/auth/retry.py b/src/databricks/sql/auth/retry.py similarity index 100% rename from databricks_sql_connector_core/src/databricks/sql/auth/retry.py rename to src/databricks/sql/auth/retry.py diff --git a/databricks_sql_connector_core/src/databricks/sql/auth/thrift_http_client.py b/src/databricks/sql/auth/thrift_http_client.py similarity index 85% rename from databricks_sql_connector_core/src/databricks/sql/auth/thrift_http_client.py rename to src/databricks/sql/auth/thrift_http_client.py index f7c22a1e..6273ab28 100644 --- a/databricks_sql_connector_core/src/databricks/sql/auth/thrift_http_client.py +++ b/src/databricks/sql/auth/thrift_http_client.py @@ -1,13 +1,11 @@ import base64 import logging import urllib.parse -from typing import Dict, Union +from typing import Dict, Union, Optional import six import thrift -logger = logging.getLogger(__name__) - import ssl import warnings from http.client import HTTPResponse @@ -16,6 +14,9 @@ from urllib3 import HTTPConnectionPool, HTTPSConnectionPool, ProxyManager from urllib3.util import make_headers from databricks.sql.auth.retry import CommandType, DatabricksRetryPolicy +from databricks.sql.types import SSLOptions + +logger = logging.getLogger(__name__) class THttpClient(thrift.transport.THttpClient.THttpClient): @@ -25,13 +26,12 @@ def __init__( uri_or_host, port=None, path=None, - cafile=None, - cert_file=None, - key_file=None, - ssl_context=None, + ssl_options: Optional[SSLOptions] = None, max_connections: int = 1, retry_policy: Union[DatabricksRetryPolicy, int] = 0, ): + self._ssl_options = ssl_options + if port is not None: warnings.warn( "Please use the THttpClient('http{s}://host:port/path') constructor", @@ -48,13 +48,11 @@ def __init__( self.scheme = parsed.scheme assert self.scheme in ("http", "https") if self.scheme == "https": - self.certfile = cert_file - self.keyfile = key_file - self.context = ( - ssl.create_default_context(cafile=cafile) - if (cafile and not ssl_context) - else ssl_context - ) + if self._ssl_options is not None: + # TODO: Not sure if those options are used anywhere - need to double-check + self.certfile = self._ssl_options.tls_client_cert_file + self.keyfile = self._ssl_options.tls_client_cert_key_file + self.context = self._ssl_options.create_ssl_context() self.port = parsed.port self.host = parsed.hostname self.path = parsed.path @@ -109,12 +107,23 @@ def startRetryTimer(self): def open(self): # self.__pool replaces the self.__http used by the original THttpClient + _pool_kwargs = {"maxsize": self.max_connections} + if self.scheme == "http": pool_class = HTTPConnectionPool elif self.scheme == "https": pool_class = HTTPSConnectionPool - - _pool_kwargs = {"maxsize": self.max_connections} + _pool_kwargs.update( + { + "cert_reqs": ssl.CERT_REQUIRED + if self._ssl_options.tls_verify + else ssl.CERT_NONE, + "ca_certs": self._ssl_options.tls_trusted_ca_file, + "cert_file": self._ssl_options.tls_client_cert_file, + "key_file": self._ssl_options.tls_client_cert_key_file, + "key_password": self._ssl_options.tls_client_cert_key_password, + } + ) if self.using_proxy(): proxy_manager = ProxyManager( diff --git a/databricks_sql_connector_core/src/databricks/sql/client.py b/src/databricks/sql/client.py similarity index 92% rename from databricks_sql_connector_core/src/databricks/sql/client.py rename to src/databricks/sql/client.py index 72811628..4e0ab941 100755 --- a/databricks_sql_connector_core/src/databricks/sql/client.py +++ b/src/databricks/sql/client.py @@ -1,6 +1,11 @@ from typing import Dict, Tuple, List, Optional, Any, Union, Sequence import pandas + +try: + import pyarrow +except ImportError: + pyarrow = None import requests import json import os @@ -21,6 +26,8 @@ ParamEscaper, inject_parameters, transform_paramstyle, + ColumnTable, + ColumnQueue, ) from databricks.sql.parameters.native import ( DbsqlParameterBase, @@ -34,7 +41,7 @@ ) -from databricks.sql.types import Row +from databricks.sql.types import Row, SSLOptions from databricks.sql.auth.auth import get_python_sql_connector_auth_provider from databricks.sql.experimental.oauth_persistence import OAuthPersistence @@ -42,10 +49,6 @@ TSparkParameter, ) -try: - import pyarrow -except ImportError: - pyarrow = None logger = logging.getLogger(__name__) @@ -181,8 +184,9 @@ def read(self) -> Optional[OAuthToken]: # _tls_trusted_ca_file # Set to the path of the file containing trusted CA certificates for server certificate # verification. If not provide, uses system truststore. - # _tls_client_cert_file, _tls_client_cert_key_file + # _tls_client_cert_file, _tls_client_cert_key_file, _tls_client_cert_key_password # Set client SSL certificate. + # See https://docs.python.org/3/library/ssl.html#ssl.SSLContext.load_cert_chain # _retry_stop_after_attempts_count # The maximum number of attempts during a request retry sequence (defaults to 24) # _socket_timeout @@ -223,12 +227,25 @@ def read(self) -> Optional[OAuthToken]: base_headers = [("User-Agent", useragent_header)] + self._ssl_options = SSLOptions( + # Double negation is generally a bad thing, but we have to keep backward compatibility + tls_verify=not kwargs.get( + "_tls_no_verify", False + ), # by default - verify cert and host + tls_verify_hostname=kwargs.get("_tls_verify_hostname", True), + tls_trusted_ca_file=kwargs.get("_tls_trusted_ca_file"), + tls_client_cert_file=kwargs.get("_tls_client_cert_file"), + tls_client_cert_key_file=kwargs.get("_tls_client_cert_key_file"), + tls_client_cert_key_password=kwargs.get("_tls_client_cert_key_password"), + ) + self.thrift_backend = ThriftBackend( self.host, self.port, http_path, (http_headers or []) + base_headers, auth_provider, + ssl_options=self._ssl_options, _use_arrow_native_complex_types=_use_arrow_native_complex_types, **kwargs, ) @@ -1132,6 +1149,18 @@ def _fill_results_buffer(self): self.results = results self.has_more_rows = has_more_rows + def _convert_columnar_table(self, table): + column_names = [c[0] for c in self.description] + ResultRow = Row(*column_names) + result = [] + for row_index in range(table.num_rows): + curr_row = [] + for col_index in range(table.num_columns): + curr_row.append(table.get_item(col_index, row_index)) + result.append(ResultRow(*curr_row)) + + return result + def _convert_arrow_table(self, table): column_names = [c[0] for c in self.description] ResultRow = Row(*column_names) @@ -1199,6 +1228,48 @@ def fetchmany_arrow(self, size: int) -> "pyarrow.Table": return results + def merge_columnar(self, result1, result2): + """ + Function to merge / combining the columnar results into a single result + :param result1: + :param result2: + :return: + """ + + if result1.column_names != result2.column_names: + raise ValueError("The columns in the results don't match") + + merged_result = [ + result1.column_table[i] + result2.column_table[i] + for i in range(result1.num_columns) + ] + return ColumnTable(merged_result, result1.column_names) + + def fetchmany_columnar(self, size: int): + """ + Fetch the next set of rows of a query result, returning a Columnar Table. + An empty sequence is returned when no more rows are available. + """ + if size < 0: + raise ValueError("size argument for fetchmany is %s but must be >= 0", size) + + results = self.results.next_n_rows(size) + n_remaining_rows = size - results.num_rows + self._next_row_index += results.num_rows + + while ( + n_remaining_rows > 0 + and not self.has_been_closed_server_side + and self.has_more_rows + ): + self._fill_results_buffer() + partial_results = self.results.next_n_rows(n_remaining_rows) + results = self.merge_columnar(results, partial_results) + n_remaining_rows -= partial_results.num_rows + self._next_row_index += partial_results.num_rows + + return results + def fetchall_arrow(self) -> "pyarrow.Table": """Fetch all (remaining) rows of a query result, returning them as a PyArrow table.""" results = self.results.remaining_rows() @@ -1212,12 +1283,30 @@ def fetchall_arrow(self) -> "pyarrow.Table": return results + def fetchall_columnar(self): + """Fetch all (remaining) rows of a query result, returning them as a Columnar table.""" + results = self.results.remaining_rows() + self._next_row_index += results.num_rows + + while not self.has_been_closed_server_side and self.has_more_rows: + self._fill_results_buffer() + partial_results = self.results.remaining_rows() + results = self.merge_columnar(results, partial_results) + self._next_row_index += partial_results.num_rows + + return results + def fetchone(self) -> Optional[Row]: """ Fetch the next row of a query result set, returning a single sequence, or None when no more data is available. """ - res = self._convert_arrow_table(self.fetchmany_arrow(1)) + + if isinstance(self.results, ColumnQueue): + res = self._convert_columnar_table(self.fetchmany_columnar(1)) + else: + res = self._convert_arrow_table(self.fetchmany_arrow(1)) + if len(res) > 0: return res[0] else: @@ -1227,7 +1316,10 @@ def fetchall(self) -> List[Row]: """ Fetch all (remaining) rows of a query result, returning them as a list of rows. """ - return self._convert_arrow_table(self.fetchall_arrow()) + if isinstance(self.results, ColumnQueue): + return self._convert_columnar_table(self.fetchall_columnar()) + else: + return self._convert_arrow_table(self.fetchall_arrow()) def fetchmany(self, size: int) -> List[Row]: """ @@ -1235,7 +1327,10 @@ def fetchmany(self, size: int) -> List[Row]: An empty sequence is returned when no more rows are available. """ - return self._convert_arrow_table(self.fetchmany_arrow(size)) + if isinstance(self.results, ColumnQueue): + return self._convert_columnar_table(self.fetchmany_columnar(size)) + else: + return self._convert_arrow_table(self.fetchmany_arrow(size)) def close(self) -> None: """ diff --git a/databricks_sql_connector_core/src/databricks/sql/cloudfetch/download_manager.py b/src/databricks/sql/cloudfetch/download_manager.py similarity index 96% rename from databricks_sql_connector_core/src/databricks/sql/cloudfetch/download_manager.py rename to src/databricks/sql/cloudfetch/download_manager.py index e30adcd6..7e96cd32 100644 --- a/databricks_sql_connector_core/src/databricks/sql/cloudfetch/download_manager.py +++ b/src/databricks/sql/cloudfetch/download_manager.py @@ -1,6 +1,5 @@ import logging -from ssl import SSLContext from concurrent.futures import ThreadPoolExecutor, Future from typing import List, Union @@ -9,6 +8,8 @@ DownloadableResultSettings, DownloadedFile, ) +from databricks.sql.types import SSLOptions + from databricks.sql.thrift_api.TCLIService.ttypes import TSparkArrowResultLink logger = logging.getLogger(__name__) @@ -20,7 +21,7 @@ def __init__( links: List[TSparkArrowResultLink], max_download_threads: int, lz4_compressed: bool, - ssl_context: SSLContext, + ssl_options: SSLOptions, ): self._pending_links: List[TSparkArrowResultLink] = [] for link in links: @@ -38,7 +39,7 @@ def __init__( self._thread_pool = ThreadPoolExecutor(max_workers=self._max_download_threads) self._downloadable_result_settings = DownloadableResultSettings(lz4_compressed) - self._ssl_context = ssl_context + self._ssl_options = ssl_options def get_next_downloaded_file( self, next_row_offset: int @@ -95,7 +96,7 @@ def _schedule_downloads(self): handler = ResultSetDownloadHandler( settings=self._downloadable_result_settings, link=link, - ssl_context=self._ssl_context, + ssl_options=self._ssl_options, ) task = self._thread_pool.submit(handler.run) self._download_tasks.append(task) diff --git a/databricks_sql_connector_core/src/databricks/sql/cloudfetch/downloader.py b/src/databricks/sql/cloudfetch/downloader.py similarity index 95% rename from databricks_sql_connector_core/src/databricks/sql/cloudfetch/downloader.py rename to src/databricks/sql/cloudfetch/downloader.py index 00ffecd0..228e07d6 100644 --- a/databricks_sql_connector_core/src/databricks/sql/cloudfetch/downloader.py +++ b/src/databricks/sql/cloudfetch/downloader.py @@ -3,13 +3,12 @@ import requests from requests.adapters import HTTPAdapter, Retry -from ssl import SSLContext, CERT_NONE import lz4.frame import time from databricks.sql.thrift_api.TCLIService.ttypes import TSparkArrowResultLink - from databricks.sql.exc import Error +from databricks.sql.types import SSLOptions logger = logging.getLogger(__name__) @@ -66,11 +65,11 @@ def __init__( self, settings: DownloadableResultSettings, link: TSparkArrowResultLink, - ssl_context: SSLContext, + ssl_options: SSLOptions, ): self.settings = settings self.link = link - self._ssl_context = ssl_context + self._ssl_options = ssl_options def run(self) -> DownloadedFile: """ @@ -95,14 +94,14 @@ def run(self) -> DownloadedFile: session.mount("http://", HTTPAdapter(max_retries=retryPolicy)) session.mount("https://", HTTPAdapter(max_retries=retryPolicy)) - ssl_verify = self._ssl_context.verify_mode != CERT_NONE - try: # Get the file via HTTP request response = session.get( self.link.fileLink, timeout=self.settings.download_timeout, - verify=ssl_verify, + verify=self._ssl_options.tls_verify, + headers=self.link.httpHeaders + # TODO: Pass cert from `self._ssl_options` ) response.raise_for_status() diff --git a/databricks_sql_connector_core/src/databricks/sql/exc.py b/src/databricks/sql/exc.py similarity index 100% rename from databricks_sql_connector_core/src/databricks/sql/exc.py rename to src/databricks/sql/exc.py diff --git a/databricks_sql_connector_core/src/databricks/sql/auth/__init__.py b/src/databricks/sql/experimental/__init__.py similarity index 100% rename from databricks_sql_connector_core/src/databricks/sql/auth/__init__.py rename to src/databricks/sql/experimental/__init__.py diff --git a/databricks_sql_connector_core/src/databricks/sql/experimental/oauth_persistence.py b/src/databricks/sql/experimental/oauth_persistence.py similarity index 100% rename from databricks_sql_connector_core/src/databricks/sql/experimental/oauth_persistence.py rename to src/databricks/sql/experimental/oauth_persistence.py diff --git a/databricks_sql_connector_core/src/databricks/sql/parameters/__init__.py b/src/databricks/sql/parameters/__init__.py similarity index 100% rename from databricks_sql_connector_core/src/databricks/sql/parameters/__init__.py rename to src/databricks/sql/parameters/__init__.py diff --git a/databricks_sql_connector_core/src/databricks/sql/parameters/native.py b/src/databricks/sql/parameters/native.py similarity index 100% rename from databricks_sql_connector_core/src/databricks/sql/parameters/native.py rename to src/databricks/sql/parameters/native.py diff --git a/databricks_sql_connector_core/src/databricks/sql/parameters/py.typed b/src/databricks/sql/parameters/py.typed similarity index 100% rename from databricks_sql_connector_core/src/databricks/sql/parameters/py.typed rename to src/databricks/sql/parameters/py.typed diff --git a/databricks_sql_connector_core/src/databricks/sql/py.typed b/src/databricks/sql/py.typed similarity index 100% rename from databricks_sql_connector_core/src/databricks/sql/py.typed rename to src/databricks/sql/py.typed diff --git a/databricks_sql_connector_core/src/databricks/sql/thrift_api/TCLIService/TCLIService-remote b/src/databricks/sql/thrift_api/TCLIService/TCLIService-remote similarity index 100% rename from databricks_sql_connector_core/src/databricks/sql/thrift_api/TCLIService/TCLIService-remote rename to src/databricks/sql/thrift_api/TCLIService/TCLIService-remote diff --git a/databricks_sql_connector_core/src/databricks/sql/thrift_api/TCLIService/TCLIService.py b/src/databricks/sql/thrift_api/TCLIService/TCLIService.py similarity index 100% rename from databricks_sql_connector_core/src/databricks/sql/thrift_api/TCLIService/TCLIService.py rename to src/databricks/sql/thrift_api/TCLIService/TCLIService.py diff --git a/databricks_sql_connector_core/src/databricks/sql/thrift_api/TCLIService/__init__.py b/src/databricks/sql/thrift_api/TCLIService/__init__.py similarity index 100% rename from databricks_sql_connector_core/src/databricks/sql/thrift_api/TCLIService/__init__.py rename to src/databricks/sql/thrift_api/TCLIService/__init__.py diff --git a/databricks_sql_connector_core/src/databricks/sql/thrift_api/TCLIService/constants.py b/src/databricks/sql/thrift_api/TCLIService/constants.py similarity index 100% rename from databricks_sql_connector_core/src/databricks/sql/thrift_api/TCLIService/constants.py rename to src/databricks/sql/thrift_api/TCLIService/constants.py diff --git a/databricks_sql_connector_core/src/databricks/sql/thrift_api/TCLIService/ttypes.py b/src/databricks/sql/thrift_api/TCLIService/ttypes.py similarity index 100% rename from databricks_sql_connector_core/src/databricks/sql/thrift_api/TCLIService/ttypes.py rename to src/databricks/sql/thrift_api/TCLIService/ttypes.py diff --git a/databricks_sql_connector_core/src/databricks/sql/experimental/__init__.py b/src/databricks/sql/thrift_api/__init__.py similarity index 100% rename from databricks_sql_connector_core/src/databricks/sql/experimental/__init__.py rename to src/databricks/sql/thrift_api/__init__.py diff --git a/databricks_sql_connector_core/src/databricks/sql/thrift_backend.py b/src/databricks/sql/thrift_backend.py similarity index 94% rename from databricks_sql_connector_core/src/databricks/sql/thrift_backend.py rename to src/databricks/sql/thrift_backend.py index 42daf85e..cf5cd906 100644 --- a/databricks_sql_connector_core/src/databricks/sql/thrift_backend.py +++ b/src/databricks/sql/thrift_backend.py @@ -5,9 +5,12 @@ import time import uuid import threading -from ssl import CERT_NONE, CERT_REQUIRED, create_default_context from typing import List, Union +try: + import pyarrow +except ImportError: + pyarrow = None import thrift.transport.THttpClient import thrift.protocol.TBinaryProtocol import thrift.transport.TSocket @@ -35,11 +38,7 @@ convert_decimals_in_arrow_table, convert_column_based_set_to_arrow_table, ) - -try: - import pyarrow -except ImportError: - pyarrow = None +from databricks.sql.types import SSLOptions logger = logging.getLogger(__name__) @@ -89,6 +88,7 @@ def __init__( http_path: str, http_headers, auth_provider: AuthProvider, + ssl_options: SSLOptions, staging_allowed_local_path: Union[None, str, List[str]] = None, **kwargs, ): @@ -97,16 +97,6 @@ def __init__( # Tag to add to User-Agent header. For use by partners. # _username, _password # Username and password Basic authentication (no official support) - # _tls_no_verify - # Set to True (Boolean) to completely disable SSL verification. - # _tls_verify_hostname - # Set to False (Boolean) to disable SSL hostname verification, but check certificate. - # _tls_trusted_ca_file - # Set to the path of the file containing trusted CA certificates for server certificate - # verification. If not provide, uses system truststore. - # _tls_client_cert_file, _tls_client_cert_key_file, _tls_client_cert_key_password - # Set client SSL certificate. - # See https://docs.python.org/3/library/ssl.html#ssl.SSLContext.load_cert_chain # _connection_uri # Overrides server_hostname and http_path. # RETRY/ATTEMPT POLICY @@ -166,29 +156,7 @@ def __init__( # Cloud fetch self.max_download_threads = kwargs.get("max_download_threads", 10) - # Configure tls context - ssl_context = create_default_context(cafile=kwargs.get("_tls_trusted_ca_file")) - if kwargs.get("_tls_no_verify") is True: - ssl_context.check_hostname = False - ssl_context.verify_mode = CERT_NONE - elif kwargs.get("_tls_verify_hostname") is False: - ssl_context.check_hostname = False - ssl_context.verify_mode = CERT_REQUIRED - else: - ssl_context.check_hostname = True - ssl_context.verify_mode = CERT_REQUIRED - - tls_client_cert_file = kwargs.get("_tls_client_cert_file") - tls_client_cert_key_file = kwargs.get("_tls_client_cert_key_file") - tls_client_cert_key_password = kwargs.get("_tls_client_cert_key_password") - if tls_client_cert_file: - ssl_context.load_cert_chain( - certfile=tls_client_cert_file, - keyfile=tls_client_cert_key_file, - password=tls_client_cert_key_password, - ) - - self._ssl_context = ssl_context + self._ssl_options = ssl_options self._auth_provider = auth_provider @@ -229,7 +197,7 @@ def __init__( self._transport = databricks.sql.auth.thrift_http_client.THttpClient( auth_provider=self._auth_provider, uri_or_host=uri, - ssl_context=self._ssl_context, + ssl_options=self._ssl_options, **additional_transport_args, # type: ignore ) @@ -656,12 +624,6 @@ def _get_metadata_resp(self, op_handle): @staticmethod def _hive_schema_to_arrow_schema(t_table_schema): - - if pyarrow is None: - raise ImportError( - "pyarrow is required to convert Hive schema to Arrow schema" - ) - def map_type(t_type_entry): if t_type_entry.primitiveEntry: return { @@ -767,12 +729,17 @@ def _results_message_to_execute_response(self, resp, operation_state): description = self._hive_schema_to_description( t_result_set_metadata_resp.schema ) - schema_bytes = ( - t_result_set_metadata_resp.arrowSchema - or self._hive_schema_to_arrow_schema(t_result_set_metadata_resp.schema) - .serialize() - .to_pybytes() - ) + + if pyarrow: + schema_bytes = ( + t_result_set_metadata_resp.arrowSchema + or self._hive_schema_to_arrow_schema(t_result_set_metadata_resp.schema) + .serialize() + .to_pybytes() + ) + else: + schema_bytes = None + lz4_compressed = t_result_set_metadata_resp.lz4Compressed is_staging_operation = t_result_set_metadata_resp.isStagingOperation if direct_results and direct_results.resultSet: @@ -786,7 +753,7 @@ def _results_message_to_execute_response(self, resp, operation_state): max_download_threads=self.max_download_threads, lz4_compressed=lz4_compressed, description=description, - ssl_context=self._ssl_context, + ssl_options=self._ssl_options, ) else: arrow_queue_opt = None @@ -1018,7 +985,7 @@ def fetch_results( max_download_threads=self.max_download_threads, lz4_compressed=lz4_compressed, description=description, - ssl_context=self._ssl_context, + ssl_options=self._ssl_options, ) return queue, resp.hasMoreRows diff --git a/databricks_sql_connector_core/src/databricks/sql/types.py b/src/databricks/sql/types.py similarity index 79% rename from databricks_sql_connector_core/src/databricks/sql/types.py rename to src/databricks/sql/types.py index aa11b954..fef22cd9 100644 --- a/databricks_sql_connector_core/src/databricks/sql/types.py +++ b/src/databricks/sql/types.py @@ -19,6 +19,54 @@ from typing import Any, Dict, List, Optional, Tuple, Union, TypeVar import datetime import decimal +from ssl import SSLContext, CERT_NONE, CERT_REQUIRED, create_default_context + + +class SSLOptions: + tls_verify: bool + tls_verify_hostname: bool + tls_trusted_ca_file: Optional[str] + tls_client_cert_file: Optional[str] + tls_client_cert_key_file: Optional[str] + tls_client_cert_key_password: Optional[str] + + def __init__( + self, + tls_verify: bool = True, + tls_verify_hostname: bool = True, + tls_trusted_ca_file: Optional[str] = None, + tls_client_cert_file: Optional[str] = None, + tls_client_cert_key_file: Optional[str] = None, + tls_client_cert_key_password: Optional[str] = None, + ): + self.tls_verify = tls_verify + self.tls_verify_hostname = tls_verify_hostname + self.tls_trusted_ca_file = tls_trusted_ca_file + self.tls_client_cert_file = tls_client_cert_file + self.tls_client_cert_key_file = tls_client_cert_key_file + self.tls_client_cert_key_password = tls_client_cert_key_password + + def create_ssl_context(self) -> SSLContext: + ssl_context = create_default_context(cafile=self.tls_trusted_ca_file) + + if self.tls_verify is False: + ssl_context.check_hostname = False + ssl_context.verify_mode = CERT_NONE + elif self.tls_verify_hostname is False: + ssl_context.check_hostname = False + ssl_context.verify_mode = CERT_REQUIRED + else: + ssl_context.check_hostname = True + ssl_context.verify_mode = CERT_REQUIRED + + if self.tls_client_cert_file: + ssl_context.load_cert_chain( + certfile=self.tls_client_cert_file, + keyfile=self.tls_client_cert_key_file, + password=self.tls_client_cert_key_password, + ) + + return ssl_context class Row(tuple): diff --git a/databricks_sql_connector_core/src/databricks/sql/utils.py b/src/databricks/sql/utils.py similarity index 82% rename from databricks_sql_connector_core/src/databricks/sql/utils.py rename to src/databricks/sql/utils.py index 1bcc8a88..cd655c4e 100644 --- a/databricks_sql_connector_core/src/databricks/sql/utils.py +++ b/src/databricks/sql/utils.py @@ -1,5 +1,6 @@ from __future__ import annotations +import pytz import datetime import decimal from abc import ABC, abstractmethod @@ -9,10 +10,14 @@ from enum import Enum from typing import Any, Dict, List, Optional, Union import re -from ssl import SSLContext import lz4.frame +try: + import pyarrow +except ImportError: + pyarrow = None + from databricks.sql import OperationalError, exc from databricks.sql.cloudfetch.download_manager import ResultFileDownloadManager from databricks.sql.thrift_api.TCLIService.ttypes import ( @@ -20,17 +25,14 @@ TSparkArrowResultLink, TSparkRowSetType, ) +from databricks.sql.types import SSLOptions from databricks.sql.parameters.native import ParameterStructure, TDbsqlParameter -BIT_MASKS = [1, 2, 4, 8, 16, 32, 64, 128] - import logging -try: - import pyarrow -except ImportError: - pyarrow = None +BIT_MASKS = [1, 2, 4, 8, 16, 32, 64, 128] +DEFAULT_ERROR_CONTEXT = "Unknown error" logger = logging.getLogger(__name__) @@ -52,7 +54,7 @@ def build_queue( t_row_set: TRowSet, arrow_schema_bytes: bytes, max_download_threads: int, - ssl_context: SSLContext, + ssl_options: SSLOptions, lz4_compressed: bool = True, description: Optional[List[List[Any]]] = None, ) -> ResultSetQueue: @@ -66,7 +68,7 @@ def build_queue( lz4_compressed (bool): Whether result data has been lz4 compressed. description (List[List[Any]]): Hive table schema description. max_download_threads (int): Maximum number of downloader thread pool threads. - ssl_context (SSLContext): SSLContext object for CloudFetchQueue + ssl_options (SSLOptions): SSLOptions object for CloudFetchQueue Returns: ResultSetQueue @@ -80,13 +82,15 @@ def build_queue( ) return ArrowQueue(converted_arrow_table, n_valid_rows) elif row_set_type == TSparkRowSetType.COLUMN_BASED_SET: - arrow_table, n_valid_rows = convert_column_based_set_to_arrow_table( + column_table, column_names = convert_column_based_set_to_column_table( t_row_set.columns, description ) - converted_arrow_table = convert_decimals_in_arrow_table( - arrow_table, description + + converted_column_table = convert_to_assigned_datatypes_in_column_table( + column_table, description ) - return ArrowQueue(converted_arrow_table, n_valid_rows) + + return ColumnQueue(ColumnTable(converted_column_table, column_names)) elif row_set_type == TSparkRowSetType.URL_BASED_SET: return CloudFetchQueue( schema_bytes=arrow_schema_bytes, @@ -95,12 +99,65 @@ def build_queue( lz4_compressed=lz4_compressed, description=description, max_download_threads=max_download_threads, - ssl_context=ssl_context, + ssl_options=ssl_options, ) else: raise AssertionError("Row set type is not valid") +class ColumnTable: + def __init__(self, column_table, column_names): + self.column_table = column_table + self.column_names = column_names + + @property + def num_rows(self): + if len(self.column_table) == 0: + return 0 + else: + return len(self.column_table[0]) + + @property + def num_columns(self): + return len(self.column_names) + + def get_item(self, col_index, row_index): + return self.column_table[col_index][row_index] + + def slice(self, curr_index, length): + sliced_column_table = [ + column[curr_index : curr_index + length] for column in self.column_table + ] + return ColumnTable(sliced_column_table, self.column_names) + + def __eq__(self, other): + return ( + self.column_table == other.column_table + and self.column_names == other.column_names + ) + + +class ColumnQueue(ResultSetQueue): + def __init__(self, column_table: ColumnTable): + self.column_table = column_table + self.cur_row_index = 0 + self.n_valid_rows = column_table.num_rows + + def next_n_rows(self, num_rows): + length = min(num_rows, self.n_valid_rows - self.cur_row_index) + + slice = self.column_table.slice(self.cur_row_index, length) + self.cur_row_index += slice.num_rows + return slice + + def remaining_rows(self): + slice = self.column_table.slice( + self.cur_row_index, self.n_valid_rows - self.cur_row_index + ) + self.cur_row_index += slice.num_rows + return slice + + class ArrowQueue(ResultSetQueue): def __init__( self, @@ -141,7 +198,7 @@ def __init__( self, schema_bytes, max_download_threads: int, - ssl_context: SSLContext, + ssl_options: SSLOptions, start_row_offset: int = 0, result_links: Optional[List[TSparkArrowResultLink]] = None, lz4_compressed: bool = True, @@ -164,7 +221,7 @@ def __init__( self.result_links = result_links self.lz4_compressed = lz4_compressed self.description = description - self._ssl_context = ssl_context + self._ssl_options = ssl_options logger.debug( "Initialize CloudFetch loader, row set start offset: {}, file list:".format( @@ -182,7 +239,7 @@ def __init__( links=result_links or [], max_download_threads=self.max_download_threads, lz4_compressed=self.lz4_compressed, - ssl_context=self._ssl_context, + ssl_options=self._ssl_options, ) self.table = self._create_next_table() @@ -361,7 +418,12 @@ def user_friendly_error_message(self, no_retry_reason, attempt, elapsed): user_friendly_error_message = "{}: {}".format( user_friendly_error_message, self.error_message ) - return user_friendly_error_message + try: + error_context = str(self.error) + except: + error_context = DEFAULT_ERROR_CONTEXT + + return user_friendly_error_message + ". " + error_context # Taken from PyHive @@ -519,7 +581,9 @@ def transform_paramstyle( return output -def create_arrow_table_from_arrow_file(file_bytes: bytes, description) -> "pyarrow.Table": +def create_arrow_table_from_arrow_file( + file_bytes: bytes, description +) -> "pyarrow.Table": arrow_table = convert_arrow_based_file_to_arrow_table(file_bytes) return convert_decimals_in_arrow_table(arrow_table, description) @@ -564,6 +628,37 @@ def convert_decimals_in_arrow_table(table, description) -> "pyarrow.Table": return table +def convert_to_assigned_datatypes_in_column_table(column_table, description): + + converted_column_table = [] + for i, col in enumerate(column_table): + if description[i][1] == "decimal": + converted_column_table.append( + tuple(v if v is None else Decimal(v) for v in col) + ) + elif description[i][1] == "date": + converted_column_table.append( + tuple(v if v is None else datetime.date.fromisoformat(v) for v in col) + ) + elif description[i][1] == "timestamp": + converted_column_table.append( + tuple( + ( + v + if v is None + else datetime.datetime.strptime( + v, "%Y-%m-%d %H:%M:%S.%f" + ).replace(tzinfo=pytz.UTC) + ) + for v in col + ) + ) + else: + converted_column_table.append(col) + + return converted_column_table + + def convert_column_based_set_to_arrow_table(columns, description): arrow_table = pyarrow.Table.from_arrays( [_convert_column_to_arrow_array(c) for c in columns], @@ -575,6 +670,13 @@ def convert_column_based_set_to_arrow_table(columns, description): return arrow_table, arrow_table.num_rows +def convert_column_based_set_to_column_table(columns, description): + column_names = [c[0] for c in description] + column_table = [_convert_column_to_list(c) for c in columns] + + return column_table, column_names + + def _convert_column_to_arrow_array(t_col): """ Return a pyarrow array from the values in a TColumn instance. @@ -599,6 +701,26 @@ def _convert_column_to_arrow_array(t_col): raise OperationalError("Empty TColumn instance {}".format(t_col)) +def _convert_column_to_list(t_col): + SUPPORTED_FIELD_TYPES = ( + "boolVal", + "byteVal", + "i16Val", + "i32Val", + "i64Val", + "doubleVal", + "stringVal", + "binaryVal", + ) + + for field in SUPPORTED_FIELD_TYPES: + wrapper = getattr(t_col, field) + if wrapper: + return _create_python_tuple(wrapper) + + raise OperationalError("Empty TColumn instance {}".format(t_col)) + + def _create_arrow_array(t_col_value_wrapper, arrow_type): result = t_col_value_wrapper.values nulls = t_col_value_wrapper.nulls # bitfield describing which values are null @@ -613,3 +735,19 @@ def _create_arrow_array(t_col_value_wrapper, arrow_type): result[i] = None return pyarrow.array(result, type=arrow_type) + + +def _create_python_tuple(t_col_value_wrapper): + result = t_col_value_wrapper.values + nulls = t_col_value_wrapper.nulls # bitfield describing which values are null + assert isinstance(nulls, bytes) + + # The number of bits in nulls can be both larger or smaller than the number of + # elements in result, so take the minimum of both to iterate over. + length = min(len(result), len(nulls) * 8) + + for i in range(length): + if nulls[i >> 3] & BIT_MASKS[i & 0x7]: + result[i] = None + + return tuple(result) diff --git a/src/databricks/sqlalchemy/__init__.py b/src/databricks/sqlalchemy/__init__.py new file mode 100644 index 00000000..f79d4c20 --- /dev/null +++ b/src/databricks/sqlalchemy/__init__.py @@ -0,0 +1,6 @@ +try: + from databricks_sqlalchemy import * +except: + import warnings + + warnings.warn("Install databricks-sqlalchemy plugin before using this") \ No newline at end of file diff --git a/databricks_sql_connector_core/src/databricks/sql/thrift_api/__init__.py b/tests/__init__.py similarity index 100% rename from databricks_sql_connector_core/src/databricks/sql/thrift_api/__init__.py rename to tests/__init__.py diff --git a/databricks_sql_connector_core/tests/__init__.py b/tests/e2e/__init__.py similarity index 100% rename from databricks_sql_connector_core/tests/__init__.py rename to tests/e2e/__init__.py diff --git a/databricks_sql_connector_core/tests/e2e/__init__.py b/tests/e2e/common/__init__.py similarity index 100% rename from databricks_sql_connector_core/tests/e2e/__init__.py rename to tests/e2e/common/__init__.py diff --git a/databricks_sql_connector_core/tests/e2e/common/core_tests.py b/tests/e2e/common/core_tests.py similarity index 87% rename from databricks_sql_connector_core/tests/e2e/common/core_tests.py rename to tests/e2e/common/core_tests.py index e89289ef..3f0fdc05 100644 --- a/databricks_sql_connector_core/tests/e2e/common/core_tests.py +++ b/tests/e2e/common/core_tests.py @@ -4,15 +4,18 @@ TypeFailure = namedtuple( "TypeFailure", - "query,columnType,resultType,resultValue," "actualValue,actualType,description,conf", + "query,columnType,resultType,resultValue," + "actualValue,actualType,description,conf", ) ResultFailure = namedtuple( "ResultFailure", - "query,columnType,resultType,resultValue," "actualValue,actualType,description,conf", + "query,columnType,resultType,resultValue," + "actualValue,actualType,description,conf", ) ExecFailure = namedtuple( "ExecFailure", - "query,columnType,resultType,resultValue," "actualValue,actualType,description,conf,error", + "query,columnType,resultType,resultValue," + "actualValue,actualType,description,conf,error", ) @@ -58,7 +61,9 @@ def run_tests_on_queries(self, default_conf): for query, columnType, rowValueType, answer in self.range_queries: with self.cursor(default_conf) as cursor: failures.extend( - self.run_query(cursor, query, columnType, rowValueType, answer, default_conf) + self.run_query( + cursor, query, columnType, rowValueType, answer, default_conf + ) ) failures.extend( self.run_range_query( @@ -69,7 +74,9 @@ def run_tests_on_queries(self, default_conf): for query, columnType, rowValueType, answer in self.queries: with self.cursor(default_conf) as cursor: failures.extend( - self.run_query(cursor, query, columnType, rowValueType, answer, default_conf) + self.run_query( + cursor, query, columnType, rowValueType, answer, default_conf + ) ) if failures: @@ -84,7 +91,9 @@ def run_query(self, cursor, query, columnType, rowValueType, answer, conf): try: cursor.execute(full_query) (result,) = cursor.fetchone() - if not all(cursor.description[0][1] == type for type in expected_column_types): + if not all( + cursor.description[0][1] == type for type in expected_column_types + ): return [ TypeFailure( full_query, @@ -150,7 +159,10 @@ def run_range_query(self, cursor, query, columnType, rowValueType, expected, con if len(rows) <= 0: break for index, (result, id) in enumerate(rows): - if not all(cursor.description[0][1] == type for type in expected_column_types): + if not all( + cursor.description[0][1] == type + for type in expected_column_types + ): return [ TypeFailure( full_query, @@ -163,7 +175,10 @@ def run_range_query(self, cursor, query, columnType, rowValueType, expected, con conf, ) ] - if self.validate_row_value_type and type(result) is not rowValueType: + if ( + self.validate_row_value_type + and type(result) is not rowValueType + ): return [ TypeFailure( full_query, diff --git a/databricks_sql_connector_core/tests/e2e/common/decimal_tests.py b/tests/e2e/common/decimal_tests.py similarity index 66% rename from databricks_sql_connector_core/tests/e2e/common/decimal_tests.py rename to tests/e2e/common/decimal_tests.py index 47fc2070..0029f30c 100644 --- a/databricks_sql_connector_core/tests/e2e/common/decimal_tests.py +++ b/tests/e2e/common/decimal_tests.py @@ -1,23 +1,22 @@ from decimal import Decimal +import pyarrow import pytest -try: - import pyarrow -except ImportError: - pyarrow = None -from tests.e2e.common.predicates import pysql_supports_arrow - -def decimal_and_expected_results(): - - if pyarrow is None: - return [] - - return [ +class DecimalTestsMixin: + decimal_and_expected_results = [ ("100.001 AS DECIMAL(6, 3)", Decimal("100.001"), pyarrow.decimal128(6, 3)), - ("1000000.0000 AS DECIMAL(11, 4)", Decimal("1000000.0000"), pyarrow.decimal128(11, 4)), - ("-10.2343 AS DECIMAL(10, 6)", Decimal("-10.234300"), pyarrow.decimal128(10, 6)), + ( + "1000000.0000 AS DECIMAL(11, 4)", + Decimal("1000000.0000"), + pyarrow.decimal128(11, 4), + ), + ( + "-10.2343 AS DECIMAL(10, 6)", + Decimal("-10.234300"), + pyarrow.decimal128(10, 6), + ), # TODO(SC-90767): Re-enable this test after we have a way of passing `ansi_mode` = False # ("-13872347.2343 AS DECIMAL(10, 10)", None, pyarrow.decimal128(10, 10)), ("NULL AS DECIMAL(1, 1)", None, pyarrow.decimal128(1, 1)), @@ -26,12 +25,7 @@ def decimal_and_expected_results(): ("1e-3 AS DECIMAL(38, 3)", Decimal("0.001"), pyarrow.decimal128(38, 3)), ] -def multi_decimals_and_expected_results(): - - if pyarrow is None: - return [] - - return [ + multi_decimals_and_expected_results = [ ( ["1 AS DECIMAL(6, 3)", "100.001 AS DECIMAL(6, 3)", "NULL AS DECIMAL(6, 3)"], [Decimal("1.00"), Decimal("100.001"), None], @@ -44,9 +38,9 @@ def multi_decimals_and_expected_results(): ), ] -@pytest.mark.skipif(not pysql_supports_arrow(), reason="Skipping because pyarrow is not installed") -class DecimalTestsMixin: - @pytest.mark.parametrize("decimal, expected_value, expected_type", decimal_and_expected_results()) + @pytest.mark.parametrize( + "decimal, expected_value, expected_type", decimal_and_expected_results + ) def test_decimals(self, decimal, expected_value, expected_type): with self.cursor({}) as cursor: query = "SELECT CAST ({})".format(decimal) @@ -55,10 +49,14 @@ def test_decimals(self, decimal, expected_value, expected_type): assert table.field(0).type == expected_type assert table.to_pydict().popitem()[1][0] == expected_value - @pytest.mark.parametrize("decimals, expected_values, expected_type", multi_decimals_and_expected_results()) + @pytest.mark.parametrize( + "decimals, expected_values, expected_type", multi_decimals_and_expected_results + ) def test_multi_decimals(self, decimals, expected_values, expected_type): with self.cursor({}) as cursor: - union_str = " UNION ".join(["(SELECT CAST ({}))".format(dec) for dec in decimals]) + union_str = " UNION ".join( + ["(SELECT CAST ({}))".format(dec) for dec in decimals] + ) query = "SELECT * FROM ({}) ORDER BY 1 NULLS LAST".format(union_str) cursor.execute(query) diff --git a/databricks_sql_connector_core/tests/e2e/common/large_queries_mixin.py b/tests/e2e/common/large_queries_mixin.py similarity index 92% rename from databricks_sql_connector_core/tests/e2e/common/large_queries_mixin.py rename to tests/e2e/common/large_queries_mixin.py index f337eb76..41ef029b 100644 --- a/databricks_sql_connector_core/tests/e2e/common/large_queries_mixin.py +++ b/tests/e2e/common/large_queries_mixin.py @@ -1,10 +1,6 @@ import logging import math import time -from unittest import skipUnless - -import pytest -from tests.e2e.common.predicates import pysql_supports_arrow log = logging.getLogger(__name__) @@ -40,11 +36,12 @@ def fetch_rows(self, cursor, row_count, fetchmany_size): num_fetches = max(math.ceil(n / 10000), 1) latency_ms = int((time.time() - start_time) * 1000 / num_fetches), 1 print( - "Fetched {} rows with an avg latency of {} per fetch, ".format(n, latency_ms) + "Fetched {} rows with an avg latency of {} per fetch, ".format( + n, latency_ms + ) + "assuming 10K fetch size." ) - @pytest.mark.skipif(not pysql_supports_arrow(), reason="Without pyarrow lz4 compression is not supported") def test_query_with_large_wide_result_set(self): resultSize = 300 * 1000 * 1000 # 300 MB width = 8192 # B @@ -60,10 +57,14 @@ def test_query_with_large_wide_result_set(self): cursor.connection.lz4_compression = lz4_compression uuids = ", ".join(["uuid() uuid{}".format(i) for i in range(cols)]) cursor.execute( - "SELECT id, {uuids} FROM RANGE({rows})".format(uuids=uuids, rows=rows) + "SELECT id, {uuids} FROM RANGE({rows})".format( + uuids=uuids, rows=rows + ) ) assert lz4_compression == cursor.active_result_set.lz4_compressed - for row_id, row in enumerate(self.fetch_rows(cursor, rows, fetchmany_size)): + for row_id, row in enumerate( + self.fetch_rows(cursor, rows, fetchmany_size) + ): assert row[0] == row_id # Verify no rows are dropped in the middle. assert len(row[1]) == 36 diff --git a/databricks_sql_connector_core/tests/e2e/common/predicates.py b/tests/e2e/common/predicates.py similarity index 78% rename from databricks_sql_connector_core/tests/e2e/common/predicates.py rename to tests/e2e/common/predicates.py index 99e6f701..61de69fd 100644 --- a/databricks_sql_connector_core/tests/e2e/common/predicates.py +++ b/tests/e2e/common/predicates.py @@ -8,13 +8,10 @@ def pysql_supports_arrow(): - """Checks if the pyarrow library is installed or not""" - try: - import pyarrow + """Import databricks.sql and test whether Cursor has fetchall_arrow.""" + from databricks.sql.client import Cursor - return True - except ImportError: - return False + return hasattr(Cursor, "fetchall_arrow") def pysql_has_version(compare, version): @@ -29,6 +26,7 @@ def test_some_pyhive_v1_stuff(): ... """ from databricks import sql + return compare_module_version(sql, compare, version) @@ -42,7 +40,7 @@ def is_endpoint_test(cli_args=None): def compare_dbr_versions(cli_args, compare, major_version, minor_version): if MAJOR_DBR_V_KEY in cli_args and MINOR_DBR_V_KEY in cli_args: if cli_args[MINOR_DBR_V_KEY] == "x": - actual_minor_v = float('inf') + actual_minor_v = float("inf") else: actual_minor_v = int(cli_args[MINOR_DBR_V_KEY]) dbr_version = (int(cli_args[MAJOR_DBR_V_KEY]), actual_minor_v) @@ -51,8 +49,10 @@ def compare_dbr_versions(cli_args, compare, major_version, minor_version): if not is_endpoint_test(): raise ValueError( - "DBR version not provided for non-endpoint test. Please pass the {} and {} params". - format(MAJOR_DBR_V_KEY, MINOR_DBR_V_KEY)) + "DBR version not provided for non-endpoint test. Please pass the {} and {} params".format( + MAJOR_DBR_V_KEY, MINOR_DBR_V_KEY + ) + ) def is_thrift_v5_plus(cli_args): @@ -60,18 +60,18 @@ def is_thrift_v5_plus(cli_args): _compare_fns = { - '<': '__lt__', - '<=': '__le__', - '>': '__gt__', - '>=': '__ge__', - '==': '__eq__', - '!=': '__ne__', + "<": "__lt__", + "<=": "__le__", + ">": "__gt__", + ">=": "__ge__", + "==": "__eq__", + "!=": "__ne__", } def compare_versions(compare, v1_tuple, v2_tuple): compare_fn_name = _compare_fns.get(compare) - assert compare_fn_name, 'Received invalid compare string: ' + compare + assert compare_fn_name, "Received invalid compare string: " + compare return getattr(v1_tuple, compare_fn_name)(v2_tuple) @@ -91,13 +91,15 @@ def test_some_pyhive_v1_stuff(): NOTE: This comparison leverages packaging.version.parse, and compares _release_ versions, thus ignoring pre/post release tags (eg -rc1, -dev, etc). """ - assert module, 'Received invalid module: ' + module - assert getattr(module, '__version__'), 'Received module with no version: ' + module + assert module, "Received invalid module: " + module + assert getattr(module, "__version__"), "Received module with no version: " + module def validate_version(version): v = parse_version(str(version)) # assert that we get a PEP-440 Version back -- LegacyVersion doesn't have major/minor. - assert hasattr(v, 'major'), 'Module has incompatible "Legacy" version: ' + version + assert hasattr(v, "major"), ( + 'Module has incompatible "Legacy" version: ' + version + ) return (v.major, v.minor, v.micro) mod_version = validate_version(module.__version__) diff --git a/databricks_sql_connector_core/tests/e2e/common/retry_test_mixins.py b/tests/e2e/common/retry_test_mixins.py similarity index 93% rename from databricks_sql_connector_core/tests/e2e/common/retry_test_mixins.py rename to tests/e2e/common/retry_test_mixins.py index 106a8fb5..7dd5f745 100755 --- a/databricks_sql_connector_core/tests/e2e/common/retry_test_mixins.py +++ b/tests/e2e/common/retry_test_mixins.py @@ -59,7 +59,9 @@ def _test_retry_disabled_with_message(self, error_msg_substring, exception_type) @contextmanager -def mocked_server_response(status: int = 200, headers: dict = {}, redirect_location: Optional[str] = None): +def mocked_server_response( + status: int = 200, headers: dict = {}, redirect_location: Optional[str] = None +): """Context manager for patching urllib3 responses""" # When mocking mocking a BaseHTTPResponse for urllib3 the mock must include @@ -97,7 +99,9 @@ def mock_sequential_server_responses(responses: List[dict]): # Each resp should have these members: for resp in responses: - _mock = MagicMock(headers=resp["headers"], msg=resp["headers"], status=resp["status"]) + _mock = MagicMock( + headers=resp["headers"], msg=resp["headers"], status=resp["status"] + ) _mock.get_redirect_location.return_value = ( False if resp["redirect_location"] is None else resp["redirect_location"] ) @@ -176,7 +180,9 @@ def test_retry_exponential_backoff(self): retry_policy["_retry_delay_min"] = 1 time_start = time.time() - with mocked_server_response(status=429, headers={"Retry-After": "3"}) as mock_obj: + with mocked_server_response( + status=429, headers={"Retry-After": "3"} + ) as mock_obj: with pytest.raises(RequestError) as cm: with self.connection(extra_params=retry_policy) as conn: pass @@ -256,7 +262,9 @@ def test_retry_dangerous_codes(self): assert isinstance(cm.value.args[1], UnsafeToRetryError) # Prove that these codes are retried if forced by the user - with self.connection(extra_params={**self._retry_policy, **additional_settings}) as conn: + with self.connection( + extra_params={**self._retry_policy, **additional_settings} + ) as conn: with conn.cursor() as cursor: for dangerous_code in DANGEROUS_CODES: with mocked_server_response(status=dangerous_code): @@ -326,7 +334,9 @@ def test_retry_abort_close_operation_on_404(self, caplog): curs.execute("SELECT 1") with mock_sequential_server_responses(responses): curs.close() - assert "Operation was canceled by a prior request" in caplog.text + assert ( + "Operation was canceled by a prior request" in caplog.text + ) def test_retry_max_redirects_raises_too_many_redirects_exception(self): """GIVEN the connector is configured with a custom max_redirects @@ -337,7 +347,9 @@ def test_retry_max_redirects_raises_too_many_redirects_exception(self): max_redirects, expected_call_count = 1, 2 # Code 302 is a redirect - with mocked_server_response(status=302, redirect_location="/foo.bar") as mock_obj: + with mocked_server_response( + status=302, redirect_location="/foo.bar" + ) as mock_obj: with pytest.raises(MaxRetryError) as cm: with self.connection( extra_params={ @@ -359,7 +371,9 @@ def test_retry_max_redirects_unset_doesnt_redirect_forever(self): _stop_after_attempts_count is enforced. """ # Code 302 is a redirect - with mocked_server_response(status=302, redirect_location="/foo.bar/") as mock_obj: + with mocked_server_response( + status=302, redirect_location="/foo.bar/" + ) as mock_obj: with pytest.raises(MaxRetryError) as cm: with self.connection( extra_params={ @@ -385,7 +399,9 @@ def test_retry_max_redirects_is_bounded_by_stop_after_attempts_count(self): with pytest.raises(RequestError) as cm: with mock_sequential_server_responses(responses): - with self.connection(extra_params={**self._retry_policy, **additional_settings}): + with self.connection( + extra_params={**self._retry_policy, **additional_settings} + ): pass # The error should be the result of the 500, not because of too many requests. @@ -405,9 +421,12 @@ def test_retry_max_redirects_exceeds_max_attempts_count_warns_user(self, caplog) assert "it will have no affect!" in caplog.text def test_retry_legacy_behavior_warns_user(self, caplog): - with self.connection(extra_params={**self._retry_policy, "_enable_v3_retries": False}): - assert "Legacy retry behavior is enabled for this connection." in caplog.text - + with self.connection( + extra_params={**self._retry_policy, "_enable_v3_retries": False} + ): + assert ( + "Legacy retry behavior is enabled for this connection." in caplog.text + ) def test_403_not_retried(self): """GIVEN the server returns a code 403 diff --git a/databricks_sql_connector_core/tests/e2e/common/staging_ingestion_tests.py b/tests/e2e/common/staging_ingestion_tests.py similarity index 82% rename from databricks_sql_connector_core/tests/e2e/common/staging_ingestion_tests.py rename to tests/e2e/common/staging_ingestion_tests.py index d8d0429f..008055e3 100644 --- a/databricks_sql_connector_core/tests/e2e/common/staging_ingestion_tests.py +++ b/tests/e2e/common/staging_ingestion_tests.py @@ -41,7 +41,9 @@ def test_staging_ingestion_life_cycle(self, ingestion_user): with open(fh, "wb") as fp: fp.write(original_text) - with self.connection(extra_params={"staging_allowed_local_path": temp_path}) as conn: + with self.connection( + extra_params={"staging_allowed_local_path": temp_path} + ) as conn: cursor = conn.cursor() query = f"PUT '{temp_path}' INTO 'stage://tmp/{ingestion_user}/tmp/11/15/file1.csv' OVERWRITE" @@ -51,7 +53,9 @@ def test_staging_ingestion_life_cycle(self, ingestion_user): new_fh, new_temp_path = tempfile.mkstemp() - with self.connection(extra_params={"staging_allowed_local_path": new_temp_path}) as conn: + with self.connection( + extra_params={"staging_allowed_local_path": new_temp_path} + ) as conn: cursor = conn.cursor() query = f"GET 'stage://tmp/{ingestion_user}/tmp/11/15/file1.csv' TO '{new_temp_path}'" cursor.execute(query) @@ -71,17 +75,19 @@ def test_staging_ingestion_life_cycle(self, ingestion_user): # GET after REMOVE should fail - with pytest.raises(Error, match="Staging operation over HTTP was unsuccessful: 404"): + with pytest.raises( + Error, match="Staging operation over HTTP was unsuccessful: 404" + ): cursor = conn.cursor() - query = ( - f"GET 'stage://tmp/{ingestion_user}/tmp/11/15/file1.csv' TO '{new_temp_path}'" - ) + query = f"GET 'stage://tmp/{ingestion_user}/tmp/11/15/file1.csv' TO '{new_temp_path}'" cursor.execute(query) os.remove(temp_path) os.remove(new_temp_path) - def test_staging_ingestion_put_fails_without_staging_allowed_local_path(self, ingestion_user): + def test_staging_ingestion_put_fails_without_staging_allowed_local_path( + self, ingestion_user + ): """PUT operations are not supported unless the connection was built with a parameter called staging_allowed_local_path """ @@ -93,7 +99,9 @@ def test_staging_ingestion_put_fails_without_staging_allowed_local_path(self, in with open(fh, "wb") as fp: fp.write(original_text) - with pytest.raises(Error, match="You must provide at least one staging_allowed_local_path"): + with pytest.raises( + Error, match="You must provide at least one staging_allowed_local_path" + ): with self.connection() as conn: cursor = conn.cursor() query = f"PUT '{temp_path}' INTO 'stage://tmp/{ingestion_user}/tmp/11/15/file1.csv' OVERWRITE" @@ -119,12 +127,16 @@ def test_staging_ingestion_put_fails_if_localFile_not_in_staging_allowed_local_p Error, match="Local file operations are restricted to paths within the configured staging_allowed_local_path", ): - with self.connection(extra_params={"staging_allowed_local_path": base_path}) as conn: + with self.connection( + extra_params={"staging_allowed_local_path": base_path} + ) as conn: cursor = conn.cursor() query = f"PUT '{temp_path}' INTO 'stage://tmp/{ingestion_user}/tmp/11/15/file1.csv' OVERWRITE" cursor.execute(query) - def test_staging_ingestion_put_fails_if_file_exists_and_overwrite_not_set(self, ingestion_user): + def test_staging_ingestion_put_fails_if_file_exists_and_overwrite_not_set( + self, ingestion_user + ): """PUT a file into the staging location twice. First command should succeed. Second should fail.""" fh, temp_path = tempfile.mkstemp() @@ -135,16 +147,22 @@ def test_staging_ingestion_put_fails_if_file_exists_and_overwrite_not_set(self, fp.write(original_text) def perform_put(): - with self.connection(extra_params={"staging_allowed_local_path": temp_path}) as conn: + with self.connection( + extra_params={"staging_allowed_local_path": temp_path} + ) as conn: cursor = conn.cursor() query = f"PUT '{temp_path}' INTO 'stage://tmp/{ingestion_user}/tmp/12/15/file1.csv'" cursor.execute(query) def perform_remove(): try: - remove_query = f"REMOVE 'stage://tmp/{ingestion_user}/tmp/12/15/file1.csv'" + remove_query = ( + f"REMOVE 'stage://tmp/{ingestion_user}/tmp/12/15/file1.csv'" + ) - with self.connection(extra_params={"staging_allowed_local_path": "/"}) as conn: + with self.connection( + extra_params={"staging_allowed_local_path": "/"} + ) as conn: cursor = conn.cursor() cursor.execute(remove_query) except Exception: @@ -178,7 +196,9 @@ def test_staging_ingestion_fails_to_modify_another_staging_user(self): fp.write(original_text) def perform_put(): - with self.connection(extra_params={"staging_allowed_local_path": temp_path}) as conn: + with self.connection( + extra_params={"staging_allowed_local_path": temp_path} + ) as conn: cursor = conn.cursor() query = f"PUT '{temp_path}' INTO 'stage://tmp/{some_other_user}/tmp/12/15/file1.csv' OVERWRITE" cursor.execute(query) @@ -186,12 +206,16 @@ def perform_put(): def perform_remove(): remove_query = f"REMOVE 'stage://tmp/{some_other_user}/tmp/12/15/file1.csv'" - with self.connection(extra_params={"staging_allowed_local_path": "/"}) as conn: + with self.connection( + extra_params={"staging_allowed_local_path": "/"} + ) as conn: cursor = conn.cursor() cursor.execute(remove_query) def perform_get(): - with self.connection(extra_params={"staging_allowed_local_path": temp_path}) as conn: + with self.connection( + extra_params={"staging_allowed_local_path": temp_path} + ) as conn: cursor = conn.cursor() query = f"GET 'stage://tmp/{some_other_user}/tmp/11/15/file1.csv' TO '{temp_path}'" cursor.execute(query) @@ -232,7 +256,9 @@ def test_staging_ingestion_put_fails_if_absolute_localFile_not_in_staging_allowe query = f"PUT '{target_file}' INTO 'stage://tmp/{ingestion_user}/tmp/11/15/file1.csv' OVERWRITE" cursor.execute(query) - def test_staging_ingestion_empty_local_path_fails_to_parse_at_server(self, ingestion_user): + def test_staging_ingestion_empty_local_path_fails_to_parse_at_server( + self, ingestion_user + ): staging_allowed_local_path = "/var/www/html" target_file = "" @@ -244,7 +270,9 @@ def test_staging_ingestion_empty_local_path_fails_to_parse_at_server(self, inges query = f"PUT '{target_file}' INTO 'stage://tmp/{ingestion_user}/tmp/11/15/file1.csv' OVERWRITE" cursor.execute(query) - def test_staging_ingestion_invalid_staging_path_fails_at_server(self, ingestion_user): + def test_staging_ingestion_invalid_staging_path_fails_at_server( + self, ingestion_user + ): staging_allowed_local_path = "/var/www/html" target_file = "index.html" @@ -278,12 +306,29 @@ def generate_file_and_path_and_queries(): original_text = "hello world!".encode("utf-8") fp.write(original_text) put_query = f"PUT '{temp_path}' INTO 'stage://tmp/{ingestion_user}/tmp/11/15/{id(temp_path)}.csv' OVERWRITE" - remove_query = f"REMOVE 'stage://tmp/{ingestion_user}/tmp/11/15/{id(temp_path)}.csv'" + remove_query = ( + f"REMOVE 'stage://tmp/{ingestion_user}/tmp/11/15/{id(temp_path)}.csv'" + ) return fh, temp_path, put_query, remove_query - fh1, temp_path1, put_query1, remove_query1 = generate_file_and_path_and_queries() - fh2, temp_path2, put_query2, remove_query2 = generate_file_and_path_and_queries() - fh3, temp_path3, put_query3, remove_query3 = generate_file_and_path_and_queries() + ( + fh1, + temp_path1, + put_query1, + remove_query1, + ) = generate_file_and_path_and_queries() + ( + fh2, + temp_path2, + put_query2, + remove_query2, + ) = generate_file_and_path_and_queries() + ( + fh3, + temp_path3, + put_query3, + remove_query3, + ) = generate_file_and_path_and_queries() with self.connection( extra_params={"staging_allowed_local_path": [temp_path1, temp_path2]} diff --git a/databricks_sql_connector_core/tests/e2e/common/timestamp_tests.py b/tests/e2e/common/timestamp_tests.py similarity index 88% rename from databricks_sql_connector_core/tests/e2e/common/timestamp_tests.py rename to tests/e2e/common/timestamp_tests.py index f25aed7e..70ded7d0 100644 --- a/databricks_sql_connector_core/tests/e2e/common/timestamp_tests.py +++ b/tests/e2e/common/timestamp_tests.py @@ -15,7 +15,10 @@ class TimestampTestsMixin: ] timestamp_and_expected_results = [ - ("2021-09-30 11:27:35.123+04:00", datetime.datetime(2021, 9, 30, 7, 27, 35, 123000)), + ( + "2021-09-30 11:27:35.123+04:00", + datetime.datetime(2021, 9, 30, 7, 27, 35, 123000), + ), ("2021-09-30 11:27:35+04:00", datetime.datetime(2021, 9, 30, 7, 27, 35)), ("2021-09-30 11:27:35.123", datetime.datetime(2021, 9, 30, 11, 27, 35, 123000)), ("2021-09-30 11:27:35", datetime.datetime(2021, 9, 30, 11, 27, 35)), @@ -45,18 +48,24 @@ def assertTimestampsEqual(self, result, expected): def multi_query(self, n_rows=10): row_sql = "SELECT " + ", ".join( - ["TIMESTAMP('{}')".format(ts) for (ts, _) in self.timestamp_and_expected_results] + [ + "TIMESTAMP('{}')".format(ts) + for (ts, _) in self.timestamp_and_expected_results + ] ) query = " UNION ALL ".join([row_sql for _ in range(n_rows)]) expected_matrix = [ - [dt for (_, dt) in self.timestamp_and_expected_results] for _ in range(n_rows) + [dt for (_, dt) in self.timestamp_and_expected_results] + for _ in range(n_rows) ] return query, expected_matrix def test_timestamps(self): with self.cursor({"session_configuration": {"ansi_mode": False}}) as cursor: for timestamp, expected in self.timestamp_and_expected_results: - cursor.execute("SELECT TIMESTAMP('{timestamp}')".format(timestamp=timestamp)) + cursor.execute( + "SELECT TIMESTAMP('{timestamp}')".format(timestamp=timestamp) + ) result = cursor.fetchone()[0] self.assertTimestampsEqual(result, expected) diff --git a/databricks_sql_connector_core/tests/e2e/common/uc_volume_tests.py b/tests/e2e/common/uc_volume_tests.py similarity index 82% rename from databricks_sql_connector_core/tests/e2e/common/uc_volume_tests.py rename to tests/e2e/common/uc_volume_tests.py index 21e43036..72e2f502 100644 --- a/databricks_sql_connector_core/tests/e2e/common/uc_volume_tests.py +++ b/tests/e2e/common/uc_volume_tests.py @@ -40,19 +40,21 @@ def test_uc_volume_life_cycle(self, catalog, schema): with open(fh, "wb") as fp: fp.write(original_text) - with self.connection(extra_params={"staging_allowed_local_path": temp_path}) as conn: + with self.connection( + extra_params={"staging_allowed_local_path": temp_path} + ) as conn: cursor = conn.cursor() - query = ( - f"PUT '{temp_path}' INTO '/Volumes/{catalog}/{schema}/e2etests/file1.csv' OVERWRITE" - ) + query = f"PUT '{temp_path}' INTO '/Volumes/{catalog}/{schema}/e2etests/file1.csv' OVERWRITE" cursor.execute(query) # GET should succeed new_fh, new_temp_path = tempfile.mkstemp() - with self.connection(extra_params={"staging_allowed_local_path": new_temp_path}) as conn: + with self.connection( + extra_params={"staging_allowed_local_path": new_temp_path} + ) as conn: cursor = conn.cursor() query = f"GET '/Volumes/{catalog}/{schema}/e2etests/file1.csv' TO '{new_temp_path}'" cursor.execute(query) @@ -72,7 +74,9 @@ def test_uc_volume_life_cycle(self, catalog, schema): # GET after REMOVE should fail - with pytest.raises(Error, match="Staging operation over HTTP was unsuccessful: 404"): + with pytest.raises( + Error, match="Staging operation over HTTP was unsuccessful: 404" + ): cursor = conn.cursor() query = f"GET '/Volumes/{catalog}/{schema}/e2etests/file1.csv' TO '{new_temp_path}'" cursor.execute(query) @@ -80,7 +84,9 @@ def test_uc_volume_life_cycle(self, catalog, schema): os.remove(temp_path) os.remove(new_temp_path) - def test_uc_volume_put_fails_without_staging_allowed_local_path(self, catalog, schema): + def test_uc_volume_put_fails_without_staging_allowed_local_path( + self, catalog, schema + ): """PUT operations are not supported unless the connection was built with a parameter called staging_allowed_local_path """ @@ -92,7 +98,9 @@ def test_uc_volume_put_fails_without_staging_allowed_local_path(self, catalog, s with open(fh, "wb") as fp: fp.write(original_text) - with pytest.raises(Error, match="You must provide at least one staging_allowed_local_path"): + with pytest.raises( + Error, match="You must provide at least one staging_allowed_local_path" + ): with self.connection() as conn: cursor = conn.cursor() query = f"PUT '{temp_path}' INTO '/Volumes/{catalog}/{schema}/e2etests/file1.csv' OVERWRITE" @@ -118,12 +126,16 @@ def test_uc_volume_put_fails_if_localFile_not_in_staging_allowed_local_path( Error, match="Local file operations are restricted to paths within the configured staging_allowed_local_path", ): - with self.connection(extra_params={"staging_allowed_local_path": base_path}) as conn: + with self.connection( + extra_params={"staging_allowed_local_path": base_path} + ) as conn: cursor = conn.cursor() query = f"PUT '{temp_path}' INTO '/Volumes/{catalog}/{schema}/e2etests/file1.csv' OVERWRITE" cursor.execute(query) - def test_uc_volume_put_fails_if_file_exists_and_overwrite_not_set(self, catalog, schema): + def test_uc_volume_put_fails_if_file_exists_and_overwrite_not_set( + self, catalog, schema + ): """PUT a file into the staging location twice. First command should succeed. Second should fail.""" fh, temp_path = tempfile.mkstemp() @@ -134,16 +146,22 @@ def test_uc_volume_put_fails_if_file_exists_and_overwrite_not_set(self, catalog, fp.write(original_text) def perform_put(): - with self.connection(extra_params={"staging_allowed_local_path": temp_path}) as conn: + with self.connection( + extra_params={"staging_allowed_local_path": temp_path} + ) as conn: cursor = conn.cursor() query = f"PUT '{temp_path}' INTO '/Volumes/{catalog}/{schema}/e2etests/file1.csv'" cursor.execute(query) def perform_remove(): try: - remove_query = f"REMOVE '/Volumes/{catalog}/{schema}/e2etests/file1.csv'" + remove_query = ( + f"REMOVE '/Volumes/{catalog}/{schema}/e2etests/file1.csv'" + ) - with self.connection(extra_params={"staging_allowed_local_path": "/"}) as conn: + with self.connection( + extra_params={"staging_allowed_local_path": "/"} + ) as conn: cursor = conn.cursor() cursor.execute(remove_query) except Exception: @@ -212,7 +230,9 @@ def test_uc_volume_invalid_volume_path_fails_at_server(self, catalog, schema): query = f"PUT '{target_file}' INTO '/Volumes/RANDOMSTRINGOFCHARACTERS/{catalog}/{schema}/e2etests/file1.csv' OVERWRITE" cursor.execute(query) - def test_uc_volume_supports_multiple_staging_allowed_local_path_values(self, catalog, schema): + def test_uc_volume_supports_multiple_staging_allowed_local_path_values( + self, catalog, schema + ): """staging_allowed_local_path may be either a path-like object or a list of path-like objects. This test confirms that two configured base paths: @@ -232,12 +252,29 @@ def generate_file_and_path_and_queries(): original_text = "hello world!".encode("utf-8") fp.write(original_text) put_query = f"PUT '{temp_path}' INTO '/Volumes/{catalog}/{schema}/e2etests/{id(temp_path)}.csv' OVERWRITE" - remove_query = f"REMOVE '/Volumes/{catalog}/{schema}/e2etests/{id(temp_path)}.csv'" + remove_query = ( + f"REMOVE '/Volumes/{catalog}/{schema}/e2etests/{id(temp_path)}.csv'" + ) return fh, temp_path, put_query, remove_query - fh1, temp_path1, put_query1, remove_query1 = generate_file_and_path_and_queries() - fh2, temp_path2, put_query2, remove_query2 = generate_file_and_path_and_queries() - fh3, temp_path3, put_query3, remove_query3 = generate_file_and_path_and_queries() + ( + fh1, + temp_path1, + put_query1, + remove_query1, + ) = generate_file_and_path_and_queries() + ( + fh2, + temp_path2, + put_query2, + remove_query2, + ) = generate_file_and_path_and_queries() + ( + fh3, + temp_path3, + put_query3, + remove_query3, + ) = generate_file_and_path_and_queries() with self.connection( extra_params={"staging_allowed_local_path": [temp_path1, temp_path2]} diff --git a/databricks_sql_connector_core/tests/e2e/test_complex_types.py b/tests/e2e/test_complex_types.py similarity index 90% rename from databricks_sql_connector_core/tests/e2e/test_complex_types.py rename to tests/e2e/test_complex_types.py index acac4e44..446a6b50 100644 --- a/databricks_sql_connector_core/tests/e2e/test_complex_types.py +++ b/tests/e2e/test_complex_types.py @@ -2,9 +2,8 @@ from numpy import ndarray from tests.e2e.test_driver import PySQLPytestTestCase -from tests.e2e.common.predicates import pysql_supports_arrow -@pytest.mark.skipif(not pysql_supports_arrow(), reason="Skipping because pyarrow is not installed") + class TestComplexTypes(PySQLPytestTestCase): @pytest.fixture(scope="class") def table_fixture(self, connection_details): @@ -54,7 +53,9 @@ def test_read_complex_types_as_arrow(self, field, expected_type, table_fixture): @pytest.mark.parametrize("field", [("array_col"), ("map_col"), ("struct_col")]) def test_read_complex_types_as_string(self, field, table_fixture): """Confirms the return type of a complex type that is returned as a string""" - with self.cursor(extra_params={"_use_arrow_native_complex_types": False}) as cursor: + with self.cursor( + extra_params={"_use_arrow_native_complex_types": False} + ) as cursor: result = cursor.execute( "SELECT * FROM pysql_test_complex_types_table LIMIT 1" ).fetchone() diff --git a/databricks_sql_connector_core/tests/e2e/test_driver.py b/tests/e2e/test_driver.py similarity index 91% rename from databricks_sql_connector_core/tests/e2e/test_driver.py rename to tests/e2e/test_driver.py index 6fa686e9..cfd1e969 100644 --- a/databricks_sql_connector_core/tests/e2e/test_driver.py +++ b/tests/e2e/test_driver.py @@ -12,6 +12,7 @@ from uuid import uuid4 import numpy as np +import pyarrow import pytz import thrift import pytest @@ -34,13 +35,15 @@ pysql_supports_arrow, compare_dbr_versions, is_thrift_v5_plus, - pysql_supports_arrow ) from tests.e2e.common.core_tests import CoreTestMixin, SmokeTestMixin from tests.e2e.common.large_queries_mixin import LargeQueriesMixin from tests.e2e.common.timestamp_tests import TimestampTestsMixin from tests.e2e.common.decimal_tests import DecimalTestsMixin -from tests.e2e.common.retry_test_mixins import Client429ResponseMixin, Client503ResponseMixin +from tests.e2e.common.retry_test_mixins import ( + Client429ResponseMixin, + Client503ResponseMixin, +) from tests.e2e.common.staging_ingestion_tests import PySQLStagingIngestionTestSuiteMixin from tests.e2e.common.retry_test_mixins import PySQLRetryTestsMixin @@ -48,11 +51,6 @@ from databricks.sql.exc import SessionAlreadyClosedError -try: - import pyarrow -except: - pyarrow = None - log = logging.getLogger(__name__) unsafe_logger = logging.getLogger("databricks.sql.unsafe") @@ -62,7 +60,9 @@ # manually decorate DecimalTestsMixin to need arrow support for name in loader.getTestCaseNames(DecimalTestsMixin, "test_"): fn = getattr(DecimalTestsMixin, name) - decorated = skipUnless(pysql_supports_arrow(), "Decimal tests need arrow support")(fn) + decorated = skipUnless(pysql_supports_arrow(), "Decimal tests need arrow support")( + fn + ) setattr(DecimalTestsMixin, name, decorated) @@ -73,7 +73,9 @@ class PySQLPytestTestCase: error_type = Error conf_to_disable_rate_limit_retries = {"_retry_stop_after_attempts_count": 1} - conf_to_disable_temporarily_unavailable_retries = {"_retry_stop_after_attempts_count": 1} + conf_to_disable_temporarily_unavailable_retries = { + "_retry_stop_after_attempts_count": 1 + } arraysize = 1000 buffer_size_bytes = 104857600 @@ -110,7 +112,9 @@ def connection(self, extra_params=()): @contextmanager def cursor(self, extra_params=()): with self.connection(extra_params) as conn: - cursor = conn.cursor(arraysize=self.arraysize, buffer_size_bytes=self.buffer_size_bytes) + cursor = conn.cursor( + arraysize=self.arraysize, buffer_size_bytes=self.buffer_size_bytes + ) try: yield cursor finally: @@ -149,7 +153,9 @@ def test_cloud_fetch(self): limits, threads, [True, False] ): with self.subTest( - num_limit=num_limit, num_threads=num_threads, lz4_compression=lz4_compression + num_limit=num_limit, + num_threads=num_threads, + lz4_compression=lz4_compression, ): cf_result, noop_result = None, None query = base_query + "LIMIT " + str(num_limit) @@ -294,7 +300,15 @@ def test_get_tables(self): ("TYPE_CAT", "string", None, None, None, None, None), ("TYPE_SCHEM", "string", None, None, None, None, None), ("TYPE_NAME", "string", None, None, None, None, None), - ("SELF_REFERENCING_COL_NAME", "string", None, None, None, None, None), + ( + "SELF_REFERENCING_COL_NAME", + "string", + None, + None, + None, + None, + None, + ), ("REF_GENERATION", "string", None, None, None, None, None), ] assert tables_desc == expected @@ -395,15 +409,21 @@ def test_escape_single_quotes(self): table_name = "table_{uuid}".format(uuid=str(uuid4()).replace("-", "_")) # Test escape syntax directly cursor.execute( - "CREATE TABLE IF NOT EXISTS {} AS (SELECT 'you\\'re' AS col_1)".format(table_name) + "CREATE TABLE IF NOT EXISTS {} AS (SELECT 'you\\'re' AS col_1)".format( + table_name + ) + ) + cursor.execute( + "SELECT * FROM {} WHERE col_1 LIKE 'you\\'re'".format(table_name) ) - cursor.execute("SELECT * FROM {} WHERE col_1 LIKE 'you\\'re'".format(table_name)) rows = cursor.fetchall() assert rows[0]["col_1"] == "you're" # Test escape syntax in parameter cursor.execute( - "SELECT * FROM {} WHERE {}.col_1 LIKE %(var)s".format(table_name, table_name), + "SELECT * FROM {} WHERE {}.col_1 LIKE %(var)s".format( + table_name, table_name + ), parameters={"var": "you're"}, ) rows = cursor.fetchall() @@ -432,7 +452,9 @@ def test_get_catalogs(self): cursor.catalogs() cursor.fetchall() catalogs_desc = cursor.description - assert catalogs_desc == [("TABLE_CAT", "string", None, None, None, None, None)] + assert catalogs_desc == [ + ("TABLE_CAT", "string", None, None, None, None, None) + ] @skipUnless(pysql_supports_arrow(), "arrow test need arrow support") def test_get_arrow(self): @@ -569,7 +591,8 @@ def test_temp_view_fetch(self): @skipIf(pysql_has_version("<", "2"), "requires pysql v2") @skipIf( - True, "Unclear the purpose of this test since urllib3 does not complain when timeout == 0" + True, + "Unclear the purpose of this test since urllib3 does not complain when timeout == 0", ) def test_socket_timeout(self): # We expect to see a BlockingIO error when the socket is opened @@ -592,15 +615,19 @@ def test_socket_timeout_user_defined(self): def test_ssp_passthrough(self): for enable_ansi in (True, False): - with self.cursor({"session_configuration": {"ansi_mode": enable_ansi}}) as cursor: + with self.cursor( + {"session_configuration": {"ansi_mode": enable_ansi}} + ) as cursor: cursor.execute("SET ansi_mode") assert list(cursor.fetchone()) == ["ansi_mode", str(enable_ansi)] - @pytest.mark.skipif(not pysql_supports_arrow(), reason="Skipping because pyarrow is not installed") + @skipUnless(pysql_supports_arrow(), "arrow test needs arrow support") def test_timestamps_arrow(self): with self.cursor({"session_configuration": {"ansi_mode": False}}) as cursor: for timestamp, expected in self.timestamp_and_expected_results: - cursor.execute("SELECT TIMESTAMP('{timestamp}')".format(timestamp=timestamp)) + cursor.execute( + "SELECT TIMESTAMP('{timestamp}')".format(timestamp=timestamp) + ) arrow_table = cursor.fetchmany_arrow(1) if self.should_add_timezone(): ts_type = pyarrow.timestamp("us", tz="Etc/UTC") @@ -611,28 +638,32 @@ def test_timestamps_arrow(self): # To work consistently across different local timezones, we specify the timezone # of the expected result to # be UTC (what it should be by default on the server) - aware_timestamp = expected and expected.replace(tzinfo=datetime.timezone.utc) + aware_timestamp = expected and expected.replace( + tzinfo=datetime.timezone.utc + ) assert result_value == ( aware_timestamp and aware_timestamp.timestamp() * 1000000 ), "timestamp {} did not match {}".format(timestamp, expected) - @pytest.mark.skipif(not pysql_supports_arrow(), reason="Skipping because pyarrow is not installed") + @skipUnless(pysql_supports_arrow(), "arrow test needs arrow support") def test_multi_timestamps_arrow(self): with self.cursor({"session_configuration": {"ansi_mode": False}}) as cursor: query, expected = self.multi_query() expected = [ - [self.maybe_add_timezone_to_timestamp(ts) for ts in row] for row in expected + [self.maybe_add_timezone_to_timestamp(ts) for ts in row] + for row in expected ] cursor.execute(query) table = cursor.fetchall_arrow() # Transpose columnar result to list of rows list_of_cols = [c.to_pylist() for c in table] result = [ - [col[row_index] for col in list_of_cols] for row_index in range(table.num_rows) + [col[row_index] for col in list_of_cols] + for row_index in range(table.num_rows) ] assert result == expected - @pytest.mark.skipif(not pysql_supports_arrow(), reason="Skipping because pyarrow is not installed") + @skipUnless(pysql_supports_arrow(), "arrow test needs arrow support") def test_timezone_with_timestamp(self): if self.should_add_timezone(): with self.cursor() as cursor: @@ -645,13 +676,15 @@ def test_timezone_with_timestamp(self): cursor.execute("select CAST('2022-03-02 12:54:56' as TIMESTAMP)") arrow_result_table = cursor.fetchmany_arrow(1) - arrow_result_value = arrow_result_table.column(0).combine_chunks()[0].value + arrow_result_value = ( + arrow_result_table.column(0).combine_chunks()[0].value + ) ts_type = pyarrow.timestamp("us", tz="Europe/Amsterdam") assert arrow_result_table.field(0).type == ts_type assert arrow_result_value == expected.timestamp() * 1000000 - @pytest.mark.skipif(not pysql_supports_arrow(), reason="Skipping because pyarrow is not installed") + @skipUnless(pysql_supports_arrow(), "arrow test needs arrow support") def test_can_flip_compression(self): with self.cursor() as cursor: cursor.execute("SELECT array(1,2,3,4)") @@ -668,7 +701,7 @@ def test_can_flip_compression(self): def _should_have_native_complex_types(self): return pysql_has_version(">=", 2) and is_thrift_v5_plus(self.arguments) - @pytest.mark.skipif(not pysql_supports_arrow(), reason="Skipping because pyarrow is not installed") + @skipUnless(pysql_supports_arrow(), "arrow test needs arrow support") def test_arrays_are_not_returned_as_strings_arrow(self): if self._should_have_native_complex_types(): with self.cursor() as cursor: @@ -679,7 +712,7 @@ def test_arrays_are_not_returned_as_strings_arrow(self): assert pyarrow.types.is_list(list_type) assert pyarrow.types.is_integer(list_type.value_type) - @pytest.mark.skipif(not pysql_supports_arrow(), reason="Skipping because pyarrow is not installed") + @skipUnless(pysql_supports_arrow(), "arrow test needs arrow support") def test_structs_are_not_returned_as_strings_arrow(self): if self._should_have_native_complex_types(): with self.cursor() as cursor: @@ -689,7 +722,7 @@ def test_structs_are_not_returned_as_strings_arrow(self): struct_type = arrow_df.field(0).type assert pyarrow.types.is_struct(struct_type) - @pytest.mark.skipif(not pysql_supports_arrow(), reason="Skipping because pyarrow is not installed") + @skipUnless(pysql_supports_arrow(), "arrow test needs arrow support") def test_decimal_not_returned_as_strings_arrow(self): if self._should_have_native_complex_types(): with self.cursor() as cursor: @@ -705,7 +738,9 @@ def test_close_connection_closes_cursors(self): with self.connection() as conn: cursor = conn.cursor() - cursor.execute("SELECT id, id `id2`, id `id3` FROM RANGE(1000000) order by RANDOM()") + cursor.execute( + "SELECT id, id `id2`, id `id3` FROM RANGE(1000000) order by RANDOM()" + ) ars = cursor.active_result_set # We must manually run this check because thrift_backend always forces `has_been_closed_server_side` to True @@ -714,14 +749,21 @@ def test_close_connection_closes_cursors(self): status_request = ttypes.TGetOperationStatusReq( operationHandle=ars.command_id, getProgressUpdate=False ) - op_status_at_server = ars.thrift_backend._client.GetOperationStatus(status_request) - assert op_status_at_server.operationState != ttypes.TOperationState.CLOSED_STATE + op_status_at_server = ars.thrift_backend._client.GetOperationStatus( + status_request + ) + assert ( + op_status_at_server.operationState + != ttypes.TOperationState.CLOSED_STATE + ) conn.close() # When connection closes, any cursor operations should no longer exist at the server with pytest.raises(SessionAlreadyClosedError) as cm: - op_status_at_server = ars.thrift_backend._client.GetOperationStatus(status_request) + op_status_at_server = ars.thrift_backend._client.GetOperationStatus( + status_request + ) def test_closing_a_closed_connection_doesnt_fail(self, caplog): caplog.set_level(logging.DEBUG) @@ -742,7 +784,9 @@ class HTTP429Suite(Client429ResponseMixin, PySQLPytestTestCase): class HTTP503Suite(Client503ResponseMixin, PySQLPytestTestCase): # 503Response suite gets custom error here vs PyODBC def test_retry_disabled(self): - self._test_retry_disabled_with_message("TEMPORARILY_UNAVAILABLE", OperationalError) + self._test_retry_disabled_with_message( + "TEMPORARILY_UNAVAILABLE", OperationalError + ) class TestPySQLUnityCatalogSuite(PySQLPytestTestCase): diff --git a/databricks_sql_connector_core/tests/e2e/test_parameterized_queries.py b/tests/e2e/test_parameterized_queries.py similarity index 96% rename from databricks_sql_connector_core/tests/e2e/test_parameterized_queries.py rename to tests/e2e/test_parameterized_queries.py index e2eac174..d346ad5c 100644 --- a/databricks_sql_connector_core/tests/e2e/test_parameterized_queries.py +++ b/tests/e2e/test_parameterized_queries.py @@ -28,7 +28,6 @@ VoidParameter, ) from tests.e2e.test_driver import PySQLPytestTestCase -from tests.e2e.common.predicates import pysql_supports_arrow class ParamStyle(Enum): @@ -168,12 +167,8 @@ def _inline_roundtrip(self, params: dict, paramstyle: ParamStyle): This is a no-op but is included to make the test-code easier to read. """ target_column = self._get_inline_table_column(params.get("p")) - INSERT_QUERY = ( - f"INSERT INTO pysql_e2e_inline_param_test_table (`{target_column}`) VALUES (%(p)s)" - ) - SELECT_QUERY = ( - f"SELECT {target_column} `col` FROM pysql_e2e_inline_param_test_table LIMIT 1" - ) + INSERT_QUERY = f"INSERT INTO pysql_e2e_inline_param_test_table (`{target_column}`) VALUES (%(p)s)" + SELECT_QUERY = f"SELECT {target_column} `col` FROM pysql_e2e_inline_param_test_table LIMIT 1" DELETE_QUERY = "DELETE FROM pysql_e2e_inline_param_test_table" with self.connection(extra_params={"use_inline_params": True}) as conn: @@ -285,8 +280,6 @@ def test_primitive_single( (PrimitiveExtra.TINYINT, TinyIntParameter), ], ) - - @pytest.mark.skipif(not pysql_supports_arrow(),reason="Without pyarrow TIMESTAMP_NTZ datatype cannot be inferred",) def test_dbsqlparameter_single( self, primitive: Primitive, @@ -311,11 +304,15 @@ def test_use_inline_off_by_default_with_warning(self, use_inline_params, caplog) If a user explicitly sets use_inline_params, don't warn them about it. """ - extra_args = {"use_inline_params": use_inline_params} if use_inline_params else {} + extra_args = ( + {"use_inline_params": use_inline_params} if use_inline_params else {} + ) with self.connection(extra_params=extra_args) as conn: with conn.cursor() as cursor: - with self.patch_server_supports_native_params(supports_native_params=True): + with self.patch_server_supports_native_params( + supports_native_params=True + ): cursor.execute("SELECT %(p)s", parameters={"p": 1}) if use_inline_params is True: assert ( @@ -405,7 +402,9 @@ def test_inline_ordinals_can_break_sql(self): query = "SELECT 'samsonite', %s WHERE 'samsonite' LIKE '%sonite'" params = ["luggage"] with self.cursor(extra_params={"use_inline_params": True}) as cursor: - with pytest.raises(TypeError, match="not enough arguments for format string"): + with pytest.raises( + TypeError, match="not enough arguments for format string" + ): cursor.execute(query, parameters=params) def test_inline_named_dont_break_sql(self): diff --git a/databricks_sql_connector_core/tests/e2e/common/__init__.py b/tests/unit/__init__.py similarity index 100% rename from databricks_sql_connector_core/tests/e2e/common/__init__.py rename to tests/unit/__init__.py diff --git a/databricks_sql_connector_core/tests/unit/test_arrow_queue.py b/tests/unit/test_arrow_queue.py similarity index 56% rename from databricks_sql_connector_core/tests/unit/test_arrow_queue.py rename to tests/unit/test_arrow_queue.py index ac98e137..b3dff45f 100644 --- a/databricks_sql_connector_core/tests/unit/test_arrow_queue.py +++ b/tests/unit/test_arrow_queue.py @@ -1,17 +1,10 @@ import unittest -import pytest +import pyarrow as pa from databricks.sql.utils import ArrowQueue -try: - import pyarrow as pa -except ImportError: - pa = None -from tests.e2e.common.predicates import pysql_supports_arrow - -@pytest.mark.skipif(not pysql_supports_arrow(), reason="Skipping because pyarrow is not installed") class ArrowQueueSuite(unittest.TestCase): @staticmethod def make_arrow_table(batch): @@ -21,13 +14,21 @@ def make_arrow_table(batch): return pa.Table.from_pydict(dict(zip(schema.names, cols)), schema=schema) def test_fetchmany_respects_n_rows(self): - arrow_table = self.make_arrow_table([[0, 1, 2], [3, 4, 5], [6, 7, 8], [9, 10, 11]]) + arrow_table = self.make_arrow_table( + [[0, 1, 2], [3, 4, 5], [6, 7, 8], [9, 10, 11]] + ) aq = ArrowQueue(arrow_table, 3) - self.assertEqual(aq.next_n_rows(2), self.make_arrow_table([[0, 1, 2], [3, 4, 5]])) + self.assertEqual( + aq.next_n_rows(2), self.make_arrow_table([[0, 1, 2], [3, 4, 5]]) + ) self.assertEqual(aq.next_n_rows(2), self.make_arrow_table([[6, 7, 8]])) def test_fetch_remaining_rows_respects_n_rows(self): - arrow_table = self.make_arrow_table([[0, 1, 2], [3, 4, 5], [6, 7, 8], [9, 10, 11]]) + arrow_table = self.make_arrow_table( + [[0, 1, 2], [3, 4, 5], [6, 7, 8], [9, 10, 11]] + ) aq = ArrowQueue(arrow_table, 3) self.assertEqual(aq.next_n_rows(1), self.make_arrow_table([[0, 1, 2]])) - self.assertEqual(aq.remaining_rows(), self.make_arrow_table([[3, 4, 5], [6, 7, 8]])) + self.assertEqual( + aq.remaining_rows(), self.make_arrow_table([[3, 4, 5], [6, 7, 8]]) + ) diff --git a/databricks_sql_connector_core/tests/unit/test_auth.py b/tests/unit/test_auth.py similarity index 95% rename from databricks_sql_connector_core/tests/unit/test_auth.py rename to tests/unit/test_auth.py index d6541525..d5b06bbf 100644 --- a/databricks_sql_connector_core/tests/unit/test_auth.py +++ b/tests/unit/test_auth.py @@ -9,7 +9,10 @@ ExternalAuthProvider, AuthType, ) -from databricks.sql.auth.auth import get_python_sql_connector_auth_provider, PYSQL_OAUTH_CLIENT_ID +from databricks.sql.auth.auth import ( + get_python_sql_connector_auth_provider, + PYSQL_OAUTH_CLIENT_ID, +) from databricks.sql.auth.oauth import OAuthManager from databricks.sql.auth.authenticators import DatabricksOAuthProvider from databricks.sql.auth.endpoint import ( @@ -177,12 +180,13 @@ def test_get_python_sql_connector_basic_auth(self): } with self.assertRaises(ValueError) as e: get_python_sql_connector_auth_provider("foo.cloud.databricks.com", **kwargs) - self.assertIn("Username/password authentication is no longer supported", str(e.exception)) + self.assertIn( + "Username/password authentication is no longer supported", str(e.exception) + ) @patch.object(DatabricksOAuthProvider, "_initial_get_token") def test_get_python_sql_connector_default_auth(self, mock__initial_get_token): hostname = "foo.cloud.databricks.com" auth_provider = get_python_sql_connector_auth_provider(hostname) self.assertTrue(type(auth_provider).__name__, "DatabricksOAuthProvider") - self.assertTrue(auth_provider._client_id,PYSQL_OAUTH_CLIENT_ID) - + self.assertTrue(auth_provider._client_id, PYSQL_OAUTH_CLIENT_ID) diff --git a/databricks_sql_connector_core/tests/unit/test_client.py b/tests/unit/test_client.py similarity index 84% rename from databricks_sql_connector_core/tests/unit/test_client.py rename to tests/unit/test_client.py index c86a9f7f..0ff660d5 100644 --- a/databricks_sql_connector_core/tests/unit/test_client.py +++ b/tests/unit/test_client.py @@ -13,7 +13,7 @@ TExecuteStatementResp, TOperationHandle, THandleIdentifier, - TOperationType + TOperationType, ) from databricks.sql.thrift_backend import ThriftBackend @@ -26,8 +26,8 @@ from tests.unit.test_thrift_backend import ThriftBackendTestSuite from tests.unit.test_arrow_queue import ArrowQueueSuite -class ThriftBackendMockFactory: +class ThriftBackendMockFactory: @classmethod def new(cls): ThriftBackendMock = Mock(spec=ThriftBackend) @@ -68,10 +68,6 @@ def apply_property_to_mock(self, mock_obj, **kwargs): setattr(type(mock_obj), key, prop) - - - - class ClientTestSuite(unittest.TestCase): """ Unit tests for isolated client behaviour. @@ -89,7 +85,7 @@ def test_close_uses_the_correct_session_id(self, mock_client_class): instance = mock_client_class.return_value mock_open_session_resp = MagicMock(spec=TOpenSessionResp)() - mock_open_session_resp.sessionHandle.sessionId = b'\x22' + mock_open_session_resp.sessionHandle.sessionId = b"\x22" instance.open_session.return_value = mock_open_session_resp connection = databricks.sql.connect(**self.DUMMY_CONNECTION_ARGS) @@ -97,7 +93,7 @@ def test_close_uses_the_correct_session_id(self, mock_client_class): # Check the close session request has an id of x22 close_session_id = instance.close_session.call_args[0][0].sessionId - self.assertEqual(close_session_id, b'\x22') + self.assertEqual(close_session_id, b"\x22") @patch("%s.client.ThriftBackend" % PACKAGE_NAME) def test_auth_args(self, mock_client_class): @@ -155,13 +151,19 @@ def test_useragent_header(self, mock_client_class): databricks.sql.connect(**self.DUMMY_CONNECTION_ARGS) http_headers = mock_client_class.call_args[0][3] - user_agent_header = ("User-Agent", "{}/{}".format(databricks.sql.USER_AGENT_NAME, - databricks.sql.__version__)) + user_agent_header = ( + "User-Agent", + "{}/{}".format(databricks.sql.USER_AGENT_NAME, databricks.sql.__version__), + ) self.assertIn(user_agent_header, http_headers) databricks.sql.connect(**self.DUMMY_CONNECTION_ARGS, _user_agent_entry="foobar") - user_agent_header_with_entry = ("User-Agent", "{}/{} ({})".format( - databricks.sql.USER_AGENT_NAME, databricks.sql.__version__, "foobar")) + user_agent_header_with_entry = ( + "User-Agent", + "{}/{} ({})".format( + databricks.sql.USER_AGENT_NAME, databricks.sql.__version__, "foobar" + ), + ) http_headers = mock_client_class.call_args[0][3] self.assertIn(user_agent_header_with_entry, http_headers) @@ -177,7 +179,9 @@ def test_closing_connection_closes_commands(self, mock_result_set_class): cursor.execute("SELECT 1;") connection.close() - self.assertTrue(mock_result_set_class.return_value.has_been_closed_server_side) + self.assertTrue( + mock_result_set_class.return_value.has_been_closed_server_side + ) mock_result_set_class.return_value.close.assert_called_once_with() @patch("%s.client.ThriftBackend" % PACKAGE_NAME) @@ -192,7 +196,9 @@ def test_cant_open_cursor_on_closed_connection(self, mock_client_class): @patch("%s.client.ThriftBackend" % PACKAGE_NAME) @patch("%s.client.Cursor" % PACKAGE_NAME) - def test_arraysize_buffer_size_passthrough(self, mock_cursor_class, mock_client_class): + def test_arraysize_buffer_size_passthrough( + self, mock_cursor_class, mock_client_class + ): connection = databricks.sql.connect(**self.DUMMY_CONNECTION_ARGS) connection.cursor(arraysize=999, buffer_size_bytes=1234) kwargs = mock_cursor_class.call_args[1] @@ -204,7 +210,10 @@ def test_closing_result_set_with_closed_connection_soft_closes_commands(self): mock_connection = Mock() mock_backend = Mock() result_set = client.ResultSet( - connection=mock_connection, thrift_backend=mock_backend, execute_response=Mock()) + connection=mock_connection, + thrift_backend=mock_backend, + execute_response=Mock(), + ) mock_connection.open = False result_set.close() @@ -218,20 +227,27 @@ def test_closing_result_set_hard_closes_commands(self): mock_connection = Mock() mock_thrift_backend = Mock() mock_connection.open = True - result_set = client.ResultSet(mock_connection, mock_results_response, mock_thrift_backend) + result_set = client.ResultSet( + mock_connection, mock_results_response, mock_thrift_backend + ) result_set.close() mock_thrift_backend.close_command.assert_called_once_with( - mock_results_response.command_handle) + mock_results_response.command_handle + ) @patch("%s.client.ResultSet" % PACKAGE_NAME) - def test_executing_multiple_commands_uses_the_most_recent_command(self, mock_result_set_class): + def test_executing_multiple_commands_uses_the_most_recent_command( + self, mock_result_set_class + ): mock_result_sets = [Mock(), Mock()] mock_result_set_class.side_effect = mock_result_sets - cursor = client.Cursor(connection=Mock(), thrift_backend=ThriftBackendMockFactory.new()) + cursor = client.Cursor( + connection=Mock(), thrift_backend=ThriftBackendMockFactory.new() + ) cursor.execute("SELECT 1;") cursor.execute("SELECT 1;") @@ -272,7 +288,7 @@ def test_context_manager_closes_connection(self, mock_client_class): instance = mock_client_class.return_value mock_open_session_resp = MagicMock(spec=TOpenSessionResp)() - mock_open_session_resp.sessionHandle.sessionId = b'\x22' + mock_open_session_resp.sessionHandle.sessionId = b"\x22" instance.open_session.return_value = mock_open_session_resp with databricks.sql.connect(**self.DUMMY_CONNECTION_ARGS) as connection: @@ -280,7 +296,7 @@ def test_context_manager_closes_connection(self, mock_client_class): # Check the close session request has an id of x22 close_session_id = instance.close_session.call_args[0][0].sessionId - self.assertEqual(close_session_id, b'\x22') + self.assertEqual(close_session_id, b"\x22") def dict_product(self, dicts): """ @@ -299,7 +315,9 @@ def test_get_schemas_parameters_passed_to_thrift_backend(self, mock_thrift_backe req_args_combinations = self.dict_product( dict( catalog_name=["NOT_SET", None, "catalog_pattern"], - schema_name=["NOT_SET", None, "schema_pattern"])) + schema_name=["NOT_SET", None, "schema_pattern"], + ) + ) for req_args in req_args_combinations: req_args = {k: v for k, v in req_args.items() if v != "NOT_SET"} @@ -320,7 +338,9 @@ def test_get_tables_parameters_passed_to_thrift_backend(self, mock_thrift_backen catalog_name=["NOT_SET", None, "catalog_pattern"], schema_name=["NOT_SET", None, "schema_pattern"], table_name=["NOT_SET", None, "table_pattern"], - table_types=["NOT_SET", [], ["type1", "type2"]])) + table_types=["NOT_SET", [], ["type1", "type2"]], + ) + ) for req_args in req_args_combinations: req_args = {k: v for k, v in req_args.items() if v != "NOT_SET"} @@ -341,7 +361,9 @@ def test_get_columns_parameters_passed_to_thrift_backend(self, mock_thrift_backe catalog_name=["NOT_SET", None, "catalog_pattern"], schema_name=["NOT_SET", None, "schema_pattern"], table_name=["NOT_SET", None, "table_pattern"], - column_name=["NOT_SET", None, "column_pattern"])) + column_name=["NOT_SET", None, "column_pattern"], + ) + ) for req_args in req_args_combinations: req_args = {k: v for k, v in req_args.items() if v != "NOT_SET"} @@ -365,7 +387,8 @@ def test_cancel_command_calls_the_backend(self): @patch("databricks.sql.client.logger") def test_cancel_command_will_issue_warning_for_cancel_with_no_executing_command( - self, logger_instance): + self, logger_instance + ): mock_thrift_backend = Mock() cursor = client.Cursor(Mock(), mock_thrift_backend) cursor.cancel() @@ -375,9 +398,13 @@ def test_cancel_command_will_issue_warning_for_cancel_with_no_executing_command( @patch("%s.client.ThriftBackend" % PACKAGE_NAME) def test_max_number_of_retries_passthrough(self, mock_client_class): - databricks.sql.connect(_retry_stop_after_attempts_count=54, **self.DUMMY_CONNECTION_ARGS) + databricks.sql.connect( + _retry_stop_after_attempts_count=54, **self.DUMMY_CONNECTION_ARGS + ) - self.assertEqual(mock_client_class.call_args[1]["_retry_stop_after_attempts_count"], 54) + self.assertEqual( + mock_client_class.call_args[1]["_retry_stop_after_attempts_count"], 54 + ) @patch("%s.client.ThriftBackend" % PACKAGE_NAME) def test_socket_timeout_passthrough(self, mock_client_class): @@ -386,27 +413,38 @@ def test_socket_timeout_passthrough(self, mock_client_class): def test_version_is_canonical(self): version = databricks.sql.__version__ - canonical_version_re = r'^([1-9][0-9]*!)?(0|[1-9][0-9]*)(\.(0|[1-9][0-9]*))*((a|b|rc)' \ - r'(0|[1-9][0-9]*))?(\.post(0|[1-9][0-9]*))?(\.dev(0|[1-9][0-9]*))?$' + canonical_version_re = ( + r"^([1-9][0-9]*!)?(0|[1-9][0-9]*)(\.(0|[1-9][0-9]*))*((a|b|rc)" + r"(0|[1-9][0-9]*))?(\.post(0|[1-9][0-9]*))?(\.dev(0|[1-9][0-9]*))?$" + ) self.assertIsNotNone(re.match(canonical_version_re, version)) @patch("%s.client.ThriftBackend" % PACKAGE_NAME) def test_configuration_passthrough(self, mock_client_class): mock_session_config = Mock() databricks.sql.connect( - session_configuration=mock_session_config, **self.DUMMY_CONNECTION_ARGS) + session_configuration=mock_session_config, **self.DUMMY_CONNECTION_ARGS + ) - self.assertEqual(mock_client_class.return_value.open_session.call_args[0][0], - mock_session_config) + self.assertEqual( + mock_client_class.return_value.open_session.call_args[0][0], + mock_session_config, + ) @patch("%s.client.ThriftBackend" % PACKAGE_NAME) def test_initial_namespace_passthrough(self, mock_client_class): mock_cat = Mock() mock_schem = Mock() - databricks.sql.connect(**self.DUMMY_CONNECTION_ARGS, catalog=mock_cat, schema=mock_schem) - self.assertEqual(mock_client_class.return_value.open_session.call_args[0][1], mock_cat) - self.assertEqual(mock_client_class.return_value.open_session.call_args[0][2], mock_schem) + databricks.sql.connect( + **self.DUMMY_CONNECTION_ARGS, catalog=mock_cat, schema=mock_schem + ) + self.assertEqual( + mock_client_class.return_value.open_session.call_args[0][1], mock_cat + ) + self.assertEqual( + mock_client_class.return_value.open_session.call_args[0][2], mock_schem + ) def test_execute_parameter_passthrough(self): mock_thrift_backend = ThriftBackendMockFactory.new() @@ -436,7 +474,8 @@ def test_execute_parameter_passthrough(self): @patch("%s.client.ThriftBackend" % PACKAGE_NAME) @patch("%s.client.ResultSet" % PACKAGE_NAME) def test_executemany_parameter_passhthrough_and_uses_last_result_set( - self, mock_result_set_class, mock_thrift_backend): + self, mock_result_set_class, mock_thrift_backend + ): # Create a new mock result set each time the class is instantiated mock_result_set_instances = [Mock(), Mock(), Mock()] mock_result_set_class.side_effect = mock_result_set_instances @@ -449,17 +488,22 @@ def test_executemany_parameter_passhthrough_and_uses_last_result_set( cursor.executemany("SELECT %(x)s", seq_of_parameters=params) self.assertEqual( - len(mock_thrift_backend.execute_command.call_args_list), len(expected_queries), - "Expected execute_command to be called the same number of times as params were passed") + len(mock_thrift_backend.execute_command.call_args_list), + len(expected_queries), + "Expected execute_command to be called the same number of times as params were passed", + ) - for expected_query, call_args in zip(expected_queries, - mock_thrift_backend.execute_command.call_args_list): + for expected_query, call_args in zip( + expected_queries, mock_thrift_backend.execute_command.call_args_list + ): self.assertEqual(call_args[1]["operation"], expected_query) self.assertEqual( - cursor.active_result_set, mock_result_set_instances[2], + cursor.active_result_set, + mock_result_set_instances[2], "Expected the active result set to be the result set corresponding to the" - "last operation") + "last operation", + ) @patch("%s.client.ThriftBackend" % PACKAGE_NAME) def test_commit_a_noop(self, mock_thrift_backend_class): @@ -495,7 +539,7 @@ def make_fake_row_slice(n_rows): mock_thrift_backend.fetch_results.return_value = (mock_aq, True) cursor = client.Cursor(Mock(), mock_thrift_backend) - cursor.execute('foo') + cursor.execute("foo") self.assertEqual(cursor.rownumber, 0) cursor.fetchmany_arrow(10) @@ -516,12 +560,14 @@ def test_disable_pandas_respected(self, mock_thrift_backend_class): mock_aq = Mock() mock_aq.remaining_rows.return_value = mock_table mock_thrift_backend.execute_command.return_value.arrow_queue = mock_aq - mock_thrift_backend.execute_command.return_value.has_been_closed_server_side = True + mock_thrift_backend.execute_command.return_value.has_been_closed_server_side = ( + True + ) mock_con = Mock() mock_con.disable_pandas = True cursor = client.Cursor(mock_con, mock_thrift_backend) - cursor.execute('foo') + cursor.execute("foo") cursor.fetchall() mock_table.itercolumns.assert_called_once_with() @@ -548,18 +594,21 @@ def test_column_name_api(self): self.assertEqual(row[1], expected[1]) self.assertEqual(row[2], expected[2]) - self.assertEqual(row.asDict(), { - "first_col": expected[0], - "second_col": expected[1], - "third_col": expected[2] - }) + self.assertEqual( + row.asDict(), + { + "first_col": expected[0], + "second_col": expected[1], + "third_col": expected[2], + }, + ) @patch("%s.client.ThriftBackend" % PACKAGE_NAME) def test_finalizer_closes_abandoned_connection(self, mock_client_class): instance = mock_client_class.return_value mock_open_session_resp = MagicMock(spec=TOpenSessionResp)() - mock_open_session_resp.sessionHandle.sessionId = b'\x22' + mock_open_session_resp.sessionHandle.sessionId = b"\x22" instance.open_session.return_value = mock_open_session_resp databricks.sql.connect(**self.DUMMY_CONNECTION_ARGS) @@ -569,14 +618,14 @@ def test_finalizer_closes_abandoned_connection(self, mock_client_class): # Check the close session request has an id of x22 close_session_id = instance.close_session.call_args[0][0].sessionId - self.assertEqual(close_session_id, b'\x22') + self.assertEqual(close_session_id, b"\x22") @patch("%s.client.ThriftBackend" % PACKAGE_NAME) def test_cursor_keeps_connection_alive(self, mock_client_class): instance = mock_client_class.return_value mock_open_session_resp = MagicMock(spec=TOpenSessionResp)() - mock_open_session_resp.sessionHandle.sessionId = b'\x22' + mock_open_session_resp.sessionHandle.sessionId = b"\x22" instance.open_session.return_value = mock_open_session_resp connection = databricks.sql.connect(**self.DUMMY_CONNECTION_ARGS) @@ -591,11 +640,14 @@ def test_cursor_keeps_connection_alive(self, mock_client_class): @patch("%s.utils.ExecuteResponse" % PACKAGE_NAME, autospec=True) @patch("%s.client.Cursor._handle_staging_operation" % PACKAGE_NAME) @patch("%s.client.ThriftBackend" % PACKAGE_NAME) - def test_staging_operation_response_is_handled(self, mock_client_class, mock_handle_staging_operation, mock_execute_response): + def test_staging_operation_response_is_handled( + self, mock_client_class, mock_handle_staging_operation, mock_execute_response + ): # If server sets ExecuteResponse.is_staging_operation True then _handle_staging_operation should be called - - ThriftBackendMockFactory.apply_property_to_mock(mock_execute_response, is_staging_operation=True) + ThriftBackendMockFactory.apply_property_to_mock( + mock_execute_response, is_staging_operation=True + ) mock_client_class.execute_command.return_value = mock_execute_response mock_client_class.return_value = mock_client_class @@ -608,7 +660,7 @@ def test_staging_operation_response_is_handled(self, mock_client_class, mock_han @patch("%s.client.ThriftBackend" % PACKAGE_NAME, ThriftBackendMockFactory.new()) def test_access_current_query_id(self): - operation_id = 'EE6A8778-21FC-438B-92D8-96AC51EE3821' + operation_id = "EE6A8778-21FC-438B-92D8-96AC51EE3821" connection = databricks.sql.connect(**self.DUMMY_CONNECTION_ARGS) cursor = connection.cursor() @@ -617,17 +669,23 @@ def test_access_current_query_id(self): cursor.active_op_handle = TOperationHandle( operationId=THandleIdentifier(guid=UUID(operation_id).bytes, secret=0x00), - operationType=TOperationType.EXECUTE_STATEMENT) + operationType=TOperationType.EXECUTE_STATEMENT, + ) self.assertEqual(cursor.query_id.upper(), operation_id.upper()) cursor.close() self.assertIsNone(cursor.query_id) -if __name__ == '__main__': +if __name__ == "__main__": suite = unittest.TestLoader().loadTestsFromModule(sys.modules[__name__]) loader = unittest.TestLoader() - test_classes = [ClientTestSuite, FetchTests, ThriftBackendTestSuite, ArrowQueueSuite] + test_classes = [ + ClientTestSuite, + FetchTests, + ThriftBackendTestSuite, + ArrowQueueSuite, + ] suites_list = [] for test_class in test_classes: suite = loader.loadTestsFromTestCase(test_class) diff --git a/databricks_sql_connector_core/tests/unit/test_cloud_fetch_queue.py b/tests/unit/test_cloud_fetch_queue.py similarity index 82% rename from databricks_sql_connector_core/tests/unit/test_cloud_fetch_queue.py rename to tests/unit/test_cloud_fetch_queue.py index def6b8aa..01d8a79b 100644 --- a/databricks_sql_connector_core/tests/unit/test_cloud_fetch_queue.py +++ b/tests/unit/test_cloud_fetch_queue.py @@ -1,34 +1,31 @@ -import pytest +import pyarrow import unittest from unittest.mock import MagicMock, patch -from ssl import create_default_context from databricks.sql.thrift_api.TCLIService.ttypes import TSparkArrowResultLink import databricks.sql.utils as utils -from tests.e2e.common.predicates import pysql_supports_arrow +from databricks.sql.types import SSLOptions -try: - import pyarrow -except ImportError: - pyarrow = None -@pytest.mark.skipif(not pysql_supports_arrow(), reason="Skipping because pyarrow is not installed") class CloudFetchQueueSuite(unittest.TestCase): - def create_result_link( - self, - file_link: str = "fileLink", - start_row_offset: int = 0, - row_count: int = 8000, - bytes_num: int = 20971520 + self, + file_link: str = "fileLink", + start_row_offset: int = 0, + row_count: int = 8000, + bytes_num: int = 20971520, ): - return TSparkArrowResultLink(file_link, None, start_row_offset, row_count, bytes_num) + return TSparkArrowResultLink( + file_link, None, start_row_offset, row_count, bytes_num + ) def create_result_links(self, num_files: int, start_row_offset: int = 0): result_links = [] for i in range(num_files): file_link = "fileLink_" + str(i) - result_link = self.create_result_link(file_link=file_link, start_row_offset=start_row_offset) + result_link = self.create_result_link( + file_link=file_link, start_row_offset=start_row_offset + ) result_links.append(result_link) start_row_offset += result_link.rowCount return result_links @@ -49,8 +46,10 @@ def get_schema_bytes(): writer.close() return sink.getvalue().to_pybytes() - - @patch("databricks.sql.utils.CloudFetchQueue._create_next_table", return_value=[None, None]) + @patch( + "databricks.sql.utils.CloudFetchQueue._create_next_table", + return_value=[None, None], + ) def test_initializer_adds_links(self, mock_create_next_table): schema_bytes = MagicMock() result_links = self.create_result_links(10) @@ -58,7 +57,7 @@ def test_initializer_adds_links(self, mock_create_next_table): schema_bytes, result_links=result_links, max_download_threads=10, - ssl_context=create_default_context(), + ssl_options=SSLOptions(), ) assert len(queue.download_manager._pending_links) == 10 @@ -72,29 +71,36 @@ def test_initializer_no_links_to_add(self): schema_bytes, result_links=result_links, max_download_threads=10, - ssl_context=create_default_context(), + ssl_options=SSLOptions(), ) assert len(queue.download_manager._pending_links) == 0 assert len(queue.download_manager._download_tasks) == 0 assert queue.table is None - @patch("databricks.sql.cloudfetch.download_manager.ResultFileDownloadManager.get_next_downloaded_file", return_value=None) + @patch( + "databricks.sql.cloudfetch.download_manager.ResultFileDownloadManager.get_next_downloaded_file", + return_value=None, + ) def test_create_next_table_no_download(self, mock_get_next_downloaded_file): queue = utils.CloudFetchQueue( MagicMock(), result_links=[], max_download_threads=10, - ssl_context=create_default_context(), + ssl_options=SSLOptions(), ) assert queue._create_next_table() is None mock_get_next_downloaded_file.assert_called_with(0) @patch("databricks.sql.utils.create_arrow_table_from_arrow_file") - @patch("databricks.sql.cloudfetch.download_manager.ResultFileDownloadManager.get_next_downloaded_file", - return_value=MagicMock(file_bytes=b"1234567890", row_count=4)) - def test_initializer_create_next_table_success(self, mock_get_next_downloaded_file, mock_create_arrow_table): + @patch( + "databricks.sql.cloudfetch.download_manager.ResultFileDownloadManager.get_next_downloaded_file", + return_value=MagicMock(file_bytes=b"1234567890", row_count=4), + ) + def test_initializer_create_next_table_success( + self, mock_get_next_downloaded_file, mock_create_arrow_table + ): mock_create_arrow_table.return_value = self.make_arrow_table() schema_bytes, description = MagicMock(), MagicMock() queue = utils.CloudFetchQueue( @@ -102,7 +108,7 @@ def test_initializer_create_next_table_success(self, mock_get_next_downloaded_fi result_links=[], description=description, max_download_threads=10, - ssl_context=create_default_context(), + ssl_options=SSLOptions(), ) expected_result = self.make_arrow_table() @@ -127,7 +133,7 @@ def test_next_n_rows_0_rows(self, mock_create_next_table): result_links=[], description=description, max_download_threads=10, - ssl_context=create_default_context(), + ssl_options=SSLOptions(), ) assert queue.table == self.make_arrow_table() assert queue.table.num_rows == 4 @@ -147,7 +153,7 @@ def test_next_n_rows_partial_table(self, mock_create_next_table): result_links=[], description=description, max_download_threads=10, - ssl_context=create_default_context(), + ssl_options=SSLOptions(), ) assert queue.table == self.make_arrow_table() assert queue.table.num_rows == 4 @@ -167,7 +173,7 @@ def test_next_n_rows_more_than_one_table(self, mock_create_next_table): result_links=[], description=description, max_download_threads=10, - ssl_context=create_default_context(), + ssl_options=SSLOptions(), ) assert queue.table == self.make_arrow_table() assert queue.table.num_rows == 4 @@ -176,7 +182,12 @@ def test_next_n_rows_more_than_one_table(self, mock_create_next_table): result = queue.next_n_rows(7) assert result.num_rows == 7 assert queue.table_row_index == 3 - assert result == pyarrow.concat_tables([self.make_arrow_table(), self.make_arrow_table()])[:7] + assert ( + result + == pyarrow.concat_tables( + [self.make_arrow_table(), self.make_arrow_table()] + )[:7] + ) @patch("databricks.sql.utils.CloudFetchQueue._create_next_table") def test_next_n_rows_only_one_table_returned(self, mock_create_next_table): @@ -187,7 +198,7 @@ def test_next_n_rows_only_one_table_returned(self, mock_create_next_table): result_links=[], description=description, max_download_threads=10, - ssl_context=create_default_context(), + ssl_options=SSLOptions(), ) assert queue.table == self.make_arrow_table() assert queue.table.num_rows == 4 @@ -206,7 +217,7 @@ def test_next_n_rows_empty_table(self, mock_create_next_table): result_links=[], description=description, max_download_threads=10, - ssl_context=create_default_context(), + ssl_options=SSLOptions(), ) assert queue.table is None @@ -223,7 +234,7 @@ def test_remaining_rows_empty_table_fully_returned(self, mock_create_next_table) result_links=[], description=description, max_download_threads=10, - ssl_context=create_default_context(), + ssl_options=SSLOptions(), ) assert queue.table == self.make_arrow_table() assert queue.table.num_rows == 4 @@ -242,7 +253,7 @@ def test_remaining_rows_partial_table_fully_returned(self, mock_create_next_tabl result_links=[], description=description, max_download_threads=10, - ssl_context=create_default_context(), + ssl_options=SSLOptions(), ) assert queue.table == self.make_arrow_table() assert queue.table.num_rows == 4 @@ -261,7 +272,7 @@ def test_remaining_rows_one_table_fully_returned(self, mock_create_next_table): result_links=[], description=description, max_download_threads=10, - ssl_context=create_default_context(), + ssl_options=SSLOptions(), ) assert queue.table == self.make_arrow_table() assert queue.table.num_rows == 4 @@ -272,15 +283,21 @@ def test_remaining_rows_one_table_fully_returned(self, mock_create_next_table): assert result == self.make_arrow_table() @patch("databricks.sql.utils.CloudFetchQueue._create_next_table") - def test_remaining_rows_multiple_tables_fully_returned(self, mock_create_next_table): - mock_create_next_table.side_effect = [self.make_arrow_table(), self.make_arrow_table(), None] + def test_remaining_rows_multiple_tables_fully_returned( + self, mock_create_next_table + ): + mock_create_next_table.side_effect = [ + self.make_arrow_table(), + self.make_arrow_table(), + None, + ] schema_bytes, description = MagicMock(), MagicMock() queue = utils.CloudFetchQueue( schema_bytes, result_links=[], description=description, max_download_threads=10, - ssl_context=create_default_context(), + ssl_options=SSLOptions(), ) assert queue.table == self.make_arrow_table() assert queue.table.num_rows == 4 @@ -289,7 +306,12 @@ def test_remaining_rows_multiple_tables_fully_returned(self, mock_create_next_ta result = queue.remaining_rows() assert mock_create_next_table.call_count == 3 assert result.num_rows == 5 - assert result == pyarrow.concat_tables([self.make_arrow_table(), self.make_arrow_table()])[3:] + assert ( + result + == pyarrow.concat_tables( + [self.make_arrow_table(), self.make_arrow_table()] + )[3:] + ) @patch("databricks.sql.utils.CloudFetchQueue._create_next_table", return_value=None) def test_remaining_rows_empty_table(self, mock_create_next_table): @@ -300,7 +322,7 @@ def test_remaining_rows_empty_table(self, mock_create_next_table): result_links=[], description=description, max_download_threads=10, - ssl_context=create_default_context(), + ssl_options=SSLOptions(), ) assert queue.table is None diff --git a/tests/unit/test_column_queue.py b/tests/unit/test_column_queue.py new file mode 100644 index 00000000..234af88e --- /dev/null +++ b/tests/unit/test_column_queue.py @@ -0,0 +1,26 @@ +from databricks.sql.utils import ColumnQueue, ColumnTable + + +class TestColumnQueueSuite: + @staticmethod + def make_column_table(table): + n_cols = len(table) if table else 0 + return ColumnTable(table, [f"col_{i}" for i in range(n_cols)]) + + def test_fetchmany_respects_n_rows(self): + column_table = self.make_column_table( + [[0, 3, 6, 9], [1, 4, 7, 10], [2, 5, 8, 11]] + ) + column_queue = ColumnQueue(column_table) + + assert column_queue.next_n_rows(2) == column_table.slice(0, 2) + assert column_queue.next_n_rows(2) == column_table.slice(2, 2) + + def test_fetch_remaining_rows_respects_n_rows(self): + column_table = self.make_column_table( + [[0, 3, 6, 9], [1, 4, 7, 10], [2, 5, 8, 11]] + ) + column_queue = ColumnQueue(column_table) + + assert column_queue.next_n_rows(2) == column_table.slice(0, 2) + assert column_queue.remaining_rows() == column_table.slice(2, 2) diff --git a/databricks_sql_connector_core/tests/unit/test_download_manager.py b/tests/unit/test_download_manager.py similarity index 65% rename from databricks_sql_connector_core/tests/unit/test_download_manager.py rename to tests/unit/test_download_manager.py index f17049e8..64edbdeb 100644 --- a/databricks_sql_connector_core/tests/unit/test_download_manager.py +++ b/tests/unit/test_download_manager.py @@ -1,42 +1,44 @@ import unittest from unittest.mock import patch, MagicMock -import pytest - -from ssl import create_default_context import databricks.sql.cloudfetch.download_manager as download_manager +from databricks.sql.types import SSLOptions from databricks.sql.thrift_api.TCLIService.ttypes import TSparkArrowResultLink -from tests.e2e.common.predicates import pysql_supports_arrow -@pytest.mark.skipif(not pysql_supports_arrow(), reason="Skipping because pyarrow is not installed") class DownloadManagerTests(unittest.TestCase): """ Unit tests for checking download manager logic. """ - def create_download_manager(self, links, max_download_threads=10, lz4_compressed=True): + def create_download_manager( + self, links, max_download_threads=10, lz4_compressed=True + ): return download_manager.ResultFileDownloadManager( links, max_download_threads, lz4_compressed, - ssl_context=create_default_context(), + ssl_options=SSLOptions(), ) def create_result_link( - self, - file_link: str = "fileLink", - start_row_offset: int = 0, - row_count: int = 8000, - bytes_num: int = 20971520 + self, + file_link: str = "fileLink", + start_row_offset: int = 0, + row_count: int = 8000, + bytes_num: int = 20971520, ): - return TSparkArrowResultLink(file_link, None, start_row_offset, row_count, bytes_num) + return TSparkArrowResultLink( + file_link, None, start_row_offset, row_count, bytes_num + ) def create_result_links(self, num_files: int, start_row_offset: int = 0): result_links = [] for i in range(num_files): file_link = "fileLink_" + str(i) - result_link = self.create_result_link(file_link=file_link, start_row_offset=start_row_offset) + result_link = self.create_result_link( + file_link=file_link, start_row_offset=start_row_offset + ) result_links.append(result_link) start_row_offset += result_link.rowCount return result_links @@ -45,7 +47,9 @@ def test_add_file_links_zero_row_count(self): links = [self.create_result_link(row_count=0, bytes_num=0)] manager = self.create_download_manager(links) - assert len(manager._pending_links) == 0 # the only link supplied contains no data, so should be skipped + assert ( + len(manager._pending_links) == 0 + ) # the only link supplied contains no data, so should be skipped assert len(manager._download_tasks) == 0 def test_add_file_links_success(self): @@ -59,7 +63,9 @@ def test_add_file_links_success(self): def test_schedule_downloads(self, mock_submit): max_download_threads = 4 links = self.create_result_links(num_files=10) - manager = self.create_download_manager(links, max_download_threads=max_download_threads) + manager = self.create_download_manager( + links, max_download_threads=max_download_threads + ) manager._schedule_downloads() assert mock_submit.call_count == max_download_threads diff --git a/databricks_sql_connector_core/tests/unit/test_downloader.py b/tests/unit/test_downloader.py similarity index 54% rename from databricks_sql_connector_core/tests/unit/test_downloader.py rename to tests/unit/test_downloader.py index b6e473b5..2a3b715b 100644 --- a/databricks_sql_connector_core/tests/unit/test_downloader.py +++ b/tests/unit/test_downloader.py @@ -2,10 +2,10 @@ from unittest.mock import Mock, patch, MagicMock import requests -from ssl import create_default_context import databricks.sql.cloudfetch.downloader as downloader from databricks.sql.exc import Error +from databricks.sql.types import SSLOptions def create_response(**kwargs) -> requests.Response: @@ -20,36 +20,40 @@ class DownloaderTests(unittest.TestCase): Unit tests for checking downloader logic. """ - @patch('time.time', return_value=1000) + @patch("time.time", return_value=1000) def test_run_link_expired(self, mock_time): settings = Mock() result_link = Mock() # Already expired result_link.expiryTime = 999 - d = downloader.ResultSetDownloadHandler(settings, result_link, ssl_context=create_default_context()) + d = downloader.ResultSetDownloadHandler( + settings, result_link, ssl_options=SSLOptions() + ) with self.assertRaises(Error) as context: d.run() - self.assertTrue('link has expired' in context.exception.message) + self.assertTrue("link has expired" in context.exception.message) mock_time.assert_called_once() - @patch('time.time', return_value=1000) + @patch("time.time", return_value=1000) def test_run_link_past_expiry_buffer(self, mock_time): settings = Mock(link_expiry_buffer_secs=5) result_link = Mock() # Within the expiry buffer time result_link.expiryTime = 1004 - d = downloader.ResultSetDownloadHandler(settings, result_link, ssl_context=create_default_context()) + d = downloader.ResultSetDownloadHandler( + settings, result_link, ssl_options=SSLOptions() + ) with self.assertRaises(Error) as context: d.run() - self.assertTrue('link has expired' in context.exception.message) + self.assertTrue("link has expired" in context.exception.message) mock_time.assert_called_once() - @patch('requests.Session', return_value=MagicMock(get=MagicMock(return_value=None))) - @patch('time.time', return_value=1000) + @patch("requests.Session", return_value=MagicMock(get=MagicMock(return_value=None))) + @patch("time.time", return_value=1000) def test_run_get_response_not_ok(self, mock_time, mock_session): mock_session.return_value.get.return_value = create_response(status_code=404) @@ -58,62 +62,81 @@ def test_run_get_response_not_ok(self, mock_time, mock_session): settings.use_proxy = False result_link = Mock(expiryTime=1001) - d = downloader.ResultSetDownloadHandler(settings, result_link, ssl_context=create_default_context()) + d = downloader.ResultSetDownloadHandler( + settings, result_link, ssl_options=SSLOptions() + ) with self.assertRaises(requests.exceptions.HTTPError) as context: d.run() - self.assertTrue('404' in str(context.exception)) + self.assertTrue("404" in str(context.exception)) - @patch('requests.Session', return_value=MagicMock(get=MagicMock(return_value=None))) - @patch('time.time', return_value=1000) + @patch("requests.Session", return_value=MagicMock(get=MagicMock(return_value=None))) + @patch("time.time", return_value=1000) def test_run_uncompressed_successful(self, mock_time, mock_session): file_bytes = b"1234567890" * 10 - mock_session.return_value.get.return_value = create_response(status_code=200, _content=file_bytes) + mock_session.return_value.get.return_value = create_response( + status_code=200, _content=file_bytes + ) settings = Mock(link_expiry_buffer_secs=0, download_timeout=0, use_proxy=False) settings.is_lz4_compressed = False result_link = Mock(bytesNum=100, expiryTime=1001) - d = downloader.ResultSetDownloadHandler(settings, result_link, ssl_context=create_default_context()) + d = downloader.ResultSetDownloadHandler( + settings, result_link, ssl_options=SSLOptions() + ) file = d.run() assert file.file_bytes == b"1234567890" * 10 - @patch('requests.Session', return_value=MagicMock(get=MagicMock(return_value=MagicMock(ok=True)))) - @patch('time.time', return_value=1000) + @patch( + "requests.Session", + return_value=MagicMock(get=MagicMock(return_value=MagicMock(ok=True))), + ) + @patch("time.time", return_value=1000) def test_run_compressed_successful(self, mock_time, mock_session): file_bytes = b"1234567890" * 10 compressed_bytes = b'\x04"M\x18h@d\x00\x00\x00\x00\x00\x00\x00#\x14\x00\x00\x00\xaf1234567890\n\x00BP67890\x00\x00\x00\x00' - mock_session.return_value.get.return_value = create_response(status_code=200, _content=compressed_bytes) + mock_session.return_value.get.return_value = create_response( + status_code=200, _content=compressed_bytes + ) settings = Mock(link_expiry_buffer_secs=0, download_timeout=0, use_proxy=False) settings.is_lz4_compressed = True result_link = Mock(bytesNum=100, expiryTime=1001) - d = downloader.ResultSetDownloadHandler(settings, result_link, ssl_context=create_default_context()) + d = downloader.ResultSetDownloadHandler( + settings, result_link, ssl_options=SSLOptions() + ) file = d.run() assert file.file_bytes == b"1234567890" * 10 - @patch('requests.Session.get', side_effect=ConnectionError('foo')) - @patch('time.time', return_value=1000) + @patch("requests.Session.get", side_effect=ConnectionError("foo")) + @patch("time.time", return_value=1000) def test_download_connection_error(self, mock_time, mock_session): - settings = Mock(link_expiry_buffer_secs=0, use_proxy=False, is_lz4_compressed=True) + settings = Mock( + link_expiry_buffer_secs=0, use_proxy=False, is_lz4_compressed=True + ) result_link = Mock(bytesNum=100, expiryTime=1001) - mock_session.return_value.get.return_value.content = \ - b'\x04"M\x18h@d\x00\x00\x00\x00\x00\x00\x00#\x14\x00\x00\x00\xaf1234567890\n\x00BP67890\x00\x00\x00\x00' + mock_session.return_value.get.return_value.content = b'\x04"M\x18h@d\x00\x00\x00\x00\x00\x00\x00#\x14\x00\x00\x00\xaf1234567890\n\x00BP67890\x00\x00\x00\x00' - d = downloader.ResultSetDownloadHandler(settings, result_link, ssl_context=create_default_context()) + d = downloader.ResultSetDownloadHandler( + settings, result_link, ssl_options=SSLOptions() + ) with self.assertRaises(ConnectionError): d.run() - @patch('requests.Session.get', side_effect=TimeoutError('foo')) - @patch('time.time', return_value=1000) + @patch("requests.Session.get", side_effect=TimeoutError("foo")) + @patch("time.time", return_value=1000) def test_download_timeout(self, mock_time, mock_session): - settings = Mock(link_expiry_buffer_secs=0, use_proxy=False, is_lz4_compressed=True) + settings = Mock( + link_expiry_buffer_secs=0, use_proxy=False, is_lz4_compressed=True + ) result_link = Mock(bytesNum=100, expiryTime=1001) - mock_session.return_value.get.return_value.content = \ - b'\x04"M\x18h@d\x00\x00\x00\x00\x00\x00\x00#\x14\x00\x00\x00\xaf1234567890\n\x00BP67890\x00\x00\x00\x00' + mock_session.return_value.get.return_value.content = b'\x04"M\x18h@d\x00\x00\x00\x00\x00\x00\x00#\x14\x00\x00\x00\xaf1234567890\n\x00BP67890\x00\x00\x00\x00' - d = downloader.ResultSetDownloadHandler(settings, result_link, ssl_context=create_default_context()) + d = downloader.ResultSetDownloadHandler( + settings, result_link, ssl_options=SSLOptions() + ) with self.assertRaises(TimeoutError): d.run() diff --git a/databricks_sql_connector_core/tests/unit/test_endpoint.py b/tests/unit/test_endpoint.py similarity index 100% rename from databricks_sql_connector_core/tests/unit/test_endpoint.py rename to tests/unit/test_endpoint.py diff --git a/databricks_sql_connector_core/tests/unit/test_fetches.py b/tests/unit/test_fetches.py similarity index 81% rename from databricks_sql_connector_core/tests/unit/test_fetches.py rename to tests/unit/test_fetches.py index c1aeadca..e9a58acd 100644 --- a/databricks_sql_connector_core/tests/unit/test_fetches.py +++ b/tests/unit/test_fetches.py @@ -1,17 +1,12 @@ import unittest from unittest.mock import Mock -import pytest + +import pyarrow as pa import databricks.sql.client as client from databricks.sql.utils import ExecuteResponse, ArrowQueue -from tests.e2e.common.predicates import pysql_supports_arrow -try: - import pyarrow as pa -except ImportError: - pa = None -@pytest.mark.skipif(not pysql_supports_arrow(), reason="Skipping because pyarrow is not installed") class FetchTests(unittest.TestCase): """ Unit tests for checking the fetch logic. @@ -22,7 +17,9 @@ def make_arrow_table(batch): n_cols = len(batch[0]) if batch else 0 schema = pa.schema({"col%s" % i: pa.uint32() for i in range(n_cols)}) cols = [[batch[row][col] for row in range(len(batch))] for col in range(n_cols)] - return schema, pa.Table.from_pydict(dict(zip(schema.names, cols)), schema=schema) + return schema, pa.Table.from_pydict( + dict(zip(schema.names, cols)), schema=schema + ) @staticmethod def make_arrow_queue(batch): @@ -47,18 +44,29 @@ def make_dummy_result_set_from_initial_results(initial_results): command_handle=None, arrow_queue=arrow_queue, arrow_schema_bytes=schema.serialize().to_pybytes(), - is_staging_operation=False)) + is_staging_operation=False, + ), + ) num_cols = len(initial_results[0]) if initial_results else 0 - rs.description = [(f'col{col_id}', 'integer', None, None, None, None, None) - for col_id in range(num_cols)] + rs.description = [ + (f"col{col_id}", "integer", None, None, None, None, None) + for col_id in range(num_cols) + ] return rs @staticmethod def make_dummy_result_set_from_batch_list(batch_list): batch_index = 0 - def fetch_results(op_handle, max_rows, max_bytes, expected_row_start_offset, lz4_compressed, - arrow_schema_bytes, description): + def fetch_results( + op_handle, + max_rows, + max_bytes, + expected_row_start_offset, + lz4_compressed, + arrow_schema_bytes, + description, + ): nonlocal batch_index results = FetchTests.make_arrow_queue(batch_list[batch_index]) batch_index += 1 @@ -76,13 +84,17 @@ def fetch_results(op_handle, max_rows, max_bytes, expected_row_start_offset, lz4 status=None, has_been_closed_server_side=False, has_more_rows=True, - description=[(f'col{col_id}', 'integer', None, None, None, None, None) - for col_id in range(num_cols)], + description=[ + (f"col{col_id}", "integer", None, None, None, None, None) + for col_id in range(num_cols) + ], lz4_compressed=Mock(), command_handle=None, arrow_queue=None, arrow_schema_bytes=None, - is_staging_operation=False)) + is_staging_operation=False, + ), + ) return rs def assertEqualRowValues(self, actual, expected): @@ -92,30 +104,44 @@ def assertEqualRowValues(self, actual, expected): def test_fetchmany_with_initial_results(self): # Fetch all in one go - initial_results_1 = [[1], [2], [3]] # This is a list of rows, each row with 1 col - dummy_result_set = self.make_dummy_result_set_from_initial_results(initial_results_1) + initial_results_1 = [ + [1], + [2], + [3], + ] # This is a list of rows, each row with 1 col + dummy_result_set = self.make_dummy_result_set_from_initial_results( + initial_results_1 + ) self.assertEqualRowValues(dummy_result_set.fetchmany(3), [[1], [2], [3]]) # Fetch in small amounts initial_results_2 = [[1], [2], [3], [4]] - dummy_result_set = self.make_dummy_result_set_from_initial_results(initial_results_2) + dummy_result_set = self.make_dummy_result_set_from_initial_results( + initial_results_2 + ) self.assertEqualRowValues(dummy_result_set.fetchmany(1), [[1]]) self.assertEqualRowValues(dummy_result_set.fetchmany(2), [[2], [3]]) self.assertEqualRowValues(dummy_result_set.fetchmany(1), [[4]]) # Fetch too many initial_results_3 = [[2], [3]] - dummy_result_set = self.make_dummy_result_set_from_initial_results(initial_results_3) + dummy_result_set = self.make_dummy_result_set_from_initial_results( + initial_results_3 + ) self.assertEqualRowValues(dummy_result_set.fetchmany(5), [[2], [3]]) # Empty results initial_results_4 = [[]] - dummy_result_set = self.make_dummy_result_set_from_initial_results(initial_results_4) + dummy_result_set = self.make_dummy_result_set_from_initial_results( + initial_results_4 + ) self.assertEqualRowValues(dummy_result_set.fetchmany(0), []) def test_fetch_many_without_initial_results(self): # Fetch all in one go; single batch - batch_list_1 = [[[1], [2], [3]]] # This is a list of one batch of rows, each row with 1 col + batch_list_1 = [ + [[1], [2], [3]] + ] # This is a list of one batch of rows, each row with 1 col dummy_result_set = self.make_dummy_result_set_from_batch_list(batch_list_1) self.assertEqualRowValues(dummy_result_set.fetchmany(3), [[1], [2], [3]]) @@ -145,7 +171,9 @@ def test_fetch_many_without_initial_results(self): # Fetch too many; multiple batches batch_list_6 = [[[1]], [[2], [3], [4]], [[5], [6]]] dummy_result_set = self.make_dummy_result_set_from_batch_list(batch_list_6) - self.assertEqualRowValues(dummy_result_set.fetchmany(100), [[1], [2], [3], [4], [5], [6]]) + self.assertEqualRowValues( + dummy_result_set.fetchmany(100), [[1], [2], [3], [4], [5], [6]] + ) # Fetch 0; 1 empty batch batch_list_7 = [[]] @@ -159,19 +187,25 @@ def test_fetch_many_without_initial_results(self): def test_fetchall_with_initial_results(self): initial_results_1 = [[1], [2], [3]] - dummy_result_set = self.make_dummy_result_set_from_initial_results(initial_results_1) + dummy_result_set = self.make_dummy_result_set_from_initial_results( + initial_results_1 + ) self.assertEqualRowValues(dummy_result_set.fetchall(), [[1], [2], [3]]) def test_fetchall_without_initial_results(self): # Fetch all, single batch - batch_list_1 = [[[1], [2], [3]]] # This is a list of one batch of rows, each row with 1 col + batch_list_1 = [ + [[1], [2], [3]] + ] # This is a list of one batch of rows, each row with 1 col dummy_result_set = self.make_dummy_result_set_from_batch_list(batch_list_1) self.assertEqualRowValues(dummy_result_set.fetchall(), [[1], [2], [3]]) # Fetch all, multiple batches batch_list_2 = [[[1], [2]], [[3]], [[4], [5], [6]]] dummy_result_set = self.make_dummy_result_set_from_batch_list(batch_list_2) - self.assertEqualRowValues(dummy_result_set.fetchall(), [[1], [2], [3], [4], [5], [6]]) + self.assertEqualRowValues( + dummy_result_set.fetchall(), [[1], [2], [3], [4], [5], [6]] + ) batch_list_3 = [[]] dummy_result_set = self.make_dummy_result_set_from_batch_list(batch_list_3) @@ -179,12 +213,16 @@ def test_fetchall_without_initial_results(self): def test_fetchmany_fetchall_with_initial_results(self): initial_results_1 = [[1], [2], [3]] - dummy_result_set = self.make_dummy_result_set_from_initial_results(initial_results_1) + dummy_result_set = self.make_dummy_result_set_from_initial_results( + initial_results_1 + ) self.assertEqualRowValues(dummy_result_set.fetchmany(2), [[1], [2]]) self.assertEqualRowValues(dummy_result_set.fetchall(), [[3]]) def test_fetchmany_fetchall_without_initial_results(self): - batch_list_1 = [[[1], [2], [3]]] # This is a list of one batch of rows, each row with 1 col + batch_list_1 = [ + [[1], [2], [3]] + ] # This is a list of one batch of rows, each row with 1 col dummy_result_set = self.make_dummy_result_set_from_batch_list(batch_list_1) self.assertEqualRowValues(dummy_result_set.fetchmany(2), [[1], [2]]) self.assertEqualRowValues(dummy_result_set.fetchall(), [[3]]) @@ -196,7 +234,9 @@ def test_fetchmany_fetchall_without_initial_results(self): def test_fetchone_with_initial_results(self): initial_results_1 = [[1], [2], [3]] - dummy_result_set = self.make_dummy_result_set_from_initial_results(initial_results_1) + dummy_result_set = self.make_dummy_result_set_from_initial_results( + initial_results_1 + ) self.assertSequenceEqual(dummy_result_set.fetchone(), [1]) self.assertSequenceEqual(dummy_result_set.fetchone(), [2]) self.assertSequenceEqual(dummy_result_set.fetchone(), [3]) @@ -215,5 +255,5 @@ def test_fetchone_without_initial_results(self): self.assertEqual(dummy_result_set.fetchone(), None) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/databricks_sql_connector_core/tests/unit/test_fetches_bench.py b/tests/unit/test_fetches_bench.py similarity index 76% rename from databricks_sql_connector_core/tests/unit/test_fetches_bench.py rename to tests/unit/test_fetches_bench.py index bba18247..9382c3b3 100644 --- a/databricks_sql_connector_core/tests/unit/test_fetches_bench.py +++ b/tests/unit/test_fetches_bench.py @@ -1,20 +1,15 @@ import unittest from unittest.mock import Mock +import pyarrow as pa import uuid import time import pytest import databricks.sql.client as client from databricks.sql.utils import ExecuteResponse, ArrowQueue -from tests.e2e.common.predicates import pysql_supports_arrow -try: - import pyarrow as pa -except ImportError: - pa = None -@pytest.mark.skipif(not pysql_supports_arrow(), reason="Skipping because pyarrow is not installed") class FetchBenchmarkTests(unittest.TestCase): """ Micro benchmark test for Arrow result handling. @@ -40,12 +35,18 @@ def make_dummy_result_set_from_initial_results(arrow_table): description=Mock(), command_handle=None, arrow_queue=arrow_queue, - arrow_schema=arrow_table.schema)) - rs.description = [(f'col{col_id}', 'string', None, None, None, None, None) - for col_id in range(arrow_table.num_columns)] + arrow_schema=arrow_table.schema, + ), + ) + rs.description = [ + (f"col{col_id}", "string", None, None, None, None, None) + for col_id in range(arrow_table.num_columns) + ] return rs - @pytest.mark.skip(reason="Test has not been updated for latest connector API (June 2022)") + @pytest.mark.skip( + reason="Test has not been updated for latest connector API (June 2022)" + ) def test_benchmark_fetchall(self): print("preparing dummy arrow table") arrow_table = FetchBenchmarkTests.make_arrow_table(10, 25000) @@ -55,7 +56,9 @@ def test_benchmark_fetchall(self): start_time = time.time() count = 0 while time.time() < start_time + benchmark_seconds: - dummy_result_set = self.make_dummy_result_set_from_initial_results(arrow_table) + dummy_result_set = self.make_dummy_result_set_from_initial_results( + arrow_table + ) res = dummy_result_set.fetchall() for _ in res: pass @@ -64,5 +67,5 @@ def test_benchmark_fetchall(self): print(f"Executed query {count} times, in {time.time() - start_time} seconds") -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/databricks_sql_connector_core/tests/unit/test_init_file.py b/tests/unit/test_init_file.py similarity index 100% rename from databricks_sql_connector_core/tests/unit/test_init_file.py rename to tests/unit/test_init_file.py diff --git a/databricks_sql_connector_core/tests/unit/test_oauth_persistence.py b/tests/unit/test_oauth_persistence.py similarity index 82% rename from databricks_sql_connector_core/tests/unit/test_oauth_persistence.py rename to tests/unit/test_oauth_persistence.py index 28b3cab3..a8ceb14e 100644 --- a/databricks_sql_connector_core/tests/unit/test_oauth_persistence.py +++ b/tests/unit/test_oauth_persistence.py @@ -1,16 +1,17 @@ - import unittest -from databricks.sql.experimental.oauth_persistence import DevOnlyFilePersistence, OAuthToken +from databricks.sql.experimental.oauth_persistence import ( + DevOnlyFilePersistence, + OAuthToken, +) import tempfile import os class OAuthPersistenceTests(unittest.TestCase): - def test_DevOnlyFilePersistence_read_my_write(self): with tempfile.TemporaryDirectory() as tempdir: - test_json_file_path = os.path.join(tempdir, 'test.json') + test_json_file_path = os.path.join(tempdir, "test.json") persistence_manager = DevOnlyFilePersistence(test_json_file_path) access_token = "abc#$%%^&^*&*()()_=-/" refresh_token = "#$%%^^&**()+)_gter243]xyz" @@ -23,7 +24,7 @@ def test_DevOnlyFilePersistence_read_my_write(self): def test_DevOnlyFilePersistence_file_does_not_exist(self): with tempfile.TemporaryDirectory() as tempdir: - test_json_file_path = os.path.join(tempdir, 'test.json') + test_json_file_path = os.path.join(tempdir, "test.json") persistence_manager = DevOnlyFilePersistence(test_json_file_path) new_token = persistence_manager.read("https://randomserver") diff --git a/databricks_sql_connector_core/tests/unit/test_param_escaper.py b/tests/unit/test_param_escaper.py similarity index 92% rename from databricks_sql_connector_core/tests/unit/test_param_escaper.py rename to tests/unit/test_param_escaper.py index 472a0843..925fcea5 100644 --- a/databricks_sql_connector_core/tests/unit/test_param_escaper.py +++ b/tests/unit/test_param_escaper.py @@ -3,7 +3,12 @@ from typing import Any, Dict from databricks.sql.parameters.native import dbsql_parameter_from_primitive -from databricks.sql.utils import ParamEscaper, inject_parameters, transform_paramstyle, ParameterStructure +from databricks.sql.utils import ( + ParamEscaper, + inject_parameters, + transform_paramstyle, + ParameterStructure, +) pe = ParamEscaper() @@ -200,26 +205,31 @@ class TestInlineToNativeTransformer(object): "query with like wildcard", 'select * from table where field like "%"', {}, - 'select * from table where field like "%"' + 'select * from table where field like "%"', ), ( "query with named param and like wildcard", 'select :param from table where field like "%"', {"param": None}, - 'select :param from table where field like "%"' + 'select :param from table where field like "%"', ), ( "query with doubled wildcards", - 'select 1 where '' like "%%"', + "select 1 where " ' like "%%"', {"param": None}, - 'select 1 where '' like "%%"', - ) + "select 1 where " ' like "%%"', + ), ), ) def test_transformer( self, label: str, query: str, params: Dict[str, Any], expected: str ): - _params = [dbsql_parameter_from_primitive(value=value, name=name) for name, value in params.items()] - output = transform_paramstyle(query, _params, param_structure=ParameterStructure.NAMED) + _params = [ + dbsql_parameter_from_primitive(value=value, name=name) + for name, value in params.items() + ] + output = transform_paramstyle( + query, _params, param_structure=ParameterStructure.NAMED + ) assert output == expected diff --git a/databricks_sql_connector_core/tests/unit/test_parameters.py b/tests/unit/test_parameters.py similarity index 100% rename from databricks_sql_connector_core/tests/unit/test_parameters.py rename to tests/unit/test_parameters.py diff --git a/databricks_sql_connector_core/tests/unit/test_retry.py b/tests/unit/test_retry.py similarity index 94% rename from databricks_sql_connector_core/tests/unit/test_retry.py rename to tests/unit/test_retry.py index 798bac2e..2108af4f 100644 --- a/databricks_sql_connector_core/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -8,7 +8,6 @@ class TestRetry: - @pytest.fixture() def retry_policy(self) -> DatabricksRetryPolicy: return DatabricksRetryPolicy( @@ -41,7 +40,9 @@ def test_sleep__retry_after_is_binding(self, t_mock, retry_policy, error_history t_mock.assert_called_with(3) @patch("time.sleep") - def test_sleep__retry_after_present_but_not_binding(self, t_mock, retry_policy, error_history): + def test_sleep__retry_after_present_but_not_binding( + self, t_mock, retry_policy, error_history + ): retry_policy._retry_start_time = time.time() retry_policy.history = [error_history, error_history] retry_policy.sleep(HTTPResponse(status=503, headers={"Retry-After": "1"})) diff --git a/databricks_sql_connector_core/tests/unit/test_thrift_backend.py b/tests/unit/test_thrift_backend.py similarity index 73% rename from databricks_sql_connector_core/tests/unit/test_thrift_backend.py rename to tests/unit/test_thrift_backend.py index 9b53a17e..293467af 100644 --- a/databricks_sql_connector_core/tests/unit/test_thrift_backend.py +++ b/tests/unit/test_thrift_backend.py @@ -2,22 +2,20 @@ from decimal import Decimal import itertools import unittest -import pytest from unittest.mock import patch, MagicMock, Mock from ssl import CERT_NONE, CERT_REQUIRED +from urllib3 import HTTPSConnectionPool + +import pyarrow import databricks.sql from databricks.sql import utils +from databricks.sql.types import SSLOptions from databricks.sql.thrift_api.TCLIService import ttypes from databricks.sql import * from databricks.sql.auth.authenticators import AuthProvider from databricks.sql.thrift_backend import ThriftBackend -from tests.e2e.common.predicates import pysql_supports_arrow -try: - import pyarrow -except ImportError: - pyarrow = None def retry_policy_factory(): return { # (type, default, min, max) @@ -28,7 +26,7 @@ def retry_policy_factory(): "_retry_delay_default": (float, 5, 1, 60), } -@pytest.mark.skipif(not pysql_supports_arrow(), reason="Skipping because pyarrow is not installed") + class ThriftBackendTestSuite(unittest.TestCase): okay_status = ttypes.TStatus(statusCode=ttypes.TStatusCode.SUCCESS_STATUS) @@ -71,7 +69,14 @@ def test_make_request_checks_thrift_status_code(self): mock_method = Mock() mock_method.__name__ = "method name" mock_method.return_value = mock_response - thrift_backend = ThriftBackend("foobar", 443, "path", [], auth_provider=AuthProvider()) + thrift_backend = ThriftBackend( + "foobar", + 443, + "path", + [], + auth_provider=AuthProvider(), + ssl_options=SSLOptions(), + ) with self.assertRaises(DatabaseError): thrift_backend.make_request(mock_method, Mock()) @@ -81,7 +86,14 @@ def _make_type_desc(self, type): ) def _make_fake_thrift_backend(self): - thrift_backend = ThriftBackend("foobar", 443, "path", [], auth_provider=AuthProvider()) + thrift_backend = ThriftBackend( + "foobar", + 443, + "path", + [], + auth_provider=AuthProvider(), + ssl_options=SSLOptions(), + ) thrift_backend._hive_schema_to_arrow_schema = Mock() thrift_backend._hive_schema_to_description = Mock() thrift_backend._create_arrow_table = MagicMock() @@ -91,13 +103,16 @@ def _make_fake_thrift_backend(self): def test_hive_schema_to_arrow_schema_preserves_column_names(self): columns = [ ttypes.TColumnDesc( - columnName="column 1", typeDesc=self._make_type_desc(ttypes.TTypeId.INT_TYPE) + columnName="column 1", + typeDesc=self._make_type_desc(ttypes.TTypeId.INT_TYPE), ), ttypes.TColumnDesc( - columnName="column 2", typeDesc=self._make_type_desc(ttypes.TTypeId.INT_TYPE) + columnName="column 2", + typeDesc=self._make_type_desc(ttypes.TTypeId.INT_TYPE), ), ttypes.TColumnDesc( - columnName="column 2", typeDesc=self._make_type_desc(ttypes.TTypeId.INT_TYPE) + columnName="column 2", + typeDesc=self._make_type_desc(ttypes.TTypeId.INT_TYPE), ), ttypes.TColumnDesc( columnName="", typeDesc=self._make_type_desc(ttypes.TTypeId.INT_TYPE) @@ -138,7 +153,9 @@ def test_bad_protocol_versions_are_rejected(self, tcli_service_client_cass): thrift_backend = self._make_fake_thrift_backend() thrift_backend.open_session({}, None, None) - self.assertIn("expected server to use a protocol version", str(cm.exception)) + self.assertIn( + "expected server to use a protocol version", str(cm.exception) + ) @patch("databricks.sql.thrift_backend.TCLIService.Client", autospec=True) def test_okay_protocol_versions_succeed(self, tcli_service_client_cass): @@ -159,8 +176,17 @@ def test_okay_protocol_versions_succeed(self, tcli_service_client_cass): @patch("databricks.sql.auth.thrift_http_client.THttpClient") def test_headers_are_set(self, t_http_client_class): - ThriftBackend("foo", 123, "bar", [("header", "value")], auth_provider=AuthProvider()) - t_http_client_class.return_value.setCustomHeaders.assert_called_with({"header": "value"}) + ThriftBackend( + "foo", + 123, + "bar", + [("header", "value")], + auth_provider=AuthProvider(), + ssl_options=SSLOptions(), + ) + t_http_client_class.return_value.setCustomHeaders.assert_called_with( + {"header": "value"} + ) def test_proxy_headers_are_set(self): @@ -176,109 +202,258 @@ def test_proxy_headers_are_set(self): assert False assert isinstance(result, type(dict())) - assert isinstance(result.get('proxy-authorization'), type(str())) + assert isinstance(result.get("proxy-authorization"), type(str())) @patch("databricks.sql.auth.thrift_http_client.THttpClient") - @patch("databricks.sql.thrift_backend.create_default_context") - def test_tls_cert_args_are_propagated(self, mock_create_default_context, t_http_client_class): + @patch("databricks.sql.types.create_default_context") + def test_tls_cert_args_are_propagated( + self, mock_create_default_context, t_http_client_class + ): mock_cert_key_file = Mock() mock_cert_key_password = Mock() mock_trusted_ca_file = Mock() mock_cert_file = Mock() + mock_ssl_options = SSLOptions( + tls_client_cert_file=mock_cert_file, + tls_client_cert_key_file=mock_cert_key_file, + tls_client_cert_key_password=mock_cert_key_password, + tls_trusted_ca_file=mock_trusted_ca_file, + ) + mock_ssl_context = mock_ssl_options.create_ssl_context() + mock_create_default_context.assert_called_once_with(cafile=mock_trusted_ca_file) + ThriftBackend( "foo", 123, "bar", [], auth_provider=AuthProvider(), - _tls_client_cert_file=mock_cert_file, - _tls_client_cert_key_file=mock_cert_key_file, - _tls_client_cert_key_password=mock_cert_key_password, - _tls_trusted_ca_file=mock_trusted_ca_file, + ssl_options=mock_ssl_options, ) - mock_create_default_context.assert_called_once_with(cafile=mock_trusted_ca_file) - mock_ssl_context = mock_create_default_context.return_value mock_ssl_context.load_cert_chain.assert_called_once_with( - certfile=mock_cert_file, keyfile=mock_cert_key_file, password=mock_cert_key_password + certfile=mock_cert_file, + keyfile=mock_cert_key_file, + password=mock_cert_key_password, ) self.assertTrue(mock_ssl_context.check_hostname) self.assertEqual(mock_ssl_context.verify_mode, CERT_REQUIRED) - self.assertEqual(t_http_client_class.call_args[1]["ssl_context"], mock_ssl_context) + self.assertEqual( + t_http_client_class.call_args[1]["ssl_options"], mock_ssl_options + ) + + @patch("databricks.sql.types.create_default_context") + def test_tls_cert_args_are_used_by_http_client(self, mock_create_default_context): + from databricks.sql.auth.thrift_http_client import THttpClient + + mock_cert_key_file = Mock() + mock_cert_key_password = Mock() + mock_trusted_ca_file = Mock() + mock_cert_file = Mock() + + mock_ssl_options = SSLOptions( + tls_verify=True, + tls_client_cert_file=mock_cert_file, + tls_client_cert_key_file=mock_cert_key_file, + tls_client_cert_key_password=mock_cert_key_password, + tls_trusted_ca_file=mock_trusted_ca_file, + ) + + http_client = THttpClient( + auth_provider=None, + uri_or_host="https://example.com", + ssl_options=mock_ssl_options, + ) + + self.assertEqual(http_client.scheme, "https") + self.assertEqual(http_client.certfile, mock_ssl_options.tls_client_cert_file) + self.assertEqual(http_client.keyfile, mock_ssl_options.tls_client_cert_key_file) + self.assertIsNotNone(http_client.certfile) + mock_create_default_context.assert_called() + + http_client.open() + + conn_pool = http_client._THttpClient__pool + self.assertIsInstance(conn_pool, HTTPSConnectionPool) + self.assertEqual(conn_pool.cert_reqs, CERT_REQUIRED) + self.assertEqual(conn_pool.ca_certs, mock_ssl_options.tls_trusted_ca_file) + self.assertEqual(conn_pool.cert_file, mock_ssl_options.tls_client_cert_file) + self.assertEqual(conn_pool.key_file, mock_ssl_options.tls_client_cert_key_file) + self.assertEqual( + conn_pool.key_password, mock_ssl_options.tls_client_cert_key_password + ) + + def test_tls_no_verify_is_respected_by_http_client(self): + from databricks.sql.auth.thrift_http_client import THttpClient + + http_client = THttpClient( + auth_provider=None, + uri_or_host="https://example.com", + ssl_options=SSLOptions(tls_verify=False), + ) + self.assertEqual(http_client.scheme, "https") + + http_client.open() + + conn_pool = http_client._THttpClient__pool + self.assertIsInstance(conn_pool, HTTPSConnectionPool) + self.assertEqual(conn_pool.cert_reqs, CERT_NONE) @patch("databricks.sql.auth.thrift_http_client.THttpClient") - @patch("databricks.sql.thrift_backend.create_default_context") - def test_tls_no_verify_is_respected(self, mock_create_default_context, t_http_client_class): - ThriftBackend("foo", 123, "bar", [], auth_provider=AuthProvider(), _tls_no_verify=True) + @patch("databricks.sql.types.create_default_context") + def test_tls_no_verify_is_respected( + self, mock_create_default_context, t_http_client_class + ): + mock_ssl_options = SSLOptions(tls_verify=False) + mock_ssl_context = mock_ssl_options.create_ssl_context() + mock_create_default_context.assert_called() + + ThriftBackend( + "foo", + 123, + "bar", + [], + auth_provider=AuthProvider(), + ssl_options=mock_ssl_options, + ) - mock_ssl_context = mock_create_default_context.return_value self.assertFalse(mock_ssl_context.check_hostname) self.assertEqual(mock_ssl_context.verify_mode, CERT_NONE) - self.assertEqual(t_http_client_class.call_args[1]["ssl_context"], mock_ssl_context) + self.assertEqual( + t_http_client_class.call_args[1]["ssl_options"], mock_ssl_options + ) @patch("databricks.sql.auth.thrift_http_client.THttpClient") - @patch("databricks.sql.thrift_backend.create_default_context") + @patch("databricks.sql.types.create_default_context") def test_tls_verify_hostname_is_respected( self, mock_create_default_context, t_http_client_class ): + mock_ssl_options = SSLOptions(tls_verify_hostname=False) + mock_ssl_context = mock_ssl_options.create_ssl_context() + mock_create_default_context.assert_called() + ThriftBackend( - "foo", 123, "bar", [], auth_provider=AuthProvider(), _tls_verify_hostname=False + "foo", + 123, + "bar", + [], + auth_provider=AuthProvider(), + ssl_options=mock_ssl_options, ) - mock_ssl_context = mock_create_default_context.return_value self.assertFalse(mock_ssl_context.check_hostname) self.assertEqual(mock_ssl_context.verify_mode, CERT_REQUIRED) - self.assertEqual(t_http_client_class.call_args[1]["ssl_context"], mock_ssl_context) + self.assertEqual( + t_http_client_class.call_args[1]["ssl_options"], mock_ssl_options + ) @patch("databricks.sql.auth.thrift_http_client.THttpClient") def test_port_and_host_are_respected(self, t_http_client_class): - ThriftBackend("hostname", 123, "path_value", [], auth_provider=AuthProvider()) + ThriftBackend( + "hostname", + 123, + "path_value", + [], + auth_provider=AuthProvider(), + ssl_options=SSLOptions(), + ) self.assertEqual( - t_http_client_class.call_args[1]["uri_or_host"], "https://hostname:123/path_value" + t_http_client_class.call_args[1]["uri_or_host"], + "https://hostname:123/path_value", ) @patch("databricks.sql.auth.thrift_http_client.THttpClient") def test_host_with_https_does_not_duplicate(self, t_http_client_class): - ThriftBackend("https://hostname", 123, "path_value", [], auth_provider=AuthProvider()) + ThriftBackend( + "https://hostname", + 123, + "path_value", + [], + auth_provider=AuthProvider(), + ssl_options=SSLOptions(), + ) self.assertEqual( - t_http_client_class.call_args[1]["uri_or_host"], "https://hostname:123/path_value" + t_http_client_class.call_args[1]["uri_or_host"], + "https://hostname:123/path_value", ) @patch("databricks.sql.auth.thrift_http_client.THttpClient") def test_host_with_trailing_backslash_does_not_duplicate(self, t_http_client_class): - ThriftBackend("https://hostname/", 123, "path_value", [], auth_provider=AuthProvider()) + ThriftBackend( + "https://hostname/", + 123, + "path_value", + [], + auth_provider=AuthProvider(), + ssl_options=SSLOptions(), + ) self.assertEqual( - t_http_client_class.call_args[1]["uri_or_host"], "https://hostname:123/path_value" + t_http_client_class.call_args[1]["uri_or_host"], + "https://hostname:123/path_value", ) @patch("databricks.sql.auth.thrift_http_client.THttpClient") def test_socket_timeout_is_propagated(self, t_http_client_class): ThriftBackend( - "hostname", 123, "path_value", [], auth_provider=AuthProvider(), _socket_timeout=129 + "hostname", + 123, + "path_value", + [], + auth_provider=AuthProvider(), + ssl_options=SSLOptions(), + _socket_timeout=129, + ) + self.assertEqual( + t_http_client_class.return_value.setTimeout.call_args[0][0], 129 * 1000 ) - self.assertEqual(t_http_client_class.return_value.setTimeout.call_args[0][0], 129 * 1000) ThriftBackend( - "hostname", 123, "path_value", [], auth_provider=AuthProvider(), _socket_timeout=0 + "hostname", + 123, + "path_value", + [], + auth_provider=AuthProvider(), + ssl_options=SSLOptions(), + _socket_timeout=0, ) self.assertEqual(t_http_client_class.return_value.setTimeout.call_args[0][0], 0) - ThriftBackend("hostname", 123, "path_value", [], auth_provider=AuthProvider()) - self.assertEqual(t_http_client_class.return_value.setTimeout.call_args[0][0], 900 * 1000) ThriftBackend( - "hostname", 123, "path_value", [], auth_provider=AuthProvider(), _socket_timeout=None + "hostname", + 123, + "path_value", + [], + auth_provider=AuthProvider(), + ssl_options=SSLOptions(), + ) + self.assertEqual( + t_http_client_class.return_value.setTimeout.call_args[0][0], 900 * 1000 + ) + ThriftBackend( + "hostname", + 123, + "path_value", + [], + auth_provider=AuthProvider(), + ssl_options=SSLOptions(), + _socket_timeout=None, + ) + self.assertEqual( + t_http_client_class.return_value.setTimeout.call_args[0][0], None ) - self.assertEqual(t_http_client_class.return_value.setTimeout.call_args[0][0], None) def test_non_primitive_types_raise_error(self): columns = [ ttypes.TColumnDesc( - columnName="column 1", typeDesc=self._make_type_desc(ttypes.TTypeId.INT_TYPE) + columnName="column 1", + typeDesc=self._make_type_desc(ttypes.TTypeId.INT_TYPE), ), ttypes.TColumnDesc( columnName="column 2", typeDesc=ttypes.TTypeDesc( types=[ - ttypes.TTypeEntry(userDefinedTypeEntry=ttypes.TUserDefinedTypeEntry("foo")) + ttypes.TTypeEntry( + userDefinedTypeEntry=ttypes.TUserDefinedTypeEntry("foo") + ) ] ), ), @@ -295,13 +470,16 @@ def test_hive_schema_to_description_preserves_column_names_and_types(self): # canary test columns = [ ttypes.TColumnDesc( - columnName="column 1", typeDesc=self._make_type_desc(ttypes.TTypeId.INT_TYPE) + columnName="column 1", + typeDesc=self._make_type_desc(ttypes.TTypeId.INT_TYPE), ), ttypes.TColumnDesc( - columnName="column 2", typeDesc=self._make_type_desc(ttypes.TTypeId.BOOLEAN_TYPE) + columnName="column 2", + typeDesc=self._make_type_desc(ttypes.TTypeId.BOOLEAN_TYPE), ), ttypes.TColumnDesc( - columnName="column 2", typeDesc=self._make_type_desc(ttypes.TTypeId.MAP_TYPE) + columnName="column 2", + typeDesc=self._make_type_desc(ttypes.TTypeId.MAP_TYPE), ), ttypes.TColumnDesc( columnName="", typeDesc=self._make_type_desc(ttypes.TTypeId.STRUCT_TYPE) @@ -332,8 +510,12 @@ def test_hive_schema_to_description_preserves_scale_and_precision(self): type=ttypes.TTypeId.DECIMAL_TYPE, typeQualifiers=ttypes.TTypeQualifiers( qualifiers={ - "precision": ttypes.TTypeQualifierValue(i32Value=10), - "scale": ttypes.TTypeQualifierValue(i32Value=100), + "precision": ttypes.TTypeQualifierValue( + i32Value=10 + ), + "scale": ttypes.TTypeQualifierValue( + i32Value=100 + ), } ), ) @@ -353,8 +535,18 @@ def test_hive_schema_to_description_preserves_scale_and_precision(self): ) def test_make_request_checks_status_code(self): - error_codes = [ttypes.TStatusCode.ERROR_STATUS, ttypes.TStatusCode.INVALID_HANDLE_STATUS] - thrift_backend = ThriftBackend("foobar", 443, "path", [], auth_provider=AuthProvider()) + error_codes = [ + ttypes.TStatusCode.ERROR_STATUS, + ttypes.TStatusCode.INVALID_HANDLE_STATUS, + ] + thrift_backend = ThriftBackend( + "foobar", + 443, + "path", + [], + auth_provider=AuthProvider(), + ssl_options=SSLOptions(), + ) for code in error_codes: mock_error_response = Mock() @@ -392,15 +584,24 @@ def test_handle_execute_response_checks_operation_state_in_direct_results(self): ), ) thrift_backend = ThriftBackend( - "foobar", 443, "path", [], auth_provider=AuthProvider() + "foobar", + 443, + "path", + [], + auth_provider=AuthProvider(), + ssl_options=SSLOptions(), ) with self.assertRaises(DatabaseError) as cm: thrift_backend._handle_execute_response(t_execute_resp, Mock()) self.assertIn("some information about the error", str(cm.exception)) - @patch("databricks.sql.utils.ResultSetQueueFactory.build_queue", return_value=Mock()) - def test_handle_execute_response_sets_compression_in_direct_results(self, build_queue): + @patch( + "databricks.sql.utils.ResultSetQueueFactory.build_queue", return_value=Mock() + ) + def test_handle_execute_response_sets_compression_in_direct_results( + self, build_queue + ): for resp_type in self.execute_response_types: lz4Compressed = Mock() resultSet = MagicMock() @@ -421,13 +622,24 @@ def test_handle_execute_response_sets_compression_in_direct_results(self, build_ closeOperation=None, ), ) - thrift_backend = ThriftBackend("foobar", 443, "path", [], auth_provider=AuthProvider()) + thrift_backend = ThriftBackend( + "foobar", + 443, + "path", + [], + auth_provider=AuthProvider(), + ssl_options=SSLOptions(), + ) - execute_response = thrift_backend._handle_execute_response(t_execute_resp, Mock()) + execute_response = thrift_backend._handle_execute_response( + t_execute_resp, Mock() + ) self.assertEqual(execute_response.lz4_compressed, lz4Compressed) @patch("databricks.sql.thrift_backend.TCLIService.Client", autospec=True) - def test_handle_execute_response_checks_operation_state_in_polls(self, tcli_service_class): + def test_handle_execute_response_checks_operation_state_in_polls( + self, tcli_service_class + ): tcli_service_instance = tcli_service_class.return_value error_resp = ttypes.TGetOperationStatusResp( @@ -443,7 +655,9 @@ def test_handle_execute_response_checks_operation_state_in_polls(self, tcli_serv for op_state_resp, exec_resp_type in itertools.product( [error_resp, closed_resp], self.execute_response_types ): - with self.subTest(op_state_resp=op_state_resp, exec_resp_type=exec_resp_type): + with self.subTest( + op_state_resp=op_state_resp, exec_resp_type=exec_resp_type + ): tcli_service_instance = tcli_service_class.return_value t_execute_resp = exec_resp_type( status=self.okay_status, @@ -453,7 +667,12 @@ def test_handle_execute_response_checks_operation_state_in_polls(self, tcli_serv tcli_service_instance.GetOperationStatus.return_value = op_state_resp thrift_backend = ThriftBackend( - "foobar", 443, "path", [], auth_provider=AuthProvider() + "foobar", + 443, + "path", + [], + auth_provider=AuthProvider(), + ssl_options=SSLOptions(), ) with self.assertRaises(DatabaseError) as cm: @@ -476,12 +695,23 @@ def test_get_status_uses_display_message_if_available(self, tcli_service_class): ) t_execute_resp = ttypes.TExecuteStatementResp( - status=self.okay_status, directResults=None, operationHandle=self.operation_handle + status=self.okay_status, + directResults=None, + operationHandle=self.operation_handle, + ) + tcli_service_instance.GetOperationStatus.return_value = ( + t_get_operation_status_resp ) - tcli_service_instance.GetOperationStatus.return_value = t_get_operation_status_resp tcli_service_instance.ExecuteStatement.return_value = t_execute_resp - thrift_backend = ThriftBackend("foobar", 443, "path", [], auth_provider=AuthProvider()) + thrift_backend = ThriftBackend( + "foobar", + 443, + "path", + [], + auth_provider=AuthProvider(), + ssl_options=SSLOptions(), + ) with self.assertRaises(DatabaseError) as cm: thrift_backend.execute_command(Mock(), Mock(), 100, 100, Mock(), Mock()) @@ -514,7 +744,14 @@ def test_direct_results_uses_display_message_if_available(self, tcli_service_cla tcli_service_instance.ExecuteStatement.return_value = t_execute_resp - thrift_backend = ThriftBackend("foobar", 443, "path", [], auth_provider=AuthProvider()) + thrift_backend = ThriftBackend( + "foobar", + 443, + "path", + [], + auth_provider=AuthProvider(), + ssl_options=SSLOptions(), + ) with self.assertRaises(DatabaseError) as cm: thrift_backend.execute_command(Mock(), Mock(), 100, 100, Mock(), Mock()) @@ -526,7 +763,9 @@ def test_handle_execute_response_checks_direct_results_for_error_statuses(self): resp_1 = resp_type( status=self.okay_status, directResults=ttypes.TSparkDirectResults( - operationStatus=ttypes.TGetOperationStatusResp(status=self.bad_status), + operationStatus=ttypes.TGetOperationStatusResp( + status=self.bad_status + ), resultSetMetadata=None, resultSet=None, closeOperation=None, @@ -537,7 +776,9 @@ def test_handle_execute_response_checks_direct_results_for_error_statuses(self): status=self.okay_status, directResults=ttypes.TSparkDirectResults( operationStatus=None, - resultSetMetadata=ttypes.TGetResultSetMetadataResp(status=self.bad_status), + resultSetMetadata=ttypes.TGetResultSetMetadataResp( + status=self.bad_status + ), resultSet=None, closeOperation=None, ), @@ -566,7 +807,12 @@ def test_handle_execute_response_checks_direct_results_for_error_statuses(self): for error_resp in [resp_1, resp_2, resp_3, resp_4]: with self.subTest(error_resp=error_resp): thrift_backend = ThriftBackend( - "foobar", 443, "path", [], auth_provider=AuthProvider() + "foobar", + 443, + "path", + [], + auth_provider=AuthProvider(), + ssl_options=SSLOptions(), ) with self.assertRaises(DatabaseError) as cm: @@ -574,7 +820,9 @@ def test_handle_execute_response_checks_direct_results_for_error_statuses(self): self.assertIn("this is a bad error", str(cm.exception)) @patch("databricks.sql.thrift_backend.TCLIService.Client", autospec=True) - def test_handle_execute_response_can_handle_without_direct_results(self, tcli_service_class): + def test_handle_execute_response_can_handle_without_direct_results( + self, tcli_service_class + ): tcli_service_instance = tcli_service_class.return_value for resp_type in self.execute_response_types: @@ -597,23 +845,32 @@ def test_handle_execute_response_can_handle_without_direct_results(self, tcli_se ) op_state_3 = ttypes.TGetOperationStatusResp( - status=self.okay_status, operationState=ttypes.TOperationState.FINISHED_STATE + status=self.okay_status, + operationState=ttypes.TOperationState.FINISHED_STATE, ) - tcli_service_instance.GetResultSetMetadata.return_value = self.metadata_resp + tcli_service_instance.GetResultSetMetadata.return_value = ( + self.metadata_resp + ) tcli_service_instance.GetOperationStatus.side_effect = [ op_state_1, op_state_2, op_state_3, ] thrift_backend = ThriftBackend( - "foobar", 443, "path", [], auth_provider=AuthProvider() + "foobar", + 443, + "path", + [], + auth_provider=AuthProvider(), + ssl_options=SSLOptions(), ) results_message_response = thrift_backend._handle_execute_response( execute_resp, Mock() ) self.assertEqual( - results_message_response.status, ttypes.TOperationState.FINISHED_STATE + results_message_response.status, + ttypes.TOperationState.FINISHED_STATE, ) def test_handle_execute_response_can_handle_with_direct_results(self): @@ -638,7 +895,12 @@ def test_handle_execute_response_can_handle_with_direct_results(self): ) thrift_backend = ThriftBackend( - "foobar", 443, "path", [], auth_provider=AuthProvider() + "foobar", + 443, + "path", + [], + auth_provider=AuthProvider(), + ssl_options=SSLOptions(), ) thrift_backend._results_message_to_execute_response = Mock() @@ -668,9 +930,13 @@ def test_use_arrow_schema_if_available(self, tcli_service_class): operationHandle=self.operation_handle, ) - tcli_service_instance.GetResultSetMetadata.return_value = t_get_result_set_metadata_resp + tcli_service_instance.GetResultSetMetadata.return_value = ( + t_get_result_set_metadata_resp + ) thrift_backend = self._make_fake_thrift_backend() - execute_response = thrift_backend._handle_execute_response(t_execute_resp, Mock()) + execute_response = thrift_backend._handle_execute_response( + t_execute_resp, Mock() + ) self.assertEqual(execute_response.arrow_schema_bytes, arrow_schema_mock) @@ -697,10 +963,13 @@ def test_fall_back_to_hive_schema_if_no_arrow_schema(self, tcli_service_class): thrift_backend._handle_execute_response(t_execute_resp, Mock()) self.assertEqual( - hive_schema_mock, thrift_backend._hive_schema_to_arrow_schema.call_args[0][0] + hive_schema_mock, + thrift_backend._hive_schema_to_arrow_schema.call_args[0][0], ) - @patch("databricks.sql.utils.ResultSetQueueFactory.build_queue", return_value=Mock()) + @patch( + "databricks.sql.utils.ResultSetQueueFactory.build_queue", return_value=Mock() + ) @patch("databricks.sql.thrift_backend.TCLIService.Client", autospec=True) def test_handle_execute_response_reads_has_more_rows_in_direct_results( self, tcli_service_class, build_queue @@ -731,14 +1000,20 @@ def test_handle_execute_response_reads_has_more_rows_in_direct_results( operationHandle=self.operation_handle, ) - tcli_service_instance.GetResultSetMetadata.return_value = self.metadata_resp + tcli_service_instance.GetResultSetMetadata.return_value = ( + self.metadata_resp + ) thrift_backend = self._make_fake_thrift_backend() - execute_response = thrift_backend._handle_execute_response(execute_resp, Mock()) + execute_response = thrift_backend._handle_execute_response( + execute_resp, Mock() + ) self.assertEqual(has_more_rows, execute_response.has_more_rows) - @patch("databricks.sql.utils.ResultSetQueueFactory.build_queue", return_value=Mock()) + @patch( + "databricks.sql.utils.ResultSetQueueFactory.build_queue", return_value=Mock() + ) @patch("databricks.sql.thrift_backend.TCLIService.Client", autospec=True) def test_handle_execute_response_reads_has_more_rows_in_result_response( self, tcli_service_class, build_queue @@ -773,8 +1048,12 @@ def test_handle_execute_response_reads_has_more_rows_in_result_response( ) tcli_service_instance.FetchResults.return_value = fetch_results_resp - tcli_service_instance.GetOperationStatus.return_value = operation_status_resp - tcli_service_instance.GetResultSetMetadata.return_value = self.metadata_resp + tcli_service_instance.GetOperationStatus.return_value = ( + operation_status_resp + ) + tcli_service_instance.GetResultSetMetadata.return_value = ( + self.metadata_resp + ) thrift_backend = self._make_fake_thrift_backend() thrift_backend._handle_execute_response(execute_resp, Mock()) @@ -801,7 +1080,8 @@ def test_arrow_batches_row_count_are_respected(self, tcli_service_class): startRowOffset=0, rows=[], arrowBatches=[ - ttypes.TSparkArrowBatch(batch=bytearray(), rowCount=15) for _ in range(10) + ttypes.TSparkArrowBatch(batch=bytearray(), rowCount=15) + for _ in range(10) ], ), resultSetMetadata=ttypes.TGetResultSetMetadataResp( @@ -822,7 +1102,14 @@ def test_arrow_batches_row_count_are_respected(self, tcli_service_class): .to_pybytes() ) - thrift_backend = ThriftBackend("foobar", 443, "path", [], auth_provider=AuthProvider()) + thrift_backend = ThriftBackend( + "foobar", + 443, + "path", + [], + auth_provider=AuthProvider(), + ssl_options=SSLOptions(), + ) arrow_queue, has_more_results = thrift_backend.fetch_results( op_handle=Mock(), max_rows=1, @@ -836,11 +1123,20 @@ def test_arrow_batches_row_count_are_respected(self, tcli_service_class): self.assertEqual(arrow_queue.n_valid_rows, 15 * 10) @patch("databricks.sql.thrift_backend.TCLIService.Client", autospec=True) - def test_execute_statement_calls_client_and_handle_execute_response(self, tcli_service_class): + def test_execute_statement_calls_client_and_handle_execute_response( + self, tcli_service_class + ): tcli_service_instance = tcli_service_class.return_value response = Mock() tcli_service_instance.ExecuteStatement.return_value = response - thrift_backend = ThriftBackend("foobar", 443, "path", [], auth_provider=AuthProvider()) + thrift_backend = ThriftBackend( + "foobar", + 443, + "path", + [], + auth_provider=AuthProvider(), + ssl_options=SSLOptions(), + ) thrift_backend._handle_execute_response = Mock() cursor_mock = Mock() @@ -851,14 +1147,25 @@ def test_execute_statement_calls_client_and_handle_execute_response(self, tcli_s self.assertEqual(req.getDirectResults, get_direct_results) self.assertEqual(req.statement, "foo") # Check response handling - thrift_backend._handle_execute_response.assert_called_with(response, cursor_mock) + thrift_backend._handle_execute_response.assert_called_with( + response, cursor_mock + ) @patch("databricks.sql.thrift_backend.TCLIService.Client", autospec=True) - def test_get_catalogs_calls_client_and_handle_execute_response(self, tcli_service_class): + def test_get_catalogs_calls_client_and_handle_execute_response( + self, tcli_service_class + ): tcli_service_instance = tcli_service_class.return_value response = Mock() tcli_service_instance.GetCatalogs.return_value = response - thrift_backend = ThriftBackend("foobar", 443, "path", [], auth_provider=AuthProvider()) + thrift_backend = ThriftBackend( + "foobar", + 443, + "path", + [], + auth_provider=AuthProvider(), + ssl_options=SSLOptions(), + ) thrift_backend._handle_execute_response = Mock() cursor_mock = Mock() @@ -868,14 +1175,25 @@ def test_get_catalogs_calls_client_and_handle_execute_response(self, tcli_servic get_direct_results = ttypes.TSparkGetDirectResults(maxRows=100, maxBytes=200) self.assertEqual(req.getDirectResults, get_direct_results) # Check response handling - thrift_backend._handle_execute_response.assert_called_with(response, cursor_mock) + thrift_backend._handle_execute_response.assert_called_with( + response, cursor_mock + ) @patch("databricks.sql.thrift_backend.TCLIService.Client", autospec=True) - def test_get_schemas_calls_client_and_handle_execute_response(self, tcli_service_class): + def test_get_schemas_calls_client_and_handle_execute_response( + self, tcli_service_class + ): tcli_service_instance = tcli_service_class.return_value response = Mock() tcli_service_instance.GetSchemas.return_value = response - thrift_backend = ThriftBackend("foobar", 443, "path", [], auth_provider=AuthProvider()) + thrift_backend = ThriftBackend( + "foobar", + 443, + "path", + [], + auth_provider=AuthProvider(), + ssl_options=SSLOptions(), + ) thrift_backend._handle_execute_response = Mock() cursor_mock = Mock() @@ -894,14 +1212,25 @@ def test_get_schemas_calls_client_and_handle_execute_response(self, tcli_service self.assertEqual(req.catalogName, "catalog_pattern") self.assertEqual(req.schemaName, "schema_pattern") # Check response handling - thrift_backend._handle_execute_response.assert_called_with(response, cursor_mock) + thrift_backend._handle_execute_response.assert_called_with( + response, cursor_mock + ) @patch("databricks.sql.thrift_backend.TCLIService.Client", autospec=True) - def test_get_tables_calls_client_and_handle_execute_response(self, tcli_service_class): + def test_get_tables_calls_client_and_handle_execute_response( + self, tcli_service_class + ): tcli_service_instance = tcli_service_class.return_value response = Mock() tcli_service_instance.GetTables.return_value = response - thrift_backend = ThriftBackend("foobar", 443, "path", [], auth_provider=AuthProvider()) + thrift_backend = ThriftBackend( + "foobar", + 443, + "path", + [], + auth_provider=AuthProvider(), + ssl_options=SSLOptions(), + ) thrift_backend._handle_execute_response = Mock() cursor_mock = Mock() @@ -924,14 +1253,25 @@ def test_get_tables_calls_client_and_handle_execute_response(self, tcli_service_ self.assertEqual(req.tableName, "table_pattern") self.assertEqual(req.tableTypes, ["type1", "type2"]) # Check response handling - thrift_backend._handle_execute_response.assert_called_with(response, cursor_mock) + thrift_backend._handle_execute_response.assert_called_with( + response, cursor_mock + ) @patch("databricks.sql.thrift_backend.TCLIService.Client", autospec=True) - def test_get_columns_calls_client_and_handle_execute_response(self, tcli_service_class): + def test_get_columns_calls_client_and_handle_execute_response( + self, tcli_service_class + ): tcli_service_instance = tcli_service_class.return_value response = Mock() tcli_service_instance.GetColumns.return_value = response - thrift_backend = ThriftBackend("foobar", 443, "path", [], auth_provider=AuthProvider()) + thrift_backend = ThriftBackend( + "foobar", + 443, + "path", + [], + auth_provider=AuthProvider(), + ssl_options=SSLOptions(), + ) thrift_backend._handle_execute_response = Mock() cursor_mock = Mock() @@ -954,21 +1294,37 @@ def test_get_columns_calls_client_and_handle_execute_response(self, tcli_service self.assertEqual(req.tableName, "table_pattern") self.assertEqual(req.columnName, "column_pattern") # Check response handling - thrift_backend._handle_execute_response.assert_called_with(response, cursor_mock) + thrift_backend._handle_execute_response.assert_called_with( + response, cursor_mock + ) @patch("databricks.sql.thrift_backend.TCLIService.Client", autospec=True) def test_open_session_user_provided_session_id_optional(self, tcli_service_class): tcli_service_instance = tcli_service_class.return_value tcli_service_instance.OpenSession.return_value = self.open_session_resp - thrift_backend = ThriftBackend("foobar", 443, "path", [], auth_provider=AuthProvider()) + thrift_backend = ThriftBackend( + "foobar", + 443, + "path", + [], + auth_provider=AuthProvider(), + ssl_options=SSLOptions(), + ) thrift_backend.open_session({}, None, None) self.assertEqual(len(tcli_service_instance.OpenSession.call_args_list), 1) @patch("databricks.sql.thrift_backend.TCLIService.Client", autospec=True) def test_op_handle_respected_in_close_command(self, tcli_service_class): tcli_service_instance = tcli_service_class.return_value - thrift_backend = ThriftBackend("foobar", 443, "path", [], auth_provider=AuthProvider()) + thrift_backend = ThriftBackend( + "foobar", + 443, + "path", + [], + auth_provider=AuthProvider(), + ssl_options=SSLOptions(), + ) thrift_backend.close_command(self.operation_handle) self.assertEqual( tcli_service_instance.CloseOperation.call_args[0][0].operationHandle, @@ -978,20 +1334,32 @@ def test_op_handle_respected_in_close_command(self, tcli_service_class): @patch("databricks.sql.thrift_backend.TCLIService.Client", autospec=True) def test_session_handle_respected_in_close_session(self, tcli_service_class): tcli_service_instance = tcli_service_class.return_value - thrift_backend = ThriftBackend("foobar", 443, "path", [], auth_provider=AuthProvider()) + thrift_backend = ThriftBackend( + "foobar", + 443, + "path", + [], + auth_provider=AuthProvider(), + ssl_options=SSLOptions(), + ) thrift_backend.close_session(self.session_handle) self.assertEqual( - tcli_service_instance.CloseSession.call_args[0][0].sessionHandle, self.session_handle + tcli_service_instance.CloseSession.call_args[0][0].sessionHandle, + self.session_handle, ) @patch("databricks.sql.thrift_backend.TCLIService.Client", autospec=True) - def test_non_arrow_non_column_based_set_triggers_exception(self, tcli_service_class): + def test_non_arrow_non_column_based_set_triggers_exception( + self, tcli_service_class + ): tcli_service_instance = tcli_service_class.return_value results_mock = Mock() results_mock.startRowOffset = 0 execute_statement_resp = ttypes.TExecuteStatementResp( - status=self.okay_status, directResults=None, operationHandle=self.operation_handle + status=self.okay_status, + directResults=None, + operationHandle=self.operation_handle, ) metadata_resp = ttypes.TGetResultSetMetadataResp( @@ -1012,11 +1380,20 @@ def test_non_arrow_non_column_based_set_triggers_exception(self, tcli_service_cl with self.assertRaises(OperationalError) as cm: thrift_backend.execute_command("foo", Mock(), 100, 100, Mock(), Mock()) - self.assertIn("Expected results to be in Arrow or column based format", str(cm.exception)) + self.assertIn( + "Expected results to be in Arrow or column based format", str(cm.exception) + ) def test_create_arrow_table_raises_error_for_unsupported_type(self): t_row_set = ttypes.TRowSet() - thrift_backend = ThriftBackend("foobar", 443, "path", [], auth_provider=AuthProvider()) + thrift_backend = ThriftBackend( + "foobar", + 443, + "path", + [], + auth_provider=AuthProvider(), + ssl_options=SSLOptions(), + ) with self.assertRaises(OperationalError): thrift_backend._create_arrow_table(t_row_set, Mock(), None, Mock()) @@ -1025,7 +1402,14 @@ def test_create_arrow_table_raises_error_for_unsupported_type(self): def test_create_arrow_table_calls_correct_conversion_method( self, convert_col_mock, convert_arrow_mock ): - thrift_backend = ThriftBackend("foobar", 443, "path", [], auth_provider=AuthProvider()) + thrift_backend = ThriftBackend( + "foobar", + 443, + "path", + [], + auth_provider=AuthProvider(), + ssl_options=SSLOptions(), + ) convert_arrow_mock.return_value = (MagicMock(), Mock()) convert_col_mock.return_value = (MagicMock(), Mock()) @@ -1036,18 +1420,31 @@ def test_create_arrow_table_calls_correct_conversion_method( description = Mock() t_col_set = ttypes.TRowSet(columns=cols) - thrift_backend._create_arrow_table(t_col_set, lz4_compressed, schema, description) + thrift_backend._create_arrow_table( + t_col_set, lz4_compressed, schema, description + ) convert_arrow_mock.assert_not_called() convert_col_mock.assert_called_once_with(cols, description) t_arrow_set = ttypes.TRowSet(arrowBatches=arrow_batches) thrift_backend._create_arrow_table(t_arrow_set, lz4_compressed, schema, Mock()) - convert_arrow_mock.assert_called_once_with(arrow_batches, lz4_compressed, schema) + convert_arrow_mock.assert_called_once_with( + arrow_batches, lz4_compressed, schema + ) @patch("lz4.frame.decompress") @patch("pyarrow.ipc.open_stream") - def test_convert_arrow_based_set_to_arrow_table(self, open_stream_mock, lz4_decompress_mock): - thrift_backend = ThriftBackend("foobar", 443, "path", [], auth_provider=AuthProvider()) + def test_convert_arrow_based_set_to_arrow_table( + self, open_stream_mock, lz4_decompress_mock + ): + thrift_backend = ThriftBackend( + "foobar", + 443, + "path", + [], + auth_provider=AuthProvider(), + ssl_options=SSLOptions(), + ) lz4_decompress_mock.return_value = bytearray("Testing", "utf-8") @@ -1079,15 +1476,23 @@ def test_convert_column_based_set_to_arrow_table_without_nulls(self): t_cols = [ ttypes.TColumn(i32Val=ttypes.TI32Column(values=[1, 2, 3], nulls=bytes(1))), ttypes.TColumn( - stringVal=ttypes.TStringColumn(values=["s1", "s2", "s3"], nulls=bytes(1)) + stringVal=ttypes.TStringColumn( + values=["s1", "s2", "s3"], nulls=bytes(1) + ) ), - ttypes.TColumn(doubleVal=ttypes.TDoubleColumn(values=[1.15, 2.2, 3.3], nulls=bytes(1))), ttypes.TColumn( - binaryVal=ttypes.TBinaryColumn(values=[b"\x11", b"\x22", b"\x33"], nulls=bytes(1)) + doubleVal=ttypes.TDoubleColumn(values=[1.15, 2.2, 3.3], nulls=bytes(1)) + ), + ttypes.TColumn( + binaryVal=ttypes.TBinaryColumn( + values=[b"\x11", b"\x22", b"\x33"], nulls=bytes(1) + ) ), ] - arrow_table, n_rows = utils.convert_column_based_set_to_arrow_table(t_cols, description) + arrow_table, n_rows = utils.convert_column_based_set_to_arrow_table( + t_cols, description + ) self.assertEqual(n_rows, 3) # Check schema, column names and types @@ -1112,19 +1517,29 @@ def test_convert_column_based_set_to_arrow_table_with_nulls(self): description = [(name,) for name in field_names] t_cols = [ - ttypes.TColumn(i32Val=ttypes.TI32Column(values=[1, 2, 3], nulls=bytes([1]))), ttypes.TColumn( - stringVal=ttypes.TStringColumn(values=["s1", "s2", "s3"], nulls=bytes([2])) + i32Val=ttypes.TI32Column(values=[1, 2, 3], nulls=bytes([1])) ), ttypes.TColumn( - doubleVal=ttypes.TDoubleColumn(values=[1.15, 2.2, 3.3], nulls=bytes([4])) + stringVal=ttypes.TStringColumn( + values=["s1", "s2", "s3"], nulls=bytes([2]) + ) ), ttypes.TColumn( - binaryVal=ttypes.TBinaryColumn(values=[b"\x11", b"\x22", b"\x33"], nulls=bytes([3])) + doubleVal=ttypes.TDoubleColumn( + values=[1.15, 2.2, 3.3], nulls=bytes([4]) + ) + ), + ttypes.TColumn( + binaryVal=ttypes.TBinaryColumn( + values=[b"\x11", b"\x22", b"\x33"], nulls=bytes([3]) + ) ), ] - arrow_table, n_rows = utils.convert_column_based_set_to_arrow_table(t_cols, description) + arrow_table, n_rows = utils.convert_column_based_set_to_arrow_table( + t_cols, description + ) self.assertEqual(n_rows, 3) # Check data @@ -1140,15 +1555,23 @@ def test_convert_column_based_set_to_arrow_table_uses_types_from_col_set(self): t_cols = [ ttypes.TColumn(i32Val=ttypes.TI32Column(values=[1, 2, 3], nulls=bytes(1))), ttypes.TColumn( - stringVal=ttypes.TStringColumn(values=["s1", "s2", "s3"], nulls=bytes(1)) + stringVal=ttypes.TStringColumn( + values=["s1", "s2", "s3"], nulls=bytes(1) + ) + ), + ttypes.TColumn( + doubleVal=ttypes.TDoubleColumn(values=[1.15, 2.2, 3.3], nulls=bytes(1)) ), - ttypes.TColumn(doubleVal=ttypes.TDoubleColumn(values=[1.15, 2.2, 3.3], nulls=bytes(1))), ttypes.TColumn( - binaryVal=ttypes.TBinaryColumn(values=[b"\x11", b"\x22", b"\x33"], nulls=bytes(1)) + binaryVal=ttypes.TBinaryColumn( + values=[b"\x11", b"\x22", b"\x33"], nulls=bytes(1) + ) ), ] - arrow_table, n_rows = utils.convert_column_based_set_to_arrow_table(t_cols, description) + arrow_table, n_rows = utils.convert_column_based_set_to_arrow_table( + t_cols, description + ) self.assertEqual(n_rows, 3) # Check schema, column names and types @@ -1194,8 +1617,12 @@ def test_handle_execute_response_sets_active_op_handle(self): self.assertEqual(mock_resp.operationHandle, mock_cursor.active_op_handle) @patch("databricks.sql.auth.thrift_http_client.THttpClient") - @patch("databricks.sql.thrift_api.TCLIService.TCLIService.Client.GetOperationStatus") - @patch("databricks.sql.thrift_backend._retry_policy", new_callable=retry_policy_factory) + @patch( + "databricks.sql.thrift_api.TCLIService.TCLIService.Client.GetOperationStatus" + ) + @patch( + "databricks.sql.thrift_backend._retry_policy", new_callable=retry_policy_factory + ) def test_make_request_will_retry_GetOperationStatus( self, mock_retry_policy, mock_GetOperationStatus, t_transport_class ): @@ -1207,7 +1634,9 @@ def test_make_request_will_retry_GetOperationStatus( this_gos_name = "GetOperationStatus" mock_GetOperationStatus.__name__ = this_gos_name - mock_GetOperationStatus.side_effect = OSError(errno.ETIMEDOUT, "Connection timed out") + mock_GetOperationStatus.side_effect = OSError( + errno.ETIMEDOUT, "Connection timed out" + ) protocol = thrift.protocol.TBinaryProtocol.TBinaryProtocol(t_transport_class) client = Client(protocol) @@ -1225,6 +1654,7 @@ def test_make_request_will_retry_GetOperationStatus( "path", [], auth_provider=AuthProvider(), + ssl_options=SSLOptions(), _retry_stop_after_attempts_count=EXPECTED_RETRIES, _retry_delay_default=1, ) @@ -1235,12 +1665,18 @@ def test_make_request_will_retry_GetOperationStatus( self.assertEqual( NoRetryReason.OUT_OF_ATTEMPTS.value, cm.exception.context["no-retry-reason"] ) - self.assertEqual(f"{EXPECTED_RETRIES}/{EXPECTED_RETRIES}", cm.exception.context["attempt"]) + self.assertEqual( + f"{EXPECTED_RETRIES}/{EXPECTED_RETRIES}", cm.exception.context["attempt"] + ) # Unusual OSError code - mock_GetOperationStatus.side_effect = OSError(errno.EEXIST, "File does not exist") + mock_GetOperationStatus.side_effect = OSError( + errno.EEXIST, "File does not exist" + ) - with self.assertLogs("databricks.sql.thrift_backend", level=logging.WARNING) as cm: + with self.assertLogs( + "databricks.sql.thrift_backend", level=logging.WARNING + ) as cm: with self.assertRaises(RequestError): thrift_backend.make_request(client.GetOperationStatus, req) @@ -1256,8 +1692,12 @@ def test_make_request_will_retry_GetOperationStatus( cm.output[0], ) - @patch("databricks.sql.thrift_api.TCLIService.TCLIService.Client.GetOperationStatus") - @patch("databricks.sql.thrift_backend._retry_policy", new_callable=retry_policy_factory) + @patch( + "databricks.sql.thrift_api.TCLIService.TCLIService.Client.GetOperationStatus" + ) + @patch( + "databricks.sql.thrift_backend._retry_policy", new_callable=retry_policy_factory + ) def test_make_request_will_retry_GetOperationStatus_for_http_error( self, mock_retry_policy, mock_gos ): @@ -1291,6 +1731,7 @@ def test_make_request_will_retry_GetOperationStatus_for_http_error( "path", [], auth_provider=AuthProvider(), + ssl_options=SSLOptions(), _retry_stop_after_attempts_count=EXPECTED_RETRIES, _retry_delay_default=1, ) @@ -1301,10 +1742,14 @@ def test_make_request_will_retry_GetOperationStatus_for_http_error( self.assertEqual( NoRetryReason.OUT_OF_ATTEMPTS.value, cm.exception.context["no-retry-reason"] ) - self.assertEqual(f"{EXPECTED_RETRIES}/{EXPECTED_RETRIES}", cm.exception.context["attempt"]) + self.assertEqual( + f"{EXPECTED_RETRIES}/{EXPECTED_RETRIES}", cm.exception.context["attempt"] + ) @patch("thrift.transport.THttpClient.THttpClient") - def test_make_request_wont_retry_if_error_code_not_429_or_503(self, t_transport_class): + def test_make_request_wont_retry_if_error_code_not_429_or_503( + self, t_transport_class + ): t_transport_instance = t_transport_class.return_value t_transport_instance.code = 430 t_transport_instance.headers = {"Retry-After": "1"} @@ -1312,7 +1757,14 @@ def test_make_request_wont_retry_if_error_code_not_429_or_503(self, t_transport_ mock_method.__name__ = "method name" mock_method.side_effect = Exception("This method fails") - thrift_backend = ThriftBackend("foobar", 443, "path", [], auth_provider=AuthProvider()) + thrift_backend = ThriftBackend( + "foobar", + 443, + "path", + [], + auth_provider=AuthProvider(), + ssl_options=SSLOptions(), + ) with self.assertRaises(OperationalError) as cm: thrift_backend.make_request(mock_method, Mock()) @@ -1320,7 +1772,9 @@ def test_make_request_wont_retry_if_error_code_not_429_or_503(self, t_transport_ self.assertIn("This method fails", str(cm.exception.message_with_context())) @patch("databricks.sql.auth.thrift_http_client.THttpClient") - @patch("databricks.sql.thrift_backend._retry_policy", new_callable=retry_policy_factory) + @patch( + "databricks.sql.thrift_backend._retry_policy", new_callable=retry_policy_factory + ) def test_make_request_will_retry_stop_after_attempts_count_if_retryable( self, mock_retry_policy, t_transport_class ): @@ -1337,6 +1791,7 @@ def test_make_request_will_retry_stop_after_attempts_count_if_retryable( "path", [], auth_provider=AuthProvider(), + ssl_options=SSLOptions(), _retry_stop_after_attempts_count=14, _retry_delay_max=0, _retry_delay_min=0, @@ -1351,13 +1806,22 @@ def test_make_request_will_retry_stop_after_attempts_count_if_retryable( self.assertEqual(mock_method.call_count, 14) @patch("databricks.sql.auth.thrift_http_client.THttpClient") - def test_make_request_will_read_error_message_headers_if_set(self, t_transport_class): + def test_make_request_will_read_error_message_headers_if_set( + self, t_transport_class + ): t_transport_instance = t_transport_class.return_value mock_method = Mock() mock_method.__name__ = "method name" mock_method.side_effect = Exception("This method fails") - thrift_backend = ThriftBackend("foobar", 443, "path", [], auth_provider=AuthProvider()) + thrift_backend = ThriftBackend( + "foobar", + 443, + "path", + [], + auth_provider=AuthProvider(), + ssl_options=SSLOptions(), + ) error_headers = [ [("x-thriftserver-error-message", "thrift server error message")], @@ -1391,12 +1855,18 @@ def make_table_and_desc( ): int_col = [int_constant for _ in range(height)] decimal_col = [decimal_constant for _ in range(height)] - data = OrderedDict({"col{}".format(i): int_col for i in range(width - n_decimal_cols)}) - decimals = OrderedDict({"col_dec{}".format(i): decimal_col for i in range(n_decimal_cols)}) + data = OrderedDict( + {"col{}".format(i): int_col for i in range(width - n_decimal_cols)} + ) + decimals = OrderedDict( + {"col_dec{}".format(i): decimal_col for i in range(n_decimal_cols)} + ) data.update(decimals) int_desc = [("", "int")] * (width - n_decimal_cols) - decimal_desc = [("", "decimal", None, None, precision, scale, None)] * n_decimal_cols + decimal_desc = [ + ("", "decimal", None, None, precision, scale, None) + ] * n_decimal_cols description = int_desc + decimal_desc table = pyarrow.Table.from_pydict(data) @@ -1429,25 +1899,36 @@ def test_arrow_decimal_conversion(self): if height > 0: if i < width - n_decimal_cols: self.assertEqual( - decimal_converted_table.field(i).type, pyarrow.int64() + decimal_converted_table.field(i).type, + pyarrow.int64(), ) else: self.assertEqual( decimal_converted_table.field(i).type, - pyarrow.decimal128(precision=precision, scale=scale), + pyarrow.decimal128( + precision=precision, scale=scale + ), ) int_col = [int_constant for _ in range(height)] decimal_col = [Decimal(decimal_constant) for _ in range(height)] expected_result = OrderedDict( - {"col{}".format(i): int_col for i in range(width - n_decimal_cols)} + { + "col{}".format(i): int_col + for i in range(width - n_decimal_cols) + } ) decimals = OrderedDict( - {"col_dec{}".format(i): decimal_col for i in range(n_decimal_cols)} + { + "col_dec{}".format(i): decimal_col + for i in range(n_decimal_cols) + } ) expected_result.update(decimals) - self.assertEqual(decimal_converted_table.to_pydict(), expected_result) + self.assertEqual( + decimal_converted_table.to_pydict(), expected_result + ) @patch("thrift.transport.THttpClient.THttpClient") def test_retry_args_passthrough(self, mock_http_client): @@ -1458,7 +1939,13 @@ def test_retry_args_passthrough(self, mock_http_client): "_retry_stop_after_attempts_duration": 100, } backend = ThriftBackend( - "foobar", 443, "path", [], auth_provider=AuthProvider(), **retry_delay_args + "foobar", + 443, + "path", + [], + auth_provider=AuthProvider(), + ssl_options=SSLOptions(), + **retry_delay_args, ) for arg, val in retry_delay_args.items(): self.assertEqual(getattr(backend, arg), val) @@ -1467,17 +1954,28 @@ def test_retry_args_passthrough(self, mock_http_client): def test_retry_args_bounding(self, mock_http_client): retry_delay_test_args_and_expected_values = {} for k, (_, _, min, max) in databricks.sql.thrift_backend._retry_policy.items(): - retry_delay_test_args_and_expected_values[k] = ((min - 1, min), (max + 1, max)) + retry_delay_test_args_and_expected_values[k] = ( + (min - 1, min), + (max + 1, max), + ) for i in range(2): retry_delay_args = { - k: v[i][0] for (k, v) in retry_delay_test_args_and_expected_values.items() + k: v[i][0] + for (k, v) in retry_delay_test_args_and_expected_values.items() } backend = ThriftBackend( - "foobar", 443, "path", [], auth_provider=AuthProvider(), **retry_delay_args + "foobar", + 443, + "path", + [], + auth_provider=AuthProvider(), + ssl_options=SSLOptions(), + **retry_delay_args, ) retry_delay_expected_vals = { - k: v[i][1] for (k, v) in retry_delay_test_args_and_expected_values.items() + k: v[i][1] + for (k, v) in retry_delay_test_args_and_expected_values.items() } for arg, val in retry_delay_expected_vals.items(): self.assertEqual(getattr(backend, arg), val) @@ -1494,7 +1992,14 @@ def test_configuration_passthrough(self, tcli_client_class): "42": "42", } - backend = ThriftBackend("foobar", 443, "path", [], auth_provider=AuthProvider()) + backend = ThriftBackend( + "foobar", + 443, + "path", + [], + auth_provider=AuthProvider(), + ssl_options=SSLOptions(), + ) backend.open_session(mock_config, None, None) open_session_req = tcli_client_class.return_value.OpenSession.call_args[0][0] @@ -1505,7 +2010,14 @@ def test_cant_set_timestamp_as_string_to_true(self, tcli_client_class): tcli_service_instance = tcli_client_class.return_value tcli_service_instance.OpenSession.return_value = self.open_session_resp mock_config = {"spark.thriftserver.arrowBasedRowSet.timestampAsString": True} - backend = ThriftBackend("foobar", 443, "path", [], auth_provider=AuthProvider()) + backend = ThriftBackend( + "foobar", + 443, + "path", + [], + auth_provider=AuthProvider(), + ssl_options=SSLOptions(), + ) with self.assertRaises(databricks.sql.Error) as cm: backend.open_session(mock_config, None, None) @@ -1524,7 +2036,14 @@ def _construct_open_session_with_namespace(self, can_use_multiple_cats, cat, sch def test_initial_namespace_passthrough_to_open_session(self, tcli_client_class): tcli_service_instance = tcli_client_class.return_value - backend = ThriftBackend("foobar", 443, "path", [], auth_provider=AuthProvider()) + backend = ThriftBackend( + "foobar", + 443, + "path", + [], + auth_provider=AuthProvider(), + ssl_options=SSLOptions(), + ) initial_cat_schem_args = [("cat", None), (None, "schem"), ("cat", "schem")] for cat, schem in initial_cat_schem_args: @@ -1535,26 +2054,46 @@ def test_initial_namespace_passthrough_to_open_session(self, tcli_client_class): backend.open_session({}, cat, schem) - open_session_req = tcli_client_class.return_value.OpenSession.call_args[0][0] + open_session_req = tcli_client_class.return_value.OpenSession.call_args[ + 0 + ][0] self.assertEqual(open_session_req.initialNamespace.catalogName, cat) self.assertEqual(open_session_req.initialNamespace.schemaName, schem) @patch("databricks.sql.thrift_backend.TCLIService.Client", autospec=True) - def test_can_use_multiple_catalogs_is_set_in_open_session_req(self, tcli_client_class): + def test_can_use_multiple_catalogs_is_set_in_open_session_req( + self, tcli_client_class + ): tcli_service_instance = tcli_client_class.return_value tcli_service_instance.OpenSession.return_value = self.open_session_resp - backend = ThriftBackend("foobar", 443, "path", [], auth_provider=AuthProvider()) + backend = ThriftBackend( + "foobar", + 443, + "path", + [], + auth_provider=AuthProvider(), + ssl_options=SSLOptions(), + ) backend.open_session({}, None, None) open_session_req = tcli_client_class.return_value.OpenSession.call_args[0][0] self.assertTrue(open_session_req.canUseMultipleCatalogs) @patch("databricks.sql.thrift_backend.TCLIService.Client", autospec=True) - def test_can_use_multiple_catalogs_is_false_fails_with_initial_catalog(self, tcli_client_class): + def test_can_use_multiple_catalogs_is_false_fails_with_initial_catalog( + self, tcli_client_class + ): tcli_service_instance = tcli_client_class.return_value - backend = ThriftBackend("foobar", 443, "path", [], auth_provider=AuthProvider()) + backend = ThriftBackend( + "foobar", + 443, + "path", + [], + auth_provider=AuthProvider(), + ssl_options=SSLOptions(), + ) # If the initial catalog is set, but server returns canUseMultipleCatalogs=False, we # expect failure. If the initial catalog isn't set, then canUseMultipleCatalogs=False # is fine @@ -1592,13 +2131,21 @@ def test_protocol_v3_fails_if_initial_namespace_set(self, tcli_client_class): initialNamespace=ttypes.TNamespace(catalogName="cat", schemaName="schem"), ) - backend = ThriftBackend("foobar", 443, "path", [], auth_provider=AuthProvider()) + backend = ThriftBackend( + "foobar", + 443, + "path", + [], + auth_provider=AuthProvider(), + ssl_options=SSLOptions(), + ) with self.assertRaises(InvalidServerResponseError) as cm: backend.open_session({}, "cat", "schem") self.assertIn( - "Setting initial namespace not supported by the DBR version", str(cm.exception) + "Setting initial namespace not supported by the DBR version", + str(cm.exception), ) @patch("databricks.sql.thrift_backend.TCLIService.Client", autospec=True) @@ -1620,10 +2167,18 @@ def test_execute_command_sets_complex_type_fields_correctly( complex_arg_types["_use_arrow_native_decimals"] = decimals thrift_backend = ThriftBackend( - "foobar", 443, "path", [], auth_provider=AuthProvider(), **complex_arg_types + "foobar", + 443, + "path", + [], + auth_provider=AuthProvider(), + ssl_options=SSLOptions(), + **complex_arg_types, ) thrift_backend.execute_command(Mock(), Mock(), 100, 100, Mock(), Mock()) - t_execute_statement_req = tcli_service_instance.ExecuteStatement.call_args[0][0] + t_execute_statement_req = tcli_service_instance.ExecuteStatement.call_args[ + 0 + ][0] # If the value is unset, the native type should default to True self.assertEqual( t_execute_statement_req.useArrowNativeTypes.timestampAsArrow, @@ -1637,7 +2192,9 @@ def test_execute_command_sets_complex_type_fields_correctly( t_execute_statement_req.useArrowNativeTypes.complexTypesAsArrow, complex_arg_types.get("_use_arrow_native_complex_types", True), ) - self.assertFalse(t_execute_statement_req.useArrowNativeTypes.intervalTypesAsArrow) + self.assertFalse( + t_execute_statement_req.useArrowNativeTypes.intervalTypesAsArrow + ) if __name__ == "__main__": From 608d2377dd2fa7af52d5455d444d0efae2836f22 Mon Sep 17 00:00:00 2001 From: Jothi Prakash Date: Mon, 11 Nov 2024 22:36:54 +0530 Subject: [PATCH 10/19] Removed the sqlalchemy tests from integration.yml file --- .github/workflows/integration.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml index f28c22a8..aef7b7f2 100644 --- a/.github/workflows/integration.yml +++ b/.github/workflows/integration.yml @@ -55,5 +55,3 @@ jobs: #---------------------------------------------- - name: Run e2e tests run: poetry run python -m pytest tests/e2e - - name: Run SQL Alchemy tests - run: poetry run python -m pytest src/databricks/sqlalchemy/test_local From 85af9c087b0ea87ba9fd23fcd1e0bf2157f3461e Mon Sep 17 00:00:00 2001 From: Jacky Hu Date: Tue, 12 Nov 2024 20:48:41 -0800 Subject: [PATCH 11/19] [PECO-1803] Print warning message if pyarrow is not installed (#468) Print warning message if pyarrow is not installed Signed-off-by: Jacky Hu --- src/databricks/sql/client.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/src/databricks/sql/client.py b/src/databricks/sql/client.py index 4e0ab941..15a09650 100755 --- a/src/databricks/sql/client.py +++ b/src/databricks/sql/client.py @@ -52,6 +52,13 @@ logger = logging.getLogger(__name__) +if pyarrow is None: + logger.warning( + "[WARN] pyarrow is not installed by default since databricks-sql-connector 4.0.0," + "any arrow specific api (e.g. fetchmany_arrow) and cloud fetch will be disabled." + "If you need these features, please run pip install pyarrow or pip install databricks-sql-connector[pyarrow] to install" + ) + DEFAULT_RESULT_BUFFER_SIZE_BYTES = 104857600 DEFAULT_ARRAY_SIZE = 100000 From 38ffa95734a290501324b2b6c323fdffffcb39b2 Mon Sep 17 00:00:00 2001 From: Jacky Hu Date: Tue, 12 Nov 2024 21:12:52 -0800 Subject: [PATCH 12/19] [PECO-1803] Remove sqlalchemy and update README.md (#469) Remove sqlalchemy and update README.md Signed-off-by: Jacky Hu --- CONTRIBUTING.md | 3 - README.md | 6 +- examples/sqlalchemy.py | 174 ------------ poetry.lock | 386 +------------------------- pyproject.toml | 10 +- src/databricks/sqlalchemy/__init__.py | 6 - 6 files changed, 12 insertions(+), 573 deletions(-) delete mode 100644 examples/sqlalchemy.py delete mode 100644 src/databricks/sqlalchemy/__init__.py diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index ce0968d4..0cb25876 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -144,9 +144,6 @@ The `PySQLStagingIngestionTestSuite` namespace requires a cluster running DBR ve The suites marked `[not documented]` require additional configuration which will be documented at a later time. -#### SQLAlchemy dialect tests - -See README.tests.md for details. ### Code formatting diff --git a/README.md b/README.md index 54d4b178..156d914a 100644 --- a/README.md +++ b/README.md @@ -3,9 +3,9 @@ [![PyPI](https://img.shields.io/pypi/v/databricks-sql-connector?style=flat-square)](https://pypi.org/project/databricks-sql-connector/) [![Downloads](https://pepy.tech/badge/databricks-sql-connector)](https://pepy.tech/project/databricks-sql-connector) -The Databricks SQL Connector for Python allows you to develop Python applications that connect to Databricks clusters and SQL warehouses. It is a Thrift-based client with no dependencies on ODBC or JDBC. It conforms to the [Python DB API 2.0 specification](https://www.python.org/dev/peps/pep-0249/) and exposes a [SQLAlchemy](https://www.sqlalchemy.org/) dialect for use with tools like `pandas` and `alembic` which use SQLAlchemy to execute DDL. Use `pip install databricks-sql-connector[sqlalchemy]` to install with SQLAlchemy's dependencies. `pip install databricks-sql-connector[alembic]` will install alembic's dependencies. +The Databricks SQL Connector for Python allows you to develop Python applications that connect to Databricks clusters and SQL warehouses. It is a Thrift-based client with no dependencies on ODBC or JDBC. It conforms to the [Python DB API 2.0 specification](https://www.python.org/dev/peps/pep-0249/). -This connector uses Arrow as the data-exchange format, and supports APIs to directly fetch Arrow tables. Arrow tables are wrapped in the `ArrowQueue` class to provide a natural API to get several rows at a time. +This connector uses Arrow as the data-exchange format, and supports APIs (e.g. `fetchmany_arrow`) to directly fetch Arrow tables. Arrow tables are wrapped in the `ArrowQueue` class to provide a natural API to get several rows at a time. [PyArrow](https://arrow.apache.org/docs/python/index.html) is required to enable this and use these APIs, you can install it via `pip install pyarrow` or `pip install databricks-sql-connector[pyarrow]`. You are welcome to file an issue here for general use cases. You can also contact Databricks Support [here](help.databricks.com). @@ -22,7 +22,7 @@ For the latest documentation, see ## Quickstart -Install the library with `pip install databricks-sql-connector` +Install the library with `pip install databricks-sql-connector[pyarrow]` ```bash export DATABRICKS_HOST=********.databricks.com diff --git a/examples/sqlalchemy.py b/examples/sqlalchemy.py deleted file mode 100644 index 7492dc5a..00000000 --- a/examples/sqlalchemy.py +++ /dev/null @@ -1,174 +0,0 @@ -""" -databricks-sql-connector includes a SQLAlchemy 2.0 dialect compatible with Databricks SQL. To install -its dependencies you can run `pip install databricks-sql-connector[sqlalchemy]`. - -The expected connection string format which you can pass to create_engine() is: - -databricks://token:dapi***@***.cloud.databricks.com?http_path=/sql/***&catalog=**&schema=** - -Our dialect implements the majority of SQLAlchemy 2.0's API. Because of the extent of SQLAlchemy's -capabilities it isn't feasible to provide examples of every usage in a single script, so we only -provide a basic one here. Learn more about usage in README.sqlalchemy.md in this repo. -""" - -# fmt: off - -import os -from datetime import date, datetime, time, timedelta, timezone -from decimal import Decimal -from uuid import UUID - -# By convention, backend-specific SQLA types are defined in uppercase -# This dialect exposes Databricks SQL's TIMESTAMP and TINYINT types -# as these are not covered by the generic, camelcase types shown below -from databricks.sqlalchemy import TIMESTAMP, TINYINT - -# Beside the CamelCase types shown below, line comments reflect -# the underlying Databricks SQL / Delta table type -from sqlalchemy import ( - BigInteger, # BIGINT - Boolean, # BOOLEAN - Column, - Date, # DATE - DateTime, # TIMESTAMP_NTZ - Integer, # INTEGER - Numeric, # DECIMAL - String, # STRING - Time, # STRING - Uuid, # STRING - create_engine, - select, -) -from sqlalchemy.orm import DeclarativeBase, Session - -host = os.getenv("DATABRICKS_SERVER_HOSTNAME") -http_path = os.getenv("DATABRICKS_HTTP_PATH") -access_token = os.getenv("DATABRICKS_TOKEN") -catalog = os.getenv("DATABRICKS_CATALOG") -schema = os.getenv("DATABRICKS_SCHEMA") - - -# Extra arguments are passed untouched to databricks-sql-connector -# See src/databricks/sql/thrift_backend.py for complete list -extra_connect_args = { - "_tls_verify_hostname": True, - "_user_agent_entry": "PySQL Example Script", -} - - -engine = create_engine( - f"databricks://token:{access_token}@{host}?http_path={http_path}&catalog={catalog}&schema={schema}", - connect_args=extra_connect_args, echo=True, -) - - -class Base(DeclarativeBase): - pass - - -# This object gives a usage example for each supported type -# for more details on these, see README.sqlalchemy.md -class SampleObject(Base): - __tablename__ = "pysql_sqlalchemy_example_table" - - bigint_col = Column(BigInteger, primary_key=True) - string_col = Column(String) - tinyint_col = Column(TINYINT) - int_col = Column(Integer) - numeric_col = Column(Numeric(10, 2)) - boolean_col = Column(Boolean) - date_col = Column(Date) - datetime_col = Column(TIMESTAMP) - datetime_col_ntz = Column(DateTime) - time_col = Column(Time) - uuid_col = Column(Uuid) - -# This generates a CREATE TABLE statement against the catalog and schema -# specified in the connection string -Base.metadata.create_all(engine) - -# Output SQL is: -# CREATE TABLE pysql_sqlalchemy_example_table ( -# bigint_col BIGINT NOT NULL, -# string_col STRING, -# tinyint_col SMALLINT, -# int_col INT, -# numeric_col DECIMAL(10, 2), -# boolean_col BOOLEAN, -# date_col DATE, -# datetime_col TIMESTAMP, -# datetime_col_ntz TIMESTAMP_NTZ, -# time_col STRING, -# uuid_col STRING, -# PRIMARY KEY (bigint_col) -# ) USING DELTA - -# The code that follows will INSERT a record using SQLAlchemy ORM containing these values -# and then SELECT it back out. The output is compared to the input to demonstrate that -# all type information is preserved. -sample_object = { - "bigint_col": 1234567890123456789, - "string_col": "foo", - "tinyint_col": -100, - "int_col": 5280, - "numeric_col": Decimal("525600.01"), - "boolean_col": True, - "date_col": date(2020, 12, 25), - "datetime_col": datetime( - 1991, 8, 3, 21, 30, 5, tzinfo=timezone(timedelta(hours=-8)) - ), - "datetime_col_ntz": datetime(1990, 12, 4, 6, 33, 41), - "time_col": time(23, 59, 59), - "uuid_col": UUID(int=255), -} -sa_obj = SampleObject(**sample_object) - -session = Session(engine) -session.add(sa_obj) -session.commit() - -# Output SQL is: -# INSERT INTO -# pysql_sqlalchemy_example_table ( -# bigint_col, -# string_col, -# tinyint_col, -# int_col, -# numeric_col, -# boolean_col, -# date_col, -# datetime_col, -# datetime_col_ntz, -# time_col, -# uuid_col -# ) -# VALUES -# ( -# :bigint_col, -# :string_col, -# :tinyint_col, -# :int_col, -# :numeric_col, -# :boolean_col, -# :date_col, -# :datetime_col, -# :datetime_col_ntz, -# :time_col, -# :uuid_col -# ) - -# Here we build a SELECT query using ORM -stmt = select(SampleObject).where(SampleObject.int_col == 5280) - -# Then fetch one result with session.scalar() -result = session.scalar(stmt) - -# Finally, we read out the input data and compare it to the output -compare = {key: getattr(result, key) for key in sample_object.keys()} -assert compare == sample_object - -# Then we drop the demonstration table -Base.metadata.drop_all(engine) - -# Output SQL is: -# DROP TABLE pysql_sqlalchemy_example_table diff --git a/poetry.lock b/poetry.lock index 2031daa8..a1763098 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,25 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. - -[[package]] -name = "alembic" -version = "1.14.0" -description = "A database migration tool for SQLAlchemy." -optional = true -python-versions = ">=3.8" -files = [ - {file = "alembic-1.14.0-py3-none-any.whl", hash = "sha256:99bd884ca390466db5e27ffccff1d179ec5c05c965cfefc0607e69f9e411cb25"}, - {file = "alembic-1.14.0.tar.gz", hash = "sha256:b00892b53b3642d0b8dbedba234dbf1924b69be83a9a769d5a624b01094e304b"}, -] - -[package.dependencies] -importlib-metadata = {version = "*", markers = "python_version < \"3.9\""} -importlib-resources = {version = "*", markers = "python_version < \"3.9\""} -Mako = "*" -SQLAlchemy = ">=1.3.0" -typing-extensions = ">=4" - -[package.extras] -tz = ["backports.zoneinfo"] +# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. [[package]] name = "astroid" @@ -220,20 +199,6 @@ files = [ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] -[[package]] -name = "databricks-sqlalchemy" -version = "2.0.1" -description = "Databricks SQLAlchemy plugin for Python" -optional = true -python-versions = "<4.0.0,>=3.8.0" -files = [ - {file = "databricks_sqlalchemy-2.0.1-py3-none-any.whl", hash = "sha256:b8e5aa7ef9add762a8ead039fe94e0f3a6e073ae4e644c88ebf29c97ec160998"}, - {file = "databricks_sqlalchemy-2.0.1.tar.gz", hash = "sha256:ce18879b4d84bd46ee3fdc864f097bdd573acc7310156d68049b0e17cfe9a6f9"}, -] - -[package.dependencies] -sqlalchemy = ">=2.0.21" - [[package]] name = "dill" version = "0.3.9" @@ -274,92 +239,6 @@ files = [ [package.extras] test = ["pytest (>=6)"] -[[package]] -name = "greenlet" -version = "3.1.1" -description = "Lightweight in-process concurrent programming" -optional = true -python-versions = ">=3.7" -files = [ - {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"}, - {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"}, - {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36b89d13c49216cadb828db8dfa6ce86bbbc476a82d3a6c397f0efae0525bdd0"}, - {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b6150a85e1b33b40b1464a3f9988dcc5251d6ed06842abff82e42632fac120"}, - {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93147c513fac16385d1036b7e5b102c7fbbdb163d556b791f0f11eada7ba65dc"}, - {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da7a9bff22ce038e19bf62c4dd1ec8391062878710ded0a845bcf47cc0200617"}, - {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b2795058c23988728eec1f36a4e5e4ebad22f8320c85f3587b539b9ac84128d7"}, - {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ed10eac5830befbdd0c32f83e8aa6288361597550ba669b04c48f0f9a2c843c6"}, - {file = "greenlet-3.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:77c386de38a60d1dfb8e55b8c1101d68c79dfdd25c7095d51fec2dd800892b80"}, - {file = "greenlet-3.1.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70"}, - {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159"}, - {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e"}, - {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1"}, - {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383"}, - {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a"}, - {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511"}, - {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395"}, - {file = "greenlet-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39"}, - {file = "greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d"}, - {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79"}, - {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa"}, - {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441"}, - {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36"}, - {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9"}, - {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0"}, - {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942"}, - {file = "greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01"}, - {file = "greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1"}, - {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff"}, - {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a"}, - {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e"}, - {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4"}, - {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e"}, - {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1"}, - {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c"}, - {file = "greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761"}, - {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011"}, - {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13"}, - {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475"}, - {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b"}, - {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822"}, - {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01"}, - {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6"}, - {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47da355d8687fd65240c364c90a31569a133b7b60de111c255ef5b606f2ae291"}, - {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98884ecf2ffb7d7fe6bd517e8eb99d31ff7855a840fa6d0d63cd07c037f6a981"}, - {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1d4aeb8891338e60d1ab6127af1fe45def5259def8094b9c7e34690c8858803"}, - {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db32b5348615a04b82240cc67983cb315309e88d444a288934ee6ceaebcad6cc"}, - {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dcc62f31eae24de7f8dce72134c8651c58000d3b1868e01392baea7c32c247de"}, - {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1d3755bcb2e02de341c55b4fca7a745a24a9e7212ac953f6b3a48d117d7257aa"}, - {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b8da394b34370874b4572676f36acabac172602abf054cbc4ac910219f3340af"}, - {file = "greenlet-3.1.1-cp37-cp37m-win32.whl", hash = "sha256:a0dfc6c143b519113354e780a50381508139b07d2177cb6ad6a08278ec655798"}, - {file = "greenlet-3.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:54558ea205654b50c438029505def3834e80f0869a70fb15b871c29b4575ddef"}, - {file = "greenlet-3.1.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:346bed03fe47414091be4ad44786d1bd8bef0c3fcad6ed3dee074a032ab408a9"}, - {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfc59d69fc48664bc693842bd57acfdd490acafda1ab52c7836e3fc75c90a111"}, - {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21e10da6ec19b457b82636209cbe2331ff4306b54d06fa04b7c138ba18c8a81"}, - {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37b9de5a96111fc15418819ab4c4432e4f3c2ede61e660b1e33971eba26ef9ba"}, - {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef9ea3f137e5711f0dbe5f9263e8c009b7069d8a1acea822bd5e9dae0ae49c8"}, - {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85f3ff71e2e60bd4b4932a043fbbe0f499e263c628390b285cb599154a3b03b1"}, - {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:95ffcf719966dd7c453f908e208e14cde192e09fde6c7186c8f1896ef778d8cd"}, - {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:03a088b9de532cbfe2ba2034b2b85e82df37874681e8c470d6fb2f8c04d7e4b7"}, - {file = "greenlet-3.1.1-cp38-cp38-win32.whl", hash = "sha256:8b8b36671f10ba80e159378df9c4f15c14098c4fd73a36b9ad715f057272fbef"}, - {file = "greenlet-3.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:7017b2be767b9d43cc31416aba48aab0d2309ee31b4dbf10a1d38fb7972bdf9d"}, - {file = "greenlet-3.1.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:396979749bd95f018296af156201d6211240e7a23090f50a8d5d18c370084dc3"}, - {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9d0ff5ad43e785350894d97e13633a66e2b50000e8a183a50a88d834752d42"}, - {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f6ff3b14f2df4c41660a7dec01045a045653998784bf8cfcb5a525bdffffbc8f"}, - {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94ebba31df2aa506d7b14866fed00ac141a867e63143fe5bca82a8e503b36437"}, - {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aaad12ac0ff500f62cebed98d8789198ea0e6f233421059fa68a5aa7220145"}, - {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63e4844797b975b9af3a3fb8f7866ff08775f5426925e1e0bbcfe7932059a12c"}, - {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7939aa3ca7d2a1593596e7ac6d59391ff30281ef280d8632fa03d81f7c5f955e"}, - {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d0028e725ee18175c6e422797c407874da24381ce0690d6b9396c204c7f7276e"}, - {file = "greenlet-3.1.1-cp39-cp39-win32.whl", hash = "sha256:5e06afd14cbaf9e00899fae69b24a32f2196c19de08fcb9f4779dd4f004e5e7c"}, - {file = "greenlet-3.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22"}, - {file = "greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467"}, -] - -[package.extras] -docs = ["Sphinx", "furo"] -test = ["objgraph", "psutil"] - [[package]] name = "idna" version = "3.10" @@ -374,51 +253,6 @@ files = [ [package.extras] all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] -[[package]] -name = "importlib-metadata" -version = "8.5.0" -description = "Read metadata from Python packages" -optional = true -python-versions = ">=3.8" -files = [ - {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, - {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, -] - -[package.dependencies] -zipp = ">=3.20" - -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -enabler = ["pytest-enabler (>=2.2)"] -perf = ["ipython"] -test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] -type = ["pytest-mypy"] - -[[package]] -name = "importlib-resources" -version = "6.4.5" -description = "Read resources from Python packages" -optional = true -python-versions = ">=3.8" -files = [ - {file = "importlib_resources-6.4.5-py3-none-any.whl", hash = "sha256:ac29d5f956f01d5e4bb63102a5a19957f1b9175e45649977264a1416783bb717"}, - {file = "importlib_resources-6.4.5.tar.gz", hash = "sha256:980862a1d16c9e147a59603677fa2aa5fd82b87f223b6cb870695bcfce830065"}, -] - -[package.dependencies] -zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} - -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -enabler = ["pytest-enabler (>=2.2)"] -test = ["jaraco.test (>=5.4)", "pytest (>=6,!=8.1.*)", "zipp (>=3.17)"] -type = ["pytest-mypy"] - [[package]] name = "iniconfig" version = "2.0.0" @@ -494,94 +328,6 @@ docs = ["sphinx (>=1.6.0)", "sphinx-bootstrap-theme"] flake8 = ["flake8"] tests = ["psutil", "pytest (!=3.3.0)", "pytest-cov"] -[[package]] -name = "mako" -version = "1.3.6" -description = "A super-fast templating language that borrows the best ideas from the existing templating languages." -optional = true -python-versions = ">=3.8" -files = [ - {file = "Mako-1.3.6-py3-none-any.whl", hash = "sha256:a91198468092a2f1a0de86ca92690fb0cfc43ca90ee17e15d93662b4c04b241a"}, - {file = "mako-1.3.6.tar.gz", hash = "sha256:9ec3a1583713479fae654f83ed9fa8c9a4c16b7bb0daba0e6bbebff50c0d983d"}, -] - -[package.dependencies] -MarkupSafe = ">=0.9.2" - -[package.extras] -babel = ["Babel"] -lingua = ["lingua"] -testing = ["pytest"] - -[[package]] -name = "markupsafe" -version = "2.1.5" -description = "Safely add untrusted strings to HTML/XML markup." -optional = true -python-versions = ">=3.7" -files = [ - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, - {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, -] - [[package]] name = "mccabe" version = "0.7.0" @@ -771,13 +517,13 @@ et-xmlfile = "*" [[package]] name = "packaging" -version = "24.1" +version = "24.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, - {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, + {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, + {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, ] [[package]] @@ -1075,101 +821,6 @@ files = [ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] -[[package]] -name = "sqlalchemy" -version = "2.0.36" -description = "Database Abstraction Library" -optional = true -python-versions = ">=3.7" -files = [ - {file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:59b8f3adb3971929a3e660337f5dacc5942c2cdb760afcabb2614ffbda9f9f72"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:37350015056a553e442ff672c2d20e6f4b6d0b2495691fa239d8aa18bb3bc908"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8318f4776c85abc3f40ab185e388bee7a6ea99e7fa3a30686580b209eaa35c08"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c245b1fbade9c35e5bd3b64270ab49ce990369018289ecfde3f9c318411aaa07"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:69f93723edbca7342624d09f6704e7126b152eaed3cdbb634cb657a54332a3c5"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f9511d8dd4a6e9271d07d150fb2f81874a3c8c95e11ff9af3a2dfc35fe42ee44"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-win32.whl", hash = "sha256:c3f3631693003d8e585d4200730616b78fafd5a01ef8b698f6967da5c605b3fa"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-win_amd64.whl", hash = "sha256:a86bfab2ef46d63300c0f06936bd6e6c0105faa11d509083ba8f2f9d237fb5b5"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fd3a55deef00f689ce931d4d1b23fa9f04c880a48ee97af488fd215cf24e2a6c"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4f5e9cd989b45b73bd359f693b935364f7e1f79486e29015813c338450aa5a71"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ddd9db6e59c44875211bc4c7953a9f6638b937b0a88ae6d09eb46cced54eff"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2519f3a5d0517fc159afab1015e54bb81b4406c278749779be57a569d8d1bb0d"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59b1ee96617135f6e1d6f275bbe988f419c5178016f3d41d3c0abb0c819f75bb"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:39769a115f730d683b0eb7b694db9789267bcd027326cccc3125e862eb03bfd8"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-win32.whl", hash = "sha256:66bffbad8d6271bb1cc2f9a4ea4f86f80fe5e2e3e501a5ae2a3dc6a76e604e6f"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-win_amd64.whl", hash = "sha256:23623166bfefe1487d81b698c423f8678e80df8b54614c2bf4b4cfcd7c711959"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7b64e6ec3f02c35647be6b4851008b26cff592a95ecb13b6788a54ef80bbdd4"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:46331b00096a6db1fdc052d55b101dbbfc99155a548e20a0e4a8e5e4d1362855"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdf3386a801ea5aba17c6410dd1dc8d39cf454ca2565541b5ac42a84e1e28f53"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac9dfa18ff2a67b09b372d5db8743c27966abf0e5344c555d86cc7199f7ad83a"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:90812a8933df713fdf748b355527e3af257a11e415b613dd794512461eb8a686"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1bc330d9d29c7f06f003ab10e1eaced295e87940405afe1b110f2eb93a233588"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-win32.whl", hash = "sha256:79d2e78abc26d871875b419e1fd3c0bca31a1cb0043277d0d850014599626c2e"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-win_amd64.whl", hash = "sha256:b544ad1935a8541d177cb402948b94e871067656b3a0b9e91dbec136b06a2ff5"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b5cc79df7f4bc3d11e4b542596c03826063092611e481fcf1c9dfee3c94355ef"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3c01117dd36800f2ecaa238c65365b7b16497adc1522bf84906e5710ee9ba0e8"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bc633f4ee4b4c46e7adcb3a9b5ec083bf1d9a97c1d3854b92749d935de40b9b"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e46ed38affdfc95d2c958de328d037d87801cfcbea6d421000859e9789e61c2"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b2985c0b06e989c043f1dc09d4fe89e1616aadd35392aea2844f0458a989eacf"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a121d62ebe7d26fec9155f83f8be5189ef1405f5973ea4874a26fab9f1e262c"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-win32.whl", hash = "sha256:0572f4bd6f94752167adfd7c1bed84f4b240ee6203a95e05d1e208d488d0d436"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-win_amd64.whl", hash = "sha256:8c78ac40bde930c60e0f78b3cd184c580f89456dd87fc08f9e3ee3ce8765ce88"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:be9812b766cad94a25bc63bec11f88c4ad3629a0cec1cd5d4ba48dc23860486b"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50aae840ebbd6cdd41af1c14590e5741665e5272d2fee999306673a1bb1fdb4d"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4557e1f11c5f653ebfdd924f3f9d5ebfc718283b0b9beebaa5dd6b77ec290971"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:07b441f7d03b9a66299ce7ccf3ef2900abc81c0db434f42a5694a37bd73870f2"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:28120ef39c92c2dd60f2721af9328479516844c6b550b077ca450c7d7dc68575"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-win32.whl", hash = "sha256:b81ee3d84803fd42d0b154cb6892ae57ea6b7c55d8359a02379965706c7efe6c"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-win_amd64.whl", hash = "sha256:f942a799516184c855e1a32fbc7b29d7e571b52612647866d4ec1c3242578fcb"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3d6718667da04294d7df1670d70eeddd414f313738d20a6f1d1f379e3139a545"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:72c28b84b174ce8af8504ca28ae9347d317f9dba3999e5981a3cd441f3712e24"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b11d0cfdd2b095e7b0686cf5fabeb9c67fae5b06d265d8180715b8cfa86522e3"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e32092c47011d113dc01ab3e1d3ce9f006a47223b18422c5c0d150af13a00687"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:6a440293d802d3011028e14e4226da1434b373cbaf4a4bbb63f845761a708346"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c54a1e53a0c308a8e8a7dffb59097bff7facda27c70c286f005327f21b2bd6b1"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-win32.whl", hash = "sha256:1e0d612a17581b6616ff03c8e3d5eff7452f34655c901f75d62bd86449d9750e"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-win_amd64.whl", hash = "sha256:8958b10490125124463095bbdadda5aa22ec799f91958e410438ad6c97a7b793"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dc022184d3e5cacc9579e41805a681187650e170eb2fd70e28b86192a479dcaa"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b817d41d692bf286abc181f8af476c4fbef3fd05e798777492618378448ee689"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4e46a888b54be23d03a89be510f24a7652fe6ff660787b96cd0e57a4ebcb46d"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4ae3005ed83f5967f961fd091f2f8c5329161f69ce8480aa8168b2d7fe37f06"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:03e08af7a5f9386a43919eda9de33ffda16b44eb11f3b313e6822243770e9763"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3dbb986bad3ed5ceaf090200eba750b5245150bd97d3e67343a3cfed06feecf7"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-win32.whl", hash = "sha256:9fe53b404f24789b5ea9003fc25b9a3988feddebd7e7b369c8fac27ad6f52f28"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-win_amd64.whl", hash = "sha256:af148a33ff0349f53512a049c6406923e4e02bf2f26c5fb285f143faf4f0e46a"}, - {file = "SQLAlchemy-2.0.36-py3-none-any.whl", hash = "sha256:fddbe92b4760c6f5d48162aef14824add991aeda8ddadb3c31d56eb15ca69f8e"}, - {file = "sqlalchemy-2.0.36.tar.gz", hash = "sha256:7f2767680b6d2398aea7082e45a774b2b0767b5c8d8ffb9c8b683088ea9b29c5"}, -] - -[package.dependencies] -greenlet = {version = "!=0.4.17", markers = "python_version < \"3.13\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} -typing-extensions = ">=4.6.0" - -[package.extras] -aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] -aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] -aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] -asyncio = ["greenlet (!=0.4.17)"] -asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] -mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5,!=1.1.10)"] -mssql = ["pyodbc"] -mssql-pymssql = ["pymssql"] -mssql-pyodbc = ["pyodbc"] -mypy = ["mypy (>=0.910)"] -mysql = ["mysqlclient (>=1.4.0)"] -mysql-connector = ["mysql-connector-python"] -oracle = ["cx_oracle (>=8)"] -oracle-oracledb = ["oracledb (>=1.0.1)"] -postgresql = ["psycopg2 (>=2.7)"] -postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] -postgresql-pg8000 = ["pg8000 (>=1.29.1)"] -postgresql-psycopg = ["psycopg (>=3.0.7)"] -postgresql-psycopg2binary = ["psycopg2-binary"] -postgresql-psycopg2cffi = ["psycopg2cffi"] -postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] -pymysql = ["pymysql"] -sqlcipher = ["sqlcipher3_binary"] - [[package]] name = "thrift" version = "0.20.0" @@ -1190,13 +841,13 @@ twisted = ["twisted"] [[package]] name = "tomli" -version = "2.0.2" +version = "2.1.0" description = "A lil' TOML parser" optional = false python-versions = ">=3.8" files = [ - {file = "tomli-2.0.2-py3-none-any.whl", hash = "sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38"}, - {file = "tomli-2.0.2.tar.gz", hash = "sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed"}, + {file = "tomli-2.1.0-py3-none-any.whl", hash = "sha256:a5c57c3d1c56f5ccdf89f6523458f60ef716e210fc47c4cfb188c5ba473e0391"}, + {file = "tomli-2.1.0.tar.gz", hash = "sha256:3f646cae2aec94e17d04973e4249548320197cfabdf130015d023de4b74d8ab8"}, ] [[package]] @@ -1249,31 +900,10 @@ h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] -[[package]] -name = "zipp" -version = "3.20.2" -description = "Backport of pathlib-compatible object wrapper for zip files" -optional = true -python-versions = ">=3.8" -files = [ - {file = "zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350"}, - {file = "zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29"}, -] - -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -enabler = ["pytest-enabler (>=2.2)"] -test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] -type = ["pytest-mypy"] - [extras] -alembic = ["alembic", "databricks-sqlalchemy"] -databricks-sqlalchemy = ["databricks-sqlalchemy"] pyarrow = ["pyarrow"] [metadata] lock-version = "2.0" python-versions = "^3.8.0" -content-hash = "0d31d27041b5bcb2c26a5f4d62bf87ca32d7f6b518a1fa436d5ccfbd65fc7c07" +content-hash = "1210e343b538e9df0473d736f161e92ec26551236ef4cb0ad02adf59a6fb65e5" diff --git a/pyproject.toml b/pyproject.toml index a0a40e85..2c9ae89f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,14 +24,9 @@ numpy = [ openpyxl = "^3.0.10" urllib3 = ">=1.26" -databricks-sqlalchemy = { version = ">=2.0.0", optional = true } pyarrow = { version = ">=14.0.1,<17", optional=true } -alembic = { version = "^1.0.11", optional = true } - [tool.poetry.extras] -databricks-sqlalchemy = ["databricks-sqlalchemy"] -alembic = ["databricks-sqlalchemy", "alembic"] pyarrow = ["pyarrow"] [tool.poetry.dev-dependencies] @@ -45,9 +40,6 @@ pytest-dotenv = "^0.5.2" "Homepage" = "https://github.com/databricks/databricks-sql-python" "Bug Tracker" = "https://github.com/databricks/databricks-sql-python/issues" -[tool.poetry.plugins."sqlalchemy.dialects"] -"databricks" = "databricks.sqlalchemy:DatabricksDialect" - [build-system] requires = ["poetry-core>=1.0.0"] build-backend = "poetry.core.masonry.api" @@ -64,5 +56,5 @@ markers = {"reviewed" = "Test case has been reviewed by Databricks"} minversion = "6.0" log_cli = "false" log_cli_level = "INFO" -testpaths = ["tests", "src/databricks/sqlalchemy/test_local"] +testpaths = ["tests"] env_files = ["test.env"] \ No newline at end of file diff --git a/src/databricks/sqlalchemy/__init__.py b/src/databricks/sqlalchemy/__init__.py deleted file mode 100644 index f79d4c20..00000000 --- a/src/databricks/sqlalchemy/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -try: - from databricks_sqlalchemy import * -except: - import warnings - - warnings.warn("Install databricks-sqlalchemy plugin before using this") \ No newline at end of file From 6ce555affe39a2453d5f8bd29ba893d2dc8f6ef2 Mon Sep 17 00:00:00 2001 From: Jothi Prakash Date: Wed, 13 Nov 2024 11:17:32 +0530 Subject: [PATCH 13/19] Removed all sqlalchemy related stuff --- CHANGELOG.md | 5 +++++ README.md | 19 ++++++++++++++++++- pyproject.toml | 3 +-- 3 files changed, 24 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e1a70f96..3eb7cacd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,10 @@ # Release History +# 4.0.0 + +- Split the connector into two separate packages: `databricks-sql-connector` and `databricks-sqlalchemy`. The `databricks-sql-connector` package contains the core functionality of the connector, while the `databricks-sqlalchemy` package contains the SQLAlchemy dialect for the connector. +- Pyarrow dependency is now optional in `databricks-sql-connector`. Users needing arrow are supposed to explicitly install pyarrow + # 3.6.0 (2024-10-25) - Support encryption headers in the cloud fetch request (https://github.com/databricks/databricks-sql-python/pull/460 by @jackyhu-db) diff --git a/README.md b/README.md index 156d914a..a4c5a130 100644 --- a/README.md +++ b/README.md @@ -22,7 +22,12 @@ For the latest documentation, see ## Quickstart -Install the library with `pip install databricks-sql-connector[pyarrow]` +### Installing the core library +Install using `pip install databricks-sql-connector` + +### Installing the core library with PyArrow +Install using `pip install databricks-sql-connector[pyarrow]` + ```bash export DATABRICKS_HOST=********.databricks.com @@ -60,6 +65,18 @@ or to a Databricks Runtime interactive cluster (e.g. /sql/protocolv1/o/123456789 > to authenticate the target Databricks user account and needs to open the browser for authentication. So it > can only run on the user's machine. +## SQLAlchemy +Starting from `databricks-sql-connector` version 4.0.0 SQLAlchemy support has been extracted to a new library `databricks-sqlalchemy`. + +- Github repository [databricks-sqlalchemy github](https://github.com/databricks/databricks-sqlalchemy) +- PyPI [databricks-sqlalchemy pypi](https://pypi.org/project/databricks-sqlalchemy/) + +### Quick SQLAlchemy guide +Users can now choose between using the SQLAlchemy v1 or SQLAlchemy v2 dialects with the connector core + +- Install the latest SQLAlchemy v1 using `pip install databricks-sqlalchemy~=1.0` +- Install SQLAlchemy v2 using `pip install databricks-sqlalchemy` + ## Contributing diff --git a/pyproject.toml b/pyproject.toml index 2c9ae89f..bfe42a2e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "databricks-sql-connector" -version = "4.0.0.b3" +version = "4.0.0.b4" description = "Databricks SQL Connector for Python" authors = ["Databricks "] license = "Apache-2.0" @@ -23,7 +23,6 @@ numpy = [ ] openpyxl = "^3.0.10" urllib3 = ">=1.26" - pyarrow = { version = ">=14.0.1,<17", optional=true } [tool.poetry.extras] From 87b12518c531128b547ee72d951c3adfaf6f8716 Mon Sep 17 00:00:00 2001 From: Jothi Prakash Date: Wed, 13 Nov 2024 11:18:58 +0530 Subject: [PATCH 14/19] generated the lock file --- poetry.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/poetry.lock b/poetry.lock index a1763098..2cab5f14 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "astroid" From f9cafe5f006d3307c0cfba6dfdca54a1e60359a1 Mon Sep 17 00:00:00 2001 From: Jothi Prakash Date: Tue, 10 Dec 2024 14:27:17 +0530 Subject: [PATCH 15/19] Fixed failing tests --- .github/workflows/code-quality-checks.yml | 2 +- poetry.lock | 79 ++++++++++++----------- pyproject.toml | 2 +- 3 files changed, 43 insertions(+), 40 deletions(-) diff --git a/.github/workflows/code-quality-checks.yml b/.github/workflows/code-quality-checks.yml index 80ac94a7..98653a61 100644 --- a/.github/workflows/code-quality-checks.yml +++ b/.github/workflows/code-quality-checks.yml @@ -52,7 +52,7 @@ jobs: # install your root project, if required #---------------------------------------------- - name: Install library - run: poetry install --no-interaction + run: poetry install --no-interaction --all-extras #---------------------------------------------- # run test suite #---------------------------------------------- diff --git a/poetry.lock b/poetry.lock index b230ad1c..13836837 100644 --- a/poetry.lock +++ b/poetry.lock @@ -656,52 +656,55 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pyarrow" -version = "16.1.0" +version = "17.0.0" description = "Python library for Apache Arrow" optional = true python-versions = ">=3.8" files = [ - {file = "pyarrow-16.1.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:17e23b9a65a70cc733d8b738baa6ad3722298fa0c81d88f63ff94bf25eaa77b9"}, - {file = "pyarrow-16.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4740cc41e2ba5d641071d0ab5e9ef9b5e6e8c7611351a5cb7c1d175eaf43674a"}, - {file = "pyarrow-16.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98100e0268d04e0eec47b73f20b39c45b4006f3c4233719c3848aa27a03c1aef"}, - {file = "pyarrow-16.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f68f409e7b283c085f2da014f9ef81e885d90dcd733bd648cfba3ef265961848"}, - {file = "pyarrow-16.1.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:a8914cd176f448e09746037b0c6b3a9d7688cef451ec5735094055116857580c"}, - {file = "pyarrow-16.1.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:48be160782c0556156d91adbdd5a4a7e719f8d407cb46ae3bb4eaee09b3111bd"}, - {file = "pyarrow-16.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:9cf389d444b0f41d9fe1444b70650fea31e9d52cfcb5f818b7888b91b586efff"}, - {file = "pyarrow-16.1.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:d0ebea336b535b37eee9eee31761813086d33ed06de9ab6fc6aaa0bace7b250c"}, - {file = "pyarrow-16.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2e73cfc4a99e796727919c5541c65bb88b973377501e39b9842ea71401ca6c1c"}, - {file = "pyarrow-16.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf9251264247ecfe93e5f5a0cd43b8ae834f1e61d1abca22da55b20c788417f6"}, - {file = "pyarrow-16.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ddf5aace92d520d3d2a20031d8b0ec27b4395cab9f74e07cc95edf42a5cc0147"}, - {file = "pyarrow-16.1.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:25233642583bf658f629eb230b9bb79d9af4d9f9229890b3c878699c82f7d11e"}, - {file = "pyarrow-16.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a33a64576fddfbec0a44112eaf844c20853647ca833e9a647bfae0582b2ff94b"}, - {file = "pyarrow-16.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:185d121b50836379fe012753cf15c4ba9638bda9645183ab36246923875f8d1b"}, - {file = "pyarrow-16.1.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:2e51ca1d6ed7f2e9d5c3c83decf27b0d17bb207a7dea986e8dc3e24f80ff7d6f"}, - {file = "pyarrow-16.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:06ebccb6f8cb7357de85f60d5da50e83507954af617d7b05f48af1621d331c9a"}, - {file = "pyarrow-16.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b04707f1979815f5e49824ce52d1dceb46e2f12909a48a6a753fe7cafbc44a0c"}, - {file = "pyarrow-16.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d32000693deff8dc5df444b032b5985a48592c0697cb6e3071a5d59888714e2"}, - {file = "pyarrow-16.1.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:8785bb10d5d6fd5e15d718ee1d1f914fe768bf8b4d1e5e9bf253de8a26cb1628"}, - {file = "pyarrow-16.1.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:e1369af39587b794873b8a307cc6623a3b1194e69399af0efd05bb202195a5a7"}, - {file = "pyarrow-16.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:febde33305f1498f6df85e8020bca496d0e9ebf2093bab9e0f65e2b4ae2b3444"}, - {file = "pyarrow-16.1.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b5f5705ab977947a43ac83b52ade3b881eb6e95fcc02d76f501d549a210ba77f"}, - {file = "pyarrow-16.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0d27bf89dfc2576f6206e9cd6cf7a107c9c06dc13d53bbc25b0bd4556f19cf5f"}, - {file = "pyarrow-16.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d07de3ee730647a600037bc1d7b7994067ed64d0eba797ac74b2bc77384f4c2"}, - {file = "pyarrow-16.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fbef391b63f708e103df99fbaa3acf9f671d77a183a07546ba2f2c297b361e83"}, - {file = "pyarrow-16.1.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:19741c4dbbbc986d38856ee7ddfdd6a00fc3b0fc2d928795b95410d38bb97d15"}, - {file = "pyarrow-16.1.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:f2c5fb249caa17b94e2b9278b36a05ce03d3180e6da0c4c3b3ce5b2788f30eed"}, - {file = "pyarrow-16.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:e6b6d3cd35fbb93b70ade1336022cc1147b95ec6af7d36906ca7fe432eb09710"}, - {file = "pyarrow-16.1.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:18da9b76a36a954665ccca8aa6bd9f46c1145f79c0bb8f4f244f5f8e799bca55"}, - {file = "pyarrow-16.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:99f7549779b6e434467d2aa43ab2b7224dd9e41bdde486020bae198978c9e05e"}, - {file = "pyarrow-16.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f07fdffe4fd5b15f5ec15c8b64584868d063bc22b86b46c9695624ca3505b7b4"}, - {file = "pyarrow-16.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ddfe389a08ea374972bd4065d5f25d14e36b43ebc22fc75f7b951f24378bf0b5"}, - {file = "pyarrow-16.1.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:3b20bd67c94b3a2ea0a749d2a5712fc845a69cb5d52e78e6449bbd295611f3aa"}, - {file = "pyarrow-16.1.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:ba8ac20693c0bb0bf4b238751d4409e62852004a8cf031c73b0e0962b03e45e3"}, - {file = "pyarrow-16.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:31a1851751433d89a986616015841977e0a188662fcffd1a5677453f1df2de0a"}, - {file = "pyarrow-16.1.0.tar.gz", hash = "sha256:15fbb22ea96d11f0b5768504a3f961edab25eaf4197c341720c4a387f6c60315"}, + {file = "pyarrow-17.0.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:a5c8b238d47e48812ee577ee20c9a2779e6a5904f1708ae240f53ecbee7c9f07"}, + {file = "pyarrow-17.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:db023dc4c6cae1015de9e198d41250688383c3f9af8f565370ab2b4cb5f62655"}, + {file = "pyarrow-17.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da1e060b3876faa11cee287839f9cc7cdc00649f475714b8680a05fd9071d545"}, + {file = "pyarrow-17.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75c06d4624c0ad6674364bb46ef38c3132768139ddec1c56582dbac54f2663e2"}, + {file = "pyarrow-17.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:fa3c246cc58cb5a4a5cb407a18f193354ea47dd0648194e6265bd24177982fe8"}, + {file = "pyarrow-17.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:f7ae2de664e0b158d1607699a16a488de3d008ba99b3a7aa5de1cbc13574d047"}, + {file = "pyarrow-17.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:5984f416552eea15fd9cee03da53542bf4cddaef5afecefb9aa8d1010c335087"}, + {file = "pyarrow-17.0.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:1c8856e2ef09eb87ecf937104aacfa0708f22dfeb039c363ec99735190ffb977"}, + {file = "pyarrow-17.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2e19f569567efcbbd42084e87f948778eb371d308e137a0f97afe19bb860ccb3"}, + {file = "pyarrow-17.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b244dc8e08a23b3e352899a006a26ae7b4d0da7bb636872fa8f5884e70acf15"}, + {file = "pyarrow-17.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b72e87fe3e1db343995562f7fff8aee354b55ee83d13afba65400c178ab2597"}, + {file = "pyarrow-17.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:dc5c31c37409dfbc5d014047817cb4ccd8c1ea25d19576acf1a001fe07f5b420"}, + {file = "pyarrow-17.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e3343cb1e88bc2ea605986d4b94948716edc7a8d14afd4e2c097232f729758b4"}, + {file = "pyarrow-17.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:a27532c38f3de9eb3e90ecab63dfda948a8ca859a66e3a47f5f42d1e403c4d03"}, + {file = "pyarrow-17.0.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:9b8a823cea605221e61f34859dcc03207e52e409ccf6354634143e23af7c8d22"}, + {file = "pyarrow-17.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f1e70de6cb5790a50b01d2b686d54aaf73da01266850b05e3af2a1bc89e16053"}, + {file = "pyarrow-17.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0071ce35788c6f9077ff9ecba4858108eebe2ea5a3f7cf2cf55ebc1dbc6ee24a"}, + {file = "pyarrow-17.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:757074882f844411fcca735e39aae74248a1531367a7c80799b4266390ae51cc"}, + {file = "pyarrow-17.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:9ba11c4f16976e89146781a83833df7f82077cdab7dc6232c897789343f7891a"}, + {file = "pyarrow-17.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:b0c6ac301093b42d34410b187bba560b17c0330f64907bfa4f7f7f2444b0cf9b"}, + {file = "pyarrow-17.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:392bc9feabc647338e6c89267635e111d71edad5fcffba204425a7c8d13610d7"}, + {file = "pyarrow-17.0.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:af5ff82a04b2171415f1410cff7ebb79861afc5dae50be73ce06d6e870615204"}, + {file = "pyarrow-17.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:edca18eaca89cd6382dfbcff3dd2d87633433043650c07375d095cd3517561d8"}, + {file = "pyarrow-17.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c7916bff914ac5d4a8fe25b7a25e432ff921e72f6f2b7547d1e325c1ad9d155"}, + {file = "pyarrow-17.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f553ca691b9e94b202ff741bdd40f6ccb70cdd5fbf65c187af132f1317de6145"}, + {file = "pyarrow-17.0.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:0cdb0e627c86c373205a2f94a510ac4376fdc523f8bb36beab2e7f204416163c"}, + {file = "pyarrow-17.0.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:d7d192305d9d8bc9082d10f361fc70a73590a4c65cf31c3e6926cd72b76bc35c"}, + {file = "pyarrow-17.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:02dae06ce212d8b3244dd3e7d12d9c4d3046945a5933d28026598e9dbbda1fca"}, + {file = "pyarrow-17.0.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:13d7a460b412f31e4c0efa1148e1d29bdf18ad1411eb6757d38f8fbdcc8645fb"}, + {file = "pyarrow-17.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9b564a51fbccfab5a04a80453e5ac6c9954a9c5ef2890d1bcf63741909c3f8df"}, + {file = "pyarrow-17.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32503827abbc5aadedfa235f5ece8c4f8f8b0a3cf01066bc8d29de7539532687"}, + {file = "pyarrow-17.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a155acc7f154b9ffcc85497509bcd0d43efb80d6f733b0dc3bb14e281f131c8b"}, + {file = "pyarrow-17.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:dec8d129254d0188a49f8a1fc99e0560dc1b85f60af729f47de4046015f9b0a5"}, + {file = "pyarrow-17.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:a48ddf5c3c6a6c505904545c25a4ae13646ae1f8ba703c4df4a1bfe4f4006bda"}, + {file = "pyarrow-17.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:42bf93249a083aca230ba7e2786c5f673507fa97bbd9725a1e2754715151a204"}, + {file = "pyarrow-17.0.0.tar.gz", hash = "sha256:4beca9521ed2c0921c1023e68d097d0299b62c362639ea315572a58f3f50fd28"}, ] [package.dependencies] numpy = ">=1.16.6" +[package.extras] +test = ["cffi", "hypothesis", "pandas", "pytest", "pytz"] + [[package]] name = "pylint" version = "3.2.7" @@ -955,4 +958,4 @@ pyarrow = ["pyarrow"] [metadata] lock-version = "2.0" python-versions = "^3.8.0" -content-hash = "b3b6e2fe0defdbf3c973df8c3f64ee8257471b752e38e99d33ee42cde3a1d72f" +content-hash = "cdf42dc5f4e437ae70fc167f9cb7400d947f5288ee46f132393a14a4e5502bf8" diff --git a/pyproject.toml b/pyproject.toml index b2774ae8..4f4e72a5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,7 +23,7 @@ numpy = [ ] openpyxl = "^3.0.10" urllib3 = ">=1.26" -pyarrow = { version = ">=14.0.1,<17", optional=true } +pyarrow = { version = ">=14.0.1", optional=true } [tool.poetry.extras] pyarrow = ["pyarrow"] From e4205cc06f54c4bb757c0141c1afae79cd64efc9 Mon Sep 17 00:00:00 2001 From: Jothi Prakash Date: Wed, 11 Dec 2024 12:07:05 +0530 Subject: [PATCH 16/19] removed poetry.lock --- poetry.lock | 961 ---------------------------------------------------- 1 file changed, 961 deletions(-) delete mode 100644 poetry.lock diff --git a/poetry.lock b/poetry.lock deleted file mode 100644 index 13836837..00000000 --- a/poetry.lock +++ /dev/null @@ -1,961 +0,0 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. - -[[package]] -name = "astroid" -version = "3.2.4" -description = "An abstract syntax tree for Python with inference support." -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "astroid-3.2.4-py3-none-any.whl", hash = "sha256:413658a61eeca6202a59231abb473f932038fbcbf1666587f66d482083413a25"}, - {file = "astroid-3.2.4.tar.gz", hash = "sha256:0e14202810b30da1b735827f78f5157be2bbd4a7a59b7707ca0bfc2fb4c0063a"}, -] - -[package.dependencies] -typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} - -[[package]] -name = "black" -version = "22.12.0" -description = "The uncompromising code formatter." -optional = false -python-versions = ">=3.7" -files = [ - {file = "black-22.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eedd20838bd5d75b80c9f5487dbcb06836a43833a37846cf1d8c1cc01cef59d"}, - {file = "black-22.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:159a46a4947f73387b4d83e87ea006dbb2337eab6c879620a3ba52699b1f4351"}, - {file = "black-22.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d30b212bffeb1e252b31dd269dfae69dd17e06d92b87ad26e23890f3efea366f"}, - {file = "black-22.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:7412e75863aa5c5411886804678b7d083c7c28421210180d67dfd8cf1221e1f4"}, - {file = "black-22.12.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c116eed0efb9ff870ded8b62fe9f28dd61ef6e9ddd28d83d7d264a38417dcee2"}, - {file = "black-22.12.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1f58cbe16dfe8c12b7434e50ff889fa479072096d79f0a7f25e4ab8e94cd8350"}, - {file = "black-22.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77d86c9f3db9b1bf6761244bc0b3572a546f5fe37917a044e02f3166d5aafa7d"}, - {file = "black-22.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:82d9fe8fee3401e02e79767016b4907820a7dc28d70d137eb397b92ef3cc5bfc"}, - {file = "black-22.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101c69b23df9b44247bd88e1d7e90154336ac4992502d4197bdac35dd7ee3320"}, - {file = "black-22.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:559c7a1ba9a006226f09e4916060982fd27334ae1998e7a38b3f33a37f7a2148"}, - {file = "black-22.12.0-py3-none-any.whl", hash = "sha256:436cc9167dd28040ad90d3b404aec22cedf24a6e4d7de221bec2730ec0c97bcf"}, - {file = "black-22.12.0.tar.gz", hash = "sha256:229351e5a18ca30f447bf724d007f890f97e13af070bb6ad4c0a441cd7596a2f"}, -] - -[package.dependencies] -click = ">=8.0.0" -mypy-extensions = ">=0.4.3" -pathspec = ">=0.9.0" -platformdirs = ">=2" -tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""} -typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} - -[package.extras] -colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)"] -jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] -uvloop = ["uvloop (>=0.15.2)"] - -[[package]] -name = "certifi" -version = "2024.8.30" -description = "Python package for providing Mozilla's CA Bundle." -optional = false -python-versions = ">=3.6" -files = [ - {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, - {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, -] - -[[package]] -name = "charset-normalizer" -version = "3.4.0" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, - {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, - {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, -] - -[[package]] -name = "click" -version = "8.1.7" -description = "Composable command line interface toolkit" -optional = false -python-versions = ">=3.7" -files = [ - {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, - {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - -[[package]] -name = "dill" -version = "0.3.9" -description = "serialize all of Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "dill-0.3.9-py3-none-any.whl", hash = "sha256:468dff3b89520b474c0397703366b7b95eebe6303f108adf9b19da1f702be87a"}, - {file = "dill-0.3.9.tar.gz", hash = "sha256:81aa267dddf68cbfe8029c42ca9ec6a4ab3b22371d1c450abc54422577b4512c"}, -] - -[package.extras] -graph = ["objgraph (>=1.7.2)"] -profile = ["gprof2dot (>=2022.7.29)"] - -[[package]] -name = "et-xmlfile" -version = "2.0.0" -description = "An implementation of lxml.xmlfile for the standard library" -optional = false -python-versions = ">=3.8" -files = [ - {file = "et_xmlfile-2.0.0-py3-none-any.whl", hash = "sha256:7a91720bc756843502c3b7504c77b8fe44217c85c537d85037f0f536151b2caa"}, - {file = "et_xmlfile-2.0.0.tar.gz", hash = "sha256:dab3f4764309081ce75662649be815c4c9081e88f0837825f90fd28317d4da54"}, -] - -[[package]] -name = "exceptiongroup" -version = "1.2.2" -description = "Backport of PEP 654 (exception groups)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, - {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, -] - -[package.extras] -test = ["pytest (>=6)"] - -[[package]] -name = "idna" -version = "3.10" -description = "Internationalized Domain Names in Applications (IDNA)" -optional = false -python-versions = ">=3.6" -files = [ - {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, - {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, -] - -[package.extras] -all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] - -[[package]] -name = "iniconfig" -version = "2.0.0" -description = "brain-dead simple config-ini parsing" -optional = false -python-versions = ">=3.7" -files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, -] - -[[package]] -name = "isort" -version = "5.13.2" -description = "A Python utility / library to sort Python imports." -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, - {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, -] - -[package.extras] -colors = ["colorama (>=0.4.6)"] - -[[package]] -name = "lz4" -version = "4.3.3" -description = "LZ4 Bindings for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "lz4-4.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b891880c187e96339474af2a3b2bfb11a8e4732ff5034be919aa9029484cd201"}, - {file = "lz4-4.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:222a7e35137d7539c9c33bb53fcbb26510c5748779364014235afc62b0ec797f"}, - {file = "lz4-4.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f76176492ff082657ada0d0f10c794b6da5800249ef1692b35cf49b1e93e8ef7"}, - {file = "lz4-4.3.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1d18718f9d78182c6b60f568c9a9cec8a7204d7cb6fad4e511a2ef279e4cb05"}, - {file = "lz4-4.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6cdc60e21ec70266947a48839b437d46025076eb4b12c76bd47f8e5eb8a75dcc"}, - {file = "lz4-4.3.3-cp310-cp310-win32.whl", hash = "sha256:c81703b12475da73a5d66618856d04b1307e43428a7e59d98cfe5a5d608a74c6"}, - {file = "lz4-4.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:43cf03059c0f941b772c8aeb42a0813d68d7081c009542301637e5782f8a33e2"}, - {file = "lz4-4.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:30e8c20b8857adef7be045c65f47ab1e2c4fabba86a9fa9a997d7674a31ea6b6"}, - {file = "lz4-4.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2f7b1839f795315e480fb87d9bc60b186a98e3e5d17203c6e757611ef7dcef61"}, - {file = "lz4-4.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edfd858985c23523f4e5a7526ca6ee65ff930207a7ec8a8f57a01eae506aaee7"}, - {file = "lz4-4.3.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e9c410b11a31dbdc94c05ac3c480cb4b222460faf9231f12538d0074e56c563"}, - {file = "lz4-4.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d2507ee9c99dbddd191c86f0e0c8b724c76d26b0602db9ea23232304382e1f21"}, - {file = "lz4-4.3.3-cp311-cp311-win32.whl", hash = "sha256:f180904f33bdd1e92967923a43c22899e303906d19b2cf8bb547db6653ea6e7d"}, - {file = "lz4-4.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:b14d948e6dce389f9a7afc666d60dd1e35fa2138a8ec5306d30cd2e30d36b40c"}, - {file = "lz4-4.3.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e36cd7b9d4d920d3bfc2369840da506fa68258f7bb176b8743189793c055e43d"}, - {file = "lz4-4.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:31ea4be9d0059c00b2572d700bf2c1bc82f241f2c3282034a759c9a4d6ca4dc2"}, - {file = "lz4-4.3.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33c9a6fd20767ccaf70649982f8f3eeb0884035c150c0b818ea660152cf3c809"}, - {file = "lz4-4.3.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bca8fccc15e3add173da91be8f34121578dc777711ffd98d399be35487c934bf"}, - {file = "lz4-4.3.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d84b479ddf39fe3ea05387f10b779155fc0990125f4fb35d636114e1c63a2e"}, - {file = "lz4-4.3.3-cp312-cp312-win32.whl", hash = "sha256:337cb94488a1b060ef1685187d6ad4ba8bc61d26d631d7ba909ee984ea736be1"}, - {file = "lz4-4.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:5d35533bf2cee56f38ced91f766cd0038b6abf46f438a80d50c52750088be93f"}, - {file = "lz4-4.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:363ab65bf31338eb364062a15f302fc0fab0a49426051429866d71c793c23394"}, - {file = "lz4-4.3.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0a136e44a16fc98b1abc404fbabf7f1fada2bdab6a7e970974fb81cf55b636d0"}, - {file = "lz4-4.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abc197e4aca8b63f5ae200af03eb95fb4b5055a8f990079b5bdf042f568469dd"}, - {file = "lz4-4.3.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56f4fe9c6327adb97406f27a66420b22ce02d71a5c365c48d6b656b4aaeb7775"}, - {file = "lz4-4.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0e822cd7644995d9ba248cb4b67859701748a93e2ab7fc9bc18c599a52e4604"}, - {file = "lz4-4.3.3-cp38-cp38-win32.whl", hash = "sha256:24b3206de56b7a537eda3a8123c644a2b7bf111f0af53bc14bed90ce5562d1aa"}, - {file = "lz4-4.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:b47839b53956e2737229d70714f1d75f33e8ac26e52c267f0197b3189ca6de24"}, - {file = "lz4-4.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6756212507405f270b66b3ff7f564618de0606395c0fe10a7ae2ffcbbe0b1fba"}, - {file = "lz4-4.3.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ee9ff50557a942d187ec85462bb0960207e7ec5b19b3b48949263993771c6205"}, - {file = "lz4-4.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b901c7784caac9a1ded4555258207d9e9697e746cc8532129f150ffe1f6ba0d"}, - {file = "lz4-4.3.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6d9ec061b9eca86e4dcc003d93334b95d53909afd5a32c6e4f222157b50c071"}, - {file = "lz4-4.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4c7bf687303ca47d69f9f0133274958fd672efaa33fb5bcde467862d6c621f0"}, - {file = "lz4-4.3.3-cp39-cp39-win32.whl", hash = "sha256:054b4631a355606e99a42396f5db4d22046a3397ffc3269a348ec41eaebd69d2"}, - {file = "lz4-4.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:eac9af361e0d98335a02ff12fb56caeb7ea1196cf1a49dbf6f17828a131da807"}, - {file = "lz4-4.3.3.tar.gz", hash = "sha256:01fe674ef2889dbb9899d8a67361e0c4a2c833af5aeb37dd505727cf5d2a131e"}, -] - -[package.extras] -docs = ["sphinx (>=1.6.0)", "sphinx-bootstrap-theme"] -flake8 = ["flake8"] -tests = ["psutil", "pytest (!=3.3.0)", "pytest-cov"] - -[[package]] -name = "mccabe" -version = "0.7.0" -description = "McCabe checker, plugin for flake8" -optional = false -python-versions = ">=3.6" -files = [ - {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, - {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, -] - -[[package]] -name = "mypy" -version = "1.13.0" -description = "Optional static typing for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "mypy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6607e0f1dd1fb7f0aca14d936d13fd19eba5e17e1cd2a14f808fa5f8f6d8f60a"}, - {file = "mypy-1.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8a21be69bd26fa81b1f80a61ee7ab05b076c674d9b18fb56239d72e21d9f4c80"}, - {file = "mypy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b2353a44d2179846a096e25691d54d59904559f4232519d420d64da6828a3a7"}, - {file = "mypy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0730d1c6a2739d4511dc4253f8274cdd140c55c32dfb0a4cf8b7a43f40abfa6f"}, - {file = "mypy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:c5fc54dbb712ff5e5a0fca797e6e0aa25726c7e72c6a5850cfd2adbc1eb0a372"}, - {file = "mypy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:581665e6f3a8a9078f28d5502f4c334c0c8d802ef55ea0e7276a6e409bc0d82d"}, - {file = "mypy-1.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3ddb5b9bf82e05cc9a627e84707b528e5c7caaa1c55c69e175abb15a761cec2d"}, - {file = "mypy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:20c7ee0bc0d5a9595c46f38beb04201f2620065a93755704e141fcac9f59db2b"}, - {file = "mypy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3790ded76f0b34bc9c8ba4def8f919dd6a46db0f5a6610fb994fe8efdd447f73"}, - {file = "mypy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:51f869f4b6b538229c1d1bcc1dd7d119817206e2bc54e8e374b3dfa202defcca"}, - {file = "mypy-1.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5c7051a3461ae84dfb5dd15eff5094640c61c5f22257c8b766794e6dd85e72d5"}, - {file = "mypy-1.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39bb21c69a5d6342f4ce526e4584bc5c197fd20a60d14a8624d8743fffb9472e"}, - {file = "mypy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:164f28cb9d6367439031f4c81e84d3ccaa1e19232d9d05d37cb0bd880d3f93c2"}, - {file = "mypy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a4c1bfcdbce96ff5d96fc9b08e3831acb30dc44ab02671eca5953eadad07d6d0"}, - {file = "mypy-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:a0affb3a79a256b4183ba09811e3577c5163ed06685e4d4b46429a271ba174d2"}, - {file = "mypy-1.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a7b44178c9760ce1a43f544e595d35ed61ac2c3de306599fa59b38a6048e1aa7"}, - {file = "mypy-1.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d5092efb8516d08440e36626f0153b5006d4088c1d663d88bf79625af3d1d62"}, - {file = "mypy-1.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de2904956dac40ced10931ac967ae63c5089bd498542194b436eb097a9f77bc8"}, - {file = "mypy-1.13.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:7bfd8836970d33c2105562650656b6846149374dc8ed77d98424b40b09340ba7"}, - {file = "mypy-1.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:9f73dba9ec77acb86457a8fc04b5239822df0c14a082564737833d2963677dbc"}, - {file = "mypy-1.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:100fac22ce82925f676a734af0db922ecfea991e1d7ec0ceb1e115ebe501301a"}, - {file = "mypy-1.13.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7bcb0bb7f42a978bb323a7c88f1081d1b5dee77ca86f4100735a6f541299d8fb"}, - {file = "mypy-1.13.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bde31fc887c213e223bbfc34328070996061b0833b0a4cfec53745ed61f3519b"}, - {file = "mypy-1.13.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07de989f89786f62b937851295ed62e51774722e5444a27cecca993fc3f9cd74"}, - {file = "mypy-1.13.0-cp38-cp38-win_amd64.whl", hash = "sha256:4bde84334fbe19bad704b3f5b78c4abd35ff1026f8ba72b29de70dda0916beb6"}, - {file = "mypy-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0246bcb1b5de7f08f2826451abd947bf656945209b140d16ed317f65a17dc7dc"}, - {file = "mypy-1.13.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7f5b7deae912cf8b77e990b9280f170381fdfbddf61b4ef80927edd813163732"}, - {file = "mypy-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7029881ec6ffb8bc233a4fa364736789582c738217b133f1b55967115288a2bc"}, - {file = "mypy-1.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3e38b980e5681f28f033f3be86b099a247b13c491f14bb8b1e1e134d23bb599d"}, - {file = "mypy-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:a6789be98a2017c912ae6ccb77ea553bbaf13d27605d2ca20a76dfbced631b24"}, - {file = "mypy-1.13.0-py3-none-any.whl", hash = "sha256:9c250883f9fd81d212e0952c92dbfcc96fc237f4b7c92f56ac81fd48460b3e5a"}, - {file = "mypy-1.13.0.tar.gz", hash = "sha256:0291a61b6fbf3e6673e3405cfcc0e7650bebc7939659fdca2702958038bd835e"}, -] - -[package.dependencies] -mypy-extensions = ">=1.0.0" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=4.6.0" - -[package.extras] -dmypy = ["psutil (>=4.0)"] -faster-cache = ["orjson"] -install-types = ["pip"] -mypyc = ["setuptools (>=50)"] -reports = ["lxml"] - -[[package]] -name = "mypy-extensions" -version = "1.0.0" -description = "Type system extensions for programs checked with the mypy type checker." -optional = false -python-versions = ">=3.5" -files = [ - {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, - {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, -] - -[[package]] -name = "numpy" -version = "1.24.4" -description = "Fundamental package for array computing in Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "numpy-1.24.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c0bfb52d2169d58c1cdb8cc1f16989101639b34c7d3ce60ed70b19c63eba0b64"}, - {file = "numpy-1.24.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ed094d4f0c177b1b8e7aa9cba7d6ceed51c0e569a5318ac0ca9a090680a6a1b1"}, - {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79fc682a374c4a8ed08b331bef9c5f582585d1048fa6d80bc6c35bc384eee9b4"}, - {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ffe43c74893dbf38c2b0a1f5428760a1a9c98285553c89e12d70a96a7f3a4d6"}, - {file = "numpy-1.24.4-cp310-cp310-win32.whl", hash = "sha256:4c21decb6ea94057331e111a5bed9a79d335658c27ce2adb580fb4d54f2ad9bc"}, - {file = "numpy-1.24.4-cp310-cp310-win_amd64.whl", hash = "sha256:b4bea75e47d9586d31e892a7401f76e909712a0fd510f58f5337bea9572c571e"}, - {file = "numpy-1.24.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f136bab9c2cfd8da131132c2cf6cc27331dd6fae65f95f69dcd4ae3c3639c810"}, - {file = "numpy-1.24.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2926dac25b313635e4d6cf4dc4e51c8c0ebfed60b801c799ffc4c32bf3d1254"}, - {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:222e40d0e2548690405b0b3c7b21d1169117391c2e82c378467ef9ab4c8f0da7"}, - {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7215847ce88a85ce39baf9e89070cb860c98fdddacbaa6c0da3ffb31b3350bd5"}, - {file = "numpy-1.24.4-cp311-cp311-win32.whl", hash = "sha256:4979217d7de511a8d57f4b4b5b2b965f707768440c17cb70fbf254c4b225238d"}, - {file = "numpy-1.24.4-cp311-cp311-win_amd64.whl", hash = "sha256:b7b1fc9864d7d39e28f41d089bfd6353cb5f27ecd9905348c24187a768c79694"}, - {file = "numpy-1.24.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1452241c290f3e2a312c137a9999cdbf63f78864d63c79039bda65ee86943f61"}, - {file = "numpy-1.24.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:04640dab83f7c6c85abf9cd729c5b65f1ebd0ccf9de90b270cd61935eef0197f"}, - {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5425b114831d1e77e4b5d812b69d11d962e104095a5b9c3b641a218abcc050e"}, - {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd80e219fd4c71fc3699fc1dadac5dcf4fd882bfc6f7ec53d30fa197b8ee22dc"}, - {file = "numpy-1.24.4-cp38-cp38-win32.whl", hash = "sha256:4602244f345453db537be5314d3983dbf5834a9701b7723ec28923e2889e0bb2"}, - {file = "numpy-1.24.4-cp38-cp38-win_amd64.whl", hash = "sha256:692f2e0f55794943c5bfff12b3f56f99af76f902fc47487bdfe97856de51a706"}, - {file = "numpy-1.24.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2541312fbf09977f3b3ad449c4e5f4bb55d0dbf79226d7724211acc905049400"}, - {file = "numpy-1.24.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9667575fb6d13c95f1b36aca12c5ee3356bf001b714fc354eb5465ce1609e62f"}, - {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3a86ed21e4f87050382c7bc96571755193c4c1392490744ac73d660e8f564a9"}, - {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d11efb4dbecbdf22508d55e48d9c8384db795e1b7b51ea735289ff96613ff74d"}, - {file = "numpy-1.24.4-cp39-cp39-win32.whl", hash = "sha256:6620c0acd41dbcb368610bb2f4d83145674040025e5536954782467100aa8835"}, - {file = "numpy-1.24.4-cp39-cp39-win_amd64.whl", hash = "sha256:befe2bf740fd8373cf56149a5c23a0f601e82869598d41f8e188a0e9869926f8"}, - {file = "numpy-1.24.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:31f13e25b4e304632a4619d0e0777662c2ffea99fcae2029556b17d8ff958aef"}, - {file = "numpy-1.24.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95f7ac6540e95bc440ad77f56e520da5bf877f87dca58bd095288dce8940532a"}, - {file = "numpy-1.24.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e98f220aa76ca2a977fe435f5b04d7b3470c0a2e6312907b37ba6068f26787f2"}, - {file = "numpy-1.24.4.tar.gz", hash = "sha256:80f5e3a4e498641401868df4208b74581206afbee7cf7b8329daae82676d9463"}, -] - -[[package]] -name = "numpy" -version = "2.2.0" -description = "Fundamental package for array computing in Python" -optional = false -python-versions = ">=3.10" -files = [ - {file = "numpy-2.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1e25507d85da11ff5066269d0bd25d06e0a0f2e908415534f3e603d2a78e4ffa"}, - {file = "numpy-2.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a62eb442011776e4036af5c8b1a00b706c5bc02dc15eb5344b0c750428c94219"}, - {file = "numpy-2.2.0-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:b606b1aaf802e6468c2608c65ff7ece53eae1a6874b3765f69b8ceb20c5fa78e"}, - {file = "numpy-2.2.0-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:36b2b43146f646642b425dd2027730f99bac962618ec2052932157e213a040e9"}, - {file = "numpy-2.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fe8f3583e0607ad4e43a954e35c1748b553bfe9fdac8635c02058023277d1b3"}, - {file = "numpy-2.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:122fd2fcfafdefc889c64ad99c228d5a1f9692c3a83f56c292618a59aa60ae83"}, - {file = "numpy-2.2.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3f2f5cddeaa4424a0a118924b988746db6ffa8565e5829b1841a8a3bd73eb59a"}, - {file = "numpy-2.2.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7fe4bb0695fe986a9e4deec3b6857003b4cfe5c5e4aac0b95f6a658c14635e31"}, - {file = "numpy-2.2.0-cp310-cp310-win32.whl", hash = "sha256:b30042fe92dbd79f1ba7f6898fada10bdaad1847c44f2dff9a16147e00a93661"}, - {file = "numpy-2.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:54dc1d6d66f8d37843ed281773c7174f03bf7ad826523f73435deb88ba60d2d4"}, - {file = "numpy-2.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9874bc2ff574c40ab7a5cbb7464bf9b045d617e36754a7bc93f933d52bd9ffc6"}, - {file = "numpy-2.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0da8495970f6b101ddd0c38ace92edea30e7e12b9a926b57f5fabb1ecc25bb90"}, - {file = "numpy-2.2.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:0557eebc699c1c34cccdd8c3778c9294e8196df27d713706895edc6f57d29608"}, - {file = "numpy-2.2.0-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:3579eaeb5e07f3ded59298ce22b65f877a86ba8e9fe701f5576c99bb17c283da"}, - {file = "numpy-2.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40deb10198bbaa531509aad0cd2f9fadb26c8b94070831e2208e7df543562b74"}, - {file = "numpy-2.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2aed8fcf8abc3020d6a9ccb31dbc9e7d7819c56a348cc88fd44be269b37427e"}, - {file = "numpy-2.2.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a222d764352c773aa5ebde02dd84dba3279c81c6db2e482d62a3fa54e5ece69b"}, - {file = "numpy-2.2.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4e58666988605e251d42c2818c7d3d8991555381be26399303053b58a5bbf30d"}, - {file = "numpy-2.2.0-cp311-cp311-win32.whl", hash = "sha256:4723a50e1523e1de4fccd1b9a6dcea750c2102461e9a02b2ac55ffeae09a4410"}, - {file = "numpy-2.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:16757cf28621e43e252c560d25b15f18a2f11da94fea344bf26c599b9cf54b73"}, - {file = "numpy-2.2.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cff210198bb4cae3f3c100444c5eaa573a823f05c253e7188e1362a5555235b3"}, - {file = "numpy-2.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:58b92a5828bd4d9aa0952492b7de803135038de47343b2aa3cc23f3b71a3dc4e"}, - {file = "numpy-2.2.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:ebe5e59545401fbb1b24da76f006ab19734ae71e703cdb4a8b347e84a0cece67"}, - {file = "numpy-2.2.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:e2b8cd48a9942ed3f85b95ca4105c45758438c7ed28fff1e4ce3e57c3b589d8e"}, - {file = "numpy-2.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57fcc997ffc0bef234b8875a54d4058afa92b0b0c4223fc1f62f24b3b5e86038"}, - {file = "numpy-2.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85ad7d11b309bd132d74397fcf2920933c9d1dc865487128f5c03d580f2c3d03"}, - {file = "numpy-2.2.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:cb24cca1968b21355cc6f3da1a20cd1cebd8a023e3c5b09b432444617949085a"}, - {file = "numpy-2.2.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0798b138c291d792f8ea40fe3768610f3c7dd2574389e37c3f26573757c8f7ef"}, - {file = "numpy-2.2.0-cp312-cp312-win32.whl", hash = "sha256:afe8fb968743d40435c3827632fd36c5fbde633b0423da7692e426529b1759b1"}, - {file = "numpy-2.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:3a4199f519e57d517ebd48cb76b36c82da0360781c6a0353e64c0cac30ecaad3"}, - {file = "numpy-2.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f8c8b141ef9699ae777c6278b52c706b653bf15d135d302754f6b2e90eb30367"}, - {file = "numpy-2.2.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0f0986e917aca18f7a567b812ef7ca9391288e2acb7a4308aa9d265bd724bdae"}, - {file = "numpy-2.2.0-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:1c92113619f7b272838b8d6702a7f8ebe5edea0df48166c47929611d0b4dea69"}, - {file = "numpy-2.2.0-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:5a145e956b374e72ad1dff82779177d4a3c62bc8248f41b80cb5122e68f22d13"}, - {file = "numpy-2.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18142b497d70a34b01642b9feabb70156311b326fdddd875a9981f34a369b671"}, - {file = "numpy-2.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7d41d1612c1a82b64697e894b75db6758d4f21c3ec069d841e60ebe54b5b571"}, - {file = "numpy-2.2.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a98f6f20465e7618c83252c02041517bd2f7ea29be5378f09667a8f654a5918d"}, - {file = "numpy-2.2.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e09d40edfdb4e260cb1567d8ae770ccf3b8b7e9f0d9b5c2a9992696b30ce2742"}, - {file = "numpy-2.2.0-cp313-cp313-win32.whl", hash = "sha256:3905a5fffcc23e597ee4d9fb3fcd209bd658c352657548db7316e810ca80458e"}, - {file = "numpy-2.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:a184288538e6ad699cbe6b24859206e38ce5fba28f3bcfa51c90d0502c1582b2"}, - {file = "numpy-2.2.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:7832f9e8eb00be32f15fdfb9a981d6955ea9adc8574c521d48710171b6c55e95"}, - {file = "numpy-2.2.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f0dd071b95bbca244f4cb7f70b77d2ff3aaaba7fa16dc41f58d14854a6204e6c"}, - {file = "numpy-2.2.0-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:b0b227dcff8cdc3efbce66d4e50891f04d0a387cce282fe1e66199146a6a8fca"}, - {file = "numpy-2.2.0-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:6ab153263a7c5ccaf6dfe7e53447b74f77789f28ecb278c3b5d49db7ece10d6d"}, - {file = "numpy-2.2.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e500aba968a48e9019e42c0c199b7ec0696a97fa69037bea163b55398e390529"}, - {file = "numpy-2.2.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:440cfb3db4c5029775803794f8638fbdbf71ec702caf32735f53b008e1eaece3"}, - {file = "numpy-2.2.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a55dc7a7f0b6198b07ec0cd445fbb98b05234e8b00c5ac4874a63372ba98d4ab"}, - {file = "numpy-2.2.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4bddbaa30d78c86329b26bd6aaaea06b1e47444da99eddac7bf1e2fab717bd72"}, - {file = "numpy-2.2.0-cp313-cp313t-win32.whl", hash = "sha256:30bf971c12e4365153afb31fc73f441d4da157153f3400b82db32d04de1e4066"}, - {file = "numpy-2.2.0-cp313-cp313t-win_amd64.whl", hash = "sha256:d35717333b39d1b6bb8433fa758a55f1081543de527171543a2b710551d40881"}, - {file = "numpy-2.2.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:e12c6c1ce84628c52d6367863773f7c8c8241be554e8b79686e91a43f1733773"}, - {file = "numpy-2.2.0-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:b6207dc8fb3c8cb5668e885cef9ec7f70189bec4e276f0ff70d5aa078d32c88e"}, - {file = "numpy-2.2.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a50aeff71d0f97b6450d33940c7181b08be1441c6c193e678211bff11aa725e7"}, - {file = "numpy-2.2.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:df12a1f99b99f569a7c2ae59aa2d31724e8d835fc7f33e14f4792e3071d11221"}, - {file = "numpy-2.2.0.tar.gz", hash = "sha256:140dd80ff8981a583a60980be1a655068f8adebf7a45a06a6858c873fcdcd4a0"}, -] - -[[package]] -name = "oauthlib" -version = "3.2.2" -description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" -optional = false -python-versions = ">=3.6" -files = [ - {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, - {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, -] - -[package.extras] -rsa = ["cryptography (>=3.0.0)"] -signals = ["blinker (>=1.4.0)"] -signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] - -[[package]] -name = "openpyxl" -version = "3.1.5" -description = "A Python library to read/write Excel 2010 xlsx/xlsm files" -optional = false -python-versions = ">=3.8" -files = [ - {file = "openpyxl-3.1.5-py2.py3-none-any.whl", hash = "sha256:5282c12b107bffeef825f4617dc029afaf41d0ea60823bbb665ef3079dc79de2"}, - {file = "openpyxl-3.1.5.tar.gz", hash = "sha256:cf0e3cf56142039133628b5acffe8ef0c12bc902d2aadd3e0fe5878dc08d1050"}, -] - -[package.dependencies] -et-xmlfile = "*" - -[[package]] -name = "packaging" -version = "24.2" -description = "Core utilities for Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, - {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, -] - -[[package]] -name = "pandas" -version = "2.0.3" -description = "Powerful data structures for data analysis, time series, and statistics" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pandas-2.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e4c7c9f27a4185304c7caf96dc7d91bc60bc162221152de697c98eb0b2648dd8"}, - {file = "pandas-2.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f167beed68918d62bffb6ec64f2e1d8a7d297a038f86d4aed056b9493fca407f"}, - {file = "pandas-2.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce0c6f76a0f1ba361551f3e6dceaff06bde7514a374aa43e33b588ec10420183"}, - {file = "pandas-2.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba619e410a21d8c387a1ea6e8a0e49bb42216474436245718d7f2e88a2f8d7c0"}, - {file = "pandas-2.0.3-cp310-cp310-win32.whl", hash = "sha256:3ef285093b4fe5058eefd756100a367f27029913760773c8bf1d2d8bebe5d210"}, - {file = "pandas-2.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:9ee1a69328d5c36c98d8e74db06f4ad518a1840e8ccb94a4ba86920986bb617e"}, - {file = "pandas-2.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b084b91d8d66ab19f5bb3256cbd5ea661848338301940e17f4492b2ce0801fe8"}, - {file = "pandas-2.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:37673e3bdf1551b95bf5d4ce372b37770f9529743d2498032439371fc7b7eb26"}, - {file = "pandas-2.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9cb1e14fdb546396b7e1b923ffaeeac24e4cedd14266c3497216dd4448e4f2d"}, - {file = "pandas-2.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d9cd88488cceb7635aebb84809d087468eb33551097d600c6dad13602029c2df"}, - {file = "pandas-2.0.3-cp311-cp311-win32.whl", hash = "sha256:694888a81198786f0e164ee3a581df7d505024fbb1f15202fc7db88a71d84ebd"}, - {file = "pandas-2.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:6a21ab5c89dcbd57f78d0ae16630b090eec626360085a4148693def5452d8a6b"}, - {file = "pandas-2.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9e4da0d45e7f34c069fe4d522359df7d23badf83abc1d1cef398895822d11061"}, - {file = "pandas-2.0.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:32fca2ee1b0d93dd71d979726b12b61faa06aeb93cf77468776287f41ff8fdc5"}, - {file = "pandas-2.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:258d3624b3ae734490e4d63c430256e716f488c4fcb7c8e9bde2d3aa46c29089"}, - {file = "pandas-2.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eae3dc34fa1aa7772dd3fc60270d13ced7346fcbcfee017d3132ec625e23bb0"}, - {file = "pandas-2.0.3-cp38-cp38-win32.whl", hash = "sha256:f3421a7afb1a43f7e38e82e844e2bca9a6d793d66c1a7f9f0ff39a795bbc5e02"}, - {file = "pandas-2.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:69d7f3884c95da3a31ef82b7618af5710dba95bb885ffab339aad925c3e8ce78"}, - {file = "pandas-2.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5247fb1ba347c1261cbbf0fcfba4a3121fbb4029d95d9ef4dc45406620b25c8b"}, - {file = "pandas-2.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:81af086f4543c9d8bb128328b5d32e9986e0c84d3ee673a2ac6fb57fd14f755e"}, - {file = "pandas-2.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1994c789bf12a7c5098277fb43836ce090f1073858c10f9220998ac74f37c69b"}, - {file = "pandas-2.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ec591c48e29226bcbb316e0c1e9423622bc7a4eaf1ef7c3c9fa1a3981f89641"}, - {file = "pandas-2.0.3-cp39-cp39-win32.whl", hash = "sha256:04dbdbaf2e4d46ca8da896e1805bc04eb85caa9a82e259e8eed00254d5e0c682"}, - {file = "pandas-2.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:1168574b036cd8b93abc746171c9b4f1b83467438a5e45909fed645cf8692dbc"}, - {file = "pandas-2.0.3.tar.gz", hash = "sha256:c02f372a88e0d17f36d3093a644c73cfc1788e876a7c4bcb4020a77512e2043c"}, -] - -[package.dependencies] -numpy = [ - {version = ">=1.20.3", markers = "python_version < \"3.10\""}, - {version = ">=1.21.0", markers = "python_version >= \"3.10\" and python_version < \"3.11\""}, - {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, -] -python-dateutil = ">=2.8.2" -pytz = ">=2020.1" -tzdata = ">=2022.1" - -[package.extras] -all = ["PyQt5 (>=5.15.1)", "SQLAlchemy (>=1.4.16)", "beautifulsoup4 (>=4.9.3)", "bottleneck (>=1.3.2)", "brotlipy (>=0.7.0)", "fastparquet (>=0.6.3)", "fsspec (>=2021.07.0)", "gcsfs (>=2021.07.0)", "html5lib (>=1.1)", "hypothesis (>=6.34.2)", "jinja2 (>=3.0.0)", "lxml (>=4.6.3)", "matplotlib (>=3.6.1)", "numba (>=0.53.1)", "numexpr (>=2.7.3)", "odfpy (>=1.4.1)", "openpyxl (>=3.0.7)", "pandas-gbq (>=0.15.0)", "psycopg2 (>=2.8.6)", "pyarrow (>=7.0.0)", "pymysql (>=1.0.2)", "pyreadstat (>=1.1.2)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)", "python-snappy (>=0.6.0)", "pyxlsb (>=1.0.8)", "qtpy (>=2.2.0)", "s3fs (>=2021.08.0)", "scipy (>=1.7.1)", "tables (>=3.6.1)", "tabulate (>=0.8.9)", "xarray (>=0.21.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=1.4.3)", "zstandard (>=0.15.2)"] -aws = ["s3fs (>=2021.08.0)"] -clipboard = ["PyQt5 (>=5.15.1)", "qtpy (>=2.2.0)"] -compression = ["brotlipy (>=0.7.0)", "python-snappy (>=0.6.0)", "zstandard (>=0.15.2)"] -computation = ["scipy (>=1.7.1)", "xarray (>=0.21.0)"] -excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.0.7)", "pyxlsb (>=1.0.8)", "xlrd (>=2.0.1)", "xlsxwriter (>=1.4.3)"] -feather = ["pyarrow (>=7.0.0)"] -fss = ["fsspec (>=2021.07.0)"] -gcp = ["gcsfs (>=2021.07.0)", "pandas-gbq (>=0.15.0)"] -hdf5 = ["tables (>=3.6.1)"] -html = ["beautifulsoup4 (>=4.9.3)", "html5lib (>=1.1)", "lxml (>=4.6.3)"] -mysql = ["SQLAlchemy (>=1.4.16)", "pymysql (>=1.0.2)"] -output-formatting = ["jinja2 (>=3.0.0)", "tabulate (>=0.8.9)"] -parquet = ["pyarrow (>=7.0.0)"] -performance = ["bottleneck (>=1.3.2)", "numba (>=0.53.1)", "numexpr (>=2.7.1)"] -plot = ["matplotlib (>=3.6.1)"] -postgresql = ["SQLAlchemy (>=1.4.16)", "psycopg2 (>=2.8.6)"] -spss = ["pyreadstat (>=1.1.2)"] -sql-other = ["SQLAlchemy (>=1.4.16)"] -test = ["hypothesis (>=6.34.2)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)"] -xml = ["lxml (>=4.6.3)"] - -[[package]] -name = "pathspec" -version = "0.12.1" -description = "Utility library for gitignore style pattern matching of file paths." -optional = false -python-versions = ">=3.8" -files = [ - {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, - {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, -] - -[[package]] -name = "platformdirs" -version = "4.3.6" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." -optional = false -python-versions = ">=3.8" -files = [ - {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, - {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, -] - -[package.extras] -docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] -type = ["mypy (>=1.11.2)"] - -[[package]] -name = "pluggy" -version = "1.5.0" -description = "plugin and hook calling mechanisms for python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, - {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, -] - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] - -[[package]] -name = "pyarrow" -version = "17.0.0" -description = "Python library for Apache Arrow" -optional = true -python-versions = ">=3.8" -files = [ - {file = "pyarrow-17.0.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:a5c8b238d47e48812ee577ee20c9a2779e6a5904f1708ae240f53ecbee7c9f07"}, - {file = "pyarrow-17.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:db023dc4c6cae1015de9e198d41250688383c3f9af8f565370ab2b4cb5f62655"}, - {file = "pyarrow-17.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da1e060b3876faa11cee287839f9cc7cdc00649f475714b8680a05fd9071d545"}, - {file = "pyarrow-17.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75c06d4624c0ad6674364bb46ef38c3132768139ddec1c56582dbac54f2663e2"}, - {file = "pyarrow-17.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:fa3c246cc58cb5a4a5cb407a18f193354ea47dd0648194e6265bd24177982fe8"}, - {file = "pyarrow-17.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:f7ae2de664e0b158d1607699a16a488de3d008ba99b3a7aa5de1cbc13574d047"}, - {file = "pyarrow-17.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:5984f416552eea15fd9cee03da53542bf4cddaef5afecefb9aa8d1010c335087"}, - {file = "pyarrow-17.0.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:1c8856e2ef09eb87ecf937104aacfa0708f22dfeb039c363ec99735190ffb977"}, - {file = "pyarrow-17.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2e19f569567efcbbd42084e87f948778eb371d308e137a0f97afe19bb860ccb3"}, - {file = "pyarrow-17.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b244dc8e08a23b3e352899a006a26ae7b4d0da7bb636872fa8f5884e70acf15"}, - {file = "pyarrow-17.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b72e87fe3e1db343995562f7fff8aee354b55ee83d13afba65400c178ab2597"}, - {file = "pyarrow-17.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:dc5c31c37409dfbc5d014047817cb4ccd8c1ea25d19576acf1a001fe07f5b420"}, - {file = "pyarrow-17.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e3343cb1e88bc2ea605986d4b94948716edc7a8d14afd4e2c097232f729758b4"}, - {file = "pyarrow-17.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:a27532c38f3de9eb3e90ecab63dfda948a8ca859a66e3a47f5f42d1e403c4d03"}, - {file = "pyarrow-17.0.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:9b8a823cea605221e61f34859dcc03207e52e409ccf6354634143e23af7c8d22"}, - {file = "pyarrow-17.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f1e70de6cb5790a50b01d2b686d54aaf73da01266850b05e3af2a1bc89e16053"}, - {file = "pyarrow-17.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0071ce35788c6f9077ff9ecba4858108eebe2ea5a3f7cf2cf55ebc1dbc6ee24a"}, - {file = "pyarrow-17.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:757074882f844411fcca735e39aae74248a1531367a7c80799b4266390ae51cc"}, - {file = "pyarrow-17.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:9ba11c4f16976e89146781a83833df7f82077cdab7dc6232c897789343f7891a"}, - {file = "pyarrow-17.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:b0c6ac301093b42d34410b187bba560b17c0330f64907bfa4f7f7f2444b0cf9b"}, - {file = "pyarrow-17.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:392bc9feabc647338e6c89267635e111d71edad5fcffba204425a7c8d13610d7"}, - {file = "pyarrow-17.0.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:af5ff82a04b2171415f1410cff7ebb79861afc5dae50be73ce06d6e870615204"}, - {file = "pyarrow-17.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:edca18eaca89cd6382dfbcff3dd2d87633433043650c07375d095cd3517561d8"}, - {file = "pyarrow-17.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c7916bff914ac5d4a8fe25b7a25e432ff921e72f6f2b7547d1e325c1ad9d155"}, - {file = "pyarrow-17.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f553ca691b9e94b202ff741bdd40f6ccb70cdd5fbf65c187af132f1317de6145"}, - {file = "pyarrow-17.0.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:0cdb0e627c86c373205a2f94a510ac4376fdc523f8bb36beab2e7f204416163c"}, - {file = "pyarrow-17.0.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:d7d192305d9d8bc9082d10f361fc70a73590a4c65cf31c3e6926cd72b76bc35c"}, - {file = "pyarrow-17.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:02dae06ce212d8b3244dd3e7d12d9c4d3046945a5933d28026598e9dbbda1fca"}, - {file = "pyarrow-17.0.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:13d7a460b412f31e4c0efa1148e1d29bdf18ad1411eb6757d38f8fbdcc8645fb"}, - {file = "pyarrow-17.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9b564a51fbccfab5a04a80453e5ac6c9954a9c5ef2890d1bcf63741909c3f8df"}, - {file = "pyarrow-17.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32503827abbc5aadedfa235f5ece8c4f8f8b0a3cf01066bc8d29de7539532687"}, - {file = "pyarrow-17.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a155acc7f154b9ffcc85497509bcd0d43efb80d6f733b0dc3bb14e281f131c8b"}, - {file = "pyarrow-17.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:dec8d129254d0188a49f8a1fc99e0560dc1b85f60af729f47de4046015f9b0a5"}, - {file = "pyarrow-17.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:a48ddf5c3c6a6c505904545c25a4ae13646ae1f8ba703c4df4a1bfe4f4006bda"}, - {file = "pyarrow-17.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:42bf93249a083aca230ba7e2786c5f673507fa97bbd9725a1e2754715151a204"}, - {file = "pyarrow-17.0.0.tar.gz", hash = "sha256:4beca9521ed2c0921c1023e68d097d0299b62c362639ea315572a58f3f50fd28"}, -] - -[package.dependencies] -numpy = ">=1.16.6" - -[package.extras] -test = ["cffi", "hypothesis", "pandas", "pytest", "pytz"] - -[[package]] -name = "pylint" -version = "3.2.7" -description = "python code static checker" -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "pylint-3.2.7-py3-none-any.whl", hash = "sha256:02f4aedeac91be69fb3b4bea997ce580a4ac68ce58b89eaefeaf06749df73f4b"}, - {file = "pylint-3.2.7.tar.gz", hash = "sha256:1b7a721b575eaeaa7d39db076b6e7743c993ea44f57979127c517c6c572c803e"}, -] - -[package.dependencies] -astroid = ">=3.2.4,<=3.3.0-dev0" -colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} -dill = [ - {version = ">=0.2", markers = "python_version < \"3.11\""}, - {version = ">=0.3.7", markers = "python_version >= \"3.12\""}, - {version = ">=0.3.6", markers = "python_version >= \"3.11\" and python_version < \"3.12\""}, -] -isort = ">=4.2.5,<5.13.0 || >5.13.0,<6" -mccabe = ">=0.6,<0.8" -platformdirs = ">=2.2.0" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -tomlkit = ">=0.10.1" -typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""} - -[package.extras] -spelling = ["pyenchant (>=3.2,<4.0)"] -testutils = ["gitpython (>3)"] - -[[package]] -name = "pytest" -version = "7.4.4" -description = "pytest: simple powerful testing with Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, - {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=0.12,<2.0" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} - -[package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] - -[[package]] -name = "pytest-dotenv" -version = "0.5.2" -description = "A py.test plugin that parses environment files before running tests" -optional = false -python-versions = "*" -files = [ - {file = "pytest-dotenv-0.5.2.tar.gz", hash = "sha256:2dc6c3ac6d8764c71c6d2804e902d0ff810fa19692e95fe138aefc9b1aa73732"}, - {file = "pytest_dotenv-0.5.2-py3-none-any.whl", hash = "sha256:40a2cece120a213898afaa5407673f6bd924b1fa7eafce6bda0e8abffe2f710f"}, -] - -[package.dependencies] -pytest = ">=5.0.0" -python-dotenv = ">=0.9.1" - -[[package]] -name = "python-dateutil" -version = "2.9.0.post0" -description = "Extensions to the standard Python datetime module" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -files = [ - {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, - {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, -] - -[package.dependencies] -six = ">=1.5" - -[[package]] -name = "python-dotenv" -version = "1.0.1" -description = "Read key-value pairs from a .env file and set them as environment variables" -optional = false -python-versions = ">=3.8" -files = [ - {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, - {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, -] - -[package.extras] -cli = ["click (>=5.0)"] - -[[package]] -name = "pytz" -version = "2024.2" -description = "World timezone definitions, modern and historical" -optional = false -python-versions = "*" -files = [ - {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, - {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, -] - -[[package]] -name = "requests" -version = "2.32.3" -description = "Python HTTP for Humans." -optional = false -python-versions = ">=3.8" -files = [ - {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, - {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, -] - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<3" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "six" -version = "1.17.0" -description = "Python 2 and 3 compatibility utilities" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -files = [ - {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, - {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, -] - -[[package]] -name = "thrift" -version = "0.20.0" -description = "Python bindings for the Apache Thrift RPC system" -optional = false -python-versions = "*" -files = [ - {file = "thrift-0.20.0.tar.gz", hash = "sha256:4dd662eadf6b8aebe8a41729527bd69adf6ceaa2a8681cbef64d1273b3e8feba"}, -] - -[package.dependencies] -six = ">=1.7.2" - -[package.extras] -all = ["tornado (>=4.0)", "twisted"] -tornado = ["tornado (>=4.0)"] -twisted = ["twisted"] - -[[package]] -name = "tomli" -version = "2.2.1" -description = "A lil' TOML parser" -optional = false -python-versions = ">=3.8" -files = [ - {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, - {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, - {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, - {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, - {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, - {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, - {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, - {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, - {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, - {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, - {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, - {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, - {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, - {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, -] - -[[package]] -name = "tomlkit" -version = "0.13.2" -description = "Style preserving TOML library" -optional = false -python-versions = ">=3.8" -files = [ - {file = "tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde"}, - {file = "tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79"}, -] - -[[package]] -name = "typing-extensions" -version = "4.12.2" -description = "Backported and Experimental Type Hints for Python 3.8+" -optional = false -python-versions = ">=3.8" -files = [ - {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, - {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, -] - -[[package]] -name = "tzdata" -version = "2024.2" -description = "Provider of IANA time zone data" -optional = false -python-versions = ">=2" -files = [ - {file = "tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd"}, - {file = "tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc"}, -] - -[[package]] -name = "urllib3" -version = "2.2.3" -description = "HTTP library with thread-safe connection pooling, file post, and more." -optional = false -python-versions = ">=3.8" -files = [ - {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, - {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, -] - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -h2 = ["h2 (>=4,<5)"] -socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["zstandard (>=0.18.0)"] - -[extras] -pyarrow = ["pyarrow"] - -[metadata] -lock-version = "2.0" -python-versions = "^3.8.0" -content-hash = "cdf42dc5f4e437ae70fc167f9cb7400d947f5288ee46f132393a14a4e5502bf8" From 3853b76f2519f930df3edf67a86d53373fbbb5a5 Mon Sep 17 00:00:00 2001 From: Jothi Prakash Date: Wed, 11 Dec 2024 12:09:14 +0530 Subject: [PATCH 17/19] Updated the lock file --- poetry.lock | 961 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 961 insertions(+) create mode 100644 poetry.lock diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 00000000..13836837 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,961 @@ +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. + +[[package]] +name = "astroid" +version = "3.2.4" +description = "An abstract syntax tree for Python with inference support." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "astroid-3.2.4-py3-none-any.whl", hash = "sha256:413658a61eeca6202a59231abb473f932038fbcbf1666587f66d482083413a25"}, + {file = "astroid-3.2.4.tar.gz", hash = "sha256:0e14202810b30da1b735827f78f5157be2bbd4a7a59b7707ca0bfc2fb4c0063a"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} + +[[package]] +name = "black" +version = "22.12.0" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.7" +files = [ + {file = "black-22.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eedd20838bd5d75b80c9f5487dbcb06836a43833a37846cf1d8c1cc01cef59d"}, + {file = "black-22.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:159a46a4947f73387b4d83e87ea006dbb2337eab6c879620a3ba52699b1f4351"}, + {file = "black-22.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d30b212bffeb1e252b31dd269dfae69dd17e06d92b87ad26e23890f3efea366f"}, + {file = "black-22.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:7412e75863aa5c5411886804678b7d083c7c28421210180d67dfd8cf1221e1f4"}, + {file = "black-22.12.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c116eed0efb9ff870ded8b62fe9f28dd61ef6e9ddd28d83d7d264a38417dcee2"}, + {file = "black-22.12.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1f58cbe16dfe8c12b7434e50ff889fa479072096d79f0a7f25e4ab8e94cd8350"}, + {file = "black-22.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77d86c9f3db9b1bf6761244bc0b3572a546f5fe37917a044e02f3166d5aafa7d"}, + {file = "black-22.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:82d9fe8fee3401e02e79767016b4907820a7dc28d70d137eb397b92ef3cc5bfc"}, + {file = "black-22.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101c69b23df9b44247bd88e1d7e90154336ac4992502d4197bdac35dd7ee3320"}, + {file = "black-22.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:559c7a1ba9a006226f09e4916060982fd27334ae1998e7a38b3f33a37f7a2148"}, + {file = "black-22.12.0-py3-none-any.whl", hash = "sha256:436cc9167dd28040ad90d3b404aec22cedf24a6e4d7de221bec2730ec0c97bcf"}, + {file = "black-22.12.0.tar.gz", hash = "sha256:229351e5a18ca30f447bf724d007f890f97e13af070bb6ad4c0a441cd7596a2f"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""} +typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "certifi" +version = "2024.8.30" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, + {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.0" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, + {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, + {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, +] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "dill" +version = "0.3.9" +description = "serialize all of Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "dill-0.3.9-py3-none-any.whl", hash = "sha256:468dff3b89520b474c0397703366b7b95eebe6303f108adf9b19da1f702be87a"}, + {file = "dill-0.3.9.tar.gz", hash = "sha256:81aa267dddf68cbfe8029c42ca9ec6a4ab3b22371d1c450abc54422577b4512c"}, +] + +[package.extras] +graph = ["objgraph (>=1.7.2)"] +profile = ["gprof2dot (>=2022.7.29)"] + +[[package]] +name = "et-xmlfile" +version = "2.0.0" +description = "An implementation of lxml.xmlfile for the standard library" +optional = false +python-versions = ">=3.8" +files = [ + {file = "et_xmlfile-2.0.0-py3-none-any.whl", hash = "sha256:7a91720bc756843502c3b7504c77b8fe44217c85c537d85037f0f536151b2caa"}, + {file = "et_xmlfile-2.0.0.tar.gz", hash = "sha256:dab3f4764309081ce75662649be815c4c9081e88f0837825f90fd28317d4da54"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.2" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "idna" +version = "3.10" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.6" +files = [ + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, +] + +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "isort" +version = "5.13.2" +description = "A Python utility / library to sort Python imports." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, +] + +[package.extras] +colors = ["colorama (>=0.4.6)"] + +[[package]] +name = "lz4" +version = "4.3.3" +description = "LZ4 Bindings for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "lz4-4.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b891880c187e96339474af2a3b2bfb11a8e4732ff5034be919aa9029484cd201"}, + {file = "lz4-4.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:222a7e35137d7539c9c33bb53fcbb26510c5748779364014235afc62b0ec797f"}, + {file = "lz4-4.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f76176492ff082657ada0d0f10c794b6da5800249ef1692b35cf49b1e93e8ef7"}, + {file = "lz4-4.3.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1d18718f9d78182c6b60f568c9a9cec8a7204d7cb6fad4e511a2ef279e4cb05"}, + {file = "lz4-4.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6cdc60e21ec70266947a48839b437d46025076eb4b12c76bd47f8e5eb8a75dcc"}, + {file = "lz4-4.3.3-cp310-cp310-win32.whl", hash = "sha256:c81703b12475da73a5d66618856d04b1307e43428a7e59d98cfe5a5d608a74c6"}, + {file = "lz4-4.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:43cf03059c0f941b772c8aeb42a0813d68d7081c009542301637e5782f8a33e2"}, + {file = "lz4-4.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:30e8c20b8857adef7be045c65f47ab1e2c4fabba86a9fa9a997d7674a31ea6b6"}, + {file = "lz4-4.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2f7b1839f795315e480fb87d9bc60b186a98e3e5d17203c6e757611ef7dcef61"}, + {file = "lz4-4.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edfd858985c23523f4e5a7526ca6ee65ff930207a7ec8a8f57a01eae506aaee7"}, + {file = "lz4-4.3.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e9c410b11a31dbdc94c05ac3c480cb4b222460faf9231f12538d0074e56c563"}, + {file = "lz4-4.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d2507ee9c99dbddd191c86f0e0c8b724c76d26b0602db9ea23232304382e1f21"}, + {file = "lz4-4.3.3-cp311-cp311-win32.whl", hash = "sha256:f180904f33bdd1e92967923a43c22899e303906d19b2cf8bb547db6653ea6e7d"}, + {file = "lz4-4.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:b14d948e6dce389f9a7afc666d60dd1e35fa2138a8ec5306d30cd2e30d36b40c"}, + {file = "lz4-4.3.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e36cd7b9d4d920d3bfc2369840da506fa68258f7bb176b8743189793c055e43d"}, + {file = "lz4-4.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:31ea4be9d0059c00b2572d700bf2c1bc82f241f2c3282034a759c9a4d6ca4dc2"}, + {file = "lz4-4.3.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33c9a6fd20767ccaf70649982f8f3eeb0884035c150c0b818ea660152cf3c809"}, + {file = "lz4-4.3.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bca8fccc15e3add173da91be8f34121578dc777711ffd98d399be35487c934bf"}, + {file = "lz4-4.3.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d84b479ddf39fe3ea05387f10b779155fc0990125f4fb35d636114e1c63a2e"}, + {file = "lz4-4.3.3-cp312-cp312-win32.whl", hash = "sha256:337cb94488a1b060ef1685187d6ad4ba8bc61d26d631d7ba909ee984ea736be1"}, + {file = "lz4-4.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:5d35533bf2cee56f38ced91f766cd0038b6abf46f438a80d50c52750088be93f"}, + {file = "lz4-4.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:363ab65bf31338eb364062a15f302fc0fab0a49426051429866d71c793c23394"}, + {file = "lz4-4.3.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0a136e44a16fc98b1abc404fbabf7f1fada2bdab6a7e970974fb81cf55b636d0"}, + {file = "lz4-4.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abc197e4aca8b63f5ae200af03eb95fb4b5055a8f990079b5bdf042f568469dd"}, + {file = "lz4-4.3.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56f4fe9c6327adb97406f27a66420b22ce02d71a5c365c48d6b656b4aaeb7775"}, + {file = "lz4-4.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0e822cd7644995d9ba248cb4b67859701748a93e2ab7fc9bc18c599a52e4604"}, + {file = "lz4-4.3.3-cp38-cp38-win32.whl", hash = "sha256:24b3206de56b7a537eda3a8123c644a2b7bf111f0af53bc14bed90ce5562d1aa"}, + {file = "lz4-4.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:b47839b53956e2737229d70714f1d75f33e8ac26e52c267f0197b3189ca6de24"}, + {file = "lz4-4.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6756212507405f270b66b3ff7f564618de0606395c0fe10a7ae2ffcbbe0b1fba"}, + {file = "lz4-4.3.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ee9ff50557a942d187ec85462bb0960207e7ec5b19b3b48949263993771c6205"}, + {file = "lz4-4.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b901c7784caac9a1ded4555258207d9e9697e746cc8532129f150ffe1f6ba0d"}, + {file = "lz4-4.3.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6d9ec061b9eca86e4dcc003d93334b95d53909afd5a32c6e4f222157b50c071"}, + {file = "lz4-4.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4c7bf687303ca47d69f9f0133274958fd672efaa33fb5bcde467862d6c621f0"}, + {file = "lz4-4.3.3-cp39-cp39-win32.whl", hash = "sha256:054b4631a355606e99a42396f5db4d22046a3397ffc3269a348ec41eaebd69d2"}, + {file = "lz4-4.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:eac9af361e0d98335a02ff12fb56caeb7ea1196cf1a49dbf6f17828a131da807"}, + {file = "lz4-4.3.3.tar.gz", hash = "sha256:01fe674ef2889dbb9899d8a67361e0c4a2c833af5aeb37dd505727cf5d2a131e"}, +] + +[package.extras] +docs = ["sphinx (>=1.6.0)", "sphinx-bootstrap-theme"] +flake8 = ["flake8"] +tests = ["psutil", "pytest (!=3.3.0)", "pytest-cov"] + +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +optional = false +python-versions = ">=3.6" +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] + +[[package]] +name = "mypy" +version = "1.13.0" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6607e0f1dd1fb7f0aca14d936d13fd19eba5e17e1cd2a14f808fa5f8f6d8f60a"}, + {file = "mypy-1.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8a21be69bd26fa81b1f80a61ee7ab05b076c674d9b18fb56239d72e21d9f4c80"}, + {file = "mypy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b2353a44d2179846a096e25691d54d59904559f4232519d420d64da6828a3a7"}, + {file = "mypy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0730d1c6a2739d4511dc4253f8274cdd140c55c32dfb0a4cf8b7a43f40abfa6f"}, + {file = "mypy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:c5fc54dbb712ff5e5a0fca797e6e0aa25726c7e72c6a5850cfd2adbc1eb0a372"}, + {file = "mypy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:581665e6f3a8a9078f28d5502f4c334c0c8d802ef55ea0e7276a6e409bc0d82d"}, + {file = "mypy-1.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3ddb5b9bf82e05cc9a627e84707b528e5c7caaa1c55c69e175abb15a761cec2d"}, + {file = "mypy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:20c7ee0bc0d5a9595c46f38beb04201f2620065a93755704e141fcac9f59db2b"}, + {file = "mypy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3790ded76f0b34bc9c8ba4def8f919dd6a46db0f5a6610fb994fe8efdd447f73"}, + {file = "mypy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:51f869f4b6b538229c1d1bcc1dd7d119817206e2bc54e8e374b3dfa202defcca"}, + {file = "mypy-1.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5c7051a3461ae84dfb5dd15eff5094640c61c5f22257c8b766794e6dd85e72d5"}, + {file = "mypy-1.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39bb21c69a5d6342f4ce526e4584bc5c197fd20a60d14a8624d8743fffb9472e"}, + {file = "mypy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:164f28cb9d6367439031f4c81e84d3ccaa1e19232d9d05d37cb0bd880d3f93c2"}, + {file = "mypy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a4c1bfcdbce96ff5d96fc9b08e3831acb30dc44ab02671eca5953eadad07d6d0"}, + {file = "mypy-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:a0affb3a79a256b4183ba09811e3577c5163ed06685e4d4b46429a271ba174d2"}, + {file = "mypy-1.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a7b44178c9760ce1a43f544e595d35ed61ac2c3de306599fa59b38a6048e1aa7"}, + {file = "mypy-1.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d5092efb8516d08440e36626f0153b5006d4088c1d663d88bf79625af3d1d62"}, + {file = "mypy-1.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de2904956dac40ced10931ac967ae63c5089bd498542194b436eb097a9f77bc8"}, + {file = "mypy-1.13.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:7bfd8836970d33c2105562650656b6846149374dc8ed77d98424b40b09340ba7"}, + {file = "mypy-1.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:9f73dba9ec77acb86457a8fc04b5239822df0c14a082564737833d2963677dbc"}, + {file = "mypy-1.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:100fac22ce82925f676a734af0db922ecfea991e1d7ec0ceb1e115ebe501301a"}, + {file = "mypy-1.13.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7bcb0bb7f42a978bb323a7c88f1081d1b5dee77ca86f4100735a6f541299d8fb"}, + {file = "mypy-1.13.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bde31fc887c213e223bbfc34328070996061b0833b0a4cfec53745ed61f3519b"}, + {file = "mypy-1.13.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07de989f89786f62b937851295ed62e51774722e5444a27cecca993fc3f9cd74"}, + {file = "mypy-1.13.0-cp38-cp38-win_amd64.whl", hash = "sha256:4bde84334fbe19bad704b3f5b78c4abd35ff1026f8ba72b29de70dda0916beb6"}, + {file = "mypy-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0246bcb1b5de7f08f2826451abd947bf656945209b140d16ed317f65a17dc7dc"}, + {file = "mypy-1.13.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7f5b7deae912cf8b77e990b9280f170381fdfbddf61b4ef80927edd813163732"}, + {file = "mypy-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7029881ec6ffb8bc233a4fa364736789582c738217b133f1b55967115288a2bc"}, + {file = "mypy-1.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3e38b980e5681f28f033f3be86b099a247b13c491f14bb8b1e1e134d23bb599d"}, + {file = "mypy-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:a6789be98a2017c912ae6ccb77ea553bbaf13d27605d2ca20a76dfbced631b24"}, + {file = "mypy-1.13.0-py3-none-any.whl", hash = "sha256:9c250883f9fd81d212e0952c92dbfcc96fc237f4b7c92f56ac81fd48460b3e5a"}, + {file = "mypy-1.13.0.tar.gz", hash = "sha256:0291a61b6fbf3e6673e3405cfcc0e7650bebc7939659fdca2702958038bd835e"}, +] + +[package.dependencies] +mypy-extensions = ">=1.0.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = ">=4.6.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +faster-cache = ["orjson"] +install-types = ["pip"] +mypyc = ["setuptools (>=50)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "numpy" +version = "1.24.4" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "numpy-1.24.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c0bfb52d2169d58c1cdb8cc1f16989101639b34c7d3ce60ed70b19c63eba0b64"}, + {file = "numpy-1.24.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ed094d4f0c177b1b8e7aa9cba7d6ceed51c0e569a5318ac0ca9a090680a6a1b1"}, + {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79fc682a374c4a8ed08b331bef9c5f582585d1048fa6d80bc6c35bc384eee9b4"}, + {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ffe43c74893dbf38c2b0a1f5428760a1a9c98285553c89e12d70a96a7f3a4d6"}, + {file = "numpy-1.24.4-cp310-cp310-win32.whl", hash = "sha256:4c21decb6ea94057331e111a5bed9a79d335658c27ce2adb580fb4d54f2ad9bc"}, + {file = "numpy-1.24.4-cp310-cp310-win_amd64.whl", hash = "sha256:b4bea75e47d9586d31e892a7401f76e909712a0fd510f58f5337bea9572c571e"}, + {file = "numpy-1.24.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f136bab9c2cfd8da131132c2cf6cc27331dd6fae65f95f69dcd4ae3c3639c810"}, + {file = "numpy-1.24.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2926dac25b313635e4d6cf4dc4e51c8c0ebfed60b801c799ffc4c32bf3d1254"}, + {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:222e40d0e2548690405b0b3c7b21d1169117391c2e82c378467ef9ab4c8f0da7"}, + {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7215847ce88a85ce39baf9e89070cb860c98fdddacbaa6c0da3ffb31b3350bd5"}, + {file = "numpy-1.24.4-cp311-cp311-win32.whl", hash = "sha256:4979217d7de511a8d57f4b4b5b2b965f707768440c17cb70fbf254c4b225238d"}, + {file = "numpy-1.24.4-cp311-cp311-win_amd64.whl", hash = "sha256:b7b1fc9864d7d39e28f41d089bfd6353cb5f27ecd9905348c24187a768c79694"}, + {file = "numpy-1.24.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1452241c290f3e2a312c137a9999cdbf63f78864d63c79039bda65ee86943f61"}, + {file = "numpy-1.24.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:04640dab83f7c6c85abf9cd729c5b65f1ebd0ccf9de90b270cd61935eef0197f"}, + {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5425b114831d1e77e4b5d812b69d11d962e104095a5b9c3b641a218abcc050e"}, + {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd80e219fd4c71fc3699fc1dadac5dcf4fd882bfc6f7ec53d30fa197b8ee22dc"}, + {file = "numpy-1.24.4-cp38-cp38-win32.whl", hash = "sha256:4602244f345453db537be5314d3983dbf5834a9701b7723ec28923e2889e0bb2"}, + {file = "numpy-1.24.4-cp38-cp38-win_amd64.whl", hash = "sha256:692f2e0f55794943c5bfff12b3f56f99af76f902fc47487bdfe97856de51a706"}, + {file = "numpy-1.24.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2541312fbf09977f3b3ad449c4e5f4bb55d0dbf79226d7724211acc905049400"}, + {file = "numpy-1.24.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9667575fb6d13c95f1b36aca12c5ee3356bf001b714fc354eb5465ce1609e62f"}, + {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3a86ed21e4f87050382c7bc96571755193c4c1392490744ac73d660e8f564a9"}, + {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d11efb4dbecbdf22508d55e48d9c8384db795e1b7b51ea735289ff96613ff74d"}, + {file = "numpy-1.24.4-cp39-cp39-win32.whl", hash = "sha256:6620c0acd41dbcb368610bb2f4d83145674040025e5536954782467100aa8835"}, + {file = "numpy-1.24.4-cp39-cp39-win_amd64.whl", hash = "sha256:befe2bf740fd8373cf56149a5c23a0f601e82869598d41f8e188a0e9869926f8"}, + {file = "numpy-1.24.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:31f13e25b4e304632a4619d0e0777662c2ffea99fcae2029556b17d8ff958aef"}, + {file = "numpy-1.24.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95f7ac6540e95bc440ad77f56e520da5bf877f87dca58bd095288dce8940532a"}, + {file = "numpy-1.24.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e98f220aa76ca2a977fe435f5b04d7b3470c0a2e6312907b37ba6068f26787f2"}, + {file = "numpy-1.24.4.tar.gz", hash = "sha256:80f5e3a4e498641401868df4208b74581206afbee7cf7b8329daae82676d9463"}, +] + +[[package]] +name = "numpy" +version = "2.2.0" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.10" +files = [ + {file = "numpy-2.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1e25507d85da11ff5066269d0bd25d06e0a0f2e908415534f3e603d2a78e4ffa"}, + {file = "numpy-2.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a62eb442011776e4036af5c8b1a00b706c5bc02dc15eb5344b0c750428c94219"}, + {file = "numpy-2.2.0-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:b606b1aaf802e6468c2608c65ff7ece53eae1a6874b3765f69b8ceb20c5fa78e"}, + {file = "numpy-2.2.0-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:36b2b43146f646642b425dd2027730f99bac962618ec2052932157e213a040e9"}, + {file = "numpy-2.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fe8f3583e0607ad4e43a954e35c1748b553bfe9fdac8635c02058023277d1b3"}, + {file = "numpy-2.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:122fd2fcfafdefc889c64ad99c228d5a1f9692c3a83f56c292618a59aa60ae83"}, + {file = "numpy-2.2.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3f2f5cddeaa4424a0a118924b988746db6ffa8565e5829b1841a8a3bd73eb59a"}, + {file = "numpy-2.2.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7fe4bb0695fe986a9e4deec3b6857003b4cfe5c5e4aac0b95f6a658c14635e31"}, + {file = "numpy-2.2.0-cp310-cp310-win32.whl", hash = "sha256:b30042fe92dbd79f1ba7f6898fada10bdaad1847c44f2dff9a16147e00a93661"}, + {file = "numpy-2.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:54dc1d6d66f8d37843ed281773c7174f03bf7ad826523f73435deb88ba60d2d4"}, + {file = "numpy-2.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9874bc2ff574c40ab7a5cbb7464bf9b045d617e36754a7bc93f933d52bd9ffc6"}, + {file = "numpy-2.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0da8495970f6b101ddd0c38ace92edea30e7e12b9a926b57f5fabb1ecc25bb90"}, + {file = "numpy-2.2.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:0557eebc699c1c34cccdd8c3778c9294e8196df27d713706895edc6f57d29608"}, + {file = "numpy-2.2.0-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:3579eaeb5e07f3ded59298ce22b65f877a86ba8e9fe701f5576c99bb17c283da"}, + {file = "numpy-2.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40deb10198bbaa531509aad0cd2f9fadb26c8b94070831e2208e7df543562b74"}, + {file = "numpy-2.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2aed8fcf8abc3020d6a9ccb31dbc9e7d7819c56a348cc88fd44be269b37427e"}, + {file = "numpy-2.2.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a222d764352c773aa5ebde02dd84dba3279c81c6db2e482d62a3fa54e5ece69b"}, + {file = "numpy-2.2.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4e58666988605e251d42c2818c7d3d8991555381be26399303053b58a5bbf30d"}, + {file = "numpy-2.2.0-cp311-cp311-win32.whl", hash = "sha256:4723a50e1523e1de4fccd1b9a6dcea750c2102461e9a02b2ac55ffeae09a4410"}, + {file = "numpy-2.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:16757cf28621e43e252c560d25b15f18a2f11da94fea344bf26c599b9cf54b73"}, + {file = "numpy-2.2.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cff210198bb4cae3f3c100444c5eaa573a823f05c253e7188e1362a5555235b3"}, + {file = "numpy-2.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:58b92a5828bd4d9aa0952492b7de803135038de47343b2aa3cc23f3b71a3dc4e"}, + {file = "numpy-2.2.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:ebe5e59545401fbb1b24da76f006ab19734ae71e703cdb4a8b347e84a0cece67"}, + {file = "numpy-2.2.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:e2b8cd48a9942ed3f85b95ca4105c45758438c7ed28fff1e4ce3e57c3b589d8e"}, + {file = "numpy-2.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57fcc997ffc0bef234b8875a54d4058afa92b0b0c4223fc1f62f24b3b5e86038"}, + {file = "numpy-2.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85ad7d11b309bd132d74397fcf2920933c9d1dc865487128f5c03d580f2c3d03"}, + {file = "numpy-2.2.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:cb24cca1968b21355cc6f3da1a20cd1cebd8a023e3c5b09b432444617949085a"}, + {file = "numpy-2.2.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0798b138c291d792f8ea40fe3768610f3c7dd2574389e37c3f26573757c8f7ef"}, + {file = "numpy-2.2.0-cp312-cp312-win32.whl", hash = "sha256:afe8fb968743d40435c3827632fd36c5fbde633b0423da7692e426529b1759b1"}, + {file = "numpy-2.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:3a4199f519e57d517ebd48cb76b36c82da0360781c6a0353e64c0cac30ecaad3"}, + {file = "numpy-2.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f8c8b141ef9699ae777c6278b52c706b653bf15d135d302754f6b2e90eb30367"}, + {file = "numpy-2.2.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0f0986e917aca18f7a567b812ef7ca9391288e2acb7a4308aa9d265bd724bdae"}, + {file = "numpy-2.2.0-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:1c92113619f7b272838b8d6702a7f8ebe5edea0df48166c47929611d0b4dea69"}, + {file = "numpy-2.2.0-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:5a145e956b374e72ad1dff82779177d4a3c62bc8248f41b80cb5122e68f22d13"}, + {file = "numpy-2.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18142b497d70a34b01642b9feabb70156311b326fdddd875a9981f34a369b671"}, + {file = "numpy-2.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7d41d1612c1a82b64697e894b75db6758d4f21c3ec069d841e60ebe54b5b571"}, + {file = "numpy-2.2.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a98f6f20465e7618c83252c02041517bd2f7ea29be5378f09667a8f654a5918d"}, + {file = "numpy-2.2.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e09d40edfdb4e260cb1567d8ae770ccf3b8b7e9f0d9b5c2a9992696b30ce2742"}, + {file = "numpy-2.2.0-cp313-cp313-win32.whl", hash = "sha256:3905a5fffcc23e597ee4d9fb3fcd209bd658c352657548db7316e810ca80458e"}, + {file = "numpy-2.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:a184288538e6ad699cbe6b24859206e38ce5fba28f3bcfa51c90d0502c1582b2"}, + {file = "numpy-2.2.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:7832f9e8eb00be32f15fdfb9a981d6955ea9adc8574c521d48710171b6c55e95"}, + {file = "numpy-2.2.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f0dd071b95bbca244f4cb7f70b77d2ff3aaaba7fa16dc41f58d14854a6204e6c"}, + {file = "numpy-2.2.0-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:b0b227dcff8cdc3efbce66d4e50891f04d0a387cce282fe1e66199146a6a8fca"}, + {file = "numpy-2.2.0-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:6ab153263a7c5ccaf6dfe7e53447b74f77789f28ecb278c3b5d49db7ece10d6d"}, + {file = "numpy-2.2.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e500aba968a48e9019e42c0c199b7ec0696a97fa69037bea163b55398e390529"}, + {file = "numpy-2.2.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:440cfb3db4c5029775803794f8638fbdbf71ec702caf32735f53b008e1eaece3"}, + {file = "numpy-2.2.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a55dc7a7f0b6198b07ec0cd445fbb98b05234e8b00c5ac4874a63372ba98d4ab"}, + {file = "numpy-2.2.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4bddbaa30d78c86329b26bd6aaaea06b1e47444da99eddac7bf1e2fab717bd72"}, + {file = "numpy-2.2.0-cp313-cp313t-win32.whl", hash = "sha256:30bf971c12e4365153afb31fc73f441d4da157153f3400b82db32d04de1e4066"}, + {file = "numpy-2.2.0-cp313-cp313t-win_amd64.whl", hash = "sha256:d35717333b39d1b6bb8433fa758a55f1081543de527171543a2b710551d40881"}, + {file = "numpy-2.2.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:e12c6c1ce84628c52d6367863773f7c8c8241be554e8b79686e91a43f1733773"}, + {file = "numpy-2.2.0-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:b6207dc8fb3c8cb5668e885cef9ec7f70189bec4e276f0ff70d5aa078d32c88e"}, + {file = "numpy-2.2.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a50aeff71d0f97b6450d33940c7181b08be1441c6c193e678211bff11aa725e7"}, + {file = "numpy-2.2.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:df12a1f99b99f569a7c2ae59aa2d31724e8d835fc7f33e14f4792e3071d11221"}, + {file = "numpy-2.2.0.tar.gz", hash = "sha256:140dd80ff8981a583a60980be1a655068f8adebf7a45a06a6858c873fcdcd4a0"}, +] + +[[package]] +name = "oauthlib" +version = "3.2.2" +description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" +optional = false +python-versions = ">=3.6" +files = [ + {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, + {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, +] + +[package.extras] +rsa = ["cryptography (>=3.0.0)"] +signals = ["blinker (>=1.4.0)"] +signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] + +[[package]] +name = "openpyxl" +version = "3.1.5" +description = "A Python library to read/write Excel 2010 xlsx/xlsm files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "openpyxl-3.1.5-py2.py3-none-any.whl", hash = "sha256:5282c12b107bffeef825f4617dc029afaf41d0ea60823bbb665ef3079dc79de2"}, + {file = "openpyxl-3.1.5.tar.gz", hash = "sha256:cf0e3cf56142039133628b5acffe8ef0c12bc902d2aadd3e0fe5878dc08d1050"}, +] + +[package.dependencies] +et-xmlfile = "*" + +[[package]] +name = "packaging" +version = "24.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, + {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, +] + +[[package]] +name = "pandas" +version = "2.0.3" +description = "Powerful data structures for data analysis, time series, and statistics" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pandas-2.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e4c7c9f27a4185304c7caf96dc7d91bc60bc162221152de697c98eb0b2648dd8"}, + {file = "pandas-2.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f167beed68918d62bffb6ec64f2e1d8a7d297a038f86d4aed056b9493fca407f"}, + {file = "pandas-2.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce0c6f76a0f1ba361551f3e6dceaff06bde7514a374aa43e33b588ec10420183"}, + {file = "pandas-2.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba619e410a21d8c387a1ea6e8a0e49bb42216474436245718d7f2e88a2f8d7c0"}, + {file = "pandas-2.0.3-cp310-cp310-win32.whl", hash = "sha256:3ef285093b4fe5058eefd756100a367f27029913760773c8bf1d2d8bebe5d210"}, + {file = "pandas-2.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:9ee1a69328d5c36c98d8e74db06f4ad518a1840e8ccb94a4ba86920986bb617e"}, + {file = "pandas-2.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b084b91d8d66ab19f5bb3256cbd5ea661848338301940e17f4492b2ce0801fe8"}, + {file = "pandas-2.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:37673e3bdf1551b95bf5d4ce372b37770f9529743d2498032439371fc7b7eb26"}, + {file = "pandas-2.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9cb1e14fdb546396b7e1b923ffaeeac24e4cedd14266c3497216dd4448e4f2d"}, + {file = "pandas-2.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d9cd88488cceb7635aebb84809d087468eb33551097d600c6dad13602029c2df"}, + {file = "pandas-2.0.3-cp311-cp311-win32.whl", hash = "sha256:694888a81198786f0e164ee3a581df7d505024fbb1f15202fc7db88a71d84ebd"}, + {file = "pandas-2.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:6a21ab5c89dcbd57f78d0ae16630b090eec626360085a4148693def5452d8a6b"}, + {file = "pandas-2.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9e4da0d45e7f34c069fe4d522359df7d23badf83abc1d1cef398895822d11061"}, + {file = "pandas-2.0.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:32fca2ee1b0d93dd71d979726b12b61faa06aeb93cf77468776287f41ff8fdc5"}, + {file = "pandas-2.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:258d3624b3ae734490e4d63c430256e716f488c4fcb7c8e9bde2d3aa46c29089"}, + {file = "pandas-2.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eae3dc34fa1aa7772dd3fc60270d13ced7346fcbcfee017d3132ec625e23bb0"}, + {file = "pandas-2.0.3-cp38-cp38-win32.whl", hash = "sha256:f3421a7afb1a43f7e38e82e844e2bca9a6d793d66c1a7f9f0ff39a795bbc5e02"}, + {file = "pandas-2.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:69d7f3884c95da3a31ef82b7618af5710dba95bb885ffab339aad925c3e8ce78"}, + {file = "pandas-2.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5247fb1ba347c1261cbbf0fcfba4a3121fbb4029d95d9ef4dc45406620b25c8b"}, + {file = "pandas-2.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:81af086f4543c9d8bb128328b5d32e9986e0c84d3ee673a2ac6fb57fd14f755e"}, + {file = "pandas-2.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1994c789bf12a7c5098277fb43836ce090f1073858c10f9220998ac74f37c69b"}, + {file = "pandas-2.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ec591c48e29226bcbb316e0c1e9423622bc7a4eaf1ef7c3c9fa1a3981f89641"}, + {file = "pandas-2.0.3-cp39-cp39-win32.whl", hash = "sha256:04dbdbaf2e4d46ca8da896e1805bc04eb85caa9a82e259e8eed00254d5e0c682"}, + {file = "pandas-2.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:1168574b036cd8b93abc746171c9b4f1b83467438a5e45909fed645cf8692dbc"}, + {file = "pandas-2.0.3.tar.gz", hash = "sha256:c02f372a88e0d17f36d3093a644c73cfc1788e876a7c4bcb4020a77512e2043c"}, +] + +[package.dependencies] +numpy = [ + {version = ">=1.20.3", markers = "python_version < \"3.10\""}, + {version = ">=1.21.0", markers = "python_version >= \"3.10\" and python_version < \"3.11\""}, + {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, +] +python-dateutil = ">=2.8.2" +pytz = ">=2020.1" +tzdata = ">=2022.1" + +[package.extras] +all = ["PyQt5 (>=5.15.1)", "SQLAlchemy (>=1.4.16)", "beautifulsoup4 (>=4.9.3)", "bottleneck (>=1.3.2)", "brotlipy (>=0.7.0)", "fastparquet (>=0.6.3)", "fsspec (>=2021.07.0)", "gcsfs (>=2021.07.0)", "html5lib (>=1.1)", "hypothesis (>=6.34.2)", "jinja2 (>=3.0.0)", "lxml (>=4.6.3)", "matplotlib (>=3.6.1)", "numba (>=0.53.1)", "numexpr (>=2.7.3)", "odfpy (>=1.4.1)", "openpyxl (>=3.0.7)", "pandas-gbq (>=0.15.0)", "psycopg2 (>=2.8.6)", "pyarrow (>=7.0.0)", "pymysql (>=1.0.2)", "pyreadstat (>=1.1.2)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)", "python-snappy (>=0.6.0)", "pyxlsb (>=1.0.8)", "qtpy (>=2.2.0)", "s3fs (>=2021.08.0)", "scipy (>=1.7.1)", "tables (>=3.6.1)", "tabulate (>=0.8.9)", "xarray (>=0.21.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=1.4.3)", "zstandard (>=0.15.2)"] +aws = ["s3fs (>=2021.08.0)"] +clipboard = ["PyQt5 (>=5.15.1)", "qtpy (>=2.2.0)"] +compression = ["brotlipy (>=0.7.0)", "python-snappy (>=0.6.0)", "zstandard (>=0.15.2)"] +computation = ["scipy (>=1.7.1)", "xarray (>=0.21.0)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.0.7)", "pyxlsb (>=1.0.8)", "xlrd (>=2.0.1)", "xlsxwriter (>=1.4.3)"] +feather = ["pyarrow (>=7.0.0)"] +fss = ["fsspec (>=2021.07.0)"] +gcp = ["gcsfs (>=2021.07.0)", "pandas-gbq (>=0.15.0)"] +hdf5 = ["tables (>=3.6.1)"] +html = ["beautifulsoup4 (>=4.9.3)", "html5lib (>=1.1)", "lxml (>=4.6.3)"] +mysql = ["SQLAlchemy (>=1.4.16)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.0.0)", "tabulate (>=0.8.9)"] +parquet = ["pyarrow (>=7.0.0)"] +performance = ["bottleneck (>=1.3.2)", "numba (>=0.53.1)", "numexpr (>=2.7.1)"] +plot = ["matplotlib (>=3.6.1)"] +postgresql = ["SQLAlchemy (>=1.4.16)", "psycopg2 (>=2.8.6)"] +spss = ["pyreadstat (>=1.1.2)"] +sql-other = ["SQLAlchemy (>=1.4.16)"] +test = ["hypothesis (>=6.34.2)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.6.3)"] + +[[package]] +name = "pathspec" +version = "0.12.1" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, +] + +[[package]] +name = "platformdirs" +version = "4.3.6" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, + {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, +] + +[package.extras] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.11.2)"] + +[[package]] +name = "pluggy" +version = "1.5.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pyarrow" +version = "17.0.0" +description = "Python library for Apache Arrow" +optional = true +python-versions = ">=3.8" +files = [ + {file = "pyarrow-17.0.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:a5c8b238d47e48812ee577ee20c9a2779e6a5904f1708ae240f53ecbee7c9f07"}, + {file = "pyarrow-17.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:db023dc4c6cae1015de9e198d41250688383c3f9af8f565370ab2b4cb5f62655"}, + {file = "pyarrow-17.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da1e060b3876faa11cee287839f9cc7cdc00649f475714b8680a05fd9071d545"}, + {file = "pyarrow-17.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75c06d4624c0ad6674364bb46ef38c3132768139ddec1c56582dbac54f2663e2"}, + {file = "pyarrow-17.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:fa3c246cc58cb5a4a5cb407a18f193354ea47dd0648194e6265bd24177982fe8"}, + {file = "pyarrow-17.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:f7ae2de664e0b158d1607699a16a488de3d008ba99b3a7aa5de1cbc13574d047"}, + {file = "pyarrow-17.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:5984f416552eea15fd9cee03da53542bf4cddaef5afecefb9aa8d1010c335087"}, + {file = "pyarrow-17.0.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:1c8856e2ef09eb87ecf937104aacfa0708f22dfeb039c363ec99735190ffb977"}, + {file = "pyarrow-17.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2e19f569567efcbbd42084e87f948778eb371d308e137a0f97afe19bb860ccb3"}, + {file = "pyarrow-17.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b244dc8e08a23b3e352899a006a26ae7b4d0da7bb636872fa8f5884e70acf15"}, + {file = "pyarrow-17.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b72e87fe3e1db343995562f7fff8aee354b55ee83d13afba65400c178ab2597"}, + {file = "pyarrow-17.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:dc5c31c37409dfbc5d014047817cb4ccd8c1ea25d19576acf1a001fe07f5b420"}, + {file = "pyarrow-17.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e3343cb1e88bc2ea605986d4b94948716edc7a8d14afd4e2c097232f729758b4"}, + {file = "pyarrow-17.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:a27532c38f3de9eb3e90ecab63dfda948a8ca859a66e3a47f5f42d1e403c4d03"}, + {file = "pyarrow-17.0.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:9b8a823cea605221e61f34859dcc03207e52e409ccf6354634143e23af7c8d22"}, + {file = "pyarrow-17.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f1e70de6cb5790a50b01d2b686d54aaf73da01266850b05e3af2a1bc89e16053"}, + {file = "pyarrow-17.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0071ce35788c6f9077ff9ecba4858108eebe2ea5a3f7cf2cf55ebc1dbc6ee24a"}, + {file = "pyarrow-17.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:757074882f844411fcca735e39aae74248a1531367a7c80799b4266390ae51cc"}, + {file = "pyarrow-17.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:9ba11c4f16976e89146781a83833df7f82077cdab7dc6232c897789343f7891a"}, + {file = "pyarrow-17.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:b0c6ac301093b42d34410b187bba560b17c0330f64907bfa4f7f7f2444b0cf9b"}, + {file = "pyarrow-17.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:392bc9feabc647338e6c89267635e111d71edad5fcffba204425a7c8d13610d7"}, + {file = "pyarrow-17.0.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:af5ff82a04b2171415f1410cff7ebb79861afc5dae50be73ce06d6e870615204"}, + {file = "pyarrow-17.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:edca18eaca89cd6382dfbcff3dd2d87633433043650c07375d095cd3517561d8"}, + {file = "pyarrow-17.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c7916bff914ac5d4a8fe25b7a25e432ff921e72f6f2b7547d1e325c1ad9d155"}, + {file = "pyarrow-17.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f553ca691b9e94b202ff741bdd40f6ccb70cdd5fbf65c187af132f1317de6145"}, + {file = "pyarrow-17.0.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:0cdb0e627c86c373205a2f94a510ac4376fdc523f8bb36beab2e7f204416163c"}, + {file = "pyarrow-17.0.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:d7d192305d9d8bc9082d10f361fc70a73590a4c65cf31c3e6926cd72b76bc35c"}, + {file = "pyarrow-17.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:02dae06ce212d8b3244dd3e7d12d9c4d3046945a5933d28026598e9dbbda1fca"}, + {file = "pyarrow-17.0.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:13d7a460b412f31e4c0efa1148e1d29bdf18ad1411eb6757d38f8fbdcc8645fb"}, + {file = "pyarrow-17.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9b564a51fbccfab5a04a80453e5ac6c9954a9c5ef2890d1bcf63741909c3f8df"}, + {file = "pyarrow-17.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32503827abbc5aadedfa235f5ece8c4f8f8b0a3cf01066bc8d29de7539532687"}, + {file = "pyarrow-17.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a155acc7f154b9ffcc85497509bcd0d43efb80d6f733b0dc3bb14e281f131c8b"}, + {file = "pyarrow-17.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:dec8d129254d0188a49f8a1fc99e0560dc1b85f60af729f47de4046015f9b0a5"}, + {file = "pyarrow-17.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:a48ddf5c3c6a6c505904545c25a4ae13646ae1f8ba703c4df4a1bfe4f4006bda"}, + {file = "pyarrow-17.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:42bf93249a083aca230ba7e2786c5f673507fa97bbd9725a1e2754715151a204"}, + {file = "pyarrow-17.0.0.tar.gz", hash = "sha256:4beca9521ed2c0921c1023e68d097d0299b62c362639ea315572a58f3f50fd28"}, +] + +[package.dependencies] +numpy = ">=1.16.6" + +[package.extras] +test = ["cffi", "hypothesis", "pandas", "pytest", "pytz"] + +[[package]] +name = "pylint" +version = "3.2.7" +description = "python code static checker" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "pylint-3.2.7-py3-none-any.whl", hash = "sha256:02f4aedeac91be69fb3b4bea997ce580a4ac68ce58b89eaefeaf06749df73f4b"}, + {file = "pylint-3.2.7.tar.gz", hash = "sha256:1b7a721b575eaeaa7d39db076b6e7743c993ea44f57979127c517c6c572c803e"}, +] + +[package.dependencies] +astroid = ">=3.2.4,<=3.3.0-dev0" +colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} +dill = [ + {version = ">=0.2", markers = "python_version < \"3.11\""}, + {version = ">=0.3.7", markers = "python_version >= \"3.12\""}, + {version = ">=0.3.6", markers = "python_version >= \"3.11\" and python_version < \"3.12\""}, +] +isort = ">=4.2.5,<5.13.0 || >5.13.0,<6" +mccabe = ">=0.6,<0.8" +platformdirs = ">=2.2.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +tomlkit = ">=0.10.1" +typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""} + +[package.extras] +spelling = ["pyenchant (>=3.2,<4.0)"] +testutils = ["gitpython (>3)"] + +[[package]] +name = "pytest" +version = "7.4.4" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-dotenv" +version = "0.5.2" +description = "A py.test plugin that parses environment files before running tests" +optional = false +python-versions = "*" +files = [ + {file = "pytest-dotenv-0.5.2.tar.gz", hash = "sha256:2dc6c3ac6d8764c71c6d2804e902d0ff810fa19692e95fe138aefc9b1aa73732"}, + {file = "pytest_dotenv-0.5.2-py3-none-any.whl", hash = "sha256:40a2cece120a213898afaa5407673f6bd924b1fa7eafce6bda0e8abffe2f710f"}, +] + +[package.dependencies] +pytest = ">=5.0.0" +python-dotenv = ">=0.9.1" + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-dotenv" +version = "1.0.1" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, + {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + +[[package]] +name = "pytz" +version = "2024.2" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, + {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, +] + +[[package]] +name = "requests" +version = "2.32.3" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "six" +version = "1.17.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, +] + +[[package]] +name = "thrift" +version = "0.20.0" +description = "Python bindings for the Apache Thrift RPC system" +optional = false +python-versions = "*" +files = [ + {file = "thrift-0.20.0.tar.gz", hash = "sha256:4dd662eadf6b8aebe8a41729527bd69adf6ceaa2a8681cbef64d1273b3e8feba"}, +] + +[package.dependencies] +six = ">=1.7.2" + +[package.extras] +all = ["tornado (>=4.0)", "twisted"] +tornado = ["tornado (>=4.0)"] +twisted = ["twisted"] + +[[package]] +name = "tomli" +version = "2.2.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, + {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, + {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, + {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, + {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, + {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, + {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, + {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, + {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, +] + +[[package]] +name = "tomlkit" +version = "0.13.2" +description = "Style preserving TOML library" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde"}, + {file = "tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79"}, +] + +[[package]] +name = "typing-extensions" +version = "4.12.2" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + +[[package]] +name = "tzdata" +version = "2024.2" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd"}, + {file = "tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc"}, +] + +[[package]] +name = "urllib3" +version = "2.2.3" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, + {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[extras] +pyarrow = ["pyarrow"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.8.0" +content-hash = "cdf42dc5f4e437ae70fc167f9cb7400d947f5288ee46f132393a14a4e5502bf8" From 8f70b5b7c89f1a8fbe092d4c83a6297eec788c6a Mon Sep 17 00:00:00 2001 From: Jothi Prakash Date: Wed, 11 Dec 2024 12:25:21 +0530 Subject: [PATCH 18/19] Fixed poetry numpy 2.2.2 issue --- poetry.lock | 99 ++++++++++++++++++++------------------------------ pyproject.toml | 4 +- 2 files changed, 42 insertions(+), 61 deletions(-) diff --git a/poetry.lock b/poetry.lock index 13836837..2b63f135 100644 --- a/poetry.lock +++ b/poetry.lock @@ -442,66 +442,47 @@ files = [ [[package]] name = "numpy" -version = "2.2.0" +version = "1.26.4" description = "Fundamental package for array computing in Python" optional = false -python-versions = ">=3.10" -files = [ - {file = "numpy-2.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1e25507d85da11ff5066269d0bd25d06e0a0f2e908415534f3e603d2a78e4ffa"}, - {file = "numpy-2.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a62eb442011776e4036af5c8b1a00b706c5bc02dc15eb5344b0c750428c94219"}, - {file = "numpy-2.2.0-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:b606b1aaf802e6468c2608c65ff7ece53eae1a6874b3765f69b8ceb20c5fa78e"}, - {file = "numpy-2.2.0-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:36b2b43146f646642b425dd2027730f99bac962618ec2052932157e213a040e9"}, - {file = "numpy-2.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fe8f3583e0607ad4e43a954e35c1748b553bfe9fdac8635c02058023277d1b3"}, - {file = "numpy-2.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:122fd2fcfafdefc889c64ad99c228d5a1f9692c3a83f56c292618a59aa60ae83"}, - {file = "numpy-2.2.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3f2f5cddeaa4424a0a118924b988746db6ffa8565e5829b1841a8a3bd73eb59a"}, - {file = "numpy-2.2.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7fe4bb0695fe986a9e4deec3b6857003b4cfe5c5e4aac0b95f6a658c14635e31"}, - {file = "numpy-2.2.0-cp310-cp310-win32.whl", hash = "sha256:b30042fe92dbd79f1ba7f6898fada10bdaad1847c44f2dff9a16147e00a93661"}, - {file = "numpy-2.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:54dc1d6d66f8d37843ed281773c7174f03bf7ad826523f73435deb88ba60d2d4"}, - {file = "numpy-2.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9874bc2ff574c40ab7a5cbb7464bf9b045d617e36754a7bc93f933d52bd9ffc6"}, - {file = "numpy-2.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0da8495970f6b101ddd0c38ace92edea30e7e12b9a926b57f5fabb1ecc25bb90"}, - {file = "numpy-2.2.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:0557eebc699c1c34cccdd8c3778c9294e8196df27d713706895edc6f57d29608"}, - {file = "numpy-2.2.0-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:3579eaeb5e07f3ded59298ce22b65f877a86ba8e9fe701f5576c99bb17c283da"}, - {file = "numpy-2.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40deb10198bbaa531509aad0cd2f9fadb26c8b94070831e2208e7df543562b74"}, - {file = "numpy-2.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2aed8fcf8abc3020d6a9ccb31dbc9e7d7819c56a348cc88fd44be269b37427e"}, - {file = "numpy-2.2.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a222d764352c773aa5ebde02dd84dba3279c81c6db2e482d62a3fa54e5ece69b"}, - {file = "numpy-2.2.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4e58666988605e251d42c2818c7d3d8991555381be26399303053b58a5bbf30d"}, - {file = "numpy-2.2.0-cp311-cp311-win32.whl", hash = "sha256:4723a50e1523e1de4fccd1b9a6dcea750c2102461e9a02b2ac55ffeae09a4410"}, - {file = "numpy-2.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:16757cf28621e43e252c560d25b15f18a2f11da94fea344bf26c599b9cf54b73"}, - {file = "numpy-2.2.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cff210198bb4cae3f3c100444c5eaa573a823f05c253e7188e1362a5555235b3"}, - {file = "numpy-2.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:58b92a5828bd4d9aa0952492b7de803135038de47343b2aa3cc23f3b71a3dc4e"}, - {file = "numpy-2.2.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:ebe5e59545401fbb1b24da76f006ab19734ae71e703cdb4a8b347e84a0cece67"}, - {file = "numpy-2.2.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:e2b8cd48a9942ed3f85b95ca4105c45758438c7ed28fff1e4ce3e57c3b589d8e"}, - {file = "numpy-2.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57fcc997ffc0bef234b8875a54d4058afa92b0b0c4223fc1f62f24b3b5e86038"}, - {file = "numpy-2.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85ad7d11b309bd132d74397fcf2920933c9d1dc865487128f5c03d580f2c3d03"}, - {file = "numpy-2.2.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:cb24cca1968b21355cc6f3da1a20cd1cebd8a023e3c5b09b432444617949085a"}, - {file = "numpy-2.2.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0798b138c291d792f8ea40fe3768610f3c7dd2574389e37c3f26573757c8f7ef"}, - {file = "numpy-2.2.0-cp312-cp312-win32.whl", hash = "sha256:afe8fb968743d40435c3827632fd36c5fbde633b0423da7692e426529b1759b1"}, - {file = "numpy-2.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:3a4199f519e57d517ebd48cb76b36c82da0360781c6a0353e64c0cac30ecaad3"}, - {file = "numpy-2.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f8c8b141ef9699ae777c6278b52c706b653bf15d135d302754f6b2e90eb30367"}, - {file = "numpy-2.2.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0f0986e917aca18f7a567b812ef7ca9391288e2acb7a4308aa9d265bd724bdae"}, - {file = "numpy-2.2.0-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:1c92113619f7b272838b8d6702a7f8ebe5edea0df48166c47929611d0b4dea69"}, - {file = "numpy-2.2.0-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:5a145e956b374e72ad1dff82779177d4a3c62bc8248f41b80cb5122e68f22d13"}, - {file = "numpy-2.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18142b497d70a34b01642b9feabb70156311b326fdddd875a9981f34a369b671"}, - {file = "numpy-2.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7d41d1612c1a82b64697e894b75db6758d4f21c3ec069d841e60ebe54b5b571"}, - {file = "numpy-2.2.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a98f6f20465e7618c83252c02041517bd2f7ea29be5378f09667a8f654a5918d"}, - {file = "numpy-2.2.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e09d40edfdb4e260cb1567d8ae770ccf3b8b7e9f0d9b5c2a9992696b30ce2742"}, - {file = "numpy-2.2.0-cp313-cp313-win32.whl", hash = "sha256:3905a5fffcc23e597ee4d9fb3fcd209bd658c352657548db7316e810ca80458e"}, - {file = "numpy-2.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:a184288538e6ad699cbe6b24859206e38ce5fba28f3bcfa51c90d0502c1582b2"}, - {file = "numpy-2.2.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:7832f9e8eb00be32f15fdfb9a981d6955ea9adc8574c521d48710171b6c55e95"}, - {file = "numpy-2.2.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f0dd071b95bbca244f4cb7f70b77d2ff3aaaba7fa16dc41f58d14854a6204e6c"}, - {file = "numpy-2.2.0-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:b0b227dcff8cdc3efbce66d4e50891f04d0a387cce282fe1e66199146a6a8fca"}, - {file = "numpy-2.2.0-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:6ab153263a7c5ccaf6dfe7e53447b74f77789f28ecb278c3b5d49db7ece10d6d"}, - {file = "numpy-2.2.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e500aba968a48e9019e42c0c199b7ec0696a97fa69037bea163b55398e390529"}, - {file = "numpy-2.2.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:440cfb3db4c5029775803794f8638fbdbf71ec702caf32735f53b008e1eaece3"}, - {file = "numpy-2.2.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a55dc7a7f0b6198b07ec0cd445fbb98b05234e8b00c5ac4874a63372ba98d4ab"}, - {file = "numpy-2.2.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4bddbaa30d78c86329b26bd6aaaea06b1e47444da99eddac7bf1e2fab717bd72"}, - {file = "numpy-2.2.0-cp313-cp313t-win32.whl", hash = "sha256:30bf971c12e4365153afb31fc73f441d4da157153f3400b82db32d04de1e4066"}, - {file = "numpy-2.2.0-cp313-cp313t-win_amd64.whl", hash = "sha256:d35717333b39d1b6bb8433fa758a55f1081543de527171543a2b710551d40881"}, - {file = "numpy-2.2.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:e12c6c1ce84628c52d6367863773f7c8c8241be554e8b79686e91a43f1733773"}, - {file = "numpy-2.2.0-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:b6207dc8fb3c8cb5668e885cef9ec7f70189bec4e276f0ff70d5aa078d32c88e"}, - {file = "numpy-2.2.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a50aeff71d0f97b6450d33940c7181b08be1441c6c193e678211bff11aa725e7"}, - {file = "numpy-2.2.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:df12a1f99b99f569a7c2ae59aa2d31724e8d835fc7f33e14f4792e3071d11221"}, - {file = "numpy-2.2.0.tar.gz", hash = "sha256:140dd80ff8981a583a60980be1a655068f8adebf7a45a06a6858c873fcdcd4a0"}, +python-versions = ">=3.9" +files = [ + {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, + {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, + {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, + {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, + {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, + {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, + {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, + {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, + {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, + {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, ] [[package]] @@ -958,4 +939,4 @@ pyarrow = ["pyarrow"] [metadata] lock-version = "2.0" python-versions = "^3.8.0" -content-hash = "cdf42dc5f4e437ae70fc167f9cb7400d947f5288ee46f132393a14a4e5502bf8" +content-hash = "43ea4a4ca7c8403d2b2033b783fe57743e100354986c723ef1f202cde2ac8881" diff --git a/pyproject.toml b/pyproject.toml index 4f4e72a5..f3748cba 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -18,8 +18,8 @@ lz4 = "^4.0.2" requests = "^2.18.1" oauthlib = "^3.1.0" numpy = [ - { version = ">=1.16.6", python = ">=3.8,<3.11" }, - { version = ">=1.23.4", python = ">=3.11" }, + { version = "^1.16.6", python = ">=3.8,<3.11" }, + { version = "^1.23.4", python = ">=3.11" }, ] openpyxl = "^3.0.10" urllib3 = ">=1.26" From 3fc4e01976e10bada32082fa2ede6b7d34b69917 Mon Sep 17 00:00:00 2001 From: Jothi Prakash Date: Thu, 26 Dec 2024 12:38:01 +0530 Subject: [PATCH 19/19] Workflow fixes --- .github/workflows/code-quality-checks.yml | 53 ++++++++++++++++++++++- tests/unit/test_arrow_queue.py | 10 +++-- tests/unit/test_cloud_fetch_queue.py | 8 +++- tests/unit/test_fetches.py | 8 +++- tests/unit/test_fetches_bench.py | 8 ++-- tests/unit/test_thrift_backend.py | 10 +++-- 6 files changed, 81 insertions(+), 16 deletions(-) diff --git a/.github/workflows/code-quality-checks.yml b/.github/workflows/code-quality-checks.yml index 98653a61..6a349233 100644 --- a/.github/workflows/code-quality-checks.yml +++ b/.github/workflows/code-quality-checks.yml @@ -52,12 +52,63 @@ jobs: # install your root project, if required #---------------------------------------------- - name: Install library - run: poetry install --no-interaction --all-extras + run: poetry install --no-interaction #---------------------------------------------- # run test suite #---------------------------------------------- - name: Run tests run: poetry run python -m pytest tests/unit + run-unit-tests-with-arrow: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: [ 3.8, 3.9, "3.10", "3.11" ] + steps: + #---------------------------------------------- + # check-out repo and set-up python + #---------------------------------------------- + - name: Check out repository + uses: actions/checkout@v2 + - name: Set up python ${{ matrix.python-version }} + id: setup-python + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + #---------------------------------------------- + # ----- install & configure poetry ----- + #---------------------------------------------- + - name: Install Poetry + uses: snok/install-poetry@v1 + with: + virtualenvs-create: true + virtualenvs-in-project: true + installer-parallel: true + + #---------------------------------------------- + # load cached venv if cache exists + #---------------------------------------------- + - name: Load cached venv + id: cached-poetry-dependencies + uses: actions/cache@v2 + with: + path: .venv-pyarrow + key: venv-pyarrow-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ github.event.repository.name }}-${{ hashFiles('**/poetry.lock') }} + #---------------------------------------------- + # install dependencies if cache does not exist + #---------------------------------------------- + - name: Install dependencies + if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' + run: poetry install --no-interaction --no-root + #---------------------------------------------- + # install your root project, if required + #---------------------------------------------- + - name: Install library + run: poetry install --no-interaction --all-extras + #---------------------------------------------- + # run test suite + #---------------------------------------------- + - name: Run tests + run: poetry run python -m pytest tests/unit check-linting: runs-on: ubuntu-latest strategy: diff --git a/tests/unit/test_arrow_queue.py b/tests/unit/test_arrow_queue.py index b3dff45f..c6aef195 100644 --- a/tests/unit/test_arrow_queue.py +++ b/tests/unit/test_arrow_queue.py @@ -1,10 +1,12 @@ import unittest - -import pyarrow as pa - +import pytest +try: + import pyarrow as pa +except ImportError: + pa = None from databricks.sql.utils import ArrowQueue - +@pytest.mark.skipif(pa is None, reason="PyArrow is not installed") class ArrowQueueSuite(unittest.TestCase): @staticmethod def make_arrow_table(batch): diff --git a/tests/unit/test_cloud_fetch_queue.py b/tests/unit/test_cloud_fetch_queue.py index 01d8a79b..59b6ce5c 100644 --- a/tests/unit/test_cloud_fetch_queue.py +++ b/tests/unit/test_cloud_fetch_queue.py @@ -1,12 +1,16 @@ -import pyarrow +try: + import pyarrow +except ImportError: + pyarrow = None import unittest +import pytest from unittest.mock import MagicMock, patch from databricks.sql.thrift_api.TCLIService.ttypes import TSparkArrowResultLink import databricks.sql.utils as utils from databricks.sql.types import SSLOptions - +@pytest.mark.skipif(pyarrow is None, reason="PyArrow is not installed") class CloudFetchQueueSuite(unittest.TestCase): def create_result_link( self, diff --git a/tests/unit/test_fetches.py b/tests/unit/test_fetches.py index e9a58acd..8ff7f742 100644 --- a/tests/unit/test_fetches.py +++ b/tests/unit/test_fetches.py @@ -1,12 +1,16 @@ import unittest +import pytest from unittest.mock import Mock -import pyarrow as pa +try: + import pyarrow as pa +except ImportError: + pa=None import databricks.sql.client as client from databricks.sql.utils import ExecuteResponse, ArrowQueue - +@pytest.mark.skipif(pa is None, reason="PyArrow is not installed") class FetchTests(unittest.TestCase): """ Unit tests for checking the fetch logic. diff --git a/tests/unit/test_fetches_bench.py b/tests/unit/test_fetches_bench.py index 9382c3b3..6c5698b3 100644 --- a/tests/unit/test_fetches_bench.py +++ b/tests/unit/test_fetches_bench.py @@ -1,7 +1,9 @@ import unittest from unittest.mock import Mock - -import pyarrow as pa +try: + import pyarrow as pa +except ImportError: + pa=None import uuid import time import pytest @@ -9,7 +11,7 @@ import databricks.sql.client as client from databricks.sql.utils import ExecuteResponse, ArrowQueue - +@pytest.mark.skipif(pa is None, reason="PyArrow is not installed") class FetchBenchmarkTests(unittest.TestCase): """ Micro benchmark test for Arrow result handling. diff --git a/tests/unit/test_thrift_backend.py b/tests/unit/test_thrift_backend.py index 293467af..135f4229 100644 --- a/tests/unit/test_thrift_backend.py +++ b/tests/unit/test_thrift_backend.py @@ -2,12 +2,14 @@ from decimal import Decimal import itertools import unittest +import pytest from unittest.mock import patch, MagicMock, Mock from ssl import CERT_NONE, CERT_REQUIRED from urllib3 import HTTPSConnectionPool - -import pyarrow - +try: + import pyarrow +except ImportError: + pyarrow=None import databricks.sql from databricks.sql import utils from databricks.sql.types import SSLOptions @@ -26,7 +28,7 @@ def retry_policy_factory(): "_retry_delay_default": (float, 5, 1, 60), } - +@pytest.mark.skipif(pyarrow is None,reason="PyArrow is not installed") class ThriftBackendTestSuite(unittest.TestCase): okay_status = ttypes.TStatus(statusCode=ttypes.TStatusCode.SUCCESS_STATUS)