diff --git a/api/fastapi/dataapi/main.py b/api/fastapi/dataapi/main.py index ed4b4314..19c9d2fb 100644 --- a/api/fastapi/dataapi/main.py +++ b/api/fastapi/dataapi/main.py @@ -1,6 +1,7 @@ from fastapi import FastAPI from .routers import measurements +from .routers import aggregation from .config import settings @@ -10,6 +11,7 @@ app = FastAPI() app.include_router(measurements.router, prefix="/api") +app.include_router(aggregation.router, prefix="/api") @app.get("/") diff --git a/api/fastapi/dataapi/routers/aggregation.py b/api/fastapi/dataapi/routers/aggregation.py new file mode 100644 index 00000000..ce908995 --- /dev/null +++ b/api/fastapi/dataapi/routers/aggregation.py @@ -0,0 +1,407 @@ +""" +Aggregation API +The routes are mounted under /api +""" + +from datetime import datetime, timedelta, date +from typing import List, Any, Dict, Optional, Union +import logging + +from fastapi import APIRouter, Depends, Query, Request +from fastapi.responses import Response +from pydantic import BaseModel +from typing_extensions import Annotated + +# debdeps: python3-sqlalchemy +from sqlalchemy.sql.expression import and_, select, column, table +from sqlalchemy.sql.expression import table as sql_table +from sqlalchemy.sql.expression import text as sql_text + +from ..config import settings, metrics +from ..utils import ( + jerror, + convert_to_csv, + commasplit, + query_click, + query_click_one_row, +) +from ..dependencies import ClickhouseClient, get_clickhouse_client + +router = APIRouter() + +log = logging.getLogger() + + +def set_dload(resp, fname: str): + """Add header to make response downloadable""" + resp.headers["Content-Disposition"] = f"attachment; filename={fname}" + + +def group_by_date(since, until, time_grain, cols, colnames, group_by): + if since and until: + delta = until - since + else: + delta = None + + # on time_grain = "auto" or empty the smallest allowed gran. is used + ranges = ( + (7, ("hour", "day", "auto")), + (30, ("day", "week", "auto")), + (365, ("day", "week", "month", "auto")), + (9999999, ("day", "week", "month", "year", "auto")), + ) + if delta is None or delta <= timedelta(): + raise Exception("Invalid since and until values") + + for thresh, allowed in ranges: + if delta > timedelta(days=thresh): + continue + if time_grain not in allowed: + a = ", ".join(allowed) + msg = f"Choose time_grain between {a} for the given time range" + raise Exception(msg) + if time_grain == "auto": + time_grain = allowed[0] + break + + # TODO: check around query weight / response size. + # Also add support in CSV format. + gmap = dict( + hour="toStartOfHour", + day="toDate", + week="toStartOfWeek", + month="toStartOfMonth", + ) + fun = gmap[time_grain] + tcol = "measurement_start_day" # TODO: support dynamic axis names + cols.append(sql_text(f"{fun}(measurement_start_time) AS {tcol}")) + colnames.append(tcol) + group_by.append(column(tcol)) + return time_grain + + +def validate_axis_name(axis): + valid = ( + "blocking_type", + "category_code", + "domain", + "input", + "measurement_start_day", + "probe_asn", + "probe_cc", + "test_name", + ) + if axis not in valid: + raise ValueError("Invalid axis name") + + +def add_axis(axis, cols, colnames, group_by): + if axis == "blocking_type": + # TODO: use blocking_type column + t = "JSONExtractString(scores, 'analysis', 'blocking_type') AS blocking_type" + cols.append(sql_text(t)) + else: + validate_axis_name(axis) + cols.append(sql_text(axis)) + colnames.append(axis) + group_by.append(column(axis)) + + +class DBStats(BaseModel): + row_count: int + bytes: int + total_row_count: int + elapsed_seconds: float + + +class AggregationResult(BaseModel): + anomaly_count: int + confirmed_count: int + failure_count: int + ok_count: int + measurement_count: int + measurement_start_day: Optional[date] = None + blocking_type: Optional[str] = None + category_code: Optional[str] = None + domain: Optional[str] = None + input: Optional[str] = None + probe_cc: Optional[str] = None + probe_asn: Optional[str] = None + test_name: Optional[str] = None + + +class MeasurementAggregation(BaseModel): + v: int + dimension_count: int + db_stats: DBStats + result: List[AggregationResult] + + +@router.get("/v1/aggregation") +@metrics.timer("get_aggregated") +async def get_measurements( + db: Annotated[ClickhouseClient, Depends(get_clickhouse_client)], + response: Response, + request: Request, + input: Annotated[ + Optional[str], + Query( + min_length=3, + description="The input (for example a URL or IP address) to search measurements for", + ), + ] = None, + domain: Annotated[ + Optional[str], + Query( + min_length=3, + description="Domain to search measurements for, comma separated", + ), + ] = None, + category_code: Annotated[ + Optional[str], + Query( + description="The category code from the citizenlab list", + regex=r"^[A-Z]+$", + ), + ] = None, + probe_cc: Annotated[ + Optional[str], + Query( + min_length=2, + description="Two letter capitalized country codes, comma separated", + ), + ] = None, + probe_asn: Annotated[ + Optional[str], + Query( + description="Autonomous system number in the format ASxxx, comma separated" + ), + ] = None, + test_name: Annotated[ + Optional[str], Query(description="Name of the tests, comma separated") + ] = None, + ooni_run_link_id: Annotated[ + Optional[str], Query(description="OONIRun descriptors comma separated") + ] = None, + since: Annotated[ + Optional[date], + Query( + description="""The start date of when measurements were run (ex. "2016-10-20T10:30:00")""" + ), + ] = None, + until: Annotated[ + Optional[date], + Query( + description="""The end date of when measurement were run (ex. "2016-10-20T10:30:00")""" + ), + ] = None, + time_grain: Annotated[ + Optional[str], + Query( + description="Time granularity. Used only when the X or Y axis represent time.", + enum=["hour", "day", "week", "month", "year", "auto"], + ), + ] = "auto", + axis_x: Annotated[ + Optional[str], + Query( + description="The dimension on the x axis e.g. measurement_start_day", + regex=r"^[a-z_]+$", + ), + ] = None, + axis_y: Annotated[ + Optional[str], + Query( + description="The dimension on the y axis e.g. probe_cc", + regex=r"^[a-z_]+$", + ), + ] = None, + format: Annotated[ + str, + Query( + description="Output format, JSON (default) or CSV", + enum=["JSON", "CSV", "json", "csv"], + ), + ] = "json", + download: Annotated[ + Optional[bool], Query(description="If we should be triggering a file download") + ] = False, +): # TODO(art): figure out how to define either CSV or JSON data format in the response + """Aggregate counters data""" + # TODO: + # better split of large dimensions in output? + # add limit and warn user + test_name_s = [] + if test_name: + test_name_s = commasplit(test_name) + domain_s = [] + if domain: + domain_s = set(commasplit(domain)) + probe_asn_s = [] + if probe_asn: + probe_asn_s = commasplit(probe_asn) + probe_cc_s = [] + if probe_cc: + probe_cc_s = commasplit(probe_cc) + + if since: + since = datetime.combine(since, datetime.min.time()) + if until: + until = datetime.combine(until, datetime.min.time()) + + inp = input or "" + try: + ooni_run_link_id_raw = ooni_run_link_id + resp_format = format.upper() + assert resp_format in ("JSON", "CSV") + + if axis_x is not None: + assert axis_x != axis_y, "Axis X and Y cannot be the same" + + except Exception as e: + return jerror(str(e), v=0, code=200) + + dimension_cnt = int(bool(axis_x)) + int(bool(axis_y)) + cacheable = until and until < datetime.now() - timedelta(hours=72) + cacheable = False # FIXME + + # Assemble query + colnames = [ + "anomaly_count", + "confirmed_count", + "failure_count", + "ok_count", + "measurement_count", + ] + cols = [ + sql_text( + "countIf(anomaly = 't' AND confirmed = 'f' AND msm_failure = 'f') AS anomaly_count" + ), + sql_text("countIf(confirmed = 't' AND msm_failure = 'f') AS confirmed_count"), + sql_text("countIf(msm_failure = 't') AS failure_count"), + sql_text( + "countIf(anomaly = 'f' AND confirmed = 'f' AND msm_failure = 'f') AS ok_count" + ), + sql_text("COUNT(*) AS measurement_count"), + ] + table = sql_table("fastpath") + where = [] + query_params: Dict[str, Any] = {} + + if domain_s: + where.append(sql_text("domain IN :domains")) + query_params["domains"] = domain_s + + if inp: + where.append(sql_text("input = :input")) + query_params["input"] = inp + + if category_code: + where.append(sql_text("citizenlab.category_code = :category_code")) + query_params["category_code"] = category_code + if probe_cc_s: + where.append(sql_text("(citizenlab.cc IN :lccs OR citizenlab.cc = 'ZZ')")) + query_params["lccs"] = [cc.lower() for cc in probe_cc_s] + else: + where.append(sql_text("citizenlab.cc = 'ZZ'")) + + if probe_cc_s: + where.append(sql_text("probe_cc IN :probe_cc_s")) + query_params["probe_cc_s"] = probe_cc_s + + if probe_asn_s: + where.append(sql_text("probe_asn IN :probe_asn_s")) + query_params["probe_asn_s"] = probe_asn_s + + if ooni_run_link_id_raw: + ooni_run_link_id_s = commasplit(ooni_run_link_id_raw) + where.append(sql_text("ooni_run_link_id IN :ooni_run_link_id_s")) + query_params["ooni_run_link_id_s"] = ooni_run_link_id_s + + if since: + where.append(sql_text("measurement_start_time >= :since")) + query_params["since"] = since + + if until: + where.append(sql_text("measurement_start_time < :until")) + query_params["until"] = until + + if test_name_s: + where.append(sql_text("test_name IN :test_name_s")) + query_params["test_name_s"] = test_name_s + + group_by: List = [] + try: + if axis_x == "measurement_start_day": + group_by_date(since, until, time_grain, cols, colnames, group_by) + elif axis_x: + add_axis(axis_x, cols, colnames, group_by) + + if axis_y == "measurement_start_day": + group_by_date(since, until, time_grain, cols, colnames, group_by) + elif axis_y: + add_axis(axis_y, cols, colnames, group_by) + + except Exception as e: + return jerror(str(e), v=0) + + # Assemble query + if category_code or axis_x == "category_code" or axis_y == "category_code": + # Join in the citizenlab table if we need to filter on category_code + # or perform group-by on it + table = table.join( + sql_table("citizenlab"), + sql_text("citizenlab.url = fastpath.input"), + ) + + where_expr = and_(*where) + query = select(cols).where(where_expr).select_from(table) # type: ignore + + # Add group-by + for g in group_by: + query = query.group_by(g).order_by(g) + + try: + if dimension_cnt > 0: + r: Any = list(query_click(db, query, query_params, query_prio=4)) + else: + r = query_click_one_row(db, query, query_params, query_prio=4) + + pq = db.last_query + assert pq + msg = f"Stats: {pq.progress.rows} {pq.progress.bytes} {pq.progress.total_rows} {pq.elapsed}" + log.info(msg) + + if cacheable: + response.headers["Cache-Control"] = f"max_age={3600 * 24}" + + headers = {} + if resp_format == "CSV": + csv_data = convert_to_csv(r) + if download: + headers[ + "Content-Disposition" + ] = f"attachment; filename=ooni-aggregate-data.csv" + + return Response(content=csv_data, media_type="text/csv", headers=headers) + + else: + if download: + headers[ + "Content-Disposition" + ] = f"attachment; filename=ooni-aggregate-data.csv" + set_dload(response, "ooni-aggregate-data.json") + return MeasurementAggregation( + v=0, + dimension_count=dimension_cnt, + db_stats=DBStats( + row_count=pq.progress.rows, + bytes=pq.progress.bytes, + total_row_count=pq.progress.total_rows, + elapsed_seconds=pq.elapsed, + ), + result=r, + ) + + except Exception as e: + return jerror(str(e), v=0) diff --git a/api/fastapi/dataapi/routers/measurements.py b/api/fastapi/dataapi/routers/measurements.py index f31ebf63..20965667 100644 --- a/api/fastapi/dataapi/routers/measurements.py +++ b/api/fastapi/dataapi/routers/measurements.py @@ -18,11 +18,12 @@ from fastapi import APIRouter, Depends, Query, HTTPException, Header, Request from fastapi.responses import Response, JSONResponse from pydantic import BaseModel -from pydantic.functional_validators import AfterValidator from typing_extensions import Annotated # debdeps: python3-sqlalchemy -from sqlalchemy import and_, text, select, sql, column +from sqlalchemy.sql.expression import and_, text, select, column +from sqlalchemy.sql.expression import text as sql_text +from sqlalchemy.sql.expression import table as sql_table from sqlalchemy.exc import OperationalError from psycopg2.extensions import QueryCanceledError # debdeps: python3-psycopg2 @@ -58,6 +59,7 @@ class MsmtNotFound(Exception): """ +TODO(art): do we care to have this redirect in place? @api_msm_blueprint.route("/") def show_apidocs(): Route to https://api.ooni.io/api/ to /apidocs/ @@ -79,7 +81,7 @@ def measurement_uid_to_s3path_linenum(db: ClickhouseClient, measurement_uid: str ) LIMIT 1""" query_params = dict(uid=measurement_uid) - lookup = query_click_one_row(db, sql.text(query), query_params, query_prio=3) + lookup = query_click_one_row(db, sql_text(query), query_params, query_prio=3) if lookup is None: raise HTTPException(status_code=500, detail="Measurement not found") @@ -145,7 +147,7 @@ def report_id_input_to_s3path_linenum(db: ClickhouseClient, report_id: str, inpu PREWHERE report_id = :report_id AND input = :inp LIMIT 1""" query_params = dict(inp=input, report_id=report_id) - lookup = query_click_one_row(db, sql.text(query), query_params, query_prio=3) + lookup = query_click_one_row(db, sql_text(query), query_params, query_prio=3) if lookup is None: m = f"Missing row in jsonl table: {report_id} {input}" @@ -420,7 +422,7 @@ def _get_measurement_meta_clickhouse( """ query_params = dict(input=input_, report_id=report_id) query += "LIMIT 1" - msmt_meta = query_click_one_row(db, sql.text(query), query_params, query_prio=3) + msmt_meta = query_click_one_row(db, sql_text(query), query_params, query_prio=3) if not msmt_meta: return {} # measurement not found if msmt_meta["probe_asn"] == 0: @@ -439,7 +441,7 @@ def _get_measurement_meta_by_uid(db: ClickhouseClient, measurement_uid: str) -> LIMIT 1 """ query_params = dict(uid=measurement_uid) - msmt_meta = query_click_one_row(db, sql.text(query), query_params, query_prio=3) + msmt_meta = query_click_one_row(db, sql_text(query), query_params, query_prio=3) if not msmt_meta: return {} # measurement not found if msmt_meta["probe_asn"] == 0: @@ -759,15 +761,15 @@ async def list_measurements( if since is not None: query_params["since"] = since - fpwhere.append(sql.text("measurement_start_time > :since")) + fpwhere.append(sql_text("measurement_start_time > :since")) if until is not None: query_params["until"] = until - fpwhere.append(sql.text("measurement_start_time <= :until")) + fpwhere.append(sql_text("measurement_start_time <= :until")) if report_id: query_params["report_id"] = report_id - fpwhere.append(sql.text("report_id = :report_id")) + fpwhere.append(sql_text("report_id = :report_id")) if probe_cc: if probe_cc == "ZZ": @@ -777,9 +779,9 @@ async def list_measurements( detail="Refusing list_measurements with probe_cc set to ZZ", ) query_params["probe_cc"] = probe_cc - fpwhere.append(sql.text("probe_cc = :probe_cc")) + fpwhere.append(sql_text("probe_cc = :probe_cc")) else: - fpwhere.append(sql.text("probe_cc != 'ZZ'")) + fpwhere.append(sql_text("probe_cc != 'ZZ'")) if probe_asn is not None: if probe_asn == 0: @@ -789,31 +791,31 @@ async def list_measurements( detail="Refusing list_measurements with probe_asn set to 0", ) query_params["probe_asn"] = probe_asn - fpwhere.append(sql.text("probe_asn = :probe_asn")) + fpwhere.append(sql_text("probe_asn = :probe_asn")) else: # https://ooni.org/post/2020-ooni-probe-asn-incident-report/ # https://github.com/ooni/explorer/issues/495 - fpwhere.append(sql.text("probe_asn != 0")) + fpwhere.append(sql_text("probe_asn != 0")) if test_name is not None: query_params["test_name"] = test_name - fpwhere.append(sql.text("test_name = :test_name")) + fpwhere.append(sql_text("test_name = :test_name")) if software_versions is not None: query_params["software_versions"] = software_versions - fpwhere.append(sql.text("software_version IN :software_versions")) + fpwhere.append(sql_text("software_version IN :software_versions")) if test_versions is not None: query_params["test_versions"] = test_versions - fpwhere.append(sql.text("test_version IN :test_versions")) + fpwhere.append(sql_text("test_version IN :test_versions")) if engine_versions is not None: query_params["engine_versions"] = engine_versions - fpwhere.append(sql.text("engine_version IN :engine_versions")) + fpwhere.append(sql_text("engine_version IN :engine_versions")) if ooni_run_link_id is not None: query_params["ooni_run_link_id"] = ooni_run_link_id - fpwhere.append(sql.text("ooni_run_link_id = :ooni_run_link_id")) + fpwhere.append(sql_text("ooni_run_link_id = :ooni_run_link_id")) # Filter on anomaly, confirmed and failure: # The database stores anomaly and confirmed as boolean + NULL and stores @@ -823,43 +825,43 @@ async def list_measurements( # See test_list_measurements_filter_flags_fastpath if anomaly is True: - fpwhere.append(sql.text("fastpath.anomaly = 't'")) + fpwhere.append(sql_text("fastpath.anomaly = 't'")) elif anomaly is False: - fpwhere.append(sql.text("fastpath.anomaly = 'f'")) + fpwhere.append(sql_text("fastpath.anomaly = 'f'")) if confirmed is True: - fpwhere.append(sql.text("fastpath.confirmed = 't'")) + fpwhere.append(sql_text("fastpath.confirmed = 't'")) elif confirmed is False: - fpwhere.append(sql.text("fastpath.confirmed = 'f'")) + fpwhere.append(sql_text("fastpath.confirmed = 'f'")) if failure is True: - fpwhere.append(sql.text("fastpath.msm_failure = 't'")) + fpwhere.append(sql_text("fastpath.msm_failure = 't'")) elif failure is False: - fpwhere.append(sql.text("fastpath.msm_failure = 'f'")) + fpwhere.append(sql_text("fastpath.msm_failure = 'f'")) - fpq_table = sql.table("fastpath") + fpq_table = sql_table("fastpath") if input: # input_ overrides domain and category_code query_params["input"] = input - fpwhere.append(sql.text("input = :input")) + fpwhere.append(sql_text("input = :input")) elif domain or category_code: # both domain and category_code can be set at the same time if domain: query_params["domain"] = domain - fpwhere.append(sql.text("domain = :domain")) + fpwhere.append(sql_text("domain = :domain")) if category_code: query_params["category_code"] = category_code fpq_table = fpq_table.join( - sql.table("citizenlab"), - sql.text("citizenlab.url = fastpath.input"), + sql_table("citizenlab"), + sql_text("citizenlab.url = fastpath.input"), ) - fpwhere.append(sql.text("citizenlab.category_code = :category_code")) + fpwhere.append(sql_text("citizenlab.category_code = :category_code")) fp_query = select("*").where(and_(*fpwhere)).select_from(fpq_table) @@ -980,26 +982,26 @@ async def get_torsf_stats( cacheable = False cols = [ - sql.text("toDate(measurement_start_time) AS measurement_start_day"), + sql_text("toDate(measurement_start_time) AS measurement_start_day"), column("probe_cc"), - sql.text("countIf(anomaly = 't') AS anomaly_count"), - sql.text("countIf(confirmed = 't') AS confirmed_count"), - sql.text("countIf(msm_failure = 't') AS failure_count"), + sql_text("countIf(anomaly = 't') AS anomaly_count"), + sql_text("countIf(confirmed = 't') AS confirmed_count"), + sql_text("countIf(msm_failure = 't') AS failure_count"), ] - table = sql.table("fastpath") - where = [sql.text("test_name = 'torsf'")] + table = sql_table("fastpath") + where = [sql_text("test_name = 'torsf'")] query_params: Dict[str, Any] = {} if probe_cc: - where.append(sql.text("probe_cc = :probe_cc")) + where.append(sql_text("probe_cc = :probe_cc")) query_params["probe_cc"] = probe_cc if since: - where.append(sql.text("measurement_start_time > :since")) + where.append(sql_text("measurement_start_time > :since")) query_params["since"] = since if until: - where.append(sql.text("measurement_start_time <= :until")) + where.append(sql_text("measurement_start_time <= :until")) query_params["until"] = until cacheable = until < datetime.now() - timedelta(hours=72) diff --git a/api/fastapi/dataapi/utils.py b/api/fastapi/dataapi/utils.py index 4f10564b..6738302d 100644 --- a/api/fastapi/dataapi/utils.py +++ b/api/fastapi/dataapi/utils.py @@ -1,3 +1,5 @@ +from csv import DictWriter +from io import StringIO import logging from typing import Dict, List, Optional, Union from fastapi.responses import JSONResponse @@ -5,8 +7,10 @@ import clickhouse_driver import clickhouse_driver.errors -from sqlalchemy import Select, TextClause from sqlalchemy.dialects import postgresql +from sqlalchemy.sql.elements import TextClause +from sqlalchemy.sql.selectable import Select + from .config import settings @@ -31,7 +35,7 @@ def nocachejson(*a, **kw) -> JSONResponse: def jerror(msg, code=400, **kw) -> JSONResponse: headers = {"Cache-Control": "no-cache"} - return JSONResponse(content=dict(**kw), status_code=code, headers=headers) + return JSONResponse(content=dict(msg=msg, **kw), status_code=code, headers=headers) def commasplit(p: str) -> List[str]: @@ -41,6 +45,28 @@ def commasplit(p: str) -> List[str]: return sorted(out) +def convert_to_csv(r) -> str: + """Convert aggregation result dict/list to CSV""" + csvf = StringIO() + if isinstance(r, dict): + # 0-dimensional data + fieldnames = sorted(r.keys()) + writer = DictWriter(csvf, fieldnames=fieldnames) + writer.writeheader() + writer.writerow(r) + + else: + fieldnames = sorted(r[0].keys()) + writer = DictWriter(csvf, fieldnames=fieldnames) + writer.writeheader() + for row in r: + writer.writerow(row) + + result = csvf.getvalue() + csvf.close() + return result + + Query = Union[str, TextClause, Select] diff --git a/api/fastapi/poetry.lock b/api/fastapi/poetry.lock index 7d274c4b..75b84427 100644 --- a/api/fastapi/poetry.lock +++ b/api/fastapi/poetry.lock @@ -616,89 +616,60 @@ files = [ [[package]] name = "sqlalchemy" -version = "2.0.25" +version = "1.4.51" description = "Database Abstraction Library" optional = false -python-versions = ">=3.7" -files = [ - {file = "SQLAlchemy-2.0.25-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4344d059265cc8b1b1be351bfb88749294b87a8b2bbe21dfbe066c4199541ebd"}, - {file = "SQLAlchemy-2.0.25-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6f9e2e59cbcc6ba1488404aad43de005d05ca56e069477b33ff74e91b6319735"}, - {file = "SQLAlchemy-2.0.25-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84daa0a2055df9ca0f148a64fdde12ac635e30edbca80e87df9b3aaf419e144a"}, - {file = "SQLAlchemy-2.0.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc8b7dabe8e67c4832891a5d322cec6d44ef02f432b4588390017f5cec186a84"}, - {file = "SQLAlchemy-2.0.25-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f5693145220517b5f42393e07a6898acdfe820e136c98663b971906120549da5"}, - {file = "SQLAlchemy-2.0.25-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:db854730a25db7c956423bb9fb4bdd1216c839a689bf9cc15fada0a7fb2f4570"}, - {file = "SQLAlchemy-2.0.25-cp310-cp310-win32.whl", hash = "sha256:14a6f68e8fc96e5e8f5647ef6cda6250c780612a573d99e4d881581432ef1669"}, - {file = "SQLAlchemy-2.0.25-cp310-cp310-win_amd64.whl", hash = "sha256:87f6e732bccd7dcf1741c00f1ecf33797383128bd1c90144ac8adc02cbb98643"}, - {file = "SQLAlchemy-2.0.25-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:342d365988ba88ada8af320d43df4e0b13a694dbd75951f537b2d5e4cb5cd002"}, - {file = "SQLAlchemy-2.0.25-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f37c0caf14b9e9b9e8f6dbc81bc56db06acb4363eba5a633167781a48ef036ed"}, - {file = "SQLAlchemy-2.0.25-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa9373708763ef46782d10e950b49d0235bfe58facebd76917d3f5cbf5971aed"}, - {file = "SQLAlchemy-2.0.25-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d24f571990c05f6b36a396218f251f3e0dda916e0c687ef6fdca5072743208f5"}, - {file = "SQLAlchemy-2.0.25-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:75432b5b14dc2fff43c50435e248b45c7cdadef73388e5610852b95280ffd0e9"}, - {file = "SQLAlchemy-2.0.25-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:884272dcd3ad97f47702965a0e902b540541890f468d24bd1d98bcfe41c3f018"}, - {file = "SQLAlchemy-2.0.25-cp311-cp311-win32.whl", hash = "sha256:e607cdd99cbf9bb80391f54446b86e16eea6ad309361942bf88318bcd452363c"}, - {file = "SQLAlchemy-2.0.25-cp311-cp311-win_amd64.whl", hash = "sha256:7d505815ac340568fd03f719446a589162d55c52f08abd77ba8964fbb7eb5b5f"}, - {file = "SQLAlchemy-2.0.25-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0dacf67aee53b16f365c589ce72e766efaabd2b145f9de7c917777b575e3659d"}, - {file = "SQLAlchemy-2.0.25-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b801154027107461ee992ff4b5c09aa7cc6ec91ddfe50d02bca344918c3265c6"}, - {file = "SQLAlchemy-2.0.25-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59a21853f5daeb50412d459cfb13cb82c089ad4c04ec208cd14dddd99fc23b39"}, - {file = "SQLAlchemy-2.0.25-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:29049e2c299b5ace92cbed0c1610a7a236f3baf4c6b66eb9547c01179f638ec5"}, - {file = "SQLAlchemy-2.0.25-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b64b183d610b424a160b0d4d880995e935208fc043d0302dd29fee32d1ee3f95"}, - {file = "SQLAlchemy-2.0.25-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4f7a7d7fcc675d3d85fbf3b3828ecd5990b8d61bd6de3f1b260080b3beccf215"}, - {file = "SQLAlchemy-2.0.25-cp312-cp312-win32.whl", hash = "sha256:cf18ff7fc9941b8fc23437cc3e68ed4ebeff3599eec6ef5eebf305f3d2e9a7c2"}, - {file = "SQLAlchemy-2.0.25-cp312-cp312-win_amd64.whl", hash = "sha256:91f7d9d1c4dd1f4f6e092874c128c11165eafcf7c963128f79e28f8445de82d5"}, - {file = "SQLAlchemy-2.0.25-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:bb209a73b8307f8fe4fe46f6ad5979649be01607f11af1eb94aa9e8a3aaf77f0"}, - {file = "SQLAlchemy-2.0.25-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:798f717ae7c806d67145f6ae94dc7c342d3222d3b9a311a784f371a4333212c7"}, - {file = "SQLAlchemy-2.0.25-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fdd402169aa00df3142149940b3bf9ce7dde075928c1886d9a1df63d4b8de62"}, - {file = "SQLAlchemy-2.0.25-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0d3cab3076af2e4aa5693f89622bef7fa770c6fec967143e4da7508b3dceb9b9"}, - {file = "SQLAlchemy-2.0.25-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:74b080c897563f81062b74e44f5a72fa44c2b373741a9ade701d5f789a10ba23"}, - {file = "SQLAlchemy-2.0.25-cp37-cp37m-win32.whl", hash = "sha256:87d91043ea0dc65ee583026cb18e1b458d8ec5fc0a93637126b5fc0bc3ea68c4"}, - {file = "SQLAlchemy-2.0.25-cp37-cp37m-win_amd64.whl", hash = "sha256:75f99202324383d613ddd1f7455ac908dca9c2dd729ec8584c9541dd41822a2c"}, - {file = "SQLAlchemy-2.0.25-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:420362338681eec03f53467804541a854617faed7272fe71a1bfdb07336a381e"}, - {file = "SQLAlchemy-2.0.25-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7c88f0c7dcc5f99bdb34b4fd9b69b93c89f893f454f40219fe923a3a2fd11625"}, - {file = "SQLAlchemy-2.0.25-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3be4987e3ee9d9a380b66393b77a4cd6d742480c951a1c56a23c335caca4ce3"}, - {file = "SQLAlchemy-2.0.25-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a159111a0f58fb034c93eeba211b4141137ec4b0a6e75789ab7a3ef3c7e7e3"}, - {file = "SQLAlchemy-2.0.25-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8b8cb63d3ea63b29074dcd29da4dc6a97ad1349151f2d2949495418fd6e48db9"}, - {file = "SQLAlchemy-2.0.25-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:736ea78cd06de6c21ecba7416499e7236a22374561493b456a1f7ffbe3f6cdb4"}, - {file = "SQLAlchemy-2.0.25-cp38-cp38-win32.whl", hash = "sha256:10331f129982a19df4284ceac6fe87353ca3ca6b4ca77ff7d697209ae0a5915e"}, - {file = "SQLAlchemy-2.0.25-cp38-cp38-win_amd64.whl", hash = "sha256:c55731c116806836a5d678a70c84cb13f2cedba920212ba7dcad53260997666d"}, - {file = "SQLAlchemy-2.0.25-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:605b6b059f4b57b277f75ace81cc5bc6335efcbcc4ccb9066695e515dbdb3900"}, - {file = "SQLAlchemy-2.0.25-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:665f0a3954635b5b777a55111ababf44b4fc12b1f3ba0a435b602b6387ffd7cf"}, - {file = "SQLAlchemy-2.0.25-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ecf6d4cda1f9f6cb0b45803a01ea7f034e2f1aed9475e883410812d9f9e3cfcf"}, - {file = "SQLAlchemy-2.0.25-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c51db269513917394faec5e5c00d6f83829742ba62e2ac4fa5c98d58be91662f"}, - {file = "SQLAlchemy-2.0.25-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:790f533fa5c8901a62b6fef5811d48980adeb2f51f1290ade8b5e7ba990ba3de"}, - {file = "SQLAlchemy-2.0.25-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1b1180cda6df7af84fe72e4530f192231b1f29a7496951db4ff38dac1687202d"}, - {file = "SQLAlchemy-2.0.25-cp39-cp39-win32.whl", hash = "sha256:555651adbb503ac7f4cb35834c5e4ae0819aab2cd24857a123370764dc7d7e24"}, - {file = "SQLAlchemy-2.0.25-cp39-cp39-win_amd64.whl", hash = "sha256:dc55990143cbd853a5d038c05e79284baedf3e299661389654551bd02a6a68d7"}, - {file = "SQLAlchemy-2.0.25-py3-none-any.whl", hash = "sha256:a86b4240e67d4753dc3092d9511886795b3c2852abe599cffe108952f7af7ac3"}, - {file = "SQLAlchemy-2.0.25.tar.gz", hash = "sha256:a2c69a7664fb2d54b8682dd774c3b54f67f84fa123cf84dda2a5f40dcaa04e08"}, +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "SQLAlchemy-1.4.51-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2be4e6294c53f2ec8ea36486b56390e3bcaa052bf3a9a47005687ccf376745d1"}, + {file = "SQLAlchemy-1.4.51-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ca484ca11c65e05639ffe80f20d45e6be81fbec7683d6c9a15cd421e6e8b340"}, + {file = "SQLAlchemy-1.4.51-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0535d5b57d014d06ceeaeffd816bb3a6e2dddeb670222570b8c4953e2d2ea678"}, + {file = "SQLAlchemy-1.4.51-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af55cc207865d641a57f7044e98b08b09220da3d1b13a46f26487cc2f898a072"}, + {file = "SQLAlchemy-1.4.51-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7deeae5071930abb3669b5185abb6c33ddfd2398f87660fafdb9e6a5fb0f3f2f"}, + {file = "SQLAlchemy-1.4.51-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0892e7ac8bc76da499ad3ee8de8da4d7905a3110b952e2a35a940dab1ffa550e"}, + {file = "SQLAlchemy-1.4.51-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6cacc0b2dd7d22a918a9642fc89840a5d3cee18a0e1fe41080b1141b23b10916"}, + {file = "SQLAlchemy-1.4.51-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:245c67c88e63f1523e9216cad6ba3107dea2d3ee19adc359597a628afcabfbcb"}, + {file = "SQLAlchemy-1.4.51-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ec7a0ed9b32afdf337172678a4a0e6419775ba4e649b66f49415615fa47efbd"}, + {file = "SQLAlchemy-1.4.51-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:352df882088a55293f621328ec33b6ffca936ad7f23013b22520542e1ab6ad1b"}, + {file = "SQLAlchemy-1.4.51-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:86a22143a4001f53bf58027b044da1fb10d67b62a785fc1390b5c7f089d9838c"}, + {file = "SQLAlchemy-1.4.51-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c37bc677690fd33932182b85d37433845de612962ed080c3e4d92f758d1bd894"}, + {file = "SQLAlchemy-1.4.51-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c55040d8ea65414de7c47f1a23823cd9f3fad0dc93e6b6b728fee81230f817b"}, + {file = "SQLAlchemy-1.4.51-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38ef80328e3fee2be0a1abe3fe9445d3a2e52a1282ba342d0dab6edf1fef4707"}, + {file = "SQLAlchemy-1.4.51-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f8cafa6f885a0ff5e39efa9325195217bb47d5929ab0051636610d24aef45ade"}, + {file = "SQLAlchemy-1.4.51-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8f2df79a46e130235bc5e1bbef4de0583fb19d481eaa0bffa76e8347ea45ec6"}, + {file = "SQLAlchemy-1.4.51-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb18549b770351b54e1ab5da37d22bc530b8bfe2ee31e22b9ebe650640d2ef12"}, + {file = "SQLAlchemy-1.4.51-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55e699466106d09f028ab78d3c2e1f621b5ef2c8694598242259e4515715da7c"}, + {file = "SQLAlchemy-1.4.51-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2ad16880ccd971ac8e570550fbdef1385e094b022d6fc85ef3ce7df400dddad3"}, + {file = "SQLAlchemy-1.4.51-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b97fd5bb6b7c1a64b7ac0632f7ce389b8ab362e7bd5f60654c2a418496be5d7f"}, + {file = "SQLAlchemy-1.4.51-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e646b19f47d655261b22df9976e572f588185279970efba3d45c377127d35349"}, + {file = "SQLAlchemy-1.4.51-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3cf56cc36d42908495760b223ca9c2c0f9f0002b4eddc994b24db5fcb86a9e4"}, + {file = "SQLAlchemy-1.4.51-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0d661cff58c91726c601cc0ee626bf167b20cc4d7941c93c5f3ac28dc34ddbea"}, + {file = "SQLAlchemy-1.4.51-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3823dda635988e6744d4417e13f2e2b5fe76c4bf29dd67e95f98717e1b094cad"}, + {file = "SQLAlchemy-1.4.51.tar.gz", hash = "sha256:e7908c2025eb18394e32d65dd02d2e37e17d733cdbe7d78231c2b6d7eb20cdb9"}, ] [package.dependencies] -greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""} -typing-extensions = ">=4.6.0" +greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} [package.extras] aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] -aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] asyncio = ["greenlet (!=0.4.17)"] -asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] -mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"] mssql = ["pyodbc"] mssql-pymssql = ["pymssql"] mssql-pyodbc = ["pyodbc"] -mypy = ["mypy (>=0.910)"] -mysql = ["mysqlclient (>=1.4.0)"] +mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"] +mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"] mysql-connector = ["mysql-connector-python"] -oracle = ["cx_oracle (>=8)"] -oracle-oracledb = ["oracledb (>=1.0.1)"] +oracle = ["cx_oracle (>=7)", "cx_oracle (>=7,<8)"] postgresql = ["psycopg2 (>=2.7)"] postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] -postgresql-pg8000 = ["pg8000 (>=1.29.1)"] -postgresql-psycopg = ["psycopg (>=3.0.7)"] +postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"] postgresql-psycopg2binary = ["psycopg2-binary"] postgresql-psycopg2cffi = ["psycopg2cffi"] -postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] -pymysql = ["pymysql"] +pymysql = ["pymysql", "pymysql (<1)"] sqlcipher = ["sqlcipher3_binary"] [[package]] @@ -879,4 +850,4 @@ standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "97ed1b2d1a823e1a0283800dfbb94ec4fa6102ed00b8fe75e4a26040519cb8df" +content-hash = "3507739aab8f384dee5853f7bd6b43d11615977b6f6a8b60d3328f24c40f358b" diff --git a/api/fastapi/pyproject.toml b/api/fastapi/pyproject.toml index 668eec47..be6ce1e1 100644 --- a/api/fastapi/pyproject.toml +++ b/api/fastapi/pyproject.toml @@ -9,7 +9,7 @@ readme = "Readme.md" python = "^3.11" fastapi = "^0.108.0" clickhouse-driver = "^0.2.6" -sqlalchemy = "^2.0.25" +sqlalchemy = "1.4.51" ujson = "^5.9.0" urllib3 = "^2.1.0" python-dateutil = "^2.8.2"