diff --git a/PKG-INFO b/PKG-INFO index 0831095..fbc9687 100644 --- a/PKG-INFO +++ b/PKG-INFO @@ -1,6 +1,6 @@ Metadata-Version: 2.1 Name: tuneinsight -Version: 0.9.0 +Version: 0.9.2 Summary: Diapason is the official Python SDK for the Tune Insight API. Version 0.6.2 targets the API v0.8.0. License: Apache-2.0 Author: Tune Insight SA @@ -15,7 +15,6 @@ Requires-Dist: PyYAML (>=6.0,<7.0) Requires-Dist: attrs (>=21.3.0) Requires-Dist: black (==24.2.0) Requires-Dist: certifi (>=2023.7.22,<2024.0.0) -Requires-Dist: handsdown (>=2.1.0,<3.0.0) Requires-Dist: httpx (>=0.15.4,<0.24.0) Requires-Dist: matplotlib (>=3.5.0,<4.0.0) Requires-Dist: notebook (>=6.4.11,<7.0.0) diff --git a/pyproject.toml b/pyproject.toml index 4f761d5..144e21c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "tuneinsight" -version = "0.9.0" +version = "0.9.2" description = "Diapason is the official Python SDK for the Tune Insight API. Version 0.6.2 targets the API v0.8.0." authors = ["Tune Insight SA"] license = "Apache-2.0" @@ -28,7 +28,6 @@ httpx = ">=0.15.4,<0.24.0" attrs = ">=21.3.0" certifi = "^2023.7.22" black = "24.2.0" -handsdown = "^2.1.0" [tool.poetry.group.dev.dependencies] selenium = "^4.9.1" @@ -36,6 +35,7 @@ wheel = "^0.38.1" docker = "^6.0.1" pylint = "^2.13.2" pyvcf3 = "^1.0.3" # For GWAS .vcf file parsing +pytest = "^8.1.1" [build-system] requires = ["poetry-core>=1.0.0"] diff --git a/src/tuneinsight/api/api-checksum b/src/tuneinsight/api/api-checksum index 1b736f4..e8cd163 100644 --- a/src/tuneinsight/api/api-checksum +++ b/src/tuneinsight/api/api-checksum @@ -1 +1 @@ -b71c951f2eb600cc2a1f073151c9a0c003bd48f1210262ab7df58c9d06e05aba +1543c5968e0e568095cb5eeb44a3c7ab3179d6491f270932e268ab579c8d5948 diff --git a/src/tuneinsight/api/sdk/api/api_admin/get_settings.py b/src/tuneinsight/api/sdk/api/api_admin/get_settings.py new file mode 100644 index 0000000..83cb3c2 --- /dev/null +++ b/src/tuneinsight/api/sdk/api/api_admin/get_settings.py @@ -0,0 +1,154 @@ +from http import HTTPStatus +from typing import Any, Dict, Optional, Union + +import httpx + +from ... import errors +from ...client import Client +from ...models.error import Error +from ...models.settings import Settings +from ...types import Response + + +def _get_kwargs( + *, + client: Client, +) -> Dict[str, Any]: + url = "{}/settings".format(client.base_url) + + headers: Dict[str, str] = client.get_headers() + cookies: Dict[str, Any] = client.get_cookies() + + return { + "method": "get", + "url": url, + "headers": headers, + "cookies": cookies, + "timeout": client.get_timeout(), + } + + +def _parse_response(*, client: Client, response: httpx.Response) -> Optional[Union[Error, Settings]]: + if response.status_code == HTTPStatus.OK: + response_200 = Settings.from_dict(response.json()) + + return response_200 + if response.status_code == HTTPStatus.BAD_REQUEST: + response_400 = Error.from_dict(response.json()) + + return response_400 + if response.status_code == HTTPStatus.UNAUTHORIZED: + response_401 = Error.from_dict(response.json()) + + return response_401 + if response.status_code == HTTPStatus.FORBIDDEN: + response_403 = Error.from_dict(response.json()) + + return response_403 + if response.status_code == HTTPStatus.INTERNAL_SERVER_ERROR: + response_500 = Error.from_dict(response.json()) + + return response_500 + if client.raise_on_unexpected_status: + raise errors.UnexpectedStatus(f"Unexpected status code: {response.status_code}") + else: + return None + + +def _build_response(*, client: Client, response: httpx.Response) -> Response[Union[Error, Settings]]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + *, + client: Client, +) -> Response[Union[Error, Settings]]: + """retrieve the instance settings + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Union[Error, Settings]] + """ + + kwargs = _get_kwargs( + client=client, + ) + + response = httpx.request( + verify=client.verify_ssl, + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + *, + client: Client, +) -> Optional[Union[Error, Settings]]: + """retrieve the instance settings + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Union[Error, Settings]] + """ + + return sync_detailed( + client=client, + ).parsed + + +async def asyncio_detailed( + *, + client: Client, +) -> Response[Union[Error, Settings]]: + """retrieve the instance settings + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Union[Error, Settings]] + """ + + kwargs = _get_kwargs( + client=client, + ) + + async with httpx.AsyncClient(verify=client.verify_ssl) as _client: + response = await _client.request(**kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + *, + client: Client, +) -> Optional[Union[Error, Settings]]: + """retrieve the instance settings + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Union[Error, Settings]] + """ + + return ( + await asyncio_detailed( + client=client, + ) + ).parsed diff --git a/src/tuneinsight/api/sdk/api/api_admin/patch_settings.py b/src/tuneinsight/api/sdk/api/api_admin/patch_settings.py new file mode 100644 index 0000000..e1568c4 --- /dev/null +++ b/src/tuneinsight/api/sdk/api/api_admin/patch_settings.py @@ -0,0 +1,182 @@ +from http import HTTPStatus +from typing import Any, Dict, Optional, Union + +import httpx + +from ... import errors +from ...client import Client +from ...models.error import Error +from ...models.settings import Settings +from ...types import Response + + +def _get_kwargs( + *, + client: Client, + json_body: Settings, +) -> Dict[str, Any]: + url = "{}/settings".format(client.base_url) + + headers: Dict[str, str] = client.get_headers() + cookies: Dict[str, Any] = client.get_cookies() + + json_json_body = json_body.to_dict() + + return { + "method": "patch", + "url": url, + "headers": headers, + "cookies": cookies, + "timeout": client.get_timeout(), + "json": json_json_body, + } + + +def _parse_response(*, client: Client, response: httpx.Response) -> Optional[Union[Error, Settings]]: + if response.status_code == HTTPStatus.OK: + response_200 = Settings.from_dict(response.json()) + + return response_200 + if response.status_code == HTTPStatus.BAD_REQUEST: + response_400 = Error.from_dict(response.json()) + + return response_400 + if response.status_code == HTTPStatus.UNAUTHORIZED: + response_401 = Error.from_dict(response.json()) + + return response_401 + if response.status_code == HTTPStatus.FORBIDDEN: + response_403 = Error.from_dict(response.json()) + + return response_403 + if response.status_code == HTTPStatus.UNPROCESSABLE_ENTITY: + response_422 = Error.from_dict(response.json()) + + return response_422 + if response.status_code == HTTPStatus.INTERNAL_SERVER_ERROR: + response_500 = Error.from_dict(response.json()) + + return response_500 + if client.raise_on_unexpected_status: + raise errors.UnexpectedStatus(f"Unexpected status code: {response.status_code}") + else: + return None + + +def _build_response(*, client: Client, response: httpx.Response) -> Response[Union[Error, Settings]]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + *, + client: Client, + json_body: Settings, +) -> Response[Union[Error, Settings]]: + """modify the settings + + Args: + json_body (Settings): instance settings that is configurable by the administrator. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Union[Error, Settings]] + """ + + kwargs = _get_kwargs( + client=client, + json_body=json_body, + ) + + response = httpx.request( + verify=client.verify_ssl, + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + *, + client: Client, + json_body: Settings, +) -> Optional[Union[Error, Settings]]: + """modify the settings + + Args: + json_body (Settings): instance settings that is configurable by the administrator. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Union[Error, Settings]] + """ + + return sync_detailed( + client=client, + json_body=json_body, + ).parsed + + +async def asyncio_detailed( + *, + client: Client, + json_body: Settings, +) -> Response[Union[Error, Settings]]: + """modify the settings + + Args: + json_body (Settings): instance settings that is configurable by the administrator. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Union[Error, Settings]] + """ + + kwargs = _get_kwargs( + client=client, + json_body=json_body, + ) + + async with httpx.AsyncClient(verify=client.verify_ssl) as _client: + response = await _client.request(**kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + *, + client: Client, + json_body: Settings, +) -> Optional[Union[Error, Settings]]: + """modify the settings + + Args: + json_body (Settings): instance settings that is configurable by the administrator. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Union[Error, Settings]] + """ + + return ( + await asyncio_detailed( + client=client, + json_body=json_body, + ) + ).parsed diff --git a/src/tuneinsight/api/sdk/api/api_computations/get_computation_list.py b/src/tuneinsight/api/sdk/api/api_computations/get_computation_list.py index 9db8d9d..479e9de 100644 --- a/src/tuneinsight/api/sdk/api/api_computations/get_computation_list.py +++ b/src/tuneinsight/api/sdk/api/api_computations/get_computation_list.py @@ -16,7 +16,7 @@ def _get_kwargs( *, client: Client, page: Union[Unset, None, int] = 1, - per_page: Union[Unset, None, int] = 30, + per_page: Union[Unset, None, int] = 50, with_total: Union[Unset, None, bool] = True, order: Union[Unset, None, GetComputationListOrder] = UNSET, sort_by: Union[Unset, None, GetComputationListSortBy] = UNSET, @@ -98,7 +98,7 @@ def sync_detailed( *, client: Client, page: Union[Unset, None, int] = 1, - per_page: Union[Unset, None, int] = 30, + per_page: Union[Unset, None, int] = 50, with_total: Union[Unset, None, bool] = True, order: Union[Unset, None, GetComputationListOrder] = UNSET, sort_by: Union[Unset, None, GetComputationListSortBy] = UNSET, @@ -110,7 +110,7 @@ def sync_detailed( Args: page (Union[Unset, None, int]): Default: 1. - per_page (Union[Unset, None, int]): Default: 30. + per_page (Union[Unset, None, int]): Default: 50. with_total (Union[Unset, None, bool]): Default: True. order (Union[Unset, None, GetComputationListOrder]): sort_by (Union[Unset, None, GetComputationListSortBy]): @@ -150,7 +150,7 @@ def sync( *, client: Client, page: Union[Unset, None, int] = 1, - per_page: Union[Unset, None, int] = 30, + per_page: Union[Unset, None, int] = 50, with_total: Union[Unset, None, bool] = True, order: Union[Unset, None, GetComputationListOrder] = UNSET, sort_by: Union[Unset, None, GetComputationListSortBy] = UNSET, @@ -162,7 +162,7 @@ def sync( Args: page (Union[Unset, None, int]): Default: 1. - per_page (Union[Unset, None, int]): Default: 30. + per_page (Union[Unset, None, int]): Default: 50. with_total (Union[Unset, None, bool]): Default: True. order (Union[Unset, None, GetComputationListOrder]): sort_by (Union[Unset, None, GetComputationListSortBy]): @@ -195,7 +195,7 @@ async def asyncio_detailed( *, client: Client, page: Union[Unset, None, int] = 1, - per_page: Union[Unset, None, int] = 30, + per_page: Union[Unset, None, int] = 50, with_total: Union[Unset, None, bool] = True, order: Union[Unset, None, GetComputationListOrder] = UNSET, sort_by: Union[Unset, None, GetComputationListSortBy] = UNSET, @@ -207,7 +207,7 @@ async def asyncio_detailed( Args: page (Union[Unset, None, int]): Default: 1. - per_page (Union[Unset, None, int]): Default: 30. + per_page (Union[Unset, None, int]): Default: 50. with_total (Union[Unset, None, bool]): Default: True. order (Union[Unset, None, GetComputationListOrder]): sort_by (Union[Unset, None, GetComputationListSortBy]): @@ -245,7 +245,7 @@ async def asyncio( *, client: Client, page: Union[Unset, None, int] = 1, - per_page: Union[Unset, None, int] = 30, + per_page: Union[Unset, None, int] = 50, with_total: Union[Unset, None, bool] = True, order: Union[Unset, None, GetComputationListOrder] = UNSET, sort_by: Union[Unset, None, GetComputationListSortBy] = UNSET, @@ -257,7 +257,7 @@ async def asyncio( Args: page (Union[Unset, None, int]): Default: 1. - per_page (Union[Unset, None, int]): Default: 30. + per_page (Union[Unset, None, int]): Default: 50. with_total (Union[Unset, None, bool]): Default: True. order (Union[Unset, None, GetComputationListOrder]): sort_by (Union[Unset, None, GetComputationListSortBy]): diff --git a/src/tuneinsight/api/sdk/api/api_computations/get_result_list.py b/src/tuneinsight/api/sdk/api/api_computations/get_result_list.py index 8cf5f9e..3cbb6fb 100644 --- a/src/tuneinsight/api/sdk/api/api_computations/get_result_list.py +++ b/src/tuneinsight/api/sdk/api/api_computations/get_result_list.py @@ -16,12 +16,11 @@ def _get_kwargs( *, client: Client, project_id: Union[Unset, None, str] = UNSET, - session_id: Union[Unset, None, str] = UNSET, owned: Union[Unset, None, bool] = UNSET, tagged: Union[Unset, None, bool] = UNSET, tags: Union[Unset, None, List[str]] = UNSET, page: Union[Unset, None, int] = 1, - per_page: Union[Unset, None, int] = 30, + per_page: Union[Unset, None, int] = 50, with_total: Union[Unset, None, bool] = True, sort_by: Union[Unset, None, GetResultListSortBy] = UNSET, order: Union[Unset, None, GetResultListOrder] = UNSET, @@ -34,8 +33,6 @@ def _get_kwargs( params: Dict[str, Any] = {} params["projectId"] = project_id - params["sessionId"] = session_id - params["owned"] = owned params["tagged"] = tagged @@ -124,12 +121,11 @@ def sync_detailed( *, client: Client, project_id: Union[Unset, None, str] = UNSET, - session_id: Union[Unset, None, str] = UNSET, owned: Union[Unset, None, bool] = UNSET, tagged: Union[Unset, None, bool] = UNSET, tags: Union[Unset, None, List[str]] = UNSET, page: Union[Unset, None, int] = 1, - per_page: Union[Unset, None, int] = 30, + per_page: Union[Unset, None, int] = 50, with_total: Union[Unset, None, bool] = True, sort_by: Union[Unset, None, GetResultListSortBy] = UNSET, order: Union[Unset, None, GetResultListOrder] = UNSET, @@ -138,12 +134,11 @@ def sync_detailed( Args: project_id (Union[Unset, None, str]): - session_id (Union[Unset, None, str]): owned (Union[Unset, None, bool]): tagged (Union[Unset, None, bool]): tags (Union[Unset, None, List[str]]): page (Union[Unset, None, int]): Default: 1. - per_page (Union[Unset, None, int]): Default: 30. + per_page (Union[Unset, None, int]): Default: 50. with_total (Union[Unset, None, bool]): Default: True. sort_by (Union[Unset, None, GetResultListSortBy]): order (Union[Unset, None, GetResultListOrder]): @@ -159,7 +154,6 @@ def sync_detailed( kwargs = _get_kwargs( client=client, project_id=project_id, - session_id=session_id, owned=owned, tagged=tagged, tags=tags, @@ -182,12 +176,11 @@ def sync( *, client: Client, project_id: Union[Unset, None, str] = UNSET, - session_id: Union[Unset, None, str] = UNSET, owned: Union[Unset, None, bool] = UNSET, tagged: Union[Unset, None, bool] = UNSET, tags: Union[Unset, None, List[str]] = UNSET, page: Union[Unset, None, int] = 1, - per_page: Union[Unset, None, int] = 30, + per_page: Union[Unset, None, int] = 50, with_total: Union[Unset, None, bool] = True, sort_by: Union[Unset, None, GetResultListSortBy] = UNSET, order: Union[Unset, None, GetResultListOrder] = UNSET, @@ -196,12 +189,11 @@ def sync( Args: project_id (Union[Unset, None, str]): - session_id (Union[Unset, None, str]): owned (Union[Unset, None, bool]): tagged (Union[Unset, None, bool]): tags (Union[Unset, None, List[str]]): page (Union[Unset, None, int]): Default: 1. - per_page (Union[Unset, None, int]): Default: 30. + per_page (Union[Unset, None, int]): Default: 50. with_total (Union[Unset, None, bool]): Default: True. sort_by (Union[Unset, None, GetResultListSortBy]): order (Union[Unset, None, GetResultListOrder]): @@ -217,7 +209,6 @@ def sync( return sync_detailed( client=client, project_id=project_id, - session_id=session_id, owned=owned, tagged=tagged, tags=tags, @@ -233,12 +224,11 @@ async def asyncio_detailed( *, client: Client, project_id: Union[Unset, None, str] = UNSET, - session_id: Union[Unset, None, str] = UNSET, owned: Union[Unset, None, bool] = UNSET, tagged: Union[Unset, None, bool] = UNSET, tags: Union[Unset, None, List[str]] = UNSET, page: Union[Unset, None, int] = 1, - per_page: Union[Unset, None, int] = 30, + per_page: Union[Unset, None, int] = 50, with_total: Union[Unset, None, bool] = True, sort_by: Union[Unset, None, GetResultListSortBy] = UNSET, order: Union[Unset, None, GetResultListOrder] = UNSET, @@ -247,12 +237,11 @@ async def asyncio_detailed( Args: project_id (Union[Unset, None, str]): - session_id (Union[Unset, None, str]): owned (Union[Unset, None, bool]): tagged (Union[Unset, None, bool]): tags (Union[Unset, None, List[str]]): page (Union[Unset, None, int]): Default: 1. - per_page (Union[Unset, None, int]): Default: 30. + per_page (Union[Unset, None, int]): Default: 50. with_total (Union[Unset, None, bool]): Default: True. sort_by (Union[Unset, None, GetResultListSortBy]): order (Union[Unset, None, GetResultListOrder]): @@ -268,7 +257,6 @@ async def asyncio_detailed( kwargs = _get_kwargs( client=client, project_id=project_id, - session_id=session_id, owned=owned, tagged=tagged, tags=tags, @@ -289,12 +277,11 @@ async def asyncio( *, client: Client, project_id: Union[Unset, None, str] = UNSET, - session_id: Union[Unset, None, str] = UNSET, owned: Union[Unset, None, bool] = UNSET, tagged: Union[Unset, None, bool] = UNSET, tags: Union[Unset, None, List[str]] = UNSET, page: Union[Unset, None, int] = 1, - per_page: Union[Unset, None, int] = 30, + per_page: Union[Unset, None, int] = 50, with_total: Union[Unset, None, bool] = True, sort_by: Union[Unset, None, GetResultListSortBy] = UNSET, order: Union[Unset, None, GetResultListOrder] = UNSET, @@ -303,12 +290,11 @@ async def asyncio( Args: project_id (Union[Unset, None, str]): - session_id (Union[Unset, None, str]): owned (Union[Unset, None, bool]): tagged (Union[Unset, None, bool]): tags (Union[Unset, None, List[str]]): page (Union[Unset, None, int]): Default: 1. - per_page (Union[Unset, None, int]): Default: 30. + per_page (Union[Unset, None, int]): Default: 50. with_total (Union[Unset, None, bool]): Default: True. sort_by (Union[Unset, None, GetResultListSortBy]): order (Union[Unset, None, GetResultListOrder]): @@ -325,7 +311,6 @@ async def asyncio( await asyncio_detailed( client=client, project_id=project_id, - session_id=session_id, owned=owned, tagged=tagged, tags=tags, diff --git a/src/tuneinsight/api/sdk/api/api_computations/release_result.py b/src/tuneinsight/api/sdk/api/api_computations/release_result.py new file mode 100644 index 0000000..20dca10 --- /dev/null +++ b/src/tuneinsight/api/sdk/api/api_computations/release_result.py @@ -0,0 +1,196 @@ +from http import HTTPStatus +from typing import Any, Dict, Optional, Union + +import httpx + +from ... import errors +from ...client import Client +from ...models.error import Error +from ...models.result_content import ResultContent +from ...models.result_release import ResultRelease +from ...types import Response + + +def _get_kwargs( + result_id: str, + *, + client: Client, + json_body: ResultRelease, +) -> Dict[str, Any]: + url = "{}/results/{resultId}/release".format(client.base_url, resultId=result_id) + + headers: Dict[str, str] = client.get_headers() + cookies: Dict[str, Any] = client.get_cookies() + + json_json_body = json_body.to_dict() + + return { + "method": "post", + "url": url, + "headers": headers, + "cookies": cookies, + "timeout": client.get_timeout(), + "json": json_json_body, + } + + +def _parse_response(*, client: Client, response: httpx.Response) -> Optional[Union[Error, ResultContent]]: + if response.status_code == HTTPStatus.OK: + response_200 = ResultContent.from_dict(response.json()) + + return response_200 + if response.status_code == HTTPStatus.BAD_REQUEST: + response_400 = Error.from_dict(response.json()) + + return response_400 + if response.status_code == HTTPStatus.FORBIDDEN: + response_403 = Error.from_dict(response.json()) + + return response_403 + if response.status_code == HTTPStatus.NOT_FOUND: + response_404 = Error.from_dict(response.json()) + + return response_404 + if response.status_code == HTTPStatus.UNPROCESSABLE_ENTITY: + response_422 = Error.from_dict(response.json()) + + return response_422 + if response.status_code == HTTPStatus.INTERNAL_SERVER_ERROR: + response_500 = Error.from_dict(response.json()) + + return response_500 + if client.raise_on_unexpected_status: + raise errors.UnexpectedStatus(f"Unexpected status code: {response.status_code}") + else: + return None + + +def _build_response(*, client: Client, response: httpx.Response) -> Response[Union[Error, ResultContent]]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + result_id: str, + *, + client: Client, + json_body: ResultRelease, +) -> Response[Union[Error, ResultContent]]: + """Re-encrypt a result with for a fresh public key + + Args: + result_id (str): + json_body (ResultRelease): required data to re-encrypt a result + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Union[Error, ResultContent]] + """ + + kwargs = _get_kwargs( + result_id=result_id, + client=client, + json_body=json_body, + ) + + response = httpx.request( + verify=client.verify_ssl, + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + result_id: str, + *, + client: Client, + json_body: ResultRelease, +) -> Optional[Union[Error, ResultContent]]: + """Re-encrypt a result with for a fresh public key + + Args: + result_id (str): + json_body (ResultRelease): required data to re-encrypt a result + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Union[Error, ResultContent]] + """ + + return sync_detailed( + result_id=result_id, + client=client, + json_body=json_body, + ).parsed + + +async def asyncio_detailed( + result_id: str, + *, + client: Client, + json_body: ResultRelease, +) -> Response[Union[Error, ResultContent]]: + """Re-encrypt a result with for a fresh public key + + Args: + result_id (str): + json_body (ResultRelease): required data to re-encrypt a result + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Union[Error, ResultContent]] + """ + + kwargs = _get_kwargs( + result_id=result_id, + client=client, + json_body=json_body, + ) + + async with httpx.AsyncClient(verify=client.verify_ssl) as _client: + response = await _client.request(**kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + result_id: str, + *, + client: Client, + json_body: ResultRelease, +) -> Optional[Union[Error, ResultContent]]: + """Re-encrypt a result with for a fresh public key + + Args: + result_id (str): + json_body (ResultRelease): required data to re-encrypt a result + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Union[Error, ResultContent]] + """ + + return ( + await asyncio_detailed( + result_id=result_id, + client=client, + json_body=json_body, + ) + ).parsed diff --git a/src/tuneinsight/api/sdk/api/api_datagen/post_synthetic_dataset.py b/src/tuneinsight/api/sdk/api/api_datagen/post_synthetic_dataset.py new file mode 100644 index 0000000..4c411e4 --- /dev/null +++ b/src/tuneinsight/api/sdk/api/api_datagen/post_synthetic_dataset.py @@ -0,0 +1,237 @@ +from http import HTTPStatus +from typing import Any, Dict, Optional, Union + +import httpx + +from ... import errors +from ...client import Client +from ...models.data_source import DataSource +from ...models.error import Error +from ...types import UNSET, Response, Unset + + +def _get_kwargs( + *, + client: Client, + data_source_id: str, + table: Union[Unset, None, str] = UNSET, + query: Union[Unset, None, str] = UNSET, + table_name: Union[Unset, None, str] = UNSET, + num_rows: Union[Unset, None, int] = UNSET, +) -> Dict[str, Any]: + url = "{}/synthetic/dataset".format(client.base_url) + + headers: Dict[str, str] = client.get_headers() + cookies: Dict[str, Any] = client.get_cookies() + + params: Dict[str, Any] = {} + params["dataSourceId"] = data_source_id + + params["table"] = table + + params["query"] = query + + params["tableName"] = table_name + + params["numRows"] = num_rows + + params = {k: v for k, v in params.items() if v is not UNSET and v is not None} + + return { + "method": "post", + "url": url, + "headers": headers, + "cookies": cookies, + "timeout": client.get_timeout(), + "params": params, + } + + +def _parse_response(*, client: Client, response: httpx.Response) -> Optional[Union[DataSource, Error]]: + if response.status_code == HTTPStatus.CREATED: + response_201 = DataSource.from_dict(response.json()) + + return response_201 + if response.status_code == HTTPStatus.BAD_REQUEST: + response_400 = Error.from_dict(response.json()) + + return response_400 + if response.status_code == HTTPStatus.FORBIDDEN: + response_403 = Error.from_dict(response.json()) + + return response_403 + if response.status_code == HTTPStatus.INTERNAL_SERVER_ERROR: + response_500 = Error.from_dict(response.json()) + + return response_500 + if client.raise_on_unexpected_status: + raise errors.UnexpectedStatus(f"Unexpected status code: {response.status_code}") + else: + return None + + +def _build_response(*, client: Client, response: httpx.Response) -> Response[Union[DataSource, Error]]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + *, + client: Client, + data_source_id: str, + table: Union[Unset, None, str] = UNSET, + query: Union[Unset, None, str] = UNSET, + table_name: Union[Unset, None, str] = UNSET, + num_rows: Union[Unset, None, int] = UNSET, +) -> Response[Union[DataSource, Error]]: + """Request the creation of a synthetic dataset from a real dataset. + + Args: + data_source_id (str): + table (Union[Unset, None, str]): + query (Union[Unset, None, str]): + table_name (Union[Unset, None, str]): + num_rows (Union[Unset, None, int]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Union[DataSource, Error]] + """ + + kwargs = _get_kwargs( + client=client, + data_source_id=data_source_id, + table=table, + query=query, + table_name=table_name, + num_rows=num_rows, + ) + + response = httpx.request( + verify=client.verify_ssl, + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + *, + client: Client, + data_source_id: str, + table: Union[Unset, None, str] = UNSET, + query: Union[Unset, None, str] = UNSET, + table_name: Union[Unset, None, str] = UNSET, + num_rows: Union[Unset, None, int] = UNSET, +) -> Optional[Union[DataSource, Error]]: + """Request the creation of a synthetic dataset from a real dataset. + + Args: + data_source_id (str): + table (Union[Unset, None, str]): + query (Union[Unset, None, str]): + table_name (Union[Unset, None, str]): + num_rows (Union[Unset, None, int]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Union[DataSource, Error]] + """ + + return sync_detailed( + client=client, + data_source_id=data_source_id, + table=table, + query=query, + table_name=table_name, + num_rows=num_rows, + ).parsed + + +async def asyncio_detailed( + *, + client: Client, + data_source_id: str, + table: Union[Unset, None, str] = UNSET, + query: Union[Unset, None, str] = UNSET, + table_name: Union[Unset, None, str] = UNSET, + num_rows: Union[Unset, None, int] = UNSET, +) -> Response[Union[DataSource, Error]]: + """Request the creation of a synthetic dataset from a real dataset. + + Args: + data_source_id (str): + table (Union[Unset, None, str]): + query (Union[Unset, None, str]): + table_name (Union[Unset, None, str]): + num_rows (Union[Unset, None, int]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Union[DataSource, Error]] + """ + + kwargs = _get_kwargs( + client=client, + data_source_id=data_source_id, + table=table, + query=query, + table_name=table_name, + num_rows=num_rows, + ) + + async with httpx.AsyncClient(verify=client.verify_ssl) as _client: + response = await _client.request(**kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + *, + client: Client, + data_source_id: str, + table: Union[Unset, None, str] = UNSET, + query: Union[Unset, None, str] = UNSET, + table_name: Union[Unset, None, str] = UNSET, + num_rows: Union[Unset, None, int] = UNSET, +) -> Optional[Union[DataSource, Error]]: + """Request the creation of a synthetic dataset from a real dataset. + + Args: + data_source_id (str): + table (Union[Unset, None, str]): + query (Union[Unset, None, str]): + table_name (Union[Unset, None, str]): + num_rows (Union[Unset, None, int]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Union[DataSource, Error]] + """ + + return ( + await asyncio_detailed( + client=client, + data_source_id=data_source_id, + table=table, + query=query, + table_name=table_name, + num_rows=num_rows, + ) + ).parsed diff --git a/src/tuneinsight/api/sdk/api/api_datasource/post_data_source.py b/src/tuneinsight/api/sdk/api/api_datasource/post_data_source.py index beb73f7..d797ad7 100644 --- a/src/tuneinsight/api/sdk/api/api_datasource/post_data_source.py +++ b/src/tuneinsight/api/sdk/api/api_datasource/post_data_source.py @@ -81,7 +81,7 @@ def sync_detailed( """Add a new datasource. Args: - json_body (DataSourceDefinition): + json_body (DataSourceDefinition): parameters used to create and modify a data source Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. @@ -112,7 +112,7 @@ def sync( """Add a new datasource. Args: - json_body (DataSourceDefinition): + json_body (DataSourceDefinition): parameters used to create and modify a data source Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. @@ -136,7 +136,7 @@ async def asyncio_detailed( """Add a new datasource. Args: - json_body (DataSourceDefinition): + json_body (DataSourceDefinition): parameters used to create and modify a data source Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. @@ -165,7 +165,7 @@ async def asyncio( """Add a new datasource. Args: - json_body (DataSourceDefinition): + json_body (DataSourceDefinition): parameters used to create and modify a data source Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. diff --git a/src/tuneinsight/api/sdk/api/api_log/get_log_list.py b/src/tuneinsight/api/sdk/api/api_log/get_log_list.py index d006b9f..c813dea 100644 --- a/src/tuneinsight/api/sdk/api/api_log/get_log_list.py +++ b/src/tuneinsight/api/sdk/api/api_log/get_log_list.py @@ -15,7 +15,7 @@ def _get_kwargs( *, client: Client, page: Union[Unset, None, int] = 1, - per_page: Union[Unset, None, int] = 30, + per_page: Union[Unset, None, int] = 50, with_total: Union[Unset, None, bool] = True, order: Union[Unset, None, GetLogListOrder] = UNSET, ) -> Dict[str, Any]: @@ -81,7 +81,7 @@ def sync_detailed( *, client: Client, page: Union[Unset, None, int] = 1, - per_page: Union[Unset, None, int] = 30, + per_page: Union[Unset, None, int] = 50, with_total: Union[Unset, None, bool] = True, order: Union[Unset, None, GetLogListOrder] = UNSET, ) -> Response[Union[Error, GetLogListResponse200]]: @@ -89,7 +89,7 @@ def sync_detailed( Args: page (Union[Unset, None, int]): Default: 1. - per_page (Union[Unset, None, int]): Default: 30. + per_page (Union[Unset, None, int]): Default: 50. with_total (Union[Unset, None, bool]): Default: True. order (Union[Unset, None, GetLogListOrder]): @@ -121,7 +121,7 @@ def sync( *, client: Client, page: Union[Unset, None, int] = 1, - per_page: Union[Unset, None, int] = 30, + per_page: Union[Unset, None, int] = 50, with_total: Union[Unset, None, bool] = True, order: Union[Unset, None, GetLogListOrder] = UNSET, ) -> Optional[Union[Error, GetLogListResponse200]]: @@ -129,7 +129,7 @@ def sync( Args: page (Union[Unset, None, int]): Default: 1. - per_page (Union[Unset, None, int]): Default: 30. + per_page (Union[Unset, None, int]): Default: 50. with_total (Union[Unset, None, bool]): Default: True. order (Union[Unset, None, GetLogListOrder]): @@ -154,7 +154,7 @@ async def asyncio_detailed( *, client: Client, page: Union[Unset, None, int] = 1, - per_page: Union[Unset, None, int] = 30, + per_page: Union[Unset, None, int] = 50, with_total: Union[Unset, None, bool] = True, order: Union[Unset, None, GetLogListOrder] = UNSET, ) -> Response[Union[Error, GetLogListResponse200]]: @@ -162,7 +162,7 @@ async def asyncio_detailed( Args: page (Union[Unset, None, int]): Default: 1. - per_page (Union[Unset, None, int]): Default: 30. + per_page (Union[Unset, None, int]): Default: 50. with_total (Union[Unset, None, bool]): Default: True. order (Union[Unset, None, GetLogListOrder]): @@ -192,7 +192,7 @@ async def asyncio( *, client: Client, page: Union[Unset, None, int] = 1, - per_page: Union[Unset, None, int] = 30, + per_page: Union[Unset, None, int] = 50, with_total: Union[Unset, None, bool] = True, order: Union[Unset, None, GetLogListOrder] = UNSET, ) -> Optional[Union[Error, GetLogListResponse200]]: @@ -200,7 +200,7 @@ async def asyncio( Args: page (Union[Unset, None, int]): Default: 1. - per_page (Union[Unset, None, int]): Default: 30. + per_page (Union[Unset, None, int]): Default: 50. with_total (Union[Unset, None, bool]): Default: True. order (Union[Unset, None, GetLogListOrder]): diff --git a/src/tuneinsight/api/sdk/api/api_project/get_project_list.py b/src/tuneinsight/api/sdk/api/api_project/get_project_list.py index 41d8fdf..c810a54 100644 --- a/src/tuneinsight/api/sdk/api/api_project/get_project_list.py +++ b/src/tuneinsight/api/sdk/api/api_project/get_project_list.py @@ -16,7 +16,7 @@ def _get_kwargs( *, client: Client, page: Union[Unset, None, int] = 1, - per_page: Union[Unset, None, int] = 30, + per_page: Union[Unset, None, int] = 50, with_total: Union[Unset, None, bool] = True, sort_by: Union[Unset, None, GetProjectListSortBy] = UNSET, order: Union[Unset, None, GetProjectListOrder] = UNSET, @@ -97,7 +97,7 @@ def sync_detailed( *, client: Client, page: Union[Unset, None, int] = 1, - per_page: Union[Unset, None, int] = 30, + per_page: Union[Unset, None, int] = 50, with_total: Union[Unset, None, bool] = True, sort_by: Union[Unset, None, GetProjectListSortBy] = UNSET, order: Union[Unset, None, GetProjectListOrder] = UNSET, @@ -107,7 +107,7 @@ def sync_detailed( Args: page (Union[Unset, None, int]): Default: 1. - per_page (Union[Unset, None, int]): Default: 30. + per_page (Union[Unset, None, int]): Default: 50. with_total (Union[Unset, None, bool]): Default: True. sort_by (Union[Unset, None, GetProjectListSortBy]): order (Union[Unset, None, GetProjectListOrder]): @@ -143,7 +143,7 @@ def sync( *, client: Client, page: Union[Unset, None, int] = 1, - per_page: Union[Unset, None, int] = 30, + per_page: Union[Unset, None, int] = 50, with_total: Union[Unset, None, bool] = True, sort_by: Union[Unset, None, GetProjectListSortBy] = UNSET, order: Union[Unset, None, GetProjectListOrder] = UNSET, @@ -153,7 +153,7 @@ def sync( Args: page (Union[Unset, None, int]): Default: 1. - per_page (Union[Unset, None, int]): Default: 30. + per_page (Union[Unset, None, int]): Default: 50. with_total (Union[Unset, None, bool]): Default: True. sort_by (Union[Unset, None, GetProjectListSortBy]): order (Union[Unset, None, GetProjectListOrder]): @@ -182,7 +182,7 @@ async def asyncio_detailed( *, client: Client, page: Union[Unset, None, int] = 1, - per_page: Union[Unset, None, int] = 30, + per_page: Union[Unset, None, int] = 50, with_total: Union[Unset, None, bool] = True, sort_by: Union[Unset, None, GetProjectListSortBy] = UNSET, order: Union[Unset, None, GetProjectListOrder] = UNSET, @@ -192,7 +192,7 @@ async def asyncio_detailed( Args: page (Union[Unset, None, int]): Default: 1. - per_page (Union[Unset, None, int]): Default: 30. + per_page (Union[Unset, None, int]): Default: 50. with_total (Union[Unset, None, bool]): Default: True. sort_by (Union[Unset, None, GetProjectListSortBy]): order (Union[Unset, None, GetProjectListOrder]): @@ -226,7 +226,7 @@ async def asyncio( *, client: Client, page: Union[Unset, None, int] = 1, - per_page: Union[Unset, None, int] = 30, + per_page: Union[Unset, None, int] = 50, with_total: Union[Unset, None, bool] = True, sort_by: Union[Unset, None, GetProjectListSortBy] = UNSET, order: Union[Unset, None, GetProjectListOrder] = UNSET, @@ -236,7 +236,7 @@ async def asyncio( Args: page (Union[Unset, None, int]): Default: 1. - per_page (Union[Unset, None, int]): Default: 30. + per_page (Union[Unset, None, int]): Default: 50. with_total (Union[Unset, None, bool]): Default: True. sort_by (Union[Unset, None, GetProjectListSortBy]): order (Union[Unset, None, GetProjectListOrder]): diff --git a/src/tuneinsight/api/sdk/models/__init__.py b/src/tuneinsight/api/sdk/models/__init__.py index b2592a4..51c2b7d 100644 --- a/src/tuneinsight/api/sdk/models/__init__.py +++ b/src/tuneinsight/api/sdk/models/__init__.py @@ -4,9 +4,7 @@ from .add_columns import AddColumns from .aggregated_dataset_length import AggregatedDatasetLength from .aggregation_strategy import AggregationStrategy -from .api_connection_info import APIConnectionInfo -from .api_connection_info_type import APIConnectionInfoType -from .api_data_source_config import ApiDataSourceConfig +from .api_type import APIType from .apply_mapping import ApplyMapping from .apply_reg_ex import ApplyRegEx from .apply_reg_ex_regex_type import ApplyRegExRegexType @@ -14,8 +12,6 @@ from .as_type import AsType from .as_type_type_map import AsTypeTypeMap from .authorization_status import AuthorizationStatus -from .azure_key_vault_credentials_provider import AzureKeyVaultCredentialsProvider -from .azure_key_vault_credentials_provider_mappings_item import AzureKeyVaultCredentialsProviderMappingsItem from .backup_definition import BackupDefinition from .backup_type import BackupType from .binning_operation import BinningOperation @@ -53,8 +49,7 @@ from .content_type import ContentType from .counts import Counts from .credentials import Credentials -from .credentials_provider import CredentialsProvider -from .credentials_provider_type import CredentialsProviderType +from .credentials_type import CredentialsType from .custom import Custom from .cut import Cut from .data_object import DataObject @@ -63,20 +58,17 @@ from .data_object_visibility_status import DataObjectVisibilityStatus from .data_selection_type import DataSelectionType from .data_source import DataSource -from .data_source_base import DataSourceBase from .data_source_column import DataSourceColumn from .data_source_compound_query import DataSourceCompoundQuery from .data_source_config import DataSourceConfig -from .data_source_config_type import DataSourceConfigType from .data_source_consent_type import DataSourceConsentType from .data_source_definition import DataSourceDefinition from .data_source_metadata import DataSourceMetadata from .data_source_query import DataSourceQuery from .data_source_query_preview import DataSourceQueryPreview from .data_source_table import DataSourceTable +from .data_source_type import DataSourceType from .data_source_types_info import DataSourceTypesInfo -from .database_connection_info import DatabaseConnectionInfo -from .database_data_source_config import DatabaseDataSourceConfig from .database_type import DatabaseType from .dataset_schema import DatasetSchema from .dataset_schema_columns import DatasetSchemaColumns @@ -137,10 +129,9 @@ from .hybrid_fl_learning_params import HybridFLLearningParams from .key_info import KeyInfo from .key_switched_computation import KeySwitchedComputation -from .local_credentials_provider import LocalCredentialsProvider from .local_data_selection import LocalDataSelection from .local_data_selection_definition import LocalDataSelectionDefinition -from .local_data_source_config import LocalDataSourceConfig +from .local_data_source_type import LocalDataSourceType from .local_input import LocalInput from .locus_range import LocusRange from .log import Log @@ -159,7 +150,7 @@ from .network_visibility_type import NetworkVisibilityType from .node import Node from .node_status import NodeStatus -from .noise_parameters import NoiseParameters +from .noise_distributions import NoiseDistributions from .one_hot_encoding import OneHotEncoding from .organization import Organization from .organization_coordinates import OrganizationCoordinates @@ -168,21 +159,10 @@ from .participation_status import ParticipationStatus from .phonetic_encoding import PhoneticEncoding from .post_data_object_json_body import PostDataObjectJsonBody -from .post_data_source_query_json_body import PostDataSourceQueryJsonBody -from .post_data_source_query_json_body_output_data_objects_shared_i_ds import ( - PostDataSourceQueryJsonBodyOutputDataObjectsSharedIDs, -) -from .post_data_source_query_json_body_parameters import PostDataSourceQueryJsonBodyParameters from .post_llm_request_json_body import PostLlmRequestJsonBody from .post_llm_request_json_body_prompt_args import PostLlmRequestJsonBodyPromptArgs from .post_mock_dataset_method import PostMockDatasetMethod from .post_project_data_json_body import PostProjectDataJsonBody -from .post_project_data_source_query_json_body import PostProjectDataSourceQueryJsonBody -from .post_project_data_source_query_json_body_aggregation_type import PostProjectDataSourceQueryJsonBodyAggregationType -from .post_project_data_source_query_json_body_output_data_objects_shared_i_ds import ( - PostProjectDataSourceQueryJsonBodyOutputDataObjectsSharedIDs, -) -from .post_project_data_source_query_json_body_parameters import PostProjectDataSourceQueryJsonBodyParameters from .post_protocol_message_multipart_data import PostProtocolMessageMultipartData from .post_user_response_201 import PostUserResponse201 from .prediction import Prediction @@ -198,7 +178,6 @@ from .private_search_setup import PrivateSearchSetup from .project import Project from .project_base import ProjectBase -from .project_base_workflow_type import ProjectBaseWorkflowType from .project_computation import ProjectComputation from .project_definition import ProjectDefinition from .project_status import ProjectStatus @@ -222,6 +201,7 @@ from .result_contextual_info import ResultContextualInfo from .result_definition import ResultDefinition from .result_metadata import ResultMetadata +from .result_release import ResultRelease from .rot_key_gen import RotKeyGen from .rot_key_gen_rotations_item import RotKeyGenRotationsItem from .run_mode import RunMode @@ -234,6 +214,7 @@ from .set_index import SetIndex from .set_intersection import SetIntersection from .set_intersection_output_format import SetIntersectionOutputFormat +from .settings import Settings from .setup_session import SetupSession from .statistic_base import StatisticBase from .statistic_definition import StatisticDefinition @@ -266,15 +247,14 @@ from .workflow_item import WorkflowItem from .workflow_item_data import WorkflowItemData from .workflow_item_position import WorkflowItemPosition +from .workflow_type import WorkflowType __all__ = ( "AccessScope", "AddColumns", "AggregatedDatasetLength", "AggregationStrategy", - "APIConnectionInfo", - "APIConnectionInfoType", - "ApiDataSourceConfig", + "APIType", "ApplyMapping", "ApplyRegEx", "ApplyRegExRegexType", @@ -282,8 +262,6 @@ "AsType", "AsTypeTypeMap", "AuthorizationStatus", - "AzureKeyVaultCredentialsProvider", - "AzureKeyVaultCredentialsProviderMappingsItem", "BackupDefinition", "BackupType", "BinningOperation", @@ -319,12 +297,9 @@ "ContentType", "Counts", "Credentials", - "CredentialsProvider", - "CredentialsProviderType", + "CredentialsType", "Custom", "Cut", - "DatabaseConnectionInfo", - "DatabaseDataSourceConfig", "DatabaseType", "DataObject", "DataObjectCreationMethod", @@ -336,17 +311,16 @@ "DatasetStatistics", "DatasetValidation", "DataSource", - "DataSourceBase", "DataSourceColumn", "DataSourceCompoundQuery", "DataSourceConfig", - "DataSourceConfigType", "DataSourceConsentType", "DataSourceDefinition", "DataSourceMetadata", "DataSourceQuery", "DataSourceQueryPreview", "DataSourceTable", + "DataSourceType", "DataSourceTypesInfo", "DeviationSquares", "DistributedJoin", @@ -403,10 +377,9 @@ "HybridFLLearningParams", "KeyInfo", "KeySwitchedComputation", - "LocalCredentialsProvider", "LocalDataSelection", "LocalDataSelectionDefinition", - "LocalDataSourceConfig", + "LocalDataSourceType", "LocalInput", "LocusRange", "Log", @@ -425,7 +398,7 @@ "NetworkVisibilityType", "Node", "NodeStatus", - "NoiseParameters", + "NoiseDistributions", "OneHotEncoding", "Organization", "OrganizationCoordinates", @@ -434,17 +407,10 @@ "ParticipationStatus", "PhoneticEncoding", "PostDataObjectJsonBody", - "PostDataSourceQueryJsonBody", - "PostDataSourceQueryJsonBodyOutputDataObjectsSharedIDs", - "PostDataSourceQueryJsonBodyParameters", "PostLlmRequestJsonBody", "PostLlmRequestJsonBodyPromptArgs", "PostMockDatasetMethod", "PostProjectDataJsonBody", - "PostProjectDataSourceQueryJsonBody", - "PostProjectDataSourceQueryJsonBodyAggregationType", - "PostProjectDataSourceQueryJsonBodyOutputDataObjectsSharedIDs", - "PostProjectDataSourceQueryJsonBodyParameters", "PostProtocolMessageMultipartData", "PostUserResponse201", "Prediction", @@ -460,7 +426,6 @@ "PrivateSearchSetup", "Project", "ProjectBase", - "ProjectBaseWorkflowType", "ProjectComputation", "ProjectDefinition", "ProjectStatus", @@ -484,6 +449,7 @@ "ResultContextualInfo", "ResultDefinition", "ResultMetadata", + "ResultRelease", "RotKeyGen", "RotKeyGenRotationsItem", "RunMode", @@ -496,6 +462,7 @@ "SetIndex", "SetIntersection", "SetIntersectionOutputFormat", + "Settings", "SetupSession", "StatisticalAggregation", "StatisticalQuantity", @@ -528,4 +495,5 @@ "WorkflowItem", "WorkflowItemData", "WorkflowItemPosition", + "WorkflowType", ) diff --git a/src/tuneinsight/api/sdk/models/add_columns.py b/src/tuneinsight/api/sdk/models/add_columns.py index d6c0ec4..331ce6a 100644 --- a/src/tuneinsight/api/sdk/models/add_columns.py +++ b/src/tuneinsight/api/sdk/models/add_columns.py @@ -13,29 +13,29 @@ class AddColumns: """ Attributes: type (PreprocessingOperationType): type of preprocessing operation - sep (Union[Unset, str]): separator when the added columns are not numerical input_columns (Union[Unset, List[str]]): the columns to add together numerical (Union[Unset, bool]): whether or not the output columns are numerical output (Union[Unset, str]): column to use as output + sep (Union[Unset, str]): separator when the added columns are not numerical """ type: PreprocessingOperationType - sep: Union[Unset, str] = UNSET input_columns: Union[Unset, List[str]] = UNSET numerical: Union[Unset, bool] = UNSET output: Union[Unset, str] = UNSET + sep: Union[Unset, str] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: type = self.type.value - sep = self.sep input_columns: Union[Unset, List[str]] = UNSET if not isinstance(self.input_columns, Unset): input_columns = self.input_columns numerical = self.numerical output = self.output + sep = self.sep field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) @@ -44,14 +44,14 @@ def to_dict(self) -> Dict[str, Any]: "type": type, } ) - if sep is not UNSET: - field_dict["sep"] = sep if input_columns is not UNSET: field_dict["inputColumns"] = input_columns if numerical is not UNSET: field_dict["numerical"] = numerical if output is not UNSET: field_dict["output"] = output + if sep is not UNSET: + field_dict["sep"] = sep return field_dict @@ -60,20 +60,20 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() type = PreprocessingOperationType(d.pop("type")) - sep = d.pop("sep", UNSET) - input_columns = cast(List[str], d.pop("inputColumns", UNSET)) numerical = d.pop("numerical", UNSET) output = d.pop("output", UNSET) + sep = d.pop("sep", UNSET) + add_columns = cls( type=type, - sep=sep, input_columns=input_columns, numerical=numerical, output=output, + sep=sep, ) add_columns.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/aggregated_dataset_length.py b/src/tuneinsight/api/sdk/models/aggregated_dataset_length.py index 133b350..e312ec9 100644 --- a/src/tuneinsight/api/sdk/models/aggregated_dataset_length.py +++ b/src/tuneinsight/api/sdk/models/aggregated_dataset_length.py @@ -22,6 +22,8 @@ class AggregatedDatasetLength: """ Attributes: type (ComputationType): Type of the computation. + dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP + mode. Default: -1.0. input_clipping_method (Union[Unset, ComputationDefinitionInputClippingMethod]): Optional method used for clipping before encrypting values when running aggregation-based workflows. The bounds are deduced based on the cryptographic parameters used for the aggregation. @@ -32,14 +34,7 @@ class AggregatedDatasetLength: (default) - error: if some values are out of bounds, then the computation is aborted. Default: ComputationDefinitionInputClippingMethod.WARNING. - preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters - applied to the input retrieved from the datasource, if applicable - wait (Union[Unset, bool]): Whether to wait synchronously for the computation result. - dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various - disclosure prevention mechanisms - dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP - mode. Default: -1.0. - join_id (Union[Unset, str]): Unique identifier of a data object. + run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both) local_input_id (Union[Unset, str]): Unique identifier of a data object. maximum_aggregated_value (Union[Unset, None, float]): optional upper bound on the total expected value to be aggregated collectively. If provided, the computation will automatically deduce @@ -48,99 +43,104 @@ class AggregatedDatasetLength: up to 16 million. For example, when using default parameters and running an aggregation with 4 participants, local aggregated values cannot exceed 4 million. - cohort_id (Union[Unset, str]): Unique identifier of a data object. + preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters + applied to the input retrieved from the datasource, if applicable + data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource + from each node before the computation + end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true, + then when release results is set to true and the output + is initially encrypted with a network collective key, then it is key switched to + the initiating user's public key. + input_data_object (Union[Unset, str]): Shared identifier of a data object. + timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run. local_input (Union[Unset, LocalInput]): If a local input is provided, the node initiating the computation will use it instead of querying the datasource. This data is *not* shared to other nodes, only used for the duration of the computation. The local input columns/values must be in the form {: [, , ...], ...} owner (Union[Unset, str]): The username of the end user who requested the computation. project_id (Union[Unset, str]): Unique identifier of a project. - run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both) + dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various + disclosure prevention mechanisms + cohort_id (Union[Unset, str]): Unique identifier of a data object. + encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key. + join_id (Union[Unset, str]): Unique identifier of a data object. local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured the network) - encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key. - end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true, - then when release results is set to true and the output - is initially encrypted with a network collective key, then it is key switched to - the initiating user's public key. - input_data_object (Union[Unset, str]): Shared identifier of a data object. release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output results. If set, then encrypted results are automatically key switched and decrypted and a Result entity is saved - timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run. - data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource - from each node before the computation + wait (Union[Unset, bool]): Whether to wait synchronously for the computation result. features (Union[Unset, str]): Shared identifier of a data object. """ type: ComputationType + dp_epsilon: Union[Unset, float] = -1.0 input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] = ( ComputationDefinitionInputClippingMethod.WARNING ) - preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET - wait: Union[Unset, bool] = UNSET - dp_policy: Union[Unset, "DPPolicy"] = UNSET - dp_epsilon: Union[Unset, float] = -1.0 - join_id: Union[Unset, str] = UNSET + run_mode: Union[Unset, RunMode] = UNSET local_input_id: Union[Unset, str] = UNSET maximum_aggregated_value: Union[Unset, None, float] = UNSET - cohort_id: Union[Unset, str] = UNSET + preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET + data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET + end_to_end_encrypted: Union[Unset, bool] = UNSET + input_data_object: Union[Unset, str] = UNSET + timeout: Union[Unset, int] = UNSET local_input: Union[Unset, "LocalInput"] = UNSET owner: Union[Unset, str] = UNSET project_id: Union[Unset, str] = UNSET - run_mode: Union[Unset, RunMode] = UNSET - local: Union[Unset, bool] = UNSET + dp_policy: Union[Unset, "DPPolicy"] = UNSET + cohort_id: Union[Unset, str] = UNSET encrypted: Union[Unset, bool] = UNSET - end_to_end_encrypted: Union[Unset, bool] = UNSET - input_data_object: Union[Unset, str] = UNSET + join_id: Union[Unset, str] = UNSET + local: Union[Unset, bool] = UNSET release_results: Union[Unset, bool] = UNSET - timeout: Union[Unset, int] = UNSET - data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET + wait: Union[Unset, bool] = UNSET features: Union[Unset, str] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: type = self.type.value + dp_epsilon = self.dp_epsilon input_clipping_method: Union[Unset, str] = UNSET if not isinstance(self.input_clipping_method, Unset): input_clipping_method = self.input_clipping_method.value + run_mode: Union[Unset, str] = UNSET + if not isinstance(self.run_mode, Unset): + run_mode = self.run_mode.value + + local_input_id = self.local_input_id + maximum_aggregated_value = self.maximum_aggregated_value preprocessing_parameters: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.preprocessing_parameters, Unset): preprocessing_parameters = self.preprocessing_parameters.to_dict() - wait = self.wait - dp_policy: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.dp_policy, Unset): - dp_policy = self.dp_policy.to_dict() + data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.data_source_parameters, Unset): + data_source_parameters = self.data_source_parameters.to_dict() - dp_epsilon = self.dp_epsilon - join_id = self.join_id - local_input_id = self.local_input_id - maximum_aggregated_value = self.maximum_aggregated_value - cohort_id = self.cohort_id + end_to_end_encrypted = self.end_to_end_encrypted + input_data_object = self.input_data_object + timeout = self.timeout local_input: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.local_input, Unset): local_input = self.local_input.to_dict() owner = self.owner project_id = self.project_id - run_mode: Union[Unset, str] = UNSET - if not isinstance(self.run_mode, Unset): - run_mode = self.run_mode.value + dp_policy: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.dp_policy, Unset): + dp_policy = self.dp_policy.to_dict() - local = self.local + cohort_id = self.cohort_id encrypted = self.encrypted - end_to_end_encrypted = self.end_to_end_encrypted - input_data_object = self.input_data_object + join_id = self.join_id + local = self.local release_results = self.release_results - timeout = self.timeout - data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.data_source_parameters, Unset): - data_source_parameters = self.data_source_parameters.to_dict() - + wait = self.wait features = self.features field_dict: Dict[str, Any] = {} @@ -150,46 +150,46 @@ def to_dict(self) -> Dict[str, Any]: "type": type, } ) - if input_clipping_method is not UNSET: - field_dict["inputClippingMethod"] = input_clipping_method - if preprocessing_parameters is not UNSET: - field_dict["preprocessingParameters"] = preprocessing_parameters - if wait is not UNSET: - field_dict["wait"] = wait - if dp_policy is not UNSET: - field_dict["DPPolicy"] = dp_policy if dp_epsilon is not UNSET: field_dict["dpEpsilon"] = dp_epsilon - if join_id is not UNSET: - field_dict["joinId"] = join_id + if input_clipping_method is not UNSET: + field_dict["inputClippingMethod"] = input_clipping_method + if run_mode is not UNSET: + field_dict["runMode"] = run_mode if local_input_id is not UNSET: field_dict["localInputID"] = local_input_id if maximum_aggregated_value is not UNSET: field_dict["maximumAggregatedValue"] = maximum_aggregated_value - if cohort_id is not UNSET: - field_dict["cohortId"] = cohort_id + if preprocessing_parameters is not UNSET: + field_dict["preprocessingParameters"] = preprocessing_parameters + if data_source_parameters is not UNSET: + field_dict["dataSourceParameters"] = data_source_parameters + if end_to_end_encrypted is not UNSET: + field_dict["endToEndEncrypted"] = end_to_end_encrypted + if input_data_object is not UNSET: + field_dict["inputDataObject"] = input_data_object + if timeout is not UNSET: + field_dict["timeout"] = timeout if local_input is not UNSET: field_dict["localInput"] = local_input if owner is not UNSET: field_dict["owner"] = owner if project_id is not UNSET: field_dict["projectId"] = project_id - if run_mode is not UNSET: - field_dict["runMode"] = run_mode - if local is not UNSET: - field_dict["local"] = local + if dp_policy is not UNSET: + field_dict["DPPolicy"] = dp_policy + if cohort_id is not UNSET: + field_dict["cohortId"] = cohort_id if encrypted is not UNSET: field_dict["encrypted"] = encrypted - if end_to_end_encrypted is not UNSET: - field_dict["endToEndEncrypted"] = end_to_end_encrypted - if input_data_object is not UNSET: - field_dict["inputDataObject"] = input_data_object + if join_id is not UNSET: + field_dict["joinId"] = join_id + if local is not UNSET: + field_dict["local"] = local if release_results is not UNSET: field_dict["releaseResults"] = release_results - if timeout is not UNSET: - field_dict["timeout"] = timeout - if data_source_parameters is not UNSET: - field_dict["dataSourceParameters"] = data_source_parameters + if wait is not UNSET: + field_dict["wait"] = wait if features is not UNSET: field_dict["features"] = features @@ -205,6 +205,8 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() type = ComputationType(d.pop("type")) + dp_epsilon = d.pop("dpEpsilon", UNSET) + _input_clipping_method = d.pop("inputClippingMethod", UNSET) input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] if isinstance(_input_clipping_method, Unset): @@ -212,6 +214,17 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method) + _run_mode = d.pop("runMode", UNSET) + run_mode: Union[Unset, RunMode] + if isinstance(_run_mode, Unset): + run_mode = UNSET + else: + run_mode = RunMode(_run_mode) + + local_input_id = d.pop("localInputID", UNSET) + + maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET) + _preprocessing_parameters = d.pop("preprocessingParameters", UNSET) preprocessing_parameters: Union[Unset, ComputationPreprocessingParameters] if isinstance(_preprocessing_parameters, Unset): @@ -219,24 +232,18 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: preprocessing_parameters = ComputationPreprocessingParameters.from_dict(_preprocessing_parameters) - wait = d.pop("wait", UNSET) - - _dp_policy = d.pop("DPPolicy", UNSET) - dp_policy: Union[Unset, DPPolicy] - if isinstance(_dp_policy, Unset): - dp_policy = UNSET + _data_source_parameters = d.pop("dataSourceParameters", UNSET) + data_source_parameters: Union[Unset, ComputationDataSourceParameters] + if isinstance(_data_source_parameters, Unset): + data_source_parameters = UNSET else: - dp_policy = DPPolicy.from_dict(_dp_policy) - - dp_epsilon = d.pop("dpEpsilon", UNSET) - - join_id = d.pop("joinId", UNSET) + data_source_parameters = ComputationDataSourceParameters.from_dict(_data_source_parameters) - local_input_id = d.pop("localInputID", UNSET) + end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) - maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET) + input_data_object = d.pop("inputDataObject", UNSET) - cohort_id = d.pop("cohortId", UNSET) + timeout = d.pop("timeout", UNSET) _local_input = d.pop("localInput", UNSET) local_input: Union[Unset, LocalInput] @@ -249,56 +256,49 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: project_id = d.pop("projectId", UNSET) - _run_mode = d.pop("runMode", UNSET) - run_mode: Union[Unset, RunMode] - if isinstance(_run_mode, Unset): - run_mode = UNSET + _dp_policy = d.pop("DPPolicy", UNSET) + dp_policy: Union[Unset, DPPolicy] + if isinstance(_dp_policy, Unset): + dp_policy = UNSET else: - run_mode = RunMode(_run_mode) + dp_policy = DPPolicy.from_dict(_dp_policy) - local = d.pop("local", UNSET) + cohort_id = d.pop("cohortId", UNSET) encrypted = d.pop("encrypted", UNSET) - end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) + join_id = d.pop("joinId", UNSET) - input_data_object = d.pop("inputDataObject", UNSET) + local = d.pop("local", UNSET) release_results = d.pop("releaseResults", UNSET) - timeout = d.pop("timeout", UNSET) - - _data_source_parameters = d.pop("dataSourceParameters", UNSET) - data_source_parameters: Union[Unset, ComputationDataSourceParameters] - if isinstance(_data_source_parameters, Unset): - data_source_parameters = UNSET - else: - data_source_parameters = ComputationDataSourceParameters.from_dict(_data_source_parameters) + wait = d.pop("wait", UNSET) features = d.pop("features", UNSET) aggregated_dataset_length = cls( type=type, - input_clipping_method=input_clipping_method, - preprocessing_parameters=preprocessing_parameters, - wait=wait, - dp_policy=dp_policy, dp_epsilon=dp_epsilon, - join_id=join_id, + input_clipping_method=input_clipping_method, + run_mode=run_mode, local_input_id=local_input_id, maximum_aggregated_value=maximum_aggregated_value, - cohort_id=cohort_id, + preprocessing_parameters=preprocessing_parameters, + data_source_parameters=data_source_parameters, + end_to_end_encrypted=end_to_end_encrypted, + input_data_object=input_data_object, + timeout=timeout, local_input=local_input, owner=owner, project_id=project_id, - run_mode=run_mode, - local=local, + dp_policy=dp_policy, + cohort_id=cohort_id, encrypted=encrypted, - end_to_end_encrypted=end_to_end_encrypted, - input_data_object=input_data_object, + join_id=join_id, + local=local, release_results=release_results, - timeout=timeout, - data_source_parameters=data_source_parameters, + wait=wait, features=features, ) diff --git a/src/tuneinsight/api/sdk/models/api_type.py b/src/tuneinsight/api/sdk/models/api_type.py new file mode 100644 index 0000000..c8cbf4d --- /dev/null +++ b/src/tuneinsight/api/sdk/models/api_type.py @@ -0,0 +1,12 @@ +from enum import Enum + + +class APIType(str, Enum): + ELASTIC = "elastic" + MISP = "misp" + HUBSPOT = "hubspot" + VIRTUOSO = "virtuoso" + GENERIC = "generic" + + def __str__(self) -> str: + return str(self.value) diff --git a/src/tuneinsight/api/sdk/models/backup_definition.py b/src/tuneinsight/api/sdk/models/backup_definition.py index 2af8592..a5b0b6f 100644 --- a/src/tuneinsight/api/sdk/models/backup_definition.py +++ b/src/tuneinsight/api/sdk/models/backup_definition.py @@ -17,22 +17,21 @@ class BackupDefinition: """backup parameters Attributes: - encrypt (Union[Unset, bool]): whether or not to encrypt the backup encryption_key (Union[Unset, str]): b64 encoded encryption in case the backup needs to be encrypted path (Union[Unset, str]): path to the local backup directory s_3_parameters (Union[Unset, S3Parameters]): parameters for the remote s3-compatible storage type (Union[Unset, BackupType]): enumeration of backup types + encrypt (Union[Unset, bool]): whether or not to encrypt the backup """ - encrypt: Union[Unset, bool] = UNSET encryption_key: Union[Unset, str] = UNSET path: Union[Unset, str] = UNSET s_3_parameters: Union[Unset, "S3Parameters"] = UNSET type: Union[Unset, BackupType] = UNSET + encrypt: Union[Unset, bool] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: - encrypt = self.encrypt encryption_key = self.encryption_key path = self.path s_3_parameters: Union[Unset, Dict[str, Any]] = UNSET @@ -43,11 +42,11 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.type, Unset): type = self.type.value + encrypt = self.encrypt + field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) - if encrypt is not UNSET: - field_dict["encrypt"] = encrypt if encryption_key is not UNSET: field_dict["encryptionKey"] = encryption_key if path is not UNSET: @@ -56,6 +55,8 @@ def to_dict(self) -> Dict[str, Any]: field_dict["s3Parameters"] = s_3_parameters if type is not UNSET: field_dict["type"] = type + if encrypt is not UNSET: + field_dict["encrypt"] = encrypt return field_dict @@ -64,8 +65,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: from ..models.s3_parameters import S3Parameters d = src_dict.copy() - encrypt = d.pop("encrypt", UNSET) - encryption_key = d.pop("encryptionKey", UNSET) path = d.pop("path", UNSET) @@ -84,12 +83,14 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: type = BackupType(_type) + encrypt = d.pop("encrypt", UNSET) + backup_definition = cls( - encrypt=encrypt, encryption_key=encryption_key, path=path, s_3_parameters=s_3_parameters, type=type, + encrypt=encrypt, ) backup_definition.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/binning_operation.py b/src/tuneinsight/api/sdk/models/binning_operation.py index 8ac0f52..06951e8 100644 --- a/src/tuneinsight/api/sdk/models/binning_operation.py +++ b/src/tuneinsight/api/sdk/models/binning_operation.py @@ -17,8 +17,6 @@ class BinningOperation: """Dataset binning operation definition Attributes: - aggregated_columns (Union[Unset, List[str]]): list of numerical columns to aggregate per bin when binning is - done, if unspecified binning only counts the number of rows categories (Union[Unset, List[str]]): list of categories when groupByType is 'category' count_columns (Union[Unset, List['CategoricalColumn']]): list of categorical on which to count the number of records per bin per matching value @@ -28,22 +26,20 @@ class BinningOperation: range_values (Union[Unset, List[float]]): list of cuts to use when groupByType is 'range' ([x,y] => creating 3 bins [v < x, x <= v < y, y <= v]) target_column (Union[Unset, str]): column targeted by the binning operation + aggregated_columns (Union[Unset, List[str]]): list of numerical columns to aggregate per bin when binning is + done, if unspecified binning only counts the number of rows """ - aggregated_columns: Union[Unset, List[str]] = UNSET categories: Union[Unset, List[str]] = UNSET count_columns: Union[Unset, List["CategoricalColumn"]] = UNSET group_by_type: Union[Unset, GroupByType] = UNSET keep_non_categorized_items: Union[Unset, bool] = True range_values: Union[Unset, List[float]] = UNSET target_column: Union[Unset, str] = UNSET + aggregated_columns: Union[Unset, List[str]] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: - aggregated_columns: Union[Unset, List[str]] = UNSET - if not isinstance(self.aggregated_columns, Unset): - aggregated_columns = self.aggregated_columns - categories: Union[Unset, List[str]] = UNSET if not isinstance(self.categories, Unset): categories = self.categories @@ -66,12 +62,13 @@ def to_dict(self) -> Dict[str, Any]: range_values = self.range_values target_column = self.target_column + aggregated_columns: Union[Unset, List[str]] = UNSET + if not isinstance(self.aggregated_columns, Unset): + aggregated_columns = self.aggregated_columns field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) - if aggregated_columns is not UNSET: - field_dict["aggregatedColumns"] = aggregated_columns if categories is not UNSET: field_dict["categories"] = categories if count_columns is not UNSET: @@ -84,6 +81,8 @@ def to_dict(self) -> Dict[str, Any]: field_dict["rangeValues"] = range_values if target_column is not UNSET: field_dict["targetColumn"] = target_column + if aggregated_columns is not UNSET: + field_dict["aggregatedColumns"] = aggregated_columns return field_dict @@ -92,8 +91,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: from ..models.categorical_column import CategoricalColumn d = src_dict.copy() - aggregated_columns = cast(List[str], d.pop("aggregatedColumns", UNSET)) - categories = cast(List[str], d.pop("categories", UNSET)) count_columns = [] @@ -116,14 +113,16 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: target_column = d.pop("targetColumn", UNSET) + aggregated_columns = cast(List[str], d.pop("aggregatedColumns", UNSET)) + binning_operation = cls( - aggregated_columns=aggregated_columns, categories=categories, count_columns=count_columns, group_by_type=group_by_type, keep_non_categorized_items=keep_non_categorized_items, range_values=range_values, target_column=target_column, + aggregated_columns=aggregated_columns, ) binning_operation.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/binning_parameters.py b/src/tuneinsight/api/sdk/models/binning_parameters.py index 2ef5ebe..c43c231 100644 --- a/src/tuneinsight/api/sdk/models/binning_parameters.py +++ b/src/tuneinsight/api/sdk/models/binning_parameters.py @@ -13,9 +13,6 @@ class BinningParameters: """parameters used to bin data Attributes: - bin_size (Union[Unset, float]): size of bins - bin_size_precision (Union[Unset, None, int]): number of decimals for generated range categories - categories (Union[Unset, List[str]]): specified categories when method is specifiedCategories default_category (Union[Unset, str]): category to assign when an item does not fall in a specific category Default: 'other'. method (Union[Unset, BinningParametersMethod]): describes whether binning is done automatically or by specified @@ -23,52 +20,49 @@ class BinningParameters: found, a category is created - "rangeBins" binning is done according to provided range parameters - "specifiedCategories" binning is done on specified categories, values that do not fall in provided categories are tagged as 'other' + bin_size (Union[Unset, float]): size of bins + bin_size_precision (Union[Unset, None, int]): number of decimals for generated range categories + categories (Union[Unset, List[str]]): specified categories when method is specifiedCategories """ + default_category: Union[Unset, str] = "other" + method: Union[Unset, BinningParametersMethod] = UNSET bin_size: Union[Unset, float] = UNSET bin_size_precision: Union[Unset, None, int] = 0 categories: Union[Unset, List[str]] = UNSET - default_category: Union[Unset, str] = "other" - method: Union[Unset, BinningParametersMethod] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: + default_category = self.default_category + method: Union[Unset, str] = UNSET + if not isinstance(self.method, Unset): + method = self.method.value + bin_size = self.bin_size bin_size_precision = self.bin_size_precision categories: Union[Unset, List[str]] = UNSET if not isinstance(self.categories, Unset): categories = self.categories - default_category = self.default_category - method: Union[Unset, str] = UNSET - if not isinstance(self.method, Unset): - method = self.method.value - field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) + if default_category is not UNSET: + field_dict["defaultCategory"] = default_category + if method is not UNSET: + field_dict["method"] = method if bin_size is not UNSET: field_dict["binSize"] = bin_size if bin_size_precision is not UNSET: field_dict["binSizePrecision"] = bin_size_precision if categories is not UNSET: field_dict["categories"] = categories - if default_category is not UNSET: - field_dict["defaultCategory"] = default_category - if method is not UNSET: - field_dict["method"] = method return field_dict @classmethod def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() - bin_size = d.pop("binSize", UNSET) - - bin_size_precision = d.pop("binSizePrecision", UNSET) - - categories = cast(List[str], d.pop("categories", UNSET)) - default_category = d.pop("defaultCategory", UNSET) _method = d.pop("method", UNSET) @@ -78,12 +72,18 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: method = BinningParametersMethod(_method) + bin_size = d.pop("binSize", UNSET) + + bin_size_precision = d.pop("binSizePrecision", UNSET) + + categories = cast(List[str], d.pop("categories", UNSET)) + binning_parameters = cls( + default_category=default_category, + method=method, bin_size=bin_size, bin_size_precision=bin_size_precision, categories=categories, - default_category=default_category, - method=method, ) binning_parameters.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/bootstrap.py b/src/tuneinsight/api/sdk/models/bootstrap.py index 6f8e744..f2f22e8 100644 --- a/src/tuneinsight/api/sdk/models/bootstrap.py +++ b/src/tuneinsight/api/sdk/models/bootstrap.py @@ -23,6 +23,8 @@ class Bootstrap: Attributes: type (ComputationType): Type of the computation. value (str): Unique identifier of a data object. + dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP + mode. Default: -1.0. input_clipping_method (Union[Unset, ComputationDefinitionInputClippingMethod]): Optional method used for clipping before encrypting values when running aggregation-based workflows. The bounds are deduced based on the cryptographic parameters used for the aggregation. @@ -33,14 +35,7 @@ class Bootstrap: (default) - error: if some values are out of bounds, then the computation is aborted. Default: ComputationDefinitionInputClippingMethod.WARNING. - preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters - applied to the input retrieved from the datasource, if applicable - wait (Union[Unset, bool]): Whether to wait synchronously for the computation result. - dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various - disclosure prevention mechanisms - dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP - mode. Default: -1.0. - join_id (Union[Unset, str]): Unique identifier of a data object. + run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both) local_input_id (Union[Unset, str]): Unique identifier of a data object. maximum_aggregated_value (Union[Unset, None, float]): optional upper bound on the total expected value to be aggregated collectively. If provided, the computation will automatically deduce @@ -49,98 +44,104 @@ class Bootstrap: up to 16 million. For example, when using default parameters and running an aggregation with 4 participants, local aggregated values cannot exceed 4 million. - cohort_id (Union[Unset, str]): Unique identifier of a data object. + preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters + applied to the input retrieved from the datasource, if applicable + data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource + from each node before the computation + end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true, + then when release results is set to true and the output + is initially encrypted with a network collective key, then it is key switched to + the initiating user's public key. + input_data_object (Union[Unset, str]): Shared identifier of a data object. + timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run. local_input (Union[Unset, LocalInput]): If a local input is provided, the node initiating the computation will use it instead of querying the datasource. This data is *not* shared to other nodes, only used for the duration of the computation. The local input columns/values must be in the form {: [, , ...], ...} owner (Union[Unset, str]): The username of the end user who requested the computation. project_id (Union[Unset, str]): Unique identifier of a project. - run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both) + dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various + disclosure prevention mechanisms + cohort_id (Union[Unset, str]): Unique identifier of a data object. + encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key. + join_id (Union[Unset, str]): Unique identifier of a data object. local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured the network) - encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key. - end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true, - then when release results is set to true and the output - is initially encrypted with a network collective key, then it is key switched to - the initiating user's public key. - input_data_object (Union[Unset, str]): Shared identifier of a data object. release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output results. If set, then encrypted results are automatically key switched and decrypted and a Result entity is saved - timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run. - data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource - from each node before the computation + wait (Union[Unset, bool]): Whether to wait synchronously for the computation result. """ type: ComputationType value: str + dp_epsilon: Union[Unset, float] = -1.0 input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] = ( ComputationDefinitionInputClippingMethod.WARNING ) - preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET - wait: Union[Unset, bool] = UNSET - dp_policy: Union[Unset, "DPPolicy"] = UNSET - dp_epsilon: Union[Unset, float] = -1.0 - join_id: Union[Unset, str] = UNSET + run_mode: Union[Unset, RunMode] = UNSET local_input_id: Union[Unset, str] = UNSET maximum_aggregated_value: Union[Unset, None, float] = UNSET - cohort_id: Union[Unset, str] = UNSET + preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET + data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET + end_to_end_encrypted: Union[Unset, bool] = UNSET + input_data_object: Union[Unset, str] = UNSET + timeout: Union[Unset, int] = UNSET local_input: Union[Unset, "LocalInput"] = UNSET owner: Union[Unset, str] = UNSET project_id: Union[Unset, str] = UNSET - run_mode: Union[Unset, RunMode] = UNSET - local: Union[Unset, bool] = UNSET + dp_policy: Union[Unset, "DPPolicy"] = UNSET + cohort_id: Union[Unset, str] = UNSET encrypted: Union[Unset, bool] = UNSET - end_to_end_encrypted: Union[Unset, bool] = UNSET - input_data_object: Union[Unset, str] = UNSET + join_id: Union[Unset, str] = UNSET + local: Union[Unset, bool] = UNSET release_results: Union[Unset, bool] = UNSET - timeout: Union[Unset, int] = UNSET - data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET + wait: Union[Unset, bool] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: type = self.type.value value = self.value + dp_epsilon = self.dp_epsilon input_clipping_method: Union[Unset, str] = UNSET if not isinstance(self.input_clipping_method, Unset): input_clipping_method = self.input_clipping_method.value + run_mode: Union[Unset, str] = UNSET + if not isinstance(self.run_mode, Unset): + run_mode = self.run_mode.value + + local_input_id = self.local_input_id + maximum_aggregated_value = self.maximum_aggregated_value preprocessing_parameters: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.preprocessing_parameters, Unset): preprocessing_parameters = self.preprocessing_parameters.to_dict() - wait = self.wait - dp_policy: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.dp_policy, Unset): - dp_policy = self.dp_policy.to_dict() + data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.data_source_parameters, Unset): + data_source_parameters = self.data_source_parameters.to_dict() - dp_epsilon = self.dp_epsilon - join_id = self.join_id - local_input_id = self.local_input_id - maximum_aggregated_value = self.maximum_aggregated_value - cohort_id = self.cohort_id + end_to_end_encrypted = self.end_to_end_encrypted + input_data_object = self.input_data_object + timeout = self.timeout local_input: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.local_input, Unset): local_input = self.local_input.to_dict() owner = self.owner project_id = self.project_id - run_mode: Union[Unset, str] = UNSET - if not isinstance(self.run_mode, Unset): - run_mode = self.run_mode.value + dp_policy: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.dp_policy, Unset): + dp_policy = self.dp_policy.to_dict() - local = self.local + cohort_id = self.cohort_id encrypted = self.encrypted - end_to_end_encrypted = self.end_to_end_encrypted - input_data_object = self.input_data_object + join_id = self.join_id + local = self.local release_results = self.release_results - timeout = self.timeout - data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.data_source_parameters, Unset): - data_source_parameters = self.data_source_parameters.to_dict() + wait = self.wait field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) @@ -150,46 +151,46 @@ def to_dict(self) -> Dict[str, Any]: "value": value, } ) - if input_clipping_method is not UNSET: - field_dict["inputClippingMethod"] = input_clipping_method - if preprocessing_parameters is not UNSET: - field_dict["preprocessingParameters"] = preprocessing_parameters - if wait is not UNSET: - field_dict["wait"] = wait - if dp_policy is not UNSET: - field_dict["DPPolicy"] = dp_policy if dp_epsilon is not UNSET: field_dict["dpEpsilon"] = dp_epsilon - if join_id is not UNSET: - field_dict["joinId"] = join_id + if input_clipping_method is not UNSET: + field_dict["inputClippingMethod"] = input_clipping_method + if run_mode is not UNSET: + field_dict["runMode"] = run_mode if local_input_id is not UNSET: field_dict["localInputID"] = local_input_id if maximum_aggregated_value is not UNSET: field_dict["maximumAggregatedValue"] = maximum_aggregated_value - if cohort_id is not UNSET: - field_dict["cohortId"] = cohort_id + if preprocessing_parameters is not UNSET: + field_dict["preprocessingParameters"] = preprocessing_parameters + if data_source_parameters is not UNSET: + field_dict["dataSourceParameters"] = data_source_parameters + if end_to_end_encrypted is not UNSET: + field_dict["endToEndEncrypted"] = end_to_end_encrypted + if input_data_object is not UNSET: + field_dict["inputDataObject"] = input_data_object + if timeout is not UNSET: + field_dict["timeout"] = timeout if local_input is not UNSET: field_dict["localInput"] = local_input if owner is not UNSET: field_dict["owner"] = owner if project_id is not UNSET: field_dict["projectId"] = project_id - if run_mode is not UNSET: - field_dict["runMode"] = run_mode - if local is not UNSET: - field_dict["local"] = local + if dp_policy is not UNSET: + field_dict["DPPolicy"] = dp_policy + if cohort_id is not UNSET: + field_dict["cohortId"] = cohort_id if encrypted is not UNSET: field_dict["encrypted"] = encrypted - if end_to_end_encrypted is not UNSET: - field_dict["endToEndEncrypted"] = end_to_end_encrypted - if input_data_object is not UNSET: - field_dict["inputDataObject"] = input_data_object + if join_id is not UNSET: + field_dict["joinId"] = join_id + if local is not UNSET: + field_dict["local"] = local if release_results is not UNSET: field_dict["releaseResults"] = release_results - if timeout is not UNSET: - field_dict["timeout"] = timeout - if data_source_parameters is not UNSET: - field_dict["dataSourceParameters"] = data_source_parameters + if wait is not UNSET: + field_dict["wait"] = wait return field_dict @@ -205,6 +206,8 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: value = d.pop("value") + dp_epsilon = d.pop("dpEpsilon", UNSET) + _input_clipping_method = d.pop("inputClippingMethod", UNSET) input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] if isinstance(_input_clipping_method, Unset): @@ -212,6 +215,17 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method) + _run_mode = d.pop("runMode", UNSET) + run_mode: Union[Unset, RunMode] + if isinstance(_run_mode, Unset): + run_mode = UNSET + else: + run_mode = RunMode(_run_mode) + + local_input_id = d.pop("localInputID", UNSET) + + maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET) + _preprocessing_parameters = d.pop("preprocessingParameters", UNSET) preprocessing_parameters: Union[Unset, ComputationPreprocessingParameters] if isinstance(_preprocessing_parameters, Unset): @@ -219,24 +233,18 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: preprocessing_parameters = ComputationPreprocessingParameters.from_dict(_preprocessing_parameters) - wait = d.pop("wait", UNSET) - - _dp_policy = d.pop("DPPolicy", UNSET) - dp_policy: Union[Unset, DPPolicy] - if isinstance(_dp_policy, Unset): - dp_policy = UNSET + _data_source_parameters = d.pop("dataSourceParameters", UNSET) + data_source_parameters: Union[Unset, ComputationDataSourceParameters] + if isinstance(_data_source_parameters, Unset): + data_source_parameters = UNSET else: - dp_policy = DPPolicy.from_dict(_dp_policy) - - dp_epsilon = d.pop("dpEpsilon", UNSET) - - join_id = d.pop("joinId", UNSET) + data_source_parameters = ComputationDataSourceParameters.from_dict(_data_source_parameters) - local_input_id = d.pop("localInputID", UNSET) + end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) - maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET) + input_data_object = d.pop("inputDataObject", UNSET) - cohort_id = d.pop("cohortId", UNSET) + timeout = d.pop("timeout", UNSET) _local_input = d.pop("localInput", UNSET) local_input: Union[Unset, LocalInput] @@ -249,55 +257,48 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: project_id = d.pop("projectId", UNSET) - _run_mode = d.pop("runMode", UNSET) - run_mode: Union[Unset, RunMode] - if isinstance(_run_mode, Unset): - run_mode = UNSET + _dp_policy = d.pop("DPPolicy", UNSET) + dp_policy: Union[Unset, DPPolicy] + if isinstance(_dp_policy, Unset): + dp_policy = UNSET else: - run_mode = RunMode(_run_mode) + dp_policy = DPPolicy.from_dict(_dp_policy) - local = d.pop("local", UNSET) + cohort_id = d.pop("cohortId", UNSET) encrypted = d.pop("encrypted", UNSET) - end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) + join_id = d.pop("joinId", UNSET) - input_data_object = d.pop("inputDataObject", UNSET) + local = d.pop("local", UNSET) release_results = d.pop("releaseResults", UNSET) - timeout = d.pop("timeout", UNSET) - - _data_source_parameters = d.pop("dataSourceParameters", UNSET) - data_source_parameters: Union[Unset, ComputationDataSourceParameters] - if isinstance(_data_source_parameters, Unset): - data_source_parameters = UNSET - else: - data_source_parameters = ComputationDataSourceParameters.from_dict(_data_source_parameters) + wait = d.pop("wait", UNSET) bootstrap = cls( type=type, value=value, - input_clipping_method=input_clipping_method, - preprocessing_parameters=preprocessing_parameters, - wait=wait, - dp_policy=dp_policy, dp_epsilon=dp_epsilon, - join_id=join_id, + input_clipping_method=input_clipping_method, + run_mode=run_mode, local_input_id=local_input_id, maximum_aggregated_value=maximum_aggregated_value, - cohort_id=cohort_id, + preprocessing_parameters=preprocessing_parameters, + data_source_parameters=data_source_parameters, + end_to_end_encrypted=end_to_end_encrypted, + input_data_object=input_data_object, + timeout=timeout, local_input=local_input, owner=owner, project_id=project_id, - run_mode=run_mode, - local=local, + dp_policy=dp_policy, + cohort_id=cohort_id, encrypted=encrypted, - end_to_end_encrypted=end_to_end_encrypted, - input_data_object=input_data_object, + join_id=join_id, + local=local, release_results=release_results, - timeout=timeout, - data_source_parameters=data_source_parameters, + wait=wait, ) bootstrap.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/collective_key_gen.py b/src/tuneinsight/api/sdk/models/collective_key_gen.py index fe7b3c0..34092e9 100644 --- a/src/tuneinsight/api/sdk/models/collective_key_gen.py +++ b/src/tuneinsight/api/sdk/models/collective_key_gen.py @@ -22,6 +22,8 @@ class CollectiveKeyGen: """ Attributes: type (ComputationType): Type of the computation. + dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP + mode. Default: -1.0. input_clipping_method (Union[Unset, ComputationDefinitionInputClippingMethod]): Optional method used for clipping before encrypting values when running aggregation-based workflows. The bounds are deduced based on the cryptographic parameters used for the aggregation. @@ -32,14 +34,7 @@ class CollectiveKeyGen: (default) - error: if some values are out of bounds, then the computation is aborted. Default: ComputationDefinitionInputClippingMethod.WARNING. - preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters - applied to the input retrieved from the datasource, if applicable - wait (Union[Unset, bool]): Whether to wait synchronously for the computation result. - dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various - disclosure prevention mechanisms - dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP - mode. Default: -1.0. - join_id (Union[Unset, str]): Unique identifier of a data object. + run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both) local_input_id (Union[Unset, str]): Unique identifier of a data object. maximum_aggregated_value (Union[Unset, None, float]): optional upper bound on the total expected value to be aggregated collectively. If provided, the computation will automatically deduce @@ -48,96 +43,102 @@ class CollectiveKeyGen: up to 16 million. For example, when using default parameters and running an aggregation with 4 participants, local aggregated values cannot exceed 4 million. - cohort_id (Union[Unset, str]): Unique identifier of a data object. + preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters + applied to the input retrieved from the datasource, if applicable + data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource + from each node before the computation + end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true, + then when release results is set to true and the output + is initially encrypted with a network collective key, then it is key switched to + the initiating user's public key. + input_data_object (Union[Unset, str]): Shared identifier of a data object. + timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run. local_input (Union[Unset, LocalInput]): If a local input is provided, the node initiating the computation will use it instead of querying the datasource. This data is *not* shared to other nodes, only used for the duration of the computation. The local input columns/values must be in the form {: [, , ...], ...} owner (Union[Unset, str]): The username of the end user who requested the computation. project_id (Union[Unset, str]): Unique identifier of a project. - run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both) + dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various + disclosure prevention mechanisms + cohort_id (Union[Unset, str]): Unique identifier of a data object. + encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key. + join_id (Union[Unset, str]): Unique identifier of a data object. local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured the network) - encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key. - end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true, - then when release results is set to true and the output - is initially encrypted with a network collective key, then it is key switched to - the initiating user's public key. - input_data_object (Union[Unset, str]): Shared identifier of a data object. release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output results. If set, then encrypted results are automatically key switched and decrypted and a Result entity is saved - timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run. - data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource - from each node before the computation + wait (Union[Unset, bool]): Whether to wait synchronously for the computation result. """ type: ComputationType + dp_epsilon: Union[Unset, float] = -1.0 input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] = ( ComputationDefinitionInputClippingMethod.WARNING ) - preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET - wait: Union[Unset, bool] = UNSET - dp_policy: Union[Unset, "DPPolicy"] = UNSET - dp_epsilon: Union[Unset, float] = -1.0 - join_id: Union[Unset, str] = UNSET + run_mode: Union[Unset, RunMode] = UNSET local_input_id: Union[Unset, str] = UNSET maximum_aggregated_value: Union[Unset, None, float] = UNSET - cohort_id: Union[Unset, str] = UNSET + preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET + data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET + end_to_end_encrypted: Union[Unset, bool] = UNSET + input_data_object: Union[Unset, str] = UNSET + timeout: Union[Unset, int] = UNSET local_input: Union[Unset, "LocalInput"] = UNSET owner: Union[Unset, str] = UNSET project_id: Union[Unset, str] = UNSET - run_mode: Union[Unset, RunMode] = UNSET - local: Union[Unset, bool] = UNSET + dp_policy: Union[Unset, "DPPolicy"] = UNSET + cohort_id: Union[Unset, str] = UNSET encrypted: Union[Unset, bool] = UNSET - end_to_end_encrypted: Union[Unset, bool] = UNSET - input_data_object: Union[Unset, str] = UNSET + join_id: Union[Unset, str] = UNSET + local: Union[Unset, bool] = UNSET release_results: Union[Unset, bool] = UNSET - timeout: Union[Unset, int] = UNSET - data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET + wait: Union[Unset, bool] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: type = self.type.value + dp_epsilon = self.dp_epsilon input_clipping_method: Union[Unset, str] = UNSET if not isinstance(self.input_clipping_method, Unset): input_clipping_method = self.input_clipping_method.value + run_mode: Union[Unset, str] = UNSET + if not isinstance(self.run_mode, Unset): + run_mode = self.run_mode.value + + local_input_id = self.local_input_id + maximum_aggregated_value = self.maximum_aggregated_value preprocessing_parameters: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.preprocessing_parameters, Unset): preprocessing_parameters = self.preprocessing_parameters.to_dict() - wait = self.wait - dp_policy: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.dp_policy, Unset): - dp_policy = self.dp_policy.to_dict() + data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.data_source_parameters, Unset): + data_source_parameters = self.data_source_parameters.to_dict() - dp_epsilon = self.dp_epsilon - join_id = self.join_id - local_input_id = self.local_input_id - maximum_aggregated_value = self.maximum_aggregated_value - cohort_id = self.cohort_id + end_to_end_encrypted = self.end_to_end_encrypted + input_data_object = self.input_data_object + timeout = self.timeout local_input: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.local_input, Unset): local_input = self.local_input.to_dict() owner = self.owner project_id = self.project_id - run_mode: Union[Unset, str] = UNSET - if not isinstance(self.run_mode, Unset): - run_mode = self.run_mode.value + dp_policy: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.dp_policy, Unset): + dp_policy = self.dp_policy.to_dict() - local = self.local + cohort_id = self.cohort_id encrypted = self.encrypted - end_to_end_encrypted = self.end_to_end_encrypted - input_data_object = self.input_data_object + join_id = self.join_id + local = self.local release_results = self.release_results - timeout = self.timeout - data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.data_source_parameters, Unset): - data_source_parameters = self.data_source_parameters.to_dict() + wait = self.wait field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) @@ -146,46 +147,46 @@ def to_dict(self) -> Dict[str, Any]: "type": type, } ) - if input_clipping_method is not UNSET: - field_dict["inputClippingMethod"] = input_clipping_method - if preprocessing_parameters is not UNSET: - field_dict["preprocessingParameters"] = preprocessing_parameters - if wait is not UNSET: - field_dict["wait"] = wait - if dp_policy is not UNSET: - field_dict["DPPolicy"] = dp_policy if dp_epsilon is not UNSET: field_dict["dpEpsilon"] = dp_epsilon - if join_id is not UNSET: - field_dict["joinId"] = join_id + if input_clipping_method is not UNSET: + field_dict["inputClippingMethod"] = input_clipping_method + if run_mode is not UNSET: + field_dict["runMode"] = run_mode if local_input_id is not UNSET: field_dict["localInputID"] = local_input_id if maximum_aggregated_value is not UNSET: field_dict["maximumAggregatedValue"] = maximum_aggregated_value - if cohort_id is not UNSET: - field_dict["cohortId"] = cohort_id + if preprocessing_parameters is not UNSET: + field_dict["preprocessingParameters"] = preprocessing_parameters + if data_source_parameters is not UNSET: + field_dict["dataSourceParameters"] = data_source_parameters + if end_to_end_encrypted is not UNSET: + field_dict["endToEndEncrypted"] = end_to_end_encrypted + if input_data_object is not UNSET: + field_dict["inputDataObject"] = input_data_object + if timeout is not UNSET: + field_dict["timeout"] = timeout if local_input is not UNSET: field_dict["localInput"] = local_input if owner is not UNSET: field_dict["owner"] = owner if project_id is not UNSET: field_dict["projectId"] = project_id - if run_mode is not UNSET: - field_dict["runMode"] = run_mode - if local is not UNSET: - field_dict["local"] = local + if dp_policy is not UNSET: + field_dict["DPPolicy"] = dp_policy + if cohort_id is not UNSET: + field_dict["cohortId"] = cohort_id if encrypted is not UNSET: field_dict["encrypted"] = encrypted - if end_to_end_encrypted is not UNSET: - field_dict["endToEndEncrypted"] = end_to_end_encrypted - if input_data_object is not UNSET: - field_dict["inputDataObject"] = input_data_object + if join_id is not UNSET: + field_dict["joinId"] = join_id + if local is not UNSET: + field_dict["local"] = local if release_results is not UNSET: field_dict["releaseResults"] = release_results - if timeout is not UNSET: - field_dict["timeout"] = timeout - if data_source_parameters is not UNSET: - field_dict["dataSourceParameters"] = data_source_parameters + if wait is not UNSET: + field_dict["wait"] = wait return field_dict @@ -199,6 +200,8 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() type = ComputationType(d.pop("type")) + dp_epsilon = d.pop("dpEpsilon", UNSET) + _input_clipping_method = d.pop("inputClippingMethod", UNSET) input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] if isinstance(_input_clipping_method, Unset): @@ -206,6 +209,17 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method) + _run_mode = d.pop("runMode", UNSET) + run_mode: Union[Unset, RunMode] + if isinstance(_run_mode, Unset): + run_mode = UNSET + else: + run_mode = RunMode(_run_mode) + + local_input_id = d.pop("localInputID", UNSET) + + maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET) + _preprocessing_parameters = d.pop("preprocessingParameters", UNSET) preprocessing_parameters: Union[Unset, ComputationPreprocessingParameters] if isinstance(_preprocessing_parameters, Unset): @@ -213,24 +227,18 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: preprocessing_parameters = ComputationPreprocessingParameters.from_dict(_preprocessing_parameters) - wait = d.pop("wait", UNSET) - - _dp_policy = d.pop("DPPolicy", UNSET) - dp_policy: Union[Unset, DPPolicy] - if isinstance(_dp_policy, Unset): - dp_policy = UNSET + _data_source_parameters = d.pop("dataSourceParameters", UNSET) + data_source_parameters: Union[Unset, ComputationDataSourceParameters] + if isinstance(_data_source_parameters, Unset): + data_source_parameters = UNSET else: - dp_policy = DPPolicy.from_dict(_dp_policy) - - dp_epsilon = d.pop("dpEpsilon", UNSET) - - join_id = d.pop("joinId", UNSET) + data_source_parameters = ComputationDataSourceParameters.from_dict(_data_source_parameters) - local_input_id = d.pop("localInputID", UNSET) + end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) - maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET) + input_data_object = d.pop("inputDataObject", UNSET) - cohort_id = d.pop("cohortId", UNSET) + timeout = d.pop("timeout", UNSET) _local_input = d.pop("localInput", UNSET) local_input: Union[Unset, LocalInput] @@ -243,54 +251,47 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: project_id = d.pop("projectId", UNSET) - _run_mode = d.pop("runMode", UNSET) - run_mode: Union[Unset, RunMode] - if isinstance(_run_mode, Unset): - run_mode = UNSET + _dp_policy = d.pop("DPPolicy", UNSET) + dp_policy: Union[Unset, DPPolicy] + if isinstance(_dp_policy, Unset): + dp_policy = UNSET else: - run_mode = RunMode(_run_mode) + dp_policy = DPPolicy.from_dict(_dp_policy) - local = d.pop("local", UNSET) + cohort_id = d.pop("cohortId", UNSET) encrypted = d.pop("encrypted", UNSET) - end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) + join_id = d.pop("joinId", UNSET) - input_data_object = d.pop("inputDataObject", UNSET) + local = d.pop("local", UNSET) release_results = d.pop("releaseResults", UNSET) - timeout = d.pop("timeout", UNSET) - - _data_source_parameters = d.pop("dataSourceParameters", UNSET) - data_source_parameters: Union[Unset, ComputationDataSourceParameters] - if isinstance(_data_source_parameters, Unset): - data_source_parameters = UNSET - else: - data_source_parameters = ComputationDataSourceParameters.from_dict(_data_source_parameters) + wait = d.pop("wait", UNSET) collective_key_gen = cls( type=type, - input_clipping_method=input_clipping_method, - preprocessing_parameters=preprocessing_parameters, - wait=wait, - dp_policy=dp_policy, dp_epsilon=dp_epsilon, - join_id=join_id, + input_clipping_method=input_clipping_method, + run_mode=run_mode, local_input_id=local_input_id, maximum_aggregated_value=maximum_aggregated_value, - cohort_id=cohort_id, + preprocessing_parameters=preprocessing_parameters, + data_source_parameters=data_source_parameters, + end_to_end_encrypted=end_to_end_encrypted, + input_data_object=input_data_object, + timeout=timeout, local_input=local_input, owner=owner, project_id=project_id, - run_mode=run_mode, - local=local, + dp_policy=dp_policy, + cohort_id=cohort_id, encrypted=encrypted, - end_to_end_encrypted=end_to_end_encrypted, - input_data_object=input_data_object, + join_id=join_id, + local=local, release_results=release_results, - timeout=timeout, - data_source_parameters=data_source_parameters, + wait=wait, ) collective_key_gen.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/collective_key_switch.py b/src/tuneinsight/api/sdk/models/collective_key_switch.py index 2786eed..4bf1da7 100644 --- a/src/tuneinsight/api/sdk/models/collective_key_switch.py +++ b/src/tuneinsight/api/sdk/models/collective_key_switch.py @@ -23,6 +23,8 @@ class CollectiveKeySwitch: Attributes: type (ComputationType): Type of the computation. cipher_vector (str): Unique identifier of a data object. + dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP + mode. Default: -1.0. input_clipping_method (Union[Unset, ComputationDefinitionInputClippingMethod]): Optional method used for clipping before encrypting values when running aggregation-based workflows. The bounds are deduced based on the cryptographic parameters used for the aggregation. @@ -33,14 +35,7 @@ class CollectiveKeySwitch: (default) - error: if some values are out of bounds, then the computation is aborted. Default: ComputationDefinitionInputClippingMethod.WARNING. - preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters - applied to the input retrieved from the datasource, if applicable - wait (Union[Unset, bool]): Whether to wait synchronously for the computation result. - dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various - disclosure prevention mechanisms - dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP - mode. Default: -1.0. - join_id (Union[Unset, str]): Unique identifier of a data object. + run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both) local_input_id (Union[Unset, str]): Unique identifier of a data object. maximum_aggregated_value (Union[Unset, None, float]): optional upper bound on the total expected value to be aggregated collectively. If provided, the computation will automatically deduce @@ -49,102 +44,110 @@ class CollectiveKeySwitch: up to 16 million. For example, when using default parameters and running an aggregation with 4 participants, local aggregated values cannot exceed 4 million. - cohort_id (Union[Unset, str]): Unique identifier of a data object. + preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters + applied to the input retrieved from the datasource, if applicable + data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource + from each node before the computation + end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true, + then when release results is set to true and the output + is initially encrypted with a network collective key, then it is key switched to + the initiating user's public key. + input_data_object (Union[Unset, str]): Shared identifier of a data object. + timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run. local_input (Union[Unset, LocalInput]): If a local input is provided, the node initiating the computation will use it instead of querying the datasource. This data is *not* shared to other nodes, only used for the duration of the computation. The local input columns/values must be in the form {: [, , ...], ...} owner (Union[Unset, str]): The username of the end user who requested the computation. project_id (Union[Unset, str]): Unique identifier of a project. - run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both) + dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various + disclosure prevention mechanisms + cohort_id (Union[Unset, str]): Unique identifier of a data object. + encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key. + join_id (Union[Unset, str]): Unique identifier of a data object. local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured the network) - encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key. - end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true, - then when release results is set to true and the output - is initially encrypted with a network collective key, then it is key switched to - the initiating user's public key. - input_data_object (Union[Unset, str]): Shared identifier of a data object. release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output results. If set, then encrypted results are automatically key switched and decrypted and a Result entity is saved - timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run. - data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource - from each node before the computation + wait (Union[Unset, bool]): Whether to wait synchronously for the computation result. target_public_key (Union[Unset, str]): Unique identifier of a data object. + target_public_key_base_64 (Union[Unset, str]): the full target public key in base-64 format. """ type: ComputationType cipher_vector: str + dp_epsilon: Union[Unset, float] = -1.0 input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] = ( ComputationDefinitionInputClippingMethod.WARNING ) - preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET - wait: Union[Unset, bool] = UNSET - dp_policy: Union[Unset, "DPPolicy"] = UNSET - dp_epsilon: Union[Unset, float] = -1.0 - join_id: Union[Unset, str] = UNSET + run_mode: Union[Unset, RunMode] = UNSET local_input_id: Union[Unset, str] = UNSET maximum_aggregated_value: Union[Unset, None, float] = UNSET - cohort_id: Union[Unset, str] = UNSET + preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET + data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET + end_to_end_encrypted: Union[Unset, bool] = UNSET + input_data_object: Union[Unset, str] = UNSET + timeout: Union[Unset, int] = UNSET local_input: Union[Unset, "LocalInput"] = UNSET owner: Union[Unset, str] = UNSET project_id: Union[Unset, str] = UNSET - run_mode: Union[Unset, RunMode] = UNSET - local: Union[Unset, bool] = UNSET + dp_policy: Union[Unset, "DPPolicy"] = UNSET + cohort_id: Union[Unset, str] = UNSET encrypted: Union[Unset, bool] = UNSET - end_to_end_encrypted: Union[Unset, bool] = UNSET - input_data_object: Union[Unset, str] = UNSET + join_id: Union[Unset, str] = UNSET + local: Union[Unset, bool] = UNSET release_results: Union[Unset, bool] = UNSET - timeout: Union[Unset, int] = UNSET - data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET + wait: Union[Unset, bool] = UNSET target_public_key: Union[Unset, str] = UNSET + target_public_key_base_64: Union[Unset, str] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: type = self.type.value cipher_vector = self.cipher_vector + dp_epsilon = self.dp_epsilon input_clipping_method: Union[Unset, str] = UNSET if not isinstance(self.input_clipping_method, Unset): input_clipping_method = self.input_clipping_method.value + run_mode: Union[Unset, str] = UNSET + if not isinstance(self.run_mode, Unset): + run_mode = self.run_mode.value + + local_input_id = self.local_input_id + maximum_aggregated_value = self.maximum_aggregated_value preprocessing_parameters: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.preprocessing_parameters, Unset): preprocessing_parameters = self.preprocessing_parameters.to_dict() - wait = self.wait - dp_policy: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.dp_policy, Unset): - dp_policy = self.dp_policy.to_dict() + data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.data_source_parameters, Unset): + data_source_parameters = self.data_source_parameters.to_dict() - dp_epsilon = self.dp_epsilon - join_id = self.join_id - local_input_id = self.local_input_id - maximum_aggregated_value = self.maximum_aggregated_value - cohort_id = self.cohort_id + end_to_end_encrypted = self.end_to_end_encrypted + input_data_object = self.input_data_object + timeout = self.timeout local_input: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.local_input, Unset): local_input = self.local_input.to_dict() owner = self.owner project_id = self.project_id - run_mode: Union[Unset, str] = UNSET - if not isinstance(self.run_mode, Unset): - run_mode = self.run_mode.value + dp_policy: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.dp_policy, Unset): + dp_policy = self.dp_policy.to_dict() - local = self.local + cohort_id = self.cohort_id encrypted = self.encrypted - end_to_end_encrypted = self.end_to_end_encrypted - input_data_object = self.input_data_object + join_id = self.join_id + local = self.local release_results = self.release_results - timeout = self.timeout - data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.data_source_parameters, Unset): - data_source_parameters = self.data_source_parameters.to_dict() - + wait = self.wait target_public_key = self.target_public_key + target_public_key_base_64 = self.target_public_key_base_64 field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) @@ -154,48 +157,50 @@ def to_dict(self) -> Dict[str, Any]: "cipherVector": cipher_vector, } ) - if input_clipping_method is not UNSET: - field_dict["inputClippingMethod"] = input_clipping_method - if preprocessing_parameters is not UNSET: - field_dict["preprocessingParameters"] = preprocessing_parameters - if wait is not UNSET: - field_dict["wait"] = wait - if dp_policy is not UNSET: - field_dict["DPPolicy"] = dp_policy if dp_epsilon is not UNSET: field_dict["dpEpsilon"] = dp_epsilon - if join_id is not UNSET: - field_dict["joinId"] = join_id + if input_clipping_method is not UNSET: + field_dict["inputClippingMethod"] = input_clipping_method + if run_mode is not UNSET: + field_dict["runMode"] = run_mode if local_input_id is not UNSET: field_dict["localInputID"] = local_input_id if maximum_aggregated_value is not UNSET: field_dict["maximumAggregatedValue"] = maximum_aggregated_value - if cohort_id is not UNSET: - field_dict["cohortId"] = cohort_id + if preprocessing_parameters is not UNSET: + field_dict["preprocessingParameters"] = preprocessing_parameters + if data_source_parameters is not UNSET: + field_dict["dataSourceParameters"] = data_source_parameters + if end_to_end_encrypted is not UNSET: + field_dict["endToEndEncrypted"] = end_to_end_encrypted + if input_data_object is not UNSET: + field_dict["inputDataObject"] = input_data_object + if timeout is not UNSET: + field_dict["timeout"] = timeout if local_input is not UNSET: field_dict["localInput"] = local_input if owner is not UNSET: field_dict["owner"] = owner if project_id is not UNSET: field_dict["projectId"] = project_id - if run_mode is not UNSET: - field_dict["runMode"] = run_mode - if local is not UNSET: - field_dict["local"] = local + if dp_policy is not UNSET: + field_dict["DPPolicy"] = dp_policy + if cohort_id is not UNSET: + field_dict["cohortId"] = cohort_id if encrypted is not UNSET: field_dict["encrypted"] = encrypted - if end_to_end_encrypted is not UNSET: - field_dict["endToEndEncrypted"] = end_to_end_encrypted - if input_data_object is not UNSET: - field_dict["inputDataObject"] = input_data_object + if join_id is not UNSET: + field_dict["joinId"] = join_id + if local is not UNSET: + field_dict["local"] = local if release_results is not UNSET: field_dict["releaseResults"] = release_results - if timeout is not UNSET: - field_dict["timeout"] = timeout - if data_source_parameters is not UNSET: - field_dict["dataSourceParameters"] = data_source_parameters + if wait is not UNSET: + field_dict["wait"] = wait if target_public_key is not UNSET: field_dict["targetPublicKey"] = target_public_key + if target_public_key_base_64 is not UNSET: + field_dict["targetPublicKeyBase64"] = target_public_key_base_64 return field_dict @@ -211,6 +216,8 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: cipher_vector = d.pop("cipherVector") + dp_epsilon = d.pop("dpEpsilon", UNSET) + _input_clipping_method = d.pop("inputClippingMethod", UNSET) input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] if isinstance(_input_clipping_method, Unset): @@ -218,6 +225,17 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method) + _run_mode = d.pop("runMode", UNSET) + run_mode: Union[Unset, RunMode] + if isinstance(_run_mode, Unset): + run_mode = UNSET + else: + run_mode = RunMode(_run_mode) + + local_input_id = d.pop("localInputID", UNSET) + + maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET) + _preprocessing_parameters = d.pop("preprocessingParameters", UNSET) preprocessing_parameters: Union[Unset, ComputationPreprocessingParameters] if isinstance(_preprocessing_parameters, Unset): @@ -225,24 +243,18 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: preprocessing_parameters = ComputationPreprocessingParameters.from_dict(_preprocessing_parameters) - wait = d.pop("wait", UNSET) - - _dp_policy = d.pop("DPPolicy", UNSET) - dp_policy: Union[Unset, DPPolicy] - if isinstance(_dp_policy, Unset): - dp_policy = UNSET + _data_source_parameters = d.pop("dataSourceParameters", UNSET) + data_source_parameters: Union[Unset, ComputationDataSourceParameters] + if isinstance(_data_source_parameters, Unset): + data_source_parameters = UNSET else: - dp_policy = DPPolicy.from_dict(_dp_policy) - - dp_epsilon = d.pop("dpEpsilon", UNSET) - - join_id = d.pop("joinId", UNSET) + data_source_parameters = ComputationDataSourceParameters.from_dict(_data_source_parameters) - local_input_id = d.pop("localInputID", UNSET) + end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) - maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET) + input_data_object = d.pop("inputDataObject", UNSET) - cohort_id = d.pop("cohortId", UNSET) + timeout = d.pop("timeout", UNSET) _local_input = d.pop("localInput", UNSET) local_input: Union[Unset, LocalInput] @@ -255,58 +267,54 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: project_id = d.pop("projectId", UNSET) - _run_mode = d.pop("runMode", UNSET) - run_mode: Union[Unset, RunMode] - if isinstance(_run_mode, Unset): - run_mode = UNSET + _dp_policy = d.pop("DPPolicy", UNSET) + dp_policy: Union[Unset, DPPolicy] + if isinstance(_dp_policy, Unset): + dp_policy = UNSET else: - run_mode = RunMode(_run_mode) + dp_policy = DPPolicy.from_dict(_dp_policy) - local = d.pop("local", UNSET) + cohort_id = d.pop("cohortId", UNSET) encrypted = d.pop("encrypted", UNSET) - end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) + join_id = d.pop("joinId", UNSET) - input_data_object = d.pop("inputDataObject", UNSET) + local = d.pop("local", UNSET) release_results = d.pop("releaseResults", UNSET) - timeout = d.pop("timeout", UNSET) - - _data_source_parameters = d.pop("dataSourceParameters", UNSET) - data_source_parameters: Union[Unset, ComputationDataSourceParameters] - if isinstance(_data_source_parameters, Unset): - data_source_parameters = UNSET - else: - data_source_parameters = ComputationDataSourceParameters.from_dict(_data_source_parameters) + wait = d.pop("wait", UNSET) target_public_key = d.pop("targetPublicKey", UNSET) + target_public_key_base_64 = d.pop("targetPublicKeyBase64", UNSET) + collective_key_switch = cls( type=type, cipher_vector=cipher_vector, - input_clipping_method=input_clipping_method, - preprocessing_parameters=preprocessing_parameters, - wait=wait, - dp_policy=dp_policy, dp_epsilon=dp_epsilon, - join_id=join_id, + input_clipping_method=input_clipping_method, + run_mode=run_mode, local_input_id=local_input_id, maximum_aggregated_value=maximum_aggregated_value, - cohort_id=cohort_id, + preprocessing_parameters=preprocessing_parameters, + data_source_parameters=data_source_parameters, + end_to_end_encrypted=end_to_end_encrypted, + input_data_object=input_data_object, + timeout=timeout, local_input=local_input, owner=owner, project_id=project_id, - run_mode=run_mode, - local=local, + dp_policy=dp_policy, + cohort_id=cohort_id, encrypted=encrypted, - end_to_end_encrypted=end_to_end_encrypted, - input_data_object=input_data_object, + join_id=join_id, + local=local, release_results=release_results, - timeout=timeout, - data_source_parameters=data_source_parameters, + wait=wait, target_public_key=target_public_key, + target_public_key_base_64=target_public_key_base_64, ) collective_key_switch.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/column_info.py b/src/tuneinsight/api/sdk/models/column_info.py index 02fc470..001e6e8 100644 --- a/src/tuneinsight/api/sdk/models/column_info.py +++ b/src/tuneinsight/api/sdk/models/column_info.py @@ -18,28 +18,24 @@ class ColumnInfo: """contextual information about a column of the resulting matrix Attributes: - value_type (Union[Unset, ColumnInfoValueType]): type of value stored in the column, can either be a count of - rows or a sum of values group_info (Union[Unset, GroupInfo]): information about a column representing a subset of rows in the final result origin_column (Union[Unset, str]): names of the column from which the value is computed origin_value (Union[Unset, str]): when originColumn is a categorical column, original value for the count scope (Union[Unset, ColumnInfoScope]): row set involved in the result, all for all rows, subgroup for a subset depending on a group + value_type (Union[Unset, ColumnInfoValueType]): type of value stored in the column, can either be a count of + rows or a sum of values """ - value_type: Union[Unset, ColumnInfoValueType] = UNSET group_info: Union[Unset, "GroupInfo"] = UNSET origin_column: Union[Unset, str] = UNSET origin_value: Union[Unset, str] = UNSET scope: Union[Unset, ColumnInfoScope] = UNSET + value_type: Union[Unset, ColumnInfoValueType] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: - value_type: Union[Unset, str] = UNSET - if not isinstance(self.value_type, Unset): - value_type = self.value_type.value - group_info: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.group_info, Unset): group_info = self.group_info.to_dict() @@ -50,11 +46,13 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.scope, Unset): scope = self.scope.value + value_type: Union[Unset, str] = UNSET + if not isinstance(self.value_type, Unset): + value_type = self.value_type.value + field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) - if value_type is not UNSET: - field_dict["valueType"] = value_type if group_info is not UNSET: field_dict["groupInfo"] = group_info if origin_column is not UNSET: @@ -63,6 +61,8 @@ def to_dict(self) -> Dict[str, Any]: field_dict["originValue"] = origin_value if scope is not UNSET: field_dict["scope"] = scope + if value_type is not UNSET: + field_dict["valueType"] = value_type return field_dict @@ -71,13 +71,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: from ..models.group_info import GroupInfo d = src_dict.copy() - _value_type = d.pop("valueType", UNSET) - value_type: Union[Unset, ColumnInfoValueType] - if isinstance(_value_type, Unset): - value_type = UNSET - else: - value_type = ColumnInfoValueType(_value_type) - _group_info = d.pop("groupInfo", UNSET) group_info: Union[Unset, GroupInfo] if isinstance(_group_info, Unset): @@ -96,12 +89,19 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: scope = ColumnInfoScope(_scope) + _value_type = d.pop("valueType", UNSET) + value_type: Union[Unset, ColumnInfoValueType] + if isinstance(_value_type, Unset): + value_type = UNSET + else: + value_type = ColumnInfoValueType(_value_type) + column_info = cls( - value_type=value_type, group_info=group_info, origin_column=origin_column, origin_value=origin_value, scope=scope, + value_type=value_type, ) column_info.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/column_schema.py b/src/tuneinsight/api/sdk/models/column_schema.py index d97d80b..e87f5e3 100644 --- a/src/tuneinsight/api/sdk/models/column_schema.py +++ b/src/tuneinsight/api/sdk/models/column_schema.py @@ -15,8 +15,6 @@ class ColumnSchema: """ Attributes: - required (Union[Unset, None, bool]): if set to false, the column will be considered as optional in the dataset. - title (Union[Unset, str]): name given to the column for informative purposes checks (Union[Unset, ColumnSchemaChecks]): optional additional checks coerce (Union[Unset, bool]): if set to true, the validation will first coerce the column into the corresponding dtype @@ -26,20 +24,20 @@ class ColumnSchema: supported types: https://pandera.readthedocs.io/en/stable/dtype_validation.html#supported-pandas-datatypes nullable (Union[Unset, bool]): whether the column is allowed to contain null values. + required (Union[Unset, None, bool]): if set to false, the column will be considered as optional in the dataset. + title (Union[Unset, str]): name given to the column for informative purposes """ - required: Union[Unset, None, bool] = UNSET - title: Union[Unset, str] = UNSET checks: Union[Unset, "ColumnSchemaChecks"] = UNSET coerce: Union[Unset, bool] = UNSET description: Union[Unset, str] = UNSET dtype: Union[Unset, str] = UNSET nullable: Union[Unset, bool] = UNSET + required: Union[Unset, None, bool] = UNSET + title: Union[Unset, str] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: - required = self.required - title = self.title checks: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.checks, Unset): checks = self.checks.to_dict() @@ -48,14 +46,12 @@ def to_dict(self) -> Dict[str, Any]: description = self.description dtype = self.dtype nullable = self.nullable + required = self.required + title = self.title field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) - if required is not UNSET: - field_dict["required"] = required - if title is not UNSET: - field_dict["title"] = title if checks is not UNSET: field_dict["checks"] = checks if coerce is not UNSET: @@ -66,6 +62,10 @@ def to_dict(self) -> Dict[str, Any]: field_dict["dtype"] = dtype if nullable is not UNSET: field_dict["nullable"] = nullable + if required is not UNSET: + field_dict["required"] = required + if title is not UNSET: + field_dict["title"] = title return field_dict @@ -74,10 +74,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: from ..models.column_schema_checks import ColumnSchemaChecks d = src_dict.copy() - required = d.pop("required", UNSET) - - title = d.pop("title", UNSET) - _checks = d.pop("checks", UNSET) checks: Union[Unset, ColumnSchemaChecks] if isinstance(_checks, Unset): @@ -93,14 +89,18 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: nullable = d.pop("nullable", UNSET) + required = d.pop("required", UNSET) + + title = d.pop("title", UNSET) + column_schema = cls( - required=required, - title=title, checks=checks, coerce=coerce, description=description, dtype=dtype, nullable=nullable, + required=required, + title=title, ) column_schema.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/column_schema_checks.py b/src/tuneinsight/api/sdk/models/column_schema_checks.py index 4e9c3c8..d93fe9b 100644 --- a/src/tuneinsight/api/sdk/models/column_schema_checks.py +++ b/src/tuneinsight/api/sdk/models/column_schema_checks.py @@ -16,43 +16,43 @@ class ColumnSchemaChecks: """optional additional checks Attributes: + eq (Union[Unset, Any]): verifies that all values are equal to this value. + gt (Union[Unset, Any]): verifies that all values are greater than this value. in_range (Union[Unset, ColumnSchemaChecksInRange]): - isin (Union[Unset, List[Any]]): + lt (Union[Unset, Any]): verifies that all values are less than this value. str_startswith (Union[Unset, str]): - eq (Union[Unset, Any]): verifies that all values are equal to this value. ge (Union[Unset, Any]): verifies that all values are greater than or equal to this value. - gt (Union[Unset, Any]): verifies that all values are greater than this value. + isin (Union[Unset, List[Any]]): le (Union[Unset, Any]): verifies that all values are less than or equal to this value. - lt (Union[Unset, Any]): verifies that all values are less than this value. notin (Union[Unset, List[Any]]): """ + eq: Union[Unset, Any] = UNSET + gt: Union[Unset, Any] = UNSET in_range: Union[Unset, "ColumnSchemaChecksInRange"] = UNSET - isin: Union[Unset, List[Any]] = UNSET + lt: Union[Unset, Any] = UNSET str_startswith: Union[Unset, str] = UNSET - eq: Union[Unset, Any] = UNSET ge: Union[Unset, Any] = UNSET - gt: Union[Unset, Any] = UNSET + isin: Union[Unset, List[Any]] = UNSET le: Union[Unset, Any] = UNSET - lt: Union[Unset, Any] = UNSET notin: Union[Unset, List[Any]] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: + eq = self.eq + gt = self.gt in_range: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.in_range, Unset): in_range = self.in_range.to_dict() + lt = self.lt + str_startswith = self.str_startswith + ge = self.ge isin: Union[Unset, List[Any]] = UNSET if not isinstance(self.isin, Unset): isin = self.isin - str_startswith = self.str_startswith - eq = self.eq - ge = self.ge - gt = self.gt le = self.le - lt = self.lt notin: Union[Unset, List[Any]] = UNSET if not isinstance(self.notin, Unset): notin = self.notin @@ -60,22 +60,22 @@ def to_dict(self) -> Dict[str, Any]: field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) + if eq is not UNSET: + field_dict["eq"] = eq + if gt is not UNSET: + field_dict["gt"] = gt if in_range is not UNSET: field_dict["in_range"] = in_range - if isin is not UNSET: - field_dict["isin"] = isin + if lt is not UNSET: + field_dict["lt"] = lt if str_startswith is not UNSET: field_dict["str_startswith"] = str_startswith - if eq is not UNSET: - field_dict["eq"] = eq if ge is not UNSET: field_dict["ge"] = ge - if gt is not UNSET: - field_dict["gt"] = gt + if isin is not UNSET: + field_dict["isin"] = isin if le is not UNSET: field_dict["le"] = le - if lt is not UNSET: - field_dict["lt"] = lt if notin is not UNSET: field_dict["notin"] = notin @@ -86,6 +86,10 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: from ..models.column_schema_checks_in_range import ColumnSchemaChecksInRange d = src_dict.copy() + eq = d.pop("eq", UNSET) + + gt = d.pop("gt", UNSET) + _in_range = d.pop("in_range", UNSET) in_range: Union[Unset, ColumnSchemaChecksInRange] if isinstance(_in_range, Unset): @@ -93,31 +97,27 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: in_range = ColumnSchemaChecksInRange.from_dict(_in_range) - isin = cast(List[Any], d.pop("isin", UNSET)) + lt = d.pop("lt", UNSET) str_startswith = d.pop("str_startswith", UNSET) - eq = d.pop("eq", UNSET) - ge = d.pop("ge", UNSET) - gt = d.pop("gt", UNSET) + isin = cast(List[Any], d.pop("isin", UNSET)) le = d.pop("le", UNSET) - lt = d.pop("lt", UNSET) - notin = cast(List[Any], d.pop("notin", UNSET)) column_schema_checks = cls( + eq=eq, + gt=gt, in_range=in_range, - isin=isin, + lt=lt, str_startswith=str_startswith, - eq=eq, ge=ge, - gt=gt, + isin=isin, le=le, - lt=lt, notin=notin, ) diff --git a/src/tuneinsight/api/sdk/models/computation.py b/src/tuneinsight/api/sdk/models/computation.py index d1bf41a..34b8130 100644 --- a/src/tuneinsight/api/sdk/models/computation.py +++ b/src/tuneinsight/api/sdk/models/computation.py @@ -19,67 +19,59 @@ class Computation: """Metadata of a computation. Attributes: - id (str): Identifier of a computation, unique across all computing nodes. definition (ComputationDefinition): Generic computation. + id (str): Identifier of a computation, unique across all computing nodes. status (ComputationStatus): Status of the computation. + local (Union[Unset, bool]): + measurements (Union[Unset, List['Measurement']]): list of benchmarking measurements done on the computation + owner (Union[Unset, str]): identifier of the end user that has requested the computation updated_at (Union[Unset, str]): - execution_cost (Union[Unset, float]): the cost of the computation when an execution quota has been setup. - started_at (Union[Unset, str]): visible (Union[Unset, bool]): False if the computation is internal and should not be displayed to the user by default - warnings (Union[Unset, List[str]]): list of warnings that occurred during the computation description (Union[Unset, str]): - measurements (Union[Unset, List['Measurement']]): list of benchmarking measurements done on the computation - owner (Union[Unset, str]): identifier of the end user that has requested the computation + egress (Union[Unset, int]): keeps track of the number of bytes sent during a computation to serve as a bandwidth + measure + ended_at (Union[Unset, str]): + created_at (Union[Unset, str]): + errors (Union[Unset, List['ComputationError']]): list of errors that occurred during the computation + execution_cost (Union[Unset, float]): the cost of the computation when an execution quota has been setup. progress (Union[Unset, int]): + started_at (Union[Unset, str]): + warnings (Union[Unset, List[str]]): list of warnings that occurred during the computation ingress (Union[Unset, int]): keeps track of the number of bytes received during a computation to serve as a bandwidth measure - local (Union[Unset, bool]): results (Union[Unset, List[str]]): Identifier(s) of the resulting data object(s). Available only when the status is completed. - created_at (Union[Unset, str]): - egress (Union[Unset, int]): keeps track of the number of bytes sent during a computation to serve as a bandwidth - measure - ended_at (Union[Unset, str]): - errors (Union[Unset, List['ComputationError']]): list of errors that occurred during the computation """ - id: str definition: "ComputationDefinition" + id: str status: ComputationStatus + local: Union[Unset, bool] = UNSET + measurements: Union[Unset, List["Measurement"]] = UNSET + owner: Union[Unset, str] = UNSET updated_at: Union[Unset, str] = UNSET - execution_cost: Union[Unset, float] = UNSET - started_at: Union[Unset, str] = UNSET visible: Union[Unset, bool] = UNSET - warnings: Union[Unset, List[str]] = UNSET description: Union[Unset, str] = UNSET - measurements: Union[Unset, List["Measurement"]] = UNSET - owner: Union[Unset, str] = UNSET - progress: Union[Unset, int] = UNSET - ingress: Union[Unset, int] = UNSET - local: Union[Unset, bool] = UNSET - results: Union[Unset, List[str]] = UNSET - created_at: Union[Unset, str] = UNSET egress: Union[Unset, int] = UNSET ended_at: Union[Unset, str] = UNSET + created_at: Union[Unset, str] = UNSET errors: Union[Unset, List["ComputationError"]] = UNSET + execution_cost: Union[Unset, float] = UNSET + progress: Union[Unset, int] = UNSET + started_at: Union[Unset, str] = UNSET + warnings: Union[Unset, List[str]] = UNSET + ingress: Union[Unset, int] = UNSET + results: Union[Unset, List[str]] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: - id = self.id definition = self.definition.to_dict() + id = self.id status = self.status.value - updated_at = self.updated_at - execution_cost = self.execution_cost - started_at = self.started_at - visible = self.visible - warnings: Union[Unset, List[str]] = UNSET - if not isinstance(self.warnings, Unset): - warnings = self.warnings - - description = self.description + local = self.local measurements: Union[Unset, List[Dict[str, Any]]] = UNSET if not isinstance(self.measurements, Unset): measurements = [] @@ -89,16 +81,12 @@ def to_dict(self) -> Dict[str, Any]: measurements.append(measurements_item) owner = self.owner - progress = self.progress - ingress = self.ingress - local = self.local - results: Union[Unset, List[str]] = UNSET - if not isinstance(self.results, Unset): - results = self.results - - created_at = self.created_at + updated_at = self.updated_at + visible = self.visible + description = self.description egress = self.egress ended_at = self.ended_at + created_at = self.created_at errors: Union[Unset, List[Dict[str, Any]]] = UNSET if not isinstance(self.errors, Unset): errors = [] @@ -107,47 +95,59 @@ def to_dict(self) -> Dict[str, Any]: errors.append(errors_item) + execution_cost = self.execution_cost + progress = self.progress + started_at = self.started_at + warnings: Union[Unset, List[str]] = UNSET + if not isinstance(self.warnings, Unset): + warnings = self.warnings + + ingress = self.ingress + results: Union[Unset, List[str]] = UNSET + if not isinstance(self.results, Unset): + results = self.results + field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update( { - "id": id, "definition": definition, + "id": id, "status": status, } ) + if local is not UNSET: + field_dict["local"] = local + if measurements is not UNSET: + field_dict["measurements"] = measurements + if owner is not UNSET: + field_dict["owner"] = owner if updated_at is not UNSET: field_dict["updatedAt"] = updated_at - if execution_cost is not UNSET: - field_dict["executionCost"] = execution_cost - if started_at is not UNSET: - field_dict["startedAt"] = started_at if visible is not UNSET: field_dict["visible"] = visible - if warnings is not UNSET: - field_dict["warnings"] = warnings if description is not UNSET: field_dict["description"] = description - if measurements is not UNSET: - field_dict["measurements"] = measurements - if owner is not UNSET: - field_dict["owner"] = owner - if progress is not UNSET: - field_dict["progress"] = progress - if ingress is not UNSET: - field_dict["ingress"] = ingress - if local is not UNSET: - field_dict["local"] = local - if results is not UNSET: - field_dict["results"] = results - if created_at is not UNSET: - field_dict["createdAt"] = created_at if egress is not UNSET: field_dict["egress"] = egress if ended_at is not UNSET: field_dict["endedAt"] = ended_at + if created_at is not UNSET: + field_dict["createdAt"] = created_at if errors is not UNSET: field_dict["errors"] = errors + if execution_cost is not UNSET: + field_dict["executionCost"] = execution_cost + if progress is not UNSET: + field_dict["progress"] = progress + if started_at is not UNSET: + field_dict["startedAt"] = started_at + if warnings is not UNSET: + field_dict["warnings"] = warnings + if ingress is not UNSET: + field_dict["ingress"] = ingress + if results is not UNSET: + field_dict["results"] = results return field_dict @@ -158,23 +158,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: from ..models.measurement import Measurement d = src_dict.copy() - id = d.pop("id") - definition = ComputationDefinition.from_dict(d.pop("definition")) - status = ComputationStatus(d.pop("status")) - - updated_at = d.pop("updatedAt", UNSET) - - execution_cost = d.pop("executionCost", UNSET) - - started_at = d.pop("startedAt", UNSET) - - visible = d.pop("visible", UNSET) + id = d.pop("id") - warnings = cast(List[str], d.pop("warnings", UNSET)) + status = ComputationStatus(d.pop("status")) - description = d.pop("description", UNSET) + local = d.pop("local", UNSET) measurements = [] _measurements = d.pop("measurements", UNSET) @@ -185,20 +175,18 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: owner = d.pop("owner", UNSET) - progress = d.pop("progress", UNSET) - - ingress = d.pop("ingress", UNSET) - - local = d.pop("local", UNSET) + updated_at = d.pop("updatedAt", UNSET) - results = cast(List[str], d.pop("results", UNSET)) + visible = d.pop("visible", UNSET) - created_at = d.pop("createdAt", UNSET) + description = d.pop("description", UNSET) egress = d.pop("egress", UNSET) ended_at = d.pop("endedAt", UNSET) + created_at = d.pop("createdAt", UNSET) + errors = [] _errors = d.pop("errors", UNSET) for errors_item_data in _errors or []: @@ -206,26 +194,38 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: errors.append(errors_item) + execution_cost = d.pop("executionCost", UNSET) + + progress = d.pop("progress", UNSET) + + started_at = d.pop("startedAt", UNSET) + + warnings = cast(List[str], d.pop("warnings", UNSET)) + + ingress = d.pop("ingress", UNSET) + + results = cast(List[str], d.pop("results", UNSET)) + computation = cls( - id=id, definition=definition, + id=id, status=status, + local=local, + measurements=measurements, + owner=owner, updated_at=updated_at, - execution_cost=execution_cost, - started_at=started_at, visible=visible, - warnings=warnings, description=description, - measurements=measurements, - owner=owner, - progress=progress, - ingress=ingress, - local=local, - results=results, - created_at=created_at, egress=egress, ended_at=ended_at, + created_at=created_at, errors=errors, + execution_cost=execution_cost, + progress=progress, + started_at=started_at, + warnings=warnings, + ingress=ingress, + results=results, ) computation.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/computation_data_source_parameters.py b/src/tuneinsight/api/sdk/models/computation_data_source_parameters.py index d170b7d..54bc6b5 100644 --- a/src/tuneinsight/api/sdk/models/computation_data_source_parameters.py +++ b/src/tuneinsight/api/sdk/models/computation_data_source_parameters.py @@ -17,24 +17,26 @@ class ComputationDataSourceParameters: """Parameters used to query the datasource from each node before the computation Attributes: - compound_query (Union[Unset, DataSourceCompoundQuery]): definition of datasource queries for each node in the - computation - data_source_id (Union[Unset, None, str]): Unique identifier of a data source. - data_source_query (Union[Unset, DataSourceQuery]): schema used for the query only_root_query (Union[Unset, bool]): Whether or not the query should only be executed at the root node of the computation compound_disabled (Union[Unset, bool]): when true, then even if the compound query is specified, it is not taken into account (enables keeping previously defined queries) + compound_query (Union[Unset, DataSourceCompoundQuery]): definition of datasource queries for each node in the + computation + data_source_id (Union[Unset, None, str]): Unique identifier of a data source. + data_source_query (Union[Unset, DataSourceQuery]): schema used for the query """ + only_root_query: Union[Unset, bool] = UNSET + compound_disabled: Union[Unset, bool] = UNSET compound_query: Union[Unset, "DataSourceCompoundQuery"] = UNSET data_source_id: Union[Unset, None, str] = UNSET data_source_query: Union[Unset, "DataSourceQuery"] = UNSET - only_root_query: Union[Unset, bool] = UNSET - compound_disabled: Union[Unset, bool] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: + only_root_query = self.only_root_query + compound_disabled = self.compound_disabled compound_query: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.compound_query, Unset): compound_query = self.compound_query.to_dict() @@ -44,22 +46,19 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.data_source_query, Unset): data_source_query = self.data_source_query.to_dict() - only_root_query = self.only_root_query - compound_disabled = self.compound_disabled - field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) + if only_root_query is not UNSET: + field_dict["onlyRootQuery"] = only_root_query + if compound_disabled is not UNSET: + field_dict["compoundDisabled"] = compound_disabled if compound_query is not UNSET: field_dict["compoundQuery"] = compound_query if data_source_id is not UNSET: field_dict["dataSourceId"] = data_source_id if data_source_query is not UNSET: field_dict["dataSourceQuery"] = data_source_query - if only_root_query is not UNSET: - field_dict["onlyRootQuery"] = only_root_query - if compound_disabled is not UNSET: - field_dict["compoundDisabled"] = compound_disabled return field_dict @@ -69,6 +68,10 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: from ..models.data_source_query import DataSourceQuery d = src_dict.copy() + only_root_query = d.pop("onlyRootQuery", UNSET) + + compound_disabled = d.pop("compoundDisabled", UNSET) + _compound_query = d.pop("compoundQuery", UNSET) compound_query: Union[Unset, DataSourceCompoundQuery] if isinstance(_compound_query, Unset): @@ -85,16 +88,12 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: data_source_query = DataSourceQuery.from_dict(_data_source_query) - only_root_query = d.pop("onlyRootQuery", UNSET) - - compound_disabled = d.pop("compoundDisabled", UNSET) - computation_data_source_parameters = cls( + only_root_query=only_root_query, + compound_disabled=compound_disabled, compound_query=compound_query, data_source_id=data_source_id, data_source_query=data_source_query, - only_root_query=only_root_query, - compound_disabled=compound_disabled, ) computation_data_source_parameters.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/computation_definition.py b/src/tuneinsight/api/sdk/models/computation_definition.py index b40e7a4..2e94a19 100644 --- a/src/tuneinsight/api/sdk/models/computation_definition.py +++ b/src/tuneinsight/api/sdk/models/computation_definition.py @@ -23,6 +23,8 @@ class ComputationDefinition: Attributes: type (ComputationType): Type of the computation. + dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP + mode. Default: -1.0. input_clipping_method (Union[Unset, ComputationDefinitionInputClippingMethod]): Optional method used for clipping before encrypting values when running aggregation-based workflows. The bounds are deduced based on the cryptographic parameters used for the aggregation. @@ -33,14 +35,7 @@ class ComputationDefinition: (default) - error: if some values are out of bounds, then the computation is aborted. Default: ComputationDefinitionInputClippingMethod.WARNING. - preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters - applied to the input retrieved from the datasource, if applicable - wait (Union[Unset, bool]): Whether to wait synchronously for the computation result. - dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various - disclosure prevention mechanisms - dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP - mode. Default: -1.0. - join_id (Union[Unset, str]): Unique identifier of a data object. + run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both) local_input_id (Union[Unset, str]): Unique identifier of a data object. maximum_aggregated_value (Union[Unset, None, float]): optional upper bound on the total expected value to be aggregated collectively. If provided, the computation will automatically deduce @@ -49,96 +44,102 @@ class ComputationDefinition: up to 16 million. For example, when using default parameters and running an aggregation with 4 participants, local aggregated values cannot exceed 4 million. - cohort_id (Union[Unset, str]): Unique identifier of a data object. + preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters + applied to the input retrieved from the datasource, if applicable + data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource + from each node before the computation + end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true, + then when release results is set to true and the output + is initially encrypted with a network collective key, then it is key switched to + the initiating user's public key. + input_data_object (Union[Unset, str]): Shared identifier of a data object. + timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run. local_input (Union[Unset, LocalInput]): If a local input is provided, the node initiating the computation will use it instead of querying the datasource. This data is *not* shared to other nodes, only used for the duration of the computation. The local input columns/values must be in the form {: [, , ...], ...} owner (Union[Unset, str]): The username of the end user who requested the computation. project_id (Union[Unset, str]): Unique identifier of a project. - run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both) + dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various + disclosure prevention mechanisms + cohort_id (Union[Unset, str]): Unique identifier of a data object. + encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key. + join_id (Union[Unset, str]): Unique identifier of a data object. local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured the network) - encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key. - end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true, - then when release results is set to true and the output - is initially encrypted with a network collective key, then it is key switched to - the initiating user's public key. - input_data_object (Union[Unset, str]): Shared identifier of a data object. release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output results. If set, then encrypted results are automatically key switched and decrypted and a Result entity is saved - timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run. - data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource - from each node before the computation + wait (Union[Unset, bool]): Whether to wait synchronously for the computation result. """ type: ComputationType + dp_epsilon: Union[Unset, float] = -1.0 input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] = ( ComputationDefinitionInputClippingMethod.WARNING ) - preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET - wait: Union[Unset, bool] = UNSET - dp_policy: Union[Unset, "DPPolicy"] = UNSET - dp_epsilon: Union[Unset, float] = -1.0 - join_id: Union[Unset, str] = UNSET + run_mode: Union[Unset, RunMode] = UNSET local_input_id: Union[Unset, str] = UNSET maximum_aggregated_value: Union[Unset, None, float] = UNSET - cohort_id: Union[Unset, str] = UNSET + preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET + data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET + end_to_end_encrypted: Union[Unset, bool] = UNSET + input_data_object: Union[Unset, str] = UNSET + timeout: Union[Unset, int] = UNSET local_input: Union[Unset, "LocalInput"] = UNSET owner: Union[Unset, str] = UNSET project_id: Union[Unset, str] = UNSET - run_mode: Union[Unset, RunMode] = UNSET - local: Union[Unset, bool] = UNSET + dp_policy: Union[Unset, "DPPolicy"] = UNSET + cohort_id: Union[Unset, str] = UNSET encrypted: Union[Unset, bool] = UNSET - end_to_end_encrypted: Union[Unset, bool] = UNSET - input_data_object: Union[Unset, str] = UNSET + join_id: Union[Unset, str] = UNSET + local: Union[Unset, bool] = UNSET release_results: Union[Unset, bool] = UNSET - timeout: Union[Unset, int] = UNSET - data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET + wait: Union[Unset, bool] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: type = self.type.value + dp_epsilon = self.dp_epsilon input_clipping_method: Union[Unset, str] = UNSET if not isinstance(self.input_clipping_method, Unset): input_clipping_method = self.input_clipping_method.value + run_mode: Union[Unset, str] = UNSET + if not isinstance(self.run_mode, Unset): + run_mode = self.run_mode.value + + local_input_id = self.local_input_id + maximum_aggregated_value = self.maximum_aggregated_value preprocessing_parameters: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.preprocessing_parameters, Unset): preprocessing_parameters = self.preprocessing_parameters.to_dict() - wait = self.wait - dp_policy: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.dp_policy, Unset): - dp_policy = self.dp_policy.to_dict() + data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.data_source_parameters, Unset): + data_source_parameters = self.data_source_parameters.to_dict() - dp_epsilon = self.dp_epsilon - join_id = self.join_id - local_input_id = self.local_input_id - maximum_aggregated_value = self.maximum_aggregated_value - cohort_id = self.cohort_id + end_to_end_encrypted = self.end_to_end_encrypted + input_data_object = self.input_data_object + timeout = self.timeout local_input: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.local_input, Unset): local_input = self.local_input.to_dict() owner = self.owner project_id = self.project_id - run_mode: Union[Unset, str] = UNSET - if not isinstance(self.run_mode, Unset): - run_mode = self.run_mode.value + dp_policy: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.dp_policy, Unset): + dp_policy = self.dp_policy.to_dict() - local = self.local + cohort_id = self.cohort_id encrypted = self.encrypted - end_to_end_encrypted = self.end_to_end_encrypted - input_data_object = self.input_data_object + join_id = self.join_id + local = self.local release_results = self.release_results - timeout = self.timeout - data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.data_source_parameters, Unset): - data_source_parameters = self.data_source_parameters.to_dict() + wait = self.wait field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) @@ -147,46 +148,46 @@ def to_dict(self) -> Dict[str, Any]: "type": type, } ) - if input_clipping_method is not UNSET: - field_dict["inputClippingMethod"] = input_clipping_method - if preprocessing_parameters is not UNSET: - field_dict["preprocessingParameters"] = preprocessing_parameters - if wait is not UNSET: - field_dict["wait"] = wait - if dp_policy is not UNSET: - field_dict["DPPolicy"] = dp_policy if dp_epsilon is not UNSET: field_dict["dpEpsilon"] = dp_epsilon - if join_id is not UNSET: - field_dict["joinId"] = join_id + if input_clipping_method is not UNSET: + field_dict["inputClippingMethod"] = input_clipping_method + if run_mode is not UNSET: + field_dict["runMode"] = run_mode if local_input_id is not UNSET: field_dict["localInputID"] = local_input_id if maximum_aggregated_value is not UNSET: field_dict["maximumAggregatedValue"] = maximum_aggregated_value - if cohort_id is not UNSET: - field_dict["cohortId"] = cohort_id + if preprocessing_parameters is not UNSET: + field_dict["preprocessingParameters"] = preprocessing_parameters + if data_source_parameters is not UNSET: + field_dict["dataSourceParameters"] = data_source_parameters + if end_to_end_encrypted is not UNSET: + field_dict["endToEndEncrypted"] = end_to_end_encrypted + if input_data_object is not UNSET: + field_dict["inputDataObject"] = input_data_object + if timeout is not UNSET: + field_dict["timeout"] = timeout if local_input is not UNSET: field_dict["localInput"] = local_input if owner is not UNSET: field_dict["owner"] = owner if project_id is not UNSET: field_dict["projectId"] = project_id - if run_mode is not UNSET: - field_dict["runMode"] = run_mode - if local is not UNSET: - field_dict["local"] = local + if dp_policy is not UNSET: + field_dict["DPPolicy"] = dp_policy + if cohort_id is not UNSET: + field_dict["cohortId"] = cohort_id if encrypted is not UNSET: field_dict["encrypted"] = encrypted - if end_to_end_encrypted is not UNSET: - field_dict["endToEndEncrypted"] = end_to_end_encrypted - if input_data_object is not UNSET: - field_dict["inputDataObject"] = input_data_object + if join_id is not UNSET: + field_dict["joinId"] = join_id + if local is not UNSET: + field_dict["local"] = local if release_results is not UNSET: field_dict["releaseResults"] = release_results - if timeout is not UNSET: - field_dict["timeout"] = timeout - if data_source_parameters is not UNSET: - field_dict["dataSourceParameters"] = data_source_parameters + if wait is not UNSET: + field_dict["wait"] = wait return field_dict @@ -200,6 +201,8 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() type = ComputationType(d.pop("type")) + dp_epsilon = d.pop("dpEpsilon", UNSET) + _input_clipping_method = d.pop("inputClippingMethod", UNSET) input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] if isinstance(_input_clipping_method, Unset): @@ -207,6 +210,17 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method) + _run_mode = d.pop("runMode", UNSET) + run_mode: Union[Unset, RunMode] + if isinstance(_run_mode, Unset): + run_mode = UNSET + else: + run_mode = RunMode(_run_mode) + + local_input_id = d.pop("localInputID", UNSET) + + maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET) + _preprocessing_parameters = d.pop("preprocessingParameters", UNSET) preprocessing_parameters: Union[Unset, ComputationPreprocessingParameters] if isinstance(_preprocessing_parameters, Unset): @@ -214,24 +228,18 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: preprocessing_parameters = ComputationPreprocessingParameters.from_dict(_preprocessing_parameters) - wait = d.pop("wait", UNSET) - - _dp_policy = d.pop("DPPolicy", UNSET) - dp_policy: Union[Unset, DPPolicy] - if isinstance(_dp_policy, Unset): - dp_policy = UNSET + _data_source_parameters = d.pop("dataSourceParameters", UNSET) + data_source_parameters: Union[Unset, ComputationDataSourceParameters] + if isinstance(_data_source_parameters, Unset): + data_source_parameters = UNSET else: - dp_policy = DPPolicy.from_dict(_dp_policy) - - dp_epsilon = d.pop("dpEpsilon", UNSET) - - join_id = d.pop("joinId", UNSET) + data_source_parameters = ComputationDataSourceParameters.from_dict(_data_source_parameters) - local_input_id = d.pop("localInputID", UNSET) + end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) - maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET) + input_data_object = d.pop("inputDataObject", UNSET) - cohort_id = d.pop("cohortId", UNSET) + timeout = d.pop("timeout", UNSET) _local_input = d.pop("localInput", UNSET) local_input: Union[Unset, LocalInput] @@ -244,54 +252,47 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: project_id = d.pop("projectId", UNSET) - _run_mode = d.pop("runMode", UNSET) - run_mode: Union[Unset, RunMode] - if isinstance(_run_mode, Unset): - run_mode = UNSET + _dp_policy = d.pop("DPPolicy", UNSET) + dp_policy: Union[Unset, DPPolicy] + if isinstance(_dp_policy, Unset): + dp_policy = UNSET else: - run_mode = RunMode(_run_mode) + dp_policy = DPPolicy.from_dict(_dp_policy) - local = d.pop("local", UNSET) + cohort_id = d.pop("cohortId", UNSET) encrypted = d.pop("encrypted", UNSET) - end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) + join_id = d.pop("joinId", UNSET) - input_data_object = d.pop("inputDataObject", UNSET) + local = d.pop("local", UNSET) release_results = d.pop("releaseResults", UNSET) - timeout = d.pop("timeout", UNSET) - - _data_source_parameters = d.pop("dataSourceParameters", UNSET) - data_source_parameters: Union[Unset, ComputationDataSourceParameters] - if isinstance(_data_source_parameters, Unset): - data_source_parameters = UNSET - else: - data_source_parameters = ComputationDataSourceParameters.from_dict(_data_source_parameters) + wait = d.pop("wait", UNSET) computation_definition = cls( type=type, - input_clipping_method=input_clipping_method, - preprocessing_parameters=preprocessing_parameters, - wait=wait, - dp_policy=dp_policy, dp_epsilon=dp_epsilon, - join_id=join_id, + input_clipping_method=input_clipping_method, + run_mode=run_mode, local_input_id=local_input_id, maximum_aggregated_value=maximum_aggregated_value, - cohort_id=cohort_id, + preprocessing_parameters=preprocessing_parameters, + data_source_parameters=data_source_parameters, + end_to_end_encrypted=end_to_end_encrypted, + input_data_object=input_data_object, + timeout=timeout, local_input=local_input, owner=owner, project_id=project_id, - run_mode=run_mode, - local=local, + dp_policy=dp_policy, + cohort_id=cohort_id, encrypted=encrypted, - end_to_end_encrypted=end_to_end_encrypted, - input_data_object=input_data_object, + join_id=join_id, + local=local, release_results=release_results, - timeout=timeout, - data_source_parameters=data_source_parameters, + wait=wait, ) computation_definition.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/computation_error.py b/src/tuneinsight/api/sdk/models/computation_error.py index f469d12..d4c41ba 100644 --- a/src/tuneinsight/api/sdk/models/computation_error.py +++ b/src/tuneinsight/api/sdk/models/computation_error.py @@ -13,48 +13,44 @@ class ComputationError: """error that occurred when running a computation Attributes: - origin (Union[Unset, str]): node instance id that caused the error - timestamp (Union[Unset, str]): time at which the error ocurred type (Union[Unset, ComputationErrorType]): error type identifier message (Union[Unset, str]): the error message + origin (Union[Unset, str]): node instance id that caused the error + timestamp (Union[Unset, str]): time at which the error ocurred """ - origin: Union[Unset, str] = UNSET - timestamp: Union[Unset, str] = UNSET type: Union[Unset, ComputationErrorType] = UNSET message: Union[Unset, str] = UNSET + origin: Union[Unset, str] = UNSET + timestamp: Union[Unset, str] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: - origin = self.origin - timestamp = self.timestamp type: Union[Unset, str] = UNSET if not isinstance(self.type, Unset): type = self.type.value message = self.message + origin = self.origin + timestamp = self.timestamp field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) - if origin is not UNSET: - field_dict["origin"] = origin - if timestamp is not UNSET: - field_dict["timestamp"] = timestamp if type is not UNSET: field_dict["type"] = type if message is not UNSET: field_dict["message"] = message + if origin is not UNSET: + field_dict["origin"] = origin + if timestamp is not UNSET: + field_dict["timestamp"] = timestamp return field_dict @classmethod def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() - origin = d.pop("origin", UNSET) - - timestamp = d.pop("timestamp", UNSET) - _type = d.pop("type", UNSET) type: Union[Unset, ComputationErrorType] if isinstance(_type, Unset): @@ -64,11 +60,15 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: message = d.pop("message", UNSET) + origin = d.pop("origin", UNSET) + + timestamp = d.pop("timestamp", UNSET) + computation_error = cls( - origin=origin, - timestamp=timestamp, type=type, message=message, + origin=origin, + timestamp=timestamp, ) computation_error.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/computation_list_response.py b/src/tuneinsight/api/sdk/models/computation_list_response.py index ee51665..c2cb577 100644 --- a/src/tuneinsight/api/sdk/models/computation_list_response.py +++ b/src/tuneinsight/api/sdk/models/computation_list_response.py @@ -16,16 +16,15 @@ class ComputationListResponse: """List of available computations. Attributes: - total (Union[Unset, int]): items (Union[Unset, List['Computation']]): + total (Union[Unset, int]): """ - total: Union[Unset, int] = UNSET items: Union[Unset, List["Computation"]] = UNSET + total: Union[Unset, int] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: - total = self.total items: Union[Unset, List[Dict[str, Any]]] = UNSET if not isinstance(self.items, Unset): items = [] @@ -34,13 +33,15 @@ def to_dict(self) -> Dict[str, Any]: items.append(items_item) + total = self.total + field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) - if total is not UNSET: - field_dict["total"] = total if items is not UNSET: field_dict["items"] = items + if total is not UNSET: + field_dict["total"] = total return field_dict @@ -49,8 +50,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: from ..models.computation import Computation d = src_dict.copy() - total = d.pop("total", UNSET) - items = [] _items = d.pop("items", UNSET) for items_item_data in _items or []: @@ -58,9 +57,11 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: items.append(items_item) + total = d.pop("total", UNSET) + computation_list_response = cls( - total=total, items=items, + total=total, ) computation_list_response.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/computation_policy.py b/src/tuneinsight/api/sdk/models/computation_policy.py index 918510b..57d4386 100644 --- a/src/tuneinsight/api/sdk/models/computation_policy.py +++ b/src/tuneinsight/api/sdk/models/computation_policy.py @@ -19,37 +19,52 @@ class ComputationPolicy: """policy to validate a specific computation Attributes: - authorized_computation_types (Union[Unset, List[ComputationType]]): list of authorized computation types + dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various + disclosure prevention mechanisms authorized_data_source_queries (Union[Unset, List[str]]): list of authorized datasource queries when restrictDataSourceQueries is set to true - authorized_preprocessing_operations (Union[Unset, List[PreprocessingOperationType]]): list of authorized - preprocessing operations types when restrictPreprocessingOperations is set to true flexible_parameters (Union[Unset, List[str]]): when validateParameters is enabled, specifies the set of parameters for which to ignore validation restrict_data_source_queries (Union[Unset, bool]): whether or not datasource queries should be restricted restrict_preprocessing_operations (Union[Unset, bool]): whether or not datasource queries should be restricted validate_parameters (Union[Unset, bool]): whether or not to validate the parameters with the ones from the template - dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various - disclosure prevention mechanisms + authorized_computation_types (Union[Unset, List[ComputationType]]): list of authorized computation types + authorized_preprocessing_operations (Union[Unset, List[PreprocessingOperationType]]): list of authorized + preprocessing operations types when restrictPreprocessingOperations is set to true fixed_parameters (Union[Unset, List[str]]): when validateParameters is enabled, specifies the set of parameters that cannot be changed if empty, then all parameters are validated template (Union[Unset, ComputationDefinition]): Generic computation. """ - authorized_computation_types: Union[Unset, List[ComputationType]] = UNSET + dp_policy: Union[Unset, "DPPolicy"] = UNSET authorized_data_source_queries: Union[Unset, List[str]] = UNSET - authorized_preprocessing_operations: Union[Unset, List[PreprocessingOperationType]] = UNSET flexible_parameters: Union[Unset, List[str]] = UNSET restrict_data_source_queries: Union[Unset, bool] = UNSET restrict_preprocessing_operations: Union[Unset, bool] = UNSET validate_parameters: Union[Unset, bool] = UNSET - dp_policy: Union[Unset, "DPPolicy"] = UNSET + authorized_computation_types: Union[Unset, List[ComputationType]] = UNSET + authorized_preprocessing_operations: Union[Unset, List[PreprocessingOperationType]] = UNSET fixed_parameters: Union[Unset, List[str]] = UNSET template: Union[Unset, "ComputationDefinition"] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: + dp_policy: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.dp_policy, Unset): + dp_policy = self.dp_policy.to_dict() + + authorized_data_source_queries: Union[Unset, List[str]] = UNSET + if not isinstance(self.authorized_data_source_queries, Unset): + authorized_data_source_queries = self.authorized_data_source_queries + + flexible_parameters: Union[Unset, List[str]] = UNSET + if not isinstance(self.flexible_parameters, Unset): + flexible_parameters = self.flexible_parameters + + restrict_data_source_queries = self.restrict_data_source_queries + restrict_preprocessing_operations = self.restrict_preprocessing_operations + validate_parameters = self.validate_parameters authorized_computation_types: Union[Unset, List[str]] = UNSET if not isinstance(self.authorized_computation_types, Unset): authorized_computation_types = [] @@ -58,10 +73,6 @@ def to_dict(self) -> Dict[str, Any]: authorized_computation_types.append(authorized_computation_types_item) - authorized_data_source_queries: Union[Unset, List[str]] = UNSET - if not isinstance(self.authorized_data_source_queries, Unset): - authorized_data_source_queries = self.authorized_data_source_queries - authorized_preprocessing_operations: Union[Unset, List[str]] = UNSET if not isinstance(self.authorized_preprocessing_operations, Unset): authorized_preprocessing_operations = [] @@ -70,17 +81,6 @@ def to_dict(self) -> Dict[str, Any]: authorized_preprocessing_operations.append(authorized_preprocessing_operations_item) - flexible_parameters: Union[Unset, List[str]] = UNSET - if not isinstance(self.flexible_parameters, Unset): - flexible_parameters = self.flexible_parameters - - restrict_data_source_queries = self.restrict_data_source_queries - restrict_preprocessing_operations = self.restrict_preprocessing_operations - validate_parameters = self.validate_parameters - dp_policy: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.dp_policy, Unset): - dp_policy = self.dp_policy.to_dict() - fixed_parameters: Union[Unset, List[str]] = UNSET if not isinstance(self.fixed_parameters, Unset): fixed_parameters = self.fixed_parameters @@ -92,12 +92,10 @@ def to_dict(self) -> Dict[str, Any]: field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) - if authorized_computation_types is not UNSET: - field_dict["authorizedComputationTypes"] = authorized_computation_types + if dp_policy is not UNSET: + field_dict["DPPolicy"] = dp_policy if authorized_data_source_queries is not UNSET: field_dict["authorizedDataSourceQueries"] = authorized_data_source_queries - if authorized_preprocessing_operations is not UNSET: - field_dict["authorizedPreprocessingOperations"] = authorized_preprocessing_operations if flexible_parameters is not UNSET: field_dict["flexibleParameters"] = flexible_parameters if restrict_data_source_queries is not UNSET: @@ -106,8 +104,10 @@ def to_dict(self) -> Dict[str, Any]: field_dict["restrictPreprocessingOperations"] = restrict_preprocessing_operations if validate_parameters is not UNSET: field_dict["validateParameters"] = validate_parameters - if dp_policy is not UNSET: - field_dict["DPPolicy"] = dp_policy + if authorized_computation_types is not UNSET: + field_dict["authorizedComputationTypes"] = authorized_computation_types + if authorized_preprocessing_operations is not UNSET: + field_dict["authorizedPreprocessingOperations"] = authorized_preprocessing_operations if fixed_parameters is not UNSET: field_dict["fixedParameters"] = fixed_parameters if template is not UNSET: @@ -121,6 +121,23 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: from ..models.dp_policy import DPPolicy d = src_dict.copy() + _dp_policy = d.pop("DPPolicy", UNSET) + dp_policy: Union[Unset, DPPolicy] + if isinstance(_dp_policy, Unset): + dp_policy = UNSET + else: + dp_policy = DPPolicy.from_dict(_dp_policy) + + authorized_data_source_queries = cast(List[str], d.pop("authorizedDataSourceQueries", UNSET)) + + flexible_parameters = cast(List[str], d.pop("flexibleParameters", UNSET)) + + restrict_data_source_queries = d.pop("restrictDataSourceQueries", UNSET) + + restrict_preprocessing_operations = d.pop("restrictPreprocessingOperations", UNSET) + + validate_parameters = d.pop("validateParameters", UNSET) + authorized_computation_types = [] _authorized_computation_types = d.pop("authorizedComputationTypes", UNSET) for authorized_computation_types_item_data in _authorized_computation_types or []: @@ -128,8 +145,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: authorized_computation_types.append(authorized_computation_types_item) - authorized_data_source_queries = cast(List[str], d.pop("authorizedDataSourceQueries", UNSET)) - authorized_preprocessing_operations = [] _authorized_preprocessing_operations = d.pop("authorizedPreprocessingOperations", UNSET) for authorized_preprocessing_operations_item_data in _authorized_preprocessing_operations or []: @@ -139,21 +154,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: authorized_preprocessing_operations.append(authorized_preprocessing_operations_item) - flexible_parameters = cast(List[str], d.pop("flexibleParameters", UNSET)) - - restrict_data_source_queries = d.pop("restrictDataSourceQueries", UNSET) - - restrict_preprocessing_operations = d.pop("restrictPreprocessingOperations", UNSET) - - validate_parameters = d.pop("validateParameters", UNSET) - - _dp_policy = d.pop("DPPolicy", UNSET) - dp_policy: Union[Unset, DPPolicy] - if isinstance(_dp_policy, Unset): - dp_policy = UNSET - else: - dp_policy = DPPolicy.from_dict(_dp_policy) - fixed_parameters = cast(List[str], d.pop("fixedParameters", UNSET)) _template = d.pop("template", UNSET) @@ -164,14 +164,14 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: template = ComputationDefinition.from_dict(_template) computation_policy = cls( - authorized_computation_types=authorized_computation_types, + dp_policy=dp_policy, authorized_data_source_queries=authorized_data_source_queries, - authorized_preprocessing_operations=authorized_preprocessing_operations, flexible_parameters=flexible_parameters, restrict_data_source_queries=restrict_data_source_queries, restrict_preprocessing_operations=restrict_preprocessing_operations, validate_parameters=validate_parameters, - dp_policy=dp_policy, + authorized_computation_types=authorized_computation_types, + authorized_preprocessing_operations=authorized_preprocessing_operations, fixed_parameters=fixed_parameters, template=template, ) diff --git a/src/tuneinsight/api/sdk/models/computation_preprocessing_parameters.py b/src/tuneinsight/api/sdk/models/computation_preprocessing_parameters.py index 17cdc67..16ad382 100644 --- a/src/tuneinsight/api/sdk/models/computation_preprocessing_parameters.py +++ b/src/tuneinsight/api/sdk/models/computation_preprocessing_parameters.py @@ -22,7 +22,6 @@ class ComputationPreprocessingParameters: """dataframe pre-processing parameters applied to the input retrieved from the datasource, if applicable Attributes: - select (Union[Unset, Select]): compound_preprocessing (Union[Unset, ComputationPreprocessingParametersCompoundPreprocessing]): preprocessing to be applied for each node dataset_schema (Union[Unset, DatasetSchema]): dataset schema definition used to validate input datasets. @@ -30,20 +29,17 @@ class ComputationPreprocessingParameters: the preprocessing is run) global_preprocessing (Union[Unset, PreprocessingChain]): Chain of preprocessing operations applied to the input dataframe + select (Union[Unset, Select]): """ - select: Union[Unset, "Select"] = UNSET compound_preprocessing: Union[Unset, "ComputationPreprocessingParametersCompoundPreprocessing"] = UNSET dataset_schema: Union[Unset, "DatasetSchema"] = UNSET filters: Union[Unset, List["LogicalFormula"]] = UNSET global_preprocessing: Union[Unset, "PreprocessingChain"] = UNSET + select: Union[Unset, "Select"] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: - select: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.select, Unset): - select = self.select.to_dict() - compound_preprocessing: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.compound_preprocessing, Unset): compound_preprocessing = self.compound_preprocessing.to_dict() @@ -64,11 +60,13 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.global_preprocessing, Unset): global_preprocessing = self.global_preprocessing.to_dict() + select: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.select, Unset): + select = self.select.to_dict() + field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) - if select is not UNSET: - field_dict["select"] = select if compound_preprocessing is not UNSET: field_dict["compoundPreprocessing"] = compound_preprocessing if dataset_schema is not UNSET: @@ -77,6 +75,8 @@ def to_dict(self) -> Dict[str, Any]: field_dict["filters"] = filters if global_preprocessing is not UNSET: field_dict["globalPreprocessing"] = global_preprocessing + if select is not UNSET: + field_dict["select"] = select return field_dict @@ -91,13 +91,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: from ..models.select import Select d = src_dict.copy() - _select = d.pop("select", UNSET) - select: Union[Unset, Select] - if isinstance(_select, Unset): - select = UNSET - else: - select = Select.from_dict(_select) - _compound_preprocessing = d.pop("compoundPreprocessing", UNSET) compound_preprocessing: Union[Unset, ComputationPreprocessingParametersCompoundPreprocessing] if isinstance(_compound_preprocessing, Unset): @@ -128,12 +121,19 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: global_preprocessing = PreprocessingChain.from_dict(_global_preprocessing) + _select = d.pop("select", UNSET) + select: Union[Unset, Select] + if isinstance(_select, Unset): + select = UNSET + else: + select = Select.from_dict(_select) + computation_preprocessing_parameters = cls( - select=select, compound_preprocessing=compound_preprocessing, dataset_schema=dataset_schema, filters=filters, global_preprocessing=global_preprocessing, + select=select, ) computation_preprocessing_parameters.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/credentials.py b/src/tuneinsight/api/sdk/models/credentials.py index edba931..8b2b71e 100644 --- a/src/tuneinsight/api/sdk/models/credentials.py +++ b/src/tuneinsight/api/sdk/models/credentials.py @@ -2,6 +2,7 @@ import attr +from ..models.credentials_type import CredentialsType from ..types import UNSET, Unset T = TypeVar("T", bound="Credentials") @@ -12,54 +13,78 @@ class Credentials: """The credentials needed to access the data source. Attributes: - id (Union[Unset, str]): - password (Union[Unset, str]): - username (Union[Unset, str]): - connection_string (Union[Unset, str]): + api_token (Union[Unset, str]): Token to connect to the API + connection_string (Union[Unset, str]): connection string for a database + credentials_id (Union[Unset, str]): the id of the credentials stored in the key vault + password (Union[Unset, str]): generic password field. + type (Union[Unset, CredentialsType]): + username (Union[Unset, str]): generic username field. """ - id: Union[Unset, str] = UNSET + api_token: Union[Unset, str] = UNSET + connection_string: Union[Unset, str] = UNSET + credentials_id: Union[Unset, str] = UNSET password: Union[Unset, str] = UNSET + type: Union[Unset, CredentialsType] = UNSET username: Union[Unset, str] = UNSET - connection_string: Union[Unset, str] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: - id = self.id + api_token = self.api_token + connection_string = self.connection_string + credentials_id = self.credentials_id password = self.password + type: Union[Unset, str] = UNSET + if not isinstance(self.type, Unset): + type = self.type.value + username = self.username - connection_string = self.connection_string field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) - if id is not UNSET: - field_dict["id"] = id + if api_token is not UNSET: + field_dict["api-token"] = api_token + if connection_string is not UNSET: + field_dict["connectionString"] = connection_string + if credentials_id is not UNSET: + field_dict["credentialsId"] = credentials_id if password is not UNSET: field_dict["password"] = password + if type is not UNSET: + field_dict["type"] = type if username is not UNSET: field_dict["username"] = username - if connection_string is not UNSET: - field_dict["connectionString"] = connection_string return field_dict @classmethod def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() - id = d.pop("id", UNSET) + api_token = d.pop("api-token", UNSET) + + connection_string = d.pop("connectionString", UNSET) + + credentials_id = d.pop("credentialsId", UNSET) password = d.pop("password", UNSET) - username = d.pop("username", UNSET) + _type = d.pop("type", UNSET) + type: Union[Unset, CredentialsType] + if isinstance(_type, Unset): + type = UNSET + else: + type = CredentialsType(_type) - connection_string = d.pop("connectionString", UNSET) + username = d.pop("username", UNSET) credentials = cls( - id=id, + api_token=api_token, + connection_string=connection_string, + credentials_id=credentials_id, password=password, + type=type, username=username, - connection_string=connection_string, ) credentials.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/credentials_type.py b/src/tuneinsight/api/sdk/models/credentials_type.py new file mode 100644 index 0000000..34ad364 --- /dev/null +++ b/src/tuneinsight/api/sdk/models/credentials_type.py @@ -0,0 +1,9 @@ +from enum import Enum + + +class CredentialsType(str, Enum): + LOCAL = "local" + AZUREKEYVAULT = "azureKeyVault" + + def __str__(self) -> str: + return str(self.value) diff --git a/src/tuneinsight/api/sdk/models/custom.py b/src/tuneinsight/api/sdk/models/custom.py index 92758d4..35f8af8 100644 --- a/src/tuneinsight/api/sdk/models/custom.py +++ b/src/tuneinsight/api/sdk/models/custom.py @@ -13,27 +13,27 @@ class Custom: """ Attributes: type (PreprocessingOperationType): type of preprocessing operation + name (Union[Unset, str]): name given to the operation. The name has no impact on the operation + and the name given to the function description (Union[Unset, str]): description given to the operation, for documentation purposes. function (Union[Unset, str]): function definition which must respect the following format: `def (df: pd.DataFrame) -> pd.DataFrame return df` - name (Union[Unset, str]): name given to the operation. The name has no impact on the operation - and the name given to the function """ type: PreprocessingOperationType + name: Union[Unset, str] = UNSET description: Union[Unset, str] = UNSET function: Union[Unset, str] = UNSET - name: Union[Unset, str] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: type = self.type.value + name = self.name description = self.description function = self.function - name = self.name field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) @@ -42,12 +42,12 @@ def to_dict(self) -> Dict[str, Any]: "type": type, } ) + if name is not UNSET: + field_dict["name"] = name if description is not UNSET: field_dict["description"] = description if function is not UNSET: field_dict["function"] = function - if name is not UNSET: - field_dict["name"] = name return field_dict @@ -56,17 +56,17 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() type = PreprocessingOperationType(d.pop("type")) + name = d.pop("name", UNSET) + description = d.pop("description", UNSET) function = d.pop("function", UNSET) - name = d.pop("name", UNSET) - custom = cls( type=type, + name=name, description=description, function=function, - name=name, ) custom.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/data_object.py b/src/tuneinsight/api/sdk/models/data_object.py index 7c362a6..401d1bb 100644 --- a/src/tuneinsight/api/sdk/models/data_object.py +++ b/src/tuneinsight/api/sdk/models/data_object.py @@ -14,26 +14,25 @@ class DataObject: """A data object definition. Attributes: - shared_id (Union[Unset, str]): Shared identifier of a data object. type (Union[Unset, DataObjectType]): type of the dataobject unique_id (Union[Unset, str]): Unique identifier of a data object. visibility_status (Union[Unset, DataObjectVisibilityStatus]): type of visibility set to the dataobject encrypted (Union[Unset, bool]): has_data (Union[Unset, bool]): whether the dataobject's data has been set session_id (Union[Unset, str]): Unique identifier of a session + shared_id (Union[Unset, str]): Shared identifier of a data object. """ - shared_id: Union[Unset, str] = UNSET type: Union[Unset, DataObjectType] = UNSET unique_id: Union[Unset, str] = UNSET visibility_status: Union[Unset, DataObjectVisibilityStatus] = UNSET encrypted: Union[Unset, bool] = UNSET has_data: Union[Unset, bool] = UNSET session_id: Union[Unset, str] = UNSET + shared_id: Union[Unset, str] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: - shared_id = self.shared_id type: Union[Unset, str] = UNSET if not isinstance(self.type, Unset): type = self.type.value @@ -46,12 +45,11 @@ def to_dict(self) -> Dict[str, Any]: encrypted = self.encrypted has_data = self.has_data session_id = self.session_id + shared_id = self.shared_id field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) - if shared_id is not UNSET: - field_dict["sharedId"] = shared_id if type is not UNSET: field_dict["type"] = type if unique_id is not UNSET: @@ -64,14 +62,14 @@ def to_dict(self) -> Dict[str, Any]: field_dict["hasData"] = has_data if session_id is not UNSET: field_dict["sessionId"] = session_id + if shared_id is not UNSET: + field_dict["sharedId"] = shared_id return field_dict @classmethod def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() - shared_id = d.pop("sharedId", UNSET) - _type = d.pop("type", UNSET) type: Union[Unset, DataObjectType] if isinstance(_type, Unset): @@ -94,14 +92,16 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: session_id = d.pop("sessionId", UNSET) + shared_id = d.pop("sharedId", UNSET) + data_object = cls( - shared_id=shared_id, type=type, unique_id=unique_id, visibility_status=visibility_status, encrypted=encrypted, has_data=has_data, session_id=session_id, + shared_id=shared_id, ) data_object.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/data_source.py b/src/tuneinsight/api/sdk/models/data_source.py index fa4e36e..0c0d530 100644 --- a/src/tuneinsight/api/sdk/models/data_source.py +++ b/src/tuneinsight/api/sdk/models/data_source.py @@ -4,9 +4,12 @@ from ..models.access_scope import AccessScope from ..models.data_source_consent_type import DataSourceConsentType +from ..models.data_source_type import DataSourceType from ..types import UNSET, Unset if TYPE_CHECKING: + from ..models.credentials import Credentials + from ..models.data_source_config import DataSourceConfig from ..models.data_source_metadata import DataSourceMetadata from ..models.local_data_selection import LocalDataSelection from ..models.project import Project @@ -19,40 +22,48 @@ class DataSource: """ Attributes: - unique_id (Union[Unset, None, str]): Unique identifier of a data source. - access_scope (Union[Unset, AccessScope]): defines the scope of access given to a resource - attributes (Union[Unset, List[str]]): + name (Union[Unset, str]): + type (Union[Unset, DataSourceType]): + attributes (Union[Unset, List[str]]): optional list of attributes. authorized_users (Union[Unset, List[str]]): + credentials (Union[Unset, Credentials]): The credentials needed to access the data source. + id (Union[Unset, None, str]): Unique identifier of a data source. + access_scope (Union[Unset, AccessScope]): defines the scope of access given to a resource + clear_if_exists (Union[Unset, bool]): If true and a data source with the same name already exists, delete it. + configuration (Union[Unset, DataSourceConfig]): data source configuration consent_type (Union[Unset, DataSourceConsentType]): Consent type given to the data source. - name (Union[Unset, str]): - type (Union[Unset, str]): + created_at (Union[Unset, str]): metadata (Union[Unset, DataSourceMetadata]): metadata about a datasource + owner (Union[Unset, str]): projects (Union[Unset, List['Project']]): response for successfully retrieved projects selections (Union[Unset, List['LocalDataSelection']]): list of local data selections associated with the data source updated_at (Union[Unset, str]): - created_at (Union[Unset, str]): """ - unique_id: Union[Unset, None, str] = UNSET - access_scope: Union[Unset, AccessScope] = UNSET + name: Union[Unset, str] = UNSET + type: Union[Unset, DataSourceType] = UNSET attributes: Union[Unset, List[str]] = UNSET authorized_users: Union[Unset, List[str]] = UNSET + credentials: Union[Unset, "Credentials"] = UNSET + id: Union[Unset, None, str] = UNSET + access_scope: Union[Unset, AccessScope] = UNSET + clear_if_exists: Union[Unset, bool] = False + configuration: Union[Unset, "DataSourceConfig"] = UNSET consent_type: Union[Unset, DataSourceConsentType] = UNSET - name: Union[Unset, str] = UNSET - type: Union[Unset, str] = UNSET + created_at: Union[Unset, str] = UNSET metadata: Union[Unset, "DataSourceMetadata"] = UNSET + owner: Union[Unset, str] = UNSET projects: Union[Unset, List["Project"]] = UNSET selections: Union[Unset, List["LocalDataSelection"]] = UNSET updated_at: Union[Unset, str] = UNSET - created_at: Union[Unset, str] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: - unique_id = self.unique_id - access_scope: Union[Unset, str] = UNSET - if not isinstance(self.access_scope, Unset): - access_scope = self.access_scope.value + name = self.name + type: Union[Unset, str] = UNSET + if not isinstance(self.type, Unset): + type = self.type.value attributes: Union[Unset, List[str]] = UNSET if not isinstance(self.attributes, Unset): @@ -62,16 +73,30 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.authorized_users, Unset): authorized_users = self.authorized_users + credentials: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.credentials, Unset): + credentials = self.credentials.to_dict() + + id = self.id + access_scope: Union[Unset, str] = UNSET + if not isinstance(self.access_scope, Unset): + access_scope = self.access_scope.value + + clear_if_exists = self.clear_if_exists + configuration: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.configuration, Unset): + configuration = self.configuration.to_dict() + consent_type: Union[Unset, str] = UNSET if not isinstance(self.consent_type, Unset): consent_type = self.consent_type.value - name = self.name - type = self.type + created_at = self.created_at metadata: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.metadata, Unset): metadata = self.metadata.to_dict() + owner = self.owner projects: Union[Unset, List[Dict[str, Any]]] = UNSET if not isinstance(self.projects, Unset): projects = [] @@ -89,46 +114,75 @@ def to_dict(self) -> Dict[str, Any]: selections.append(selections_item) updated_at = self.updated_at - created_at = self.created_at field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) - if unique_id is not UNSET: - field_dict["uniqueId"] = unique_id - if access_scope is not UNSET: - field_dict["accessScope"] = access_scope + if name is not UNSET: + field_dict["name"] = name + if type is not UNSET: + field_dict["type"] = type if attributes is not UNSET: field_dict["attributes"] = attributes if authorized_users is not UNSET: field_dict["authorizedUsers"] = authorized_users + if credentials is not UNSET: + field_dict["credentials"] = credentials + if id is not UNSET: + field_dict["id"] = id + if access_scope is not UNSET: + field_dict["accessScope"] = access_scope + if clear_if_exists is not UNSET: + field_dict["clearIfExists"] = clear_if_exists + if configuration is not UNSET: + field_dict["configuration"] = configuration if consent_type is not UNSET: field_dict["consentType"] = consent_type - if name is not UNSET: - field_dict["name"] = name - if type is not UNSET: - field_dict["type"] = type + if created_at is not UNSET: + field_dict["createdAt"] = created_at if metadata is not UNSET: field_dict["metadata"] = metadata + if owner is not UNSET: + field_dict["owner"] = owner if projects is not UNSET: field_dict["projects"] = projects if selections is not UNSET: field_dict["selections"] = selections if updated_at is not UNSET: field_dict["updatedAt"] = updated_at - if created_at is not UNSET: - field_dict["createdAt"] = created_at return field_dict @classmethod def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + from ..models.credentials import Credentials + from ..models.data_source_config import DataSourceConfig from ..models.data_source_metadata import DataSourceMetadata from ..models.local_data_selection import LocalDataSelection from ..models.project import Project d = src_dict.copy() - unique_id = d.pop("uniqueId", UNSET) + name = d.pop("name", UNSET) + + _type = d.pop("type", UNSET) + type: Union[Unset, DataSourceType] + if isinstance(_type, Unset): + type = UNSET + else: + type = DataSourceType(_type) + + attributes = cast(List[str], d.pop("attributes", UNSET)) + + authorized_users = cast(List[str], d.pop("authorizedUsers", UNSET)) + + _credentials = d.pop("credentials", UNSET) + credentials: Union[Unset, Credentials] + if isinstance(_credentials, Unset): + credentials = UNSET + else: + credentials = Credentials.from_dict(_credentials) + + id = d.pop("id", UNSET) _access_scope = d.pop("accessScope", UNSET) access_scope: Union[Unset, AccessScope] @@ -137,9 +191,14 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: access_scope = AccessScope(_access_scope) - attributes = cast(List[str], d.pop("attributes", UNSET)) + clear_if_exists = d.pop("clearIfExists", UNSET) - authorized_users = cast(List[str], d.pop("authorizedUsers", UNSET)) + _configuration = d.pop("configuration", UNSET) + configuration: Union[Unset, DataSourceConfig] + if isinstance(_configuration, Unset): + configuration = UNSET + else: + configuration = DataSourceConfig.from_dict(_configuration) _consent_type = d.pop("consentType", UNSET) consent_type: Union[Unset, DataSourceConsentType] @@ -148,9 +207,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: consent_type = DataSourceConsentType(_consent_type) - name = d.pop("name", UNSET) - - type = d.pop("type", UNSET) + created_at = d.pop("createdAt", UNSET) _metadata = d.pop("metadata", UNSET) metadata: Union[Unset, DataSourceMetadata] @@ -159,6 +216,8 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: metadata = DataSourceMetadata.from_dict(_metadata) + owner = d.pop("owner", UNSET) + projects = [] _projects = d.pop("projects", UNSET) for projects_item_data in _projects or []: @@ -175,21 +234,23 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: updated_at = d.pop("updatedAt", UNSET) - created_at = d.pop("createdAt", UNSET) - data_source = cls( - unique_id=unique_id, - access_scope=access_scope, + name=name, + type=type, attributes=attributes, authorized_users=authorized_users, + credentials=credentials, + id=id, + access_scope=access_scope, + clear_if_exists=clear_if_exists, + configuration=configuration, consent_type=consent_type, - name=name, - type=type, + created_at=created_at, metadata=metadata, + owner=owner, projects=projects, selections=selections, updated_at=updated_at, - created_at=created_at, ) data_source.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/data_source_config.py b/src/tuneinsight/api/sdk/models/data_source_config.py index eb04106..487e8ba 100644 --- a/src/tuneinsight/api/sdk/models/data_source_config.py +++ b/src/tuneinsight/api/sdk/models/data_source_config.py @@ -1,43 +1,158 @@ -from typing import Any, Dict, List, Type, TypeVar +from typing import Any, Dict, List, Type, TypeVar, Union import attr -from ..models.data_source_config_type import DataSourceConfigType +from ..models.api_type import APIType +from ..models.database_type import DatabaseType +from ..models.local_data_source_type import LocalDataSourceType +from ..types import UNSET, Unset T = TypeVar("T", bound="DataSourceConfig") @attr.s(auto_attribs=True) class DataSourceConfig: - """Configuration of data source that depends on the type. + """data source configuration Attributes: - type (DataSourceConfigType): + csv_path (Union[Unset, str]): the path to the CSV file. + cert (Union[Unset, str]): If applicable, name of the certificate to access the datasource. Certificate should be + in '/usr/local/share/datasource-certificates/.{crt/key}' + host (Union[Unset, str]): Hostname of the database + suricata_path (Union[Unset, str]): the path to the suricata JSON file. + local_type (Union[Unset, LocalDataSourceType]): + port (Union[Unset, str]): Port number of the database + with_auth (Union[Unset, bool]): Whether the API requires authentication + api_type (Union[Unset, APIType]): + api_url (Union[Unset, str]): URL of the API + database (Union[Unset, str]): Name of the database + database_type (Union[Unset, DatabaseType]): Type of the database + insecure_skip_verify_tls (Union[Unset, bool]): This flag enables skipping TLS verification when connecting to + the remote API data source. WARNING: this should not be used in production """ - type: DataSourceConfigType + csv_path: Union[Unset, str] = UNSET + cert: Union[Unset, str] = UNSET + host: Union[Unset, str] = UNSET + suricata_path: Union[Unset, str] = UNSET + local_type: Union[Unset, LocalDataSourceType] = UNSET + port: Union[Unset, str] = UNSET + with_auth: Union[Unset, bool] = UNSET + api_type: Union[Unset, APIType] = UNSET + api_url: Union[Unset, str] = UNSET + database: Union[Unset, str] = UNSET + database_type: Union[Unset, DatabaseType] = UNSET + insecure_skip_verify_tls: Union[Unset, bool] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: - type = self.type.value + csv_path = self.csv_path + cert = self.cert + host = self.host + suricata_path = self.suricata_path + local_type: Union[Unset, str] = UNSET + if not isinstance(self.local_type, Unset): + local_type = self.local_type.value + + port = self.port + with_auth = self.with_auth + api_type: Union[Unset, str] = UNSET + if not isinstance(self.api_type, Unset): + api_type = self.api_type.value + + api_url = self.api_url + database = self.database + database_type: Union[Unset, str] = UNSET + if not isinstance(self.database_type, Unset): + database_type = self.database_type.value + + insecure_skip_verify_tls = self.insecure_skip_verify_tls field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) - field_dict.update( - { - "type": type, - } - ) + field_dict.update({}) + if csv_path is not UNSET: + field_dict["CSVPath"] = csv_path + if cert is not UNSET: + field_dict["cert"] = cert + if host is not UNSET: + field_dict["host"] = host + if suricata_path is not UNSET: + field_dict["suricataPath"] = suricata_path + if local_type is not UNSET: + field_dict["localType"] = local_type + if port is not UNSET: + field_dict["port"] = port + if with_auth is not UNSET: + field_dict["withAuth"] = with_auth + if api_type is not UNSET: + field_dict["APIType"] = api_type + if api_url is not UNSET: + field_dict["api-url"] = api_url + if database is not UNSET: + field_dict["database"] = database + if database_type is not UNSET: + field_dict["databaseType"] = database_type + if insecure_skip_verify_tls is not UNSET: + field_dict["insecureSkipVerifyTLS"] = insecure_skip_verify_tls return field_dict @classmethod def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() - type = DataSourceConfigType(d.pop("type")) + csv_path = d.pop("CSVPath", UNSET) + + cert = d.pop("cert", UNSET) + + host = d.pop("host", UNSET) + + suricata_path = d.pop("suricataPath", UNSET) + + _local_type = d.pop("localType", UNSET) + local_type: Union[Unset, LocalDataSourceType] + if isinstance(_local_type, Unset): + local_type = UNSET + else: + local_type = LocalDataSourceType(_local_type) + + port = d.pop("port", UNSET) + + with_auth = d.pop("withAuth", UNSET) + + _api_type = d.pop("APIType", UNSET) + api_type: Union[Unset, APIType] + if isinstance(_api_type, Unset): + api_type = UNSET + else: + api_type = APIType(_api_type) + + api_url = d.pop("api-url", UNSET) + + database = d.pop("database", UNSET) + + _database_type = d.pop("databaseType", UNSET) + database_type: Union[Unset, DatabaseType] + if isinstance(_database_type, Unset): + database_type = UNSET + else: + database_type = DatabaseType(_database_type) + + insecure_skip_verify_tls = d.pop("insecureSkipVerifyTLS", UNSET) data_source_config = cls( - type=type, + csv_path=csv_path, + cert=cert, + host=host, + suricata_path=suricata_path, + local_type=local_type, + port=port, + with_auth=with_auth, + api_type=api_type, + api_url=api_url, + database=database, + database_type=database_type, + insecure_skip_verify_tls=insecure_skip_verify_tls, ) data_source_config.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/data_source_definition.py b/src/tuneinsight/api/sdk/models/data_source_definition.py index bd37014..5c9331d 100644 --- a/src/tuneinsight/api/sdk/models/data_source_definition.py +++ b/src/tuneinsight/api/sdk/models/data_source_definition.py @@ -4,10 +4,11 @@ from ..models.access_scope import AccessScope from ..models.data_source_consent_type import DataSourceConsentType +from ..models.data_source_type import DataSourceType from ..types import UNSET, Unset if TYPE_CHECKING: - from ..models.credentials_provider import CredentialsProvider + from ..models.credentials import Credentials from ..models.data_source_config import DataSourceConfig @@ -16,38 +17,38 @@ @attr.s(auto_attribs=True) class DataSourceDefinition: - """ + """parameters used to create and modify a data source + Attributes: - unique_id (Union[Unset, None, str]): Unique identifier of a data source. - access_scope (Union[Unset, AccessScope]): defines the scope of access given to a resource - attributes (Union[Unset, List[str]]): - authorized_users (Union[Unset, List[str]]): - consent_type (Union[Unset, DataSourceConsentType]): Consent type given to the data source. name (Union[Unset, str]): - type (Union[Unset, str]): + type (Union[Unset, DataSourceType]): + attributes (Union[Unset, List[str]]): optional list of attributes. + authorized_users (Union[Unset, List[str]]): + credentials (Union[Unset, Credentials]): The credentials needed to access the data source. + id (Union[Unset, None, str]): Unique identifier of a data source. + access_scope (Union[Unset, AccessScope]): defines the scope of access given to a resource clear_if_exists (Union[Unset, bool]): If true and a data source with the same name already exists, delete it. - config (Union[Unset, DataSourceConfig]): Configuration of data source that depends on the type. - credentials_provider (Union[Unset, CredentialsProvider]): The provider of the credentials needed to access the - data source. + configuration (Union[Unset, DataSourceConfig]): data source configuration + consent_type (Union[Unset, DataSourceConsentType]): Consent type given to the data source. """ - unique_id: Union[Unset, None, str] = UNSET - access_scope: Union[Unset, AccessScope] = UNSET + name: Union[Unset, str] = UNSET + type: Union[Unset, DataSourceType] = UNSET attributes: Union[Unset, List[str]] = UNSET authorized_users: Union[Unset, List[str]] = UNSET - consent_type: Union[Unset, DataSourceConsentType] = UNSET - name: Union[Unset, str] = UNSET - type: Union[Unset, str] = UNSET + credentials: Union[Unset, "Credentials"] = UNSET + id: Union[Unset, None, str] = UNSET + access_scope: Union[Unset, AccessScope] = UNSET clear_if_exists: Union[Unset, bool] = False - config: Union[Unset, "DataSourceConfig"] = UNSET - credentials_provider: Union[Unset, "CredentialsProvider"] = UNSET + configuration: Union[Unset, "DataSourceConfig"] = UNSET + consent_type: Union[Unset, DataSourceConsentType] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: - unique_id = self.unique_id - access_scope: Union[Unset, str] = UNSET - if not isinstance(self.access_scope, Unset): - access_scope = self.access_scope.value + name = self.name + type: Union[Unset, str] = UNSET + if not isinstance(self.type, Unset): + type = self.type.value attributes: Union[Unset, List[str]] = UNSET if not isinstance(self.attributes, Unset): @@ -57,104 +58,112 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.authorized_users, Unset): authorized_users = self.authorized_users - consent_type: Union[Unset, str] = UNSET - if not isinstance(self.consent_type, Unset): - consent_type = self.consent_type.value + credentials: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.credentials, Unset): + credentials = self.credentials.to_dict() + + id = self.id + access_scope: Union[Unset, str] = UNSET + if not isinstance(self.access_scope, Unset): + access_scope = self.access_scope.value - name = self.name - type = self.type clear_if_exists = self.clear_if_exists - config: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.config, Unset): - config = self.config.to_dict() + configuration: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.configuration, Unset): + configuration = self.configuration.to_dict() - credentials_provider: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.credentials_provider, Unset): - credentials_provider = self.credentials_provider.to_dict() + consent_type: Union[Unset, str] = UNSET + if not isinstance(self.consent_type, Unset): + consent_type = self.consent_type.value field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) - if unique_id is not UNSET: - field_dict["uniqueId"] = unique_id - if access_scope is not UNSET: - field_dict["accessScope"] = access_scope - if attributes is not UNSET: - field_dict["attributes"] = attributes - if authorized_users is not UNSET: - field_dict["authorizedUsers"] = authorized_users - if consent_type is not UNSET: - field_dict["consentType"] = consent_type if name is not UNSET: field_dict["name"] = name if type is not UNSET: field_dict["type"] = type + if attributes is not UNSET: + field_dict["attributes"] = attributes + if authorized_users is not UNSET: + field_dict["authorizedUsers"] = authorized_users + if credentials is not UNSET: + field_dict["credentials"] = credentials + if id is not UNSET: + field_dict["id"] = id + if access_scope is not UNSET: + field_dict["accessScope"] = access_scope if clear_if_exists is not UNSET: field_dict["clearIfExists"] = clear_if_exists - if config is not UNSET: - field_dict["config"] = config - if credentials_provider is not UNSET: - field_dict["credentialsProvider"] = credentials_provider + if configuration is not UNSET: + field_dict["configuration"] = configuration + if consent_type is not UNSET: + field_dict["consentType"] = consent_type return field_dict @classmethod def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: - from ..models.credentials_provider import CredentialsProvider + from ..models.credentials import Credentials from ..models.data_source_config import DataSourceConfig d = src_dict.copy() - unique_id = d.pop("uniqueId", UNSET) + name = d.pop("name", UNSET) - _access_scope = d.pop("accessScope", UNSET) - access_scope: Union[Unset, AccessScope] - if isinstance(_access_scope, Unset): - access_scope = UNSET + _type = d.pop("type", UNSET) + type: Union[Unset, DataSourceType] + if isinstance(_type, Unset): + type = UNSET else: - access_scope = AccessScope(_access_scope) + type = DataSourceType(_type) attributes = cast(List[str], d.pop("attributes", UNSET)) authorized_users = cast(List[str], d.pop("authorizedUsers", UNSET)) - _consent_type = d.pop("consentType", UNSET) - consent_type: Union[Unset, DataSourceConsentType] - if isinstance(_consent_type, Unset): - consent_type = UNSET + _credentials = d.pop("credentials", UNSET) + credentials: Union[Unset, Credentials] + if isinstance(_credentials, Unset): + credentials = UNSET else: - consent_type = DataSourceConsentType(_consent_type) + credentials = Credentials.from_dict(_credentials) - name = d.pop("name", UNSET) + id = d.pop("id", UNSET) - type = d.pop("type", UNSET) + _access_scope = d.pop("accessScope", UNSET) + access_scope: Union[Unset, AccessScope] + if isinstance(_access_scope, Unset): + access_scope = UNSET + else: + access_scope = AccessScope(_access_scope) clear_if_exists = d.pop("clearIfExists", UNSET) - _config = d.pop("config", UNSET) - config: Union[Unset, DataSourceConfig] - if isinstance(_config, Unset): - config = UNSET + _configuration = d.pop("configuration", UNSET) + configuration: Union[Unset, DataSourceConfig] + if isinstance(_configuration, Unset): + configuration = UNSET else: - config = DataSourceConfig.from_dict(_config) + configuration = DataSourceConfig.from_dict(_configuration) - _credentials_provider = d.pop("credentialsProvider", UNSET) - credentials_provider: Union[Unset, CredentialsProvider] - if isinstance(_credentials_provider, Unset): - credentials_provider = UNSET + _consent_type = d.pop("consentType", UNSET) + consent_type: Union[Unset, DataSourceConsentType] + if isinstance(_consent_type, Unset): + consent_type = UNSET else: - credentials_provider = CredentialsProvider.from_dict(_credentials_provider) + consent_type = DataSourceConsentType(_consent_type) data_source_definition = cls( - unique_id=unique_id, - access_scope=access_scope, - attributes=attributes, - authorized_users=authorized_users, - consent_type=consent_type, name=name, type=type, + attributes=attributes, + authorized_users=authorized_users, + credentials=credentials, + id=id, + access_scope=access_scope, clear_if_exists=clear_if_exists, - config=config, - credentials_provider=credentials_provider, + configuration=configuration, + consent_type=consent_type, ) data_source_definition.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/data_source_metadata.py b/src/tuneinsight/api/sdk/models/data_source_metadata.py index b820d01..a3a65aa 100644 --- a/src/tuneinsight/api/sdk/models/data_source_metadata.py +++ b/src/tuneinsight/api/sdk/models/data_source_metadata.py @@ -16,18 +16,17 @@ class DataSourceMetadata: """metadata about a datasource Attributes: - metadata_available (Union[Unset, bool]): whether or not the datasource supports returning metadata stores_templates (Union[Unset, bool]): whether the data source stores template tables. tables (Union[Unset, List['DataSourceTable']]): + metadata_available (Union[Unset, bool]): whether or not the datasource supports returning metadata """ - metadata_available: Union[Unset, bool] = UNSET stores_templates: Union[Unset, bool] = UNSET tables: Union[Unset, List["DataSourceTable"]] = UNSET + metadata_available: Union[Unset, bool] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: - metadata_available = self.metadata_available stores_templates = self.stores_templates tables: Union[Unset, List[Dict[str, Any]]] = UNSET if not isinstance(self.tables, Unset): @@ -37,15 +36,17 @@ def to_dict(self) -> Dict[str, Any]: tables.append(tables_item) + metadata_available = self.metadata_available + field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) - if metadata_available is not UNSET: - field_dict["metadataAvailable"] = metadata_available if stores_templates is not UNSET: field_dict["storesTemplates"] = stores_templates if tables is not UNSET: field_dict["tables"] = tables + if metadata_available is not UNSET: + field_dict["metadataAvailable"] = metadata_available return field_dict @@ -54,8 +55,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: from ..models.data_source_table import DataSourceTable d = src_dict.copy() - metadata_available = d.pop("metadataAvailable", UNSET) - stores_templates = d.pop("storesTemplates", UNSET) tables = [] @@ -65,10 +64,12 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: tables.append(tables_item) + metadata_available = d.pop("metadataAvailable", UNSET) + data_source_metadata = cls( - metadata_available=metadata_available, stores_templates=stores_templates, tables=tables, + metadata_available=metadata_available, ) data_source_metadata.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/data_source_query.py b/src/tuneinsight/api/sdk/models/data_source_query.py index 9d80444..719fafd 100644 --- a/src/tuneinsight/api/sdk/models/data_source_query.py +++ b/src/tuneinsight/api/sdk/models/data_source_query.py @@ -16,47 +16,46 @@ class DataSourceQuery: """schema used for the query Attributes: - database_query (Union[Unset, str]): query used to retrieve data from a database data source (typically in SQL - format) - select (Union[Unset, Select]): api_json_path (Union[Unset, str]): JSONPath used for API data sources (if given, will be used to parse the API response) api_path_query (Union[Unset, str]): Query path for the API data source URL (e.g. https://example.com+{apiPathQuery}) api_request_body (Union[Unset, str]): request body used for API data sources (if given, the request will use POST with this request body) + database_query (Union[Unset, str]): query used to retrieve data from a database data source (typically in SQL + format) + select (Union[Unset, Select]): """ - database_query: Union[Unset, str] = UNSET - select: Union[Unset, "Select"] = UNSET api_json_path: Union[Unset, str] = UNSET api_path_query: Union[Unset, str] = UNSET api_request_body: Union[Unset, str] = UNSET + database_query: Union[Unset, str] = UNSET + select: Union[Unset, "Select"] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: + api_json_path = self.api_json_path + api_path_query = self.api_path_query + api_request_body = self.api_request_body database_query = self.database_query select: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.select, Unset): select = self.select.to_dict() - api_json_path = self.api_json_path - api_path_query = self.api_path_query - api_request_body = self.api_request_body - field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) - if database_query is not UNSET: - field_dict["databaseQuery"] = database_query - if select is not UNSET: - field_dict["select"] = select if api_json_path is not UNSET: field_dict["apiJsonPath"] = api_json_path if api_path_query is not UNSET: field_dict["apiPathQuery"] = api_path_query if api_request_body is not UNSET: field_dict["apiRequestBody"] = api_request_body + if database_query is not UNSET: + field_dict["databaseQuery"] = database_query + if select is not UNSET: + field_dict["select"] = select return field_dict @@ -65,6 +64,12 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: from ..models.select import Select d = src_dict.copy() + api_json_path = d.pop("apiJsonPath", UNSET) + + api_path_query = d.pop("apiPathQuery", UNSET) + + api_request_body = d.pop("apiRequestBody", UNSET) + database_query = d.pop("databaseQuery", UNSET) _select = d.pop("select", UNSET) @@ -74,18 +79,12 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: select = Select.from_dict(_select) - api_json_path = d.pop("apiJsonPath", UNSET) - - api_path_query = d.pop("apiPathQuery", UNSET) - - api_request_body = d.pop("apiRequestBody", UNSET) - data_source_query = cls( - database_query=database_query, - select=select, api_json_path=api_json_path, api_path_query=api_path_query, api_request_body=api_request_body, + database_query=database_query, + select=select, ) data_source_query.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/data_source_type.py b/src/tuneinsight/api/sdk/models/data_source_type.py new file mode 100644 index 0000000..ce8c892 --- /dev/null +++ b/src/tuneinsight/api/sdk/models/data_source_type.py @@ -0,0 +1,10 @@ +from enum import Enum + + +class DataSourceType(str, Enum): + LOCAL = "local" + DATABASE = "database" + API = "api" + + def __str__(self) -> str: + return str(self.value) diff --git a/src/tuneinsight/api/sdk/models/data_source_types_info.py b/src/tuneinsight/api/sdk/models/data_source_types_info.py index bd728e8..2475403 100644 --- a/src/tuneinsight/api/sdk/models/data_source_types_info.py +++ b/src/tuneinsight/api/sdk/models/data_source_types_info.py @@ -1,8 +1,10 @@ -from typing import Any, Dict, List, Type, TypeVar, Union, cast +from typing import Any, Dict, List, Type, TypeVar, Union import attr +from ..models.data_source_type import DataSourceType from ..models.database_type import DatabaseType +from ..models.local_data_source_type import LocalDataSourceType from ..types import UNSET, Unset T = TypeVar("T", bound="DataSourceTypesInfo") @@ -13,17 +15,25 @@ class DataSourceTypesInfo: """information about the available datasources Attributes: + data_source_types (Union[Unset, List[DataSourceType]]): list of available datasource types database_types (Union[Unset, List[DatabaseType]]): list of supported database types - local_formats (Union[Unset, List[str]]): list of supported format for local datasources - data_source_types (Union[Unset, List[str]]): list of available datasource types + local_formats (Union[Unset, List[LocalDataSourceType]]): list of supported format for local datasources """ + data_source_types: Union[Unset, List[DataSourceType]] = UNSET database_types: Union[Unset, List[DatabaseType]] = UNSET - local_formats: Union[Unset, List[str]] = UNSET - data_source_types: Union[Unset, List[str]] = UNSET + local_formats: Union[Unset, List[LocalDataSourceType]] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: + data_source_types: Union[Unset, List[str]] = UNSET + if not isinstance(self.data_source_types, Unset): + data_source_types = [] + for data_source_types_item_data in self.data_source_types: + data_source_types_item = data_source_types_item_data.value + + data_source_types.append(data_source_types_item) + database_types: Union[Unset, List[str]] = UNSET if not isinstance(self.database_types, Unset): database_types = [] @@ -34,27 +44,34 @@ def to_dict(self) -> Dict[str, Any]: local_formats: Union[Unset, List[str]] = UNSET if not isinstance(self.local_formats, Unset): - local_formats = self.local_formats + local_formats = [] + for local_formats_item_data in self.local_formats: + local_formats_item = local_formats_item_data.value - data_source_types: Union[Unset, List[str]] = UNSET - if not isinstance(self.data_source_types, Unset): - data_source_types = self.data_source_types + local_formats.append(local_formats_item) field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) + if data_source_types is not UNSET: + field_dict["dataSourceTypes"] = data_source_types if database_types is not UNSET: field_dict["databaseTypes"] = database_types if local_formats is not UNSET: field_dict["localFormats"] = local_formats - if data_source_types is not UNSET: - field_dict["dataSourceTypes"] = data_source_types return field_dict @classmethod def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() + data_source_types = [] + _data_source_types = d.pop("dataSourceTypes", UNSET) + for data_source_types_item_data in _data_source_types or []: + data_source_types_item = DataSourceType(data_source_types_item_data) + + data_source_types.append(data_source_types_item) + database_types = [] _database_types = d.pop("databaseTypes", UNSET) for database_types_item_data in _database_types or []: @@ -62,14 +79,17 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: database_types.append(database_types_item) - local_formats = cast(List[str], d.pop("localFormats", UNSET)) + local_formats = [] + _local_formats = d.pop("localFormats", UNSET) + for local_formats_item_data in _local_formats or []: + local_formats_item = LocalDataSourceType(local_formats_item_data) - data_source_types = cast(List[str], d.pop("dataSourceTypes", UNSET)) + local_formats.append(local_formats_item) data_source_types_info = cls( + data_source_types=data_source_types, database_types=database_types, local_formats=local_formats, - data_source_types=data_source_types, ) data_source_types_info.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/dataset_statistics.py b/src/tuneinsight/api/sdk/models/dataset_statistics.py index 16fcf36..e71c4dd 100644 --- a/src/tuneinsight/api/sdk/models/dataset_statistics.py +++ b/src/tuneinsight/api/sdk/models/dataset_statistics.py @@ -23,6 +23,8 @@ class DatasetStatistics: """ Attributes: type (ComputationType): Type of the computation. + dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP + mode. Default: -1.0. input_clipping_method (Union[Unset, ComputationDefinitionInputClippingMethod]): Optional method used for clipping before encrypting values when running aggregation-based workflows. The bounds are deduced based on the cryptographic parameters used for the aggregation. @@ -33,14 +35,7 @@ class DatasetStatistics: (default) - error: if some values are out of bounds, then the computation is aborted. Default: ComputationDefinitionInputClippingMethod.WARNING. - preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters - applied to the input retrieved from the datasource, if applicable - wait (Union[Unset, bool]): Whether to wait synchronously for the computation result. - dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various - disclosure prevention mechanisms - dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP - mode. Default: -1.0. - join_id (Union[Unset, str]): Unique identifier of a data object. + run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both) local_input_id (Union[Unset, str]): Unique identifier of a data object. maximum_aggregated_value (Union[Unset, None, float]): optional upper bound on the total expected value to be aggregated collectively. If provided, the computation will automatically deduce @@ -49,99 +44,104 @@ class DatasetStatistics: up to 16 million. For example, when using default parameters and running an aggregation with 4 participants, local aggregated values cannot exceed 4 million. - cohort_id (Union[Unset, str]): Unique identifier of a data object. + preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters + applied to the input retrieved from the datasource, if applicable + data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource + from each node before the computation + end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true, + then when release results is set to true and the output + is initially encrypted with a network collective key, then it is key switched to + the initiating user's public key. + input_data_object (Union[Unset, str]): Shared identifier of a data object. + timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run. local_input (Union[Unset, LocalInput]): If a local input is provided, the node initiating the computation will use it instead of querying the datasource. This data is *not* shared to other nodes, only used for the duration of the computation. The local input columns/values must be in the form {: [, , ...], ...} owner (Union[Unset, str]): The username of the end user who requested the computation. project_id (Union[Unset, str]): Unique identifier of a project. - run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both) + dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various + disclosure prevention mechanisms + cohort_id (Union[Unset, str]): Unique identifier of a data object. + encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key. + join_id (Union[Unset, str]): Unique identifier of a data object. local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured the network) - encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key. - end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true, - then when release results is set to true and the output - is initially encrypted with a network collective key, then it is key switched to - the initiating user's public key. - input_data_object (Union[Unset, str]): Shared identifier of a data object. release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output results. If set, then encrypted results are automatically key switched and decrypted and a Result entity is saved - timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run. - data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource - from each node before the computation + wait (Union[Unset, bool]): Whether to wait synchronously for the computation result. statistics (Union[Unset, List['StatisticDefinition']]): list of statistics to be extracted from the dataset """ type: ComputationType + dp_epsilon: Union[Unset, float] = -1.0 input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] = ( ComputationDefinitionInputClippingMethod.WARNING ) - preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET - wait: Union[Unset, bool] = UNSET - dp_policy: Union[Unset, "DPPolicy"] = UNSET - dp_epsilon: Union[Unset, float] = -1.0 - join_id: Union[Unset, str] = UNSET + run_mode: Union[Unset, RunMode] = UNSET local_input_id: Union[Unset, str] = UNSET maximum_aggregated_value: Union[Unset, None, float] = UNSET - cohort_id: Union[Unset, str] = UNSET + preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET + data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET + end_to_end_encrypted: Union[Unset, bool] = UNSET + input_data_object: Union[Unset, str] = UNSET + timeout: Union[Unset, int] = UNSET local_input: Union[Unset, "LocalInput"] = UNSET owner: Union[Unset, str] = UNSET project_id: Union[Unset, str] = UNSET - run_mode: Union[Unset, RunMode] = UNSET - local: Union[Unset, bool] = UNSET + dp_policy: Union[Unset, "DPPolicy"] = UNSET + cohort_id: Union[Unset, str] = UNSET encrypted: Union[Unset, bool] = UNSET - end_to_end_encrypted: Union[Unset, bool] = UNSET - input_data_object: Union[Unset, str] = UNSET + join_id: Union[Unset, str] = UNSET + local: Union[Unset, bool] = UNSET release_results: Union[Unset, bool] = UNSET - timeout: Union[Unset, int] = UNSET - data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET + wait: Union[Unset, bool] = UNSET statistics: Union[Unset, List["StatisticDefinition"]] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: type = self.type.value + dp_epsilon = self.dp_epsilon input_clipping_method: Union[Unset, str] = UNSET if not isinstance(self.input_clipping_method, Unset): input_clipping_method = self.input_clipping_method.value + run_mode: Union[Unset, str] = UNSET + if not isinstance(self.run_mode, Unset): + run_mode = self.run_mode.value + + local_input_id = self.local_input_id + maximum_aggregated_value = self.maximum_aggregated_value preprocessing_parameters: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.preprocessing_parameters, Unset): preprocessing_parameters = self.preprocessing_parameters.to_dict() - wait = self.wait - dp_policy: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.dp_policy, Unset): - dp_policy = self.dp_policy.to_dict() + data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.data_source_parameters, Unset): + data_source_parameters = self.data_source_parameters.to_dict() - dp_epsilon = self.dp_epsilon - join_id = self.join_id - local_input_id = self.local_input_id - maximum_aggregated_value = self.maximum_aggregated_value - cohort_id = self.cohort_id + end_to_end_encrypted = self.end_to_end_encrypted + input_data_object = self.input_data_object + timeout = self.timeout local_input: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.local_input, Unset): local_input = self.local_input.to_dict() owner = self.owner project_id = self.project_id - run_mode: Union[Unset, str] = UNSET - if not isinstance(self.run_mode, Unset): - run_mode = self.run_mode.value + dp_policy: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.dp_policy, Unset): + dp_policy = self.dp_policy.to_dict() - local = self.local + cohort_id = self.cohort_id encrypted = self.encrypted - end_to_end_encrypted = self.end_to_end_encrypted - input_data_object = self.input_data_object + join_id = self.join_id + local = self.local release_results = self.release_results - timeout = self.timeout - data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.data_source_parameters, Unset): - data_source_parameters = self.data_source_parameters.to_dict() - + wait = self.wait statistics: Union[Unset, List[Dict[str, Any]]] = UNSET if not isinstance(self.statistics, Unset): statistics = [] @@ -157,46 +157,46 @@ def to_dict(self) -> Dict[str, Any]: "type": type, } ) - if input_clipping_method is not UNSET: - field_dict["inputClippingMethod"] = input_clipping_method - if preprocessing_parameters is not UNSET: - field_dict["preprocessingParameters"] = preprocessing_parameters - if wait is not UNSET: - field_dict["wait"] = wait - if dp_policy is not UNSET: - field_dict["DPPolicy"] = dp_policy if dp_epsilon is not UNSET: field_dict["dpEpsilon"] = dp_epsilon - if join_id is not UNSET: - field_dict["joinId"] = join_id + if input_clipping_method is not UNSET: + field_dict["inputClippingMethod"] = input_clipping_method + if run_mode is not UNSET: + field_dict["runMode"] = run_mode if local_input_id is not UNSET: field_dict["localInputID"] = local_input_id if maximum_aggregated_value is not UNSET: field_dict["maximumAggregatedValue"] = maximum_aggregated_value - if cohort_id is not UNSET: - field_dict["cohortId"] = cohort_id + if preprocessing_parameters is not UNSET: + field_dict["preprocessingParameters"] = preprocessing_parameters + if data_source_parameters is not UNSET: + field_dict["dataSourceParameters"] = data_source_parameters + if end_to_end_encrypted is not UNSET: + field_dict["endToEndEncrypted"] = end_to_end_encrypted + if input_data_object is not UNSET: + field_dict["inputDataObject"] = input_data_object + if timeout is not UNSET: + field_dict["timeout"] = timeout if local_input is not UNSET: field_dict["localInput"] = local_input if owner is not UNSET: field_dict["owner"] = owner if project_id is not UNSET: field_dict["projectId"] = project_id - if run_mode is not UNSET: - field_dict["runMode"] = run_mode - if local is not UNSET: - field_dict["local"] = local + if dp_policy is not UNSET: + field_dict["DPPolicy"] = dp_policy + if cohort_id is not UNSET: + field_dict["cohortId"] = cohort_id if encrypted is not UNSET: field_dict["encrypted"] = encrypted - if end_to_end_encrypted is not UNSET: - field_dict["endToEndEncrypted"] = end_to_end_encrypted - if input_data_object is not UNSET: - field_dict["inputDataObject"] = input_data_object + if join_id is not UNSET: + field_dict["joinId"] = join_id + if local is not UNSET: + field_dict["local"] = local if release_results is not UNSET: field_dict["releaseResults"] = release_results - if timeout is not UNSET: - field_dict["timeout"] = timeout - if data_source_parameters is not UNSET: - field_dict["dataSourceParameters"] = data_source_parameters + if wait is not UNSET: + field_dict["wait"] = wait if statistics is not UNSET: field_dict["statistics"] = statistics @@ -213,6 +213,8 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() type = ComputationType(d.pop("type")) + dp_epsilon = d.pop("dpEpsilon", UNSET) + _input_clipping_method = d.pop("inputClippingMethod", UNSET) input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] if isinstance(_input_clipping_method, Unset): @@ -220,6 +222,17 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method) + _run_mode = d.pop("runMode", UNSET) + run_mode: Union[Unset, RunMode] + if isinstance(_run_mode, Unset): + run_mode = UNSET + else: + run_mode = RunMode(_run_mode) + + local_input_id = d.pop("localInputID", UNSET) + + maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET) + _preprocessing_parameters = d.pop("preprocessingParameters", UNSET) preprocessing_parameters: Union[Unset, ComputationPreprocessingParameters] if isinstance(_preprocessing_parameters, Unset): @@ -227,24 +240,18 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: preprocessing_parameters = ComputationPreprocessingParameters.from_dict(_preprocessing_parameters) - wait = d.pop("wait", UNSET) - - _dp_policy = d.pop("DPPolicy", UNSET) - dp_policy: Union[Unset, DPPolicy] - if isinstance(_dp_policy, Unset): - dp_policy = UNSET + _data_source_parameters = d.pop("dataSourceParameters", UNSET) + data_source_parameters: Union[Unset, ComputationDataSourceParameters] + if isinstance(_data_source_parameters, Unset): + data_source_parameters = UNSET else: - dp_policy = DPPolicy.from_dict(_dp_policy) - - dp_epsilon = d.pop("dpEpsilon", UNSET) - - join_id = d.pop("joinId", UNSET) + data_source_parameters = ComputationDataSourceParameters.from_dict(_data_source_parameters) - local_input_id = d.pop("localInputID", UNSET) + end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) - maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET) + input_data_object = d.pop("inputDataObject", UNSET) - cohort_id = d.pop("cohortId", UNSET) + timeout = d.pop("timeout", UNSET) _local_input = d.pop("localInput", UNSET) local_input: Union[Unset, LocalInput] @@ -257,31 +264,24 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: project_id = d.pop("projectId", UNSET) - _run_mode = d.pop("runMode", UNSET) - run_mode: Union[Unset, RunMode] - if isinstance(_run_mode, Unset): - run_mode = UNSET + _dp_policy = d.pop("DPPolicy", UNSET) + dp_policy: Union[Unset, DPPolicy] + if isinstance(_dp_policy, Unset): + dp_policy = UNSET else: - run_mode = RunMode(_run_mode) + dp_policy = DPPolicy.from_dict(_dp_policy) - local = d.pop("local", UNSET) + cohort_id = d.pop("cohortId", UNSET) encrypted = d.pop("encrypted", UNSET) - end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) + join_id = d.pop("joinId", UNSET) - input_data_object = d.pop("inputDataObject", UNSET) + local = d.pop("local", UNSET) release_results = d.pop("releaseResults", UNSET) - timeout = d.pop("timeout", UNSET) - - _data_source_parameters = d.pop("dataSourceParameters", UNSET) - data_source_parameters: Union[Unset, ComputationDataSourceParameters] - if isinstance(_data_source_parameters, Unset): - data_source_parameters = UNSET - else: - data_source_parameters = ComputationDataSourceParameters.from_dict(_data_source_parameters) + wait = d.pop("wait", UNSET) statistics = [] _statistics = d.pop("statistics", UNSET) @@ -292,26 +292,26 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: dataset_statistics = cls( type=type, - input_clipping_method=input_clipping_method, - preprocessing_parameters=preprocessing_parameters, - wait=wait, - dp_policy=dp_policy, dp_epsilon=dp_epsilon, - join_id=join_id, + input_clipping_method=input_clipping_method, + run_mode=run_mode, local_input_id=local_input_id, maximum_aggregated_value=maximum_aggregated_value, - cohort_id=cohort_id, + preprocessing_parameters=preprocessing_parameters, + data_source_parameters=data_source_parameters, + end_to_end_encrypted=end_to_end_encrypted, + input_data_object=input_data_object, + timeout=timeout, local_input=local_input, owner=owner, project_id=project_id, - run_mode=run_mode, - local=local, + dp_policy=dp_policy, + cohort_id=cohort_id, encrypted=encrypted, - end_to_end_encrypted=end_to_end_encrypted, - input_data_object=input_data_object, + join_id=join_id, + local=local, release_results=release_results, - timeout=timeout, - data_source_parameters=data_source_parameters, + wait=wait, statistics=statistics, ) diff --git a/src/tuneinsight/api/sdk/models/deviation_squares.py b/src/tuneinsight/api/sdk/models/deviation_squares.py index f9676f7..eff6fea 100644 --- a/src/tuneinsight/api/sdk/models/deviation_squares.py +++ b/src/tuneinsight/api/sdk/models/deviation_squares.py @@ -13,27 +13,27 @@ class DeviationSquares: """ Attributes: type (PreprocessingOperationType): type of preprocessing operation + count (Union[Unset, float]): dataset count used for computing the variance, if < 2 then the sum of squares will + be divided by 1 input_ (Union[Unset, str]): column to use as input mean (Union[Unset, float]): mean to compute the deviation from output (Union[Unset, str]): column to use as output - count (Union[Unset, float]): dataset count used for computing the variance, if < 2 then the sum of squares will - be divided by 1 """ type: PreprocessingOperationType + count: Union[Unset, float] = UNSET input_: Union[Unset, str] = UNSET mean: Union[Unset, float] = UNSET output: Union[Unset, str] = UNSET - count: Union[Unset, float] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: type = self.type.value + count = self.count input_ = self.input_ mean = self.mean output = self.output - count = self.count field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) @@ -42,14 +42,14 @@ def to_dict(self) -> Dict[str, Any]: "type": type, } ) + if count is not UNSET: + field_dict["count"] = count if input_ is not UNSET: field_dict["input"] = input_ if mean is not UNSET: field_dict["mean"] = mean if output is not UNSET: field_dict["output"] = output - if count is not UNSET: - field_dict["count"] = count return field_dict @@ -58,20 +58,20 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() type = PreprocessingOperationType(d.pop("type")) + count = d.pop("count", UNSET) + input_ = d.pop("input", UNSET) mean = d.pop("mean", UNSET) output = d.pop("output", UNSET) - count = d.pop("count", UNSET) - deviation_squares = cls( type=type, + count=count, input_=input_, mean=mean, output=output, - count=count, ) deviation_squares.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/distributed_join.py b/src/tuneinsight/api/sdk/models/distributed_join.py index 919e58b..89a59a9 100644 --- a/src/tuneinsight/api/sdk/models/distributed_join.py +++ b/src/tuneinsight/api/sdk/models/distributed_join.py @@ -22,6 +22,8 @@ class DistributedJoin: """ Attributes: type (ComputationType): Type of the computation. + dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP + mode. Default: -1.0. input_clipping_method (Union[Unset, ComputationDefinitionInputClippingMethod]): Optional method used for clipping before encrypting values when running aggregation-based workflows. The bounds are deduced based on the cryptographic parameters used for the aggregation. @@ -32,14 +34,7 @@ class DistributedJoin: (default) - error: if some values are out of bounds, then the computation is aborted. Default: ComputationDefinitionInputClippingMethod.WARNING. - preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters - applied to the input retrieved from the datasource, if applicable - wait (Union[Unset, bool]): Whether to wait synchronously for the computation result. - dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various - disclosure prevention mechanisms - dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP - mode. Default: -1.0. - join_id (Union[Unset, str]): Unique identifier of a data object. + run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both) local_input_id (Union[Unset, str]): Unique identifier of a data object. maximum_aggregated_value (Union[Unset, None, float]): optional upper bound on the total expected value to be aggregated collectively. If provided, the computation will automatically deduce @@ -48,102 +43,111 @@ class DistributedJoin: up to 16 million. For example, when using default parameters and running an aggregation with 4 participants, local aggregated values cannot exceed 4 million. - cohort_id (Union[Unset, str]): Unique identifier of a data object. + preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters + applied to the input retrieved from the datasource, if applicable + data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource + from each node before the computation + end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true, + then when release results is set to true and the output + is initially encrypted with a network collective key, then it is key switched to + the initiating user's public key. + input_data_object (Union[Unset, str]): Shared identifier of a data object. + timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run. local_input (Union[Unset, LocalInput]): If a local input is provided, the node initiating the computation will use it instead of querying the datasource. This data is *not* shared to other nodes, only used for the duration of the computation. The local input columns/values must be in the form {: [, , ...], ...} owner (Union[Unset, str]): The username of the end user who requested the computation. project_id (Union[Unset, str]): Unique identifier of a project. - run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both) + dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various + disclosure prevention mechanisms + cohort_id (Union[Unset, str]): Unique identifier of a data object. + encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key. + join_id (Union[Unset, str]): Unique identifier of a data object. local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured the network) - encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key. - end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true, - then when release results is set to true and the output - is initially encrypted with a network collective key, then it is key switched to - the initiating user's public key. - input_data_object (Union[Unset, str]): Shared identifier of a data object. release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output results. If set, then encrypted results are automatically key switched and decrypted and a Result entity is saved - timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run. - data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource - from each node before the computation + wait (Union[Unset, bool]): Whether to wait synchronously for the computation result. + join_columns (Union[Unset, List[str]]): missing_patterns (Union[Unset, List[str]]): target_columns (Union[Unset, List[str]]): - join_columns (Union[Unset, List[str]]): """ type: ComputationType + dp_epsilon: Union[Unset, float] = -1.0 input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] = ( ComputationDefinitionInputClippingMethod.WARNING ) - preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET - wait: Union[Unset, bool] = UNSET - dp_policy: Union[Unset, "DPPolicy"] = UNSET - dp_epsilon: Union[Unset, float] = -1.0 - join_id: Union[Unset, str] = UNSET + run_mode: Union[Unset, RunMode] = UNSET local_input_id: Union[Unset, str] = UNSET maximum_aggregated_value: Union[Unset, None, float] = UNSET - cohort_id: Union[Unset, str] = UNSET + preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET + data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET + end_to_end_encrypted: Union[Unset, bool] = UNSET + input_data_object: Union[Unset, str] = UNSET + timeout: Union[Unset, int] = UNSET local_input: Union[Unset, "LocalInput"] = UNSET owner: Union[Unset, str] = UNSET project_id: Union[Unset, str] = UNSET - run_mode: Union[Unset, RunMode] = UNSET - local: Union[Unset, bool] = UNSET + dp_policy: Union[Unset, "DPPolicy"] = UNSET + cohort_id: Union[Unset, str] = UNSET encrypted: Union[Unset, bool] = UNSET - end_to_end_encrypted: Union[Unset, bool] = UNSET - input_data_object: Union[Unset, str] = UNSET + join_id: Union[Unset, str] = UNSET + local: Union[Unset, bool] = UNSET release_results: Union[Unset, bool] = UNSET - timeout: Union[Unset, int] = UNSET - data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET + wait: Union[Unset, bool] = UNSET + join_columns: Union[Unset, List[str]] = UNSET missing_patterns: Union[Unset, List[str]] = UNSET target_columns: Union[Unset, List[str]] = UNSET - join_columns: Union[Unset, List[str]] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: type = self.type.value + dp_epsilon = self.dp_epsilon input_clipping_method: Union[Unset, str] = UNSET if not isinstance(self.input_clipping_method, Unset): input_clipping_method = self.input_clipping_method.value + run_mode: Union[Unset, str] = UNSET + if not isinstance(self.run_mode, Unset): + run_mode = self.run_mode.value + + local_input_id = self.local_input_id + maximum_aggregated_value = self.maximum_aggregated_value preprocessing_parameters: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.preprocessing_parameters, Unset): preprocessing_parameters = self.preprocessing_parameters.to_dict() - wait = self.wait - dp_policy: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.dp_policy, Unset): - dp_policy = self.dp_policy.to_dict() + data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.data_source_parameters, Unset): + data_source_parameters = self.data_source_parameters.to_dict() - dp_epsilon = self.dp_epsilon - join_id = self.join_id - local_input_id = self.local_input_id - maximum_aggregated_value = self.maximum_aggregated_value - cohort_id = self.cohort_id + end_to_end_encrypted = self.end_to_end_encrypted + input_data_object = self.input_data_object + timeout = self.timeout local_input: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.local_input, Unset): local_input = self.local_input.to_dict() owner = self.owner project_id = self.project_id - run_mode: Union[Unset, str] = UNSET - if not isinstance(self.run_mode, Unset): - run_mode = self.run_mode.value + dp_policy: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.dp_policy, Unset): + dp_policy = self.dp_policy.to_dict() - local = self.local + cohort_id = self.cohort_id encrypted = self.encrypted - end_to_end_encrypted = self.end_to_end_encrypted - input_data_object = self.input_data_object + join_id = self.join_id + local = self.local release_results = self.release_results - timeout = self.timeout - data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.data_source_parameters, Unset): - data_source_parameters = self.data_source_parameters.to_dict() + wait = self.wait + join_columns: Union[Unset, List[str]] = UNSET + if not isinstance(self.join_columns, Unset): + join_columns = self.join_columns missing_patterns: Union[Unset, List[str]] = UNSET if not isinstance(self.missing_patterns, Unset): @@ -153,10 +157,6 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.target_columns, Unset): target_columns = self.target_columns - join_columns: Union[Unset, List[str]] = UNSET - if not isinstance(self.join_columns, Unset): - join_columns = self.join_columns - field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update( @@ -164,52 +164,52 @@ def to_dict(self) -> Dict[str, Any]: "type": type, } ) - if input_clipping_method is not UNSET: - field_dict["inputClippingMethod"] = input_clipping_method - if preprocessing_parameters is not UNSET: - field_dict["preprocessingParameters"] = preprocessing_parameters - if wait is not UNSET: - field_dict["wait"] = wait - if dp_policy is not UNSET: - field_dict["DPPolicy"] = dp_policy if dp_epsilon is not UNSET: field_dict["dpEpsilon"] = dp_epsilon - if join_id is not UNSET: - field_dict["joinId"] = join_id + if input_clipping_method is not UNSET: + field_dict["inputClippingMethod"] = input_clipping_method + if run_mode is not UNSET: + field_dict["runMode"] = run_mode if local_input_id is not UNSET: field_dict["localInputID"] = local_input_id if maximum_aggregated_value is not UNSET: field_dict["maximumAggregatedValue"] = maximum_aggregated_value - if cohort_id is not UNSET: - field_dict["cohortId"] = cohort_id + if preprocessing_parameters is not UNSET: + field_dict["preprocessingParameters"] = preprocessing_parameters + if data_source_parameters is not UNSET: + field_dict["dataSourceParameters"] = data_source_parameters + if end_to_end_encrypted is not UNSET: + field_dict["endToEndEncrypted"] = end_to_end_encrypted + if input_data_object is not UNSET: + field_dict["inputDataObject"] = input_data_object + if timeout is not UNSET: + field_dict["timeout"] = timeout if local_input is not UNSET: field_dict["localInput"] = local_input if owner is not UNSET: field_dict["owner"] = owner if project_id is not UNSET: field_dict["projectId"] = project_id - if run_mode is not UNSET: - field_dict["runMode"] = run_mode - if local is not UNSET: - field_dict["local"] = local + if dp_policy is not UNSET: + field_dict["DPPolicy"] = dp_policy + if cohort_id is not UNSET: + field_dict["cohortId"] = cohort_id if encrypted is not UNSET: field_dict["encrypted"] = encrypted - if end_to_end_encrypted is not UNSET: - field_dict["endToEndEncrypted"] = end_to_end_encrypted - if input_data_object is not UNSET: - field_dict["inputDataObject"] = input_data_object + if join_id is not UNSET: + field_dict["joinId"] = join_id + if local is not UNSET: + field_dict["local"] = local if release_results is not UNSET: field_dict["releaseResults"] = release_results - if timeout is not UNSET: - field_dict["timeout"] = timeout - if data_source_parameters is not UNSET: - field_dict["dataSourceParameters"] = data_source_parameters + if wait is not UNSET: + field_dict["wait"] = wait + if join_columns is not UNSET: + field_dict["joinColumns"] = join_columns if missing_patterns is not UNSET: field_dict["missingPatterns"] = missing_patterns if target_columns is not UNSET: field_dict["targetColumns"] = target_columns - if join_columns is not UNSET: - field_dict["joinColumns"] = join_columns return field_dict @@ -223,6 +223,8 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() type = ComputationType(d.pop("type")) + dp_epsilon = d.pop("dpEpsilon", UNSET) + _input_clipping_method = d.pop("inputClippingMethod", UNSET) input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] if isinstance(_input_clipping_method, Unset): @@ -230,6 +232,17 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method) + _run_mode = d.pop("runMode", UNSET) + run_mode: Union[Unset, RunMode] + if isinstance(_run_mode, Unset): + run_mode = UNSET + else: + run_mode = RunMode(_run_mode) + + local_input_id = d.pop("localInputID", UNSET) + + maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET) + _preprocessing_parameters = d.pop("preprocessingParameters", UNSET) preprocessing_parameters: Union[Unset, ComputationPreprocessingParameters] if isinstance(_preprocessing_parameters, Unset): @@ -237,24 +250,18 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: preprocessing_parameters = ComputationPreprocessingParameters.from_dict(_preprocessing_parameters) - wait = d.pop("wait", UNSET) - - _dp_policy = d.pop("DPPolicy", UNSET) - dp_policy: Union[Unset, DPPolicy] - if isinstance(_dp_policy, Unset): - dp_policy = UNSET + _data_source_parameters = d.pop("dataSourceParameters", UNSET) + data_source_parameters: Union[Unset, ComputationDataSourceParameters] + if isinstance(_data_source_parameters, Unset): + data_source_parameters = UNSET else: - dp_policy = DPPolicy.from_dict(_dp_policy) - - dp_epsilon = d.pop("dpEpsilon", UNSET) - - join_id = d.pop("joinId", UNSET) + data_source_parameters = ComputationDataSourceParameters.from_dict(_data_source_parameters) - local_input_id = d.pop("localInputID", UNSET) + end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) - maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET) + input_data_object = d.pop("inputDataObject", UNSET) - cohort_id = d.pop("cohortId", UNSET) + timeout = d.pop("timeout", UNSET) _local_input = d.pop("localInput", UNSET) local_input: Union[Unset, LocalInput] @@ -267,63 +274,56 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: project_id = d.pop("projectId", UNSET) - _run_mode = d.pop("runMode", UNSET) - run_mode: Union[Unset, RunMode] - if isinstance(_run_mode, Unset): - run_mode = UNSET + _dp_policy = d.pop("DPPolicy", UNSET) + dp_policy: Union[Unset, DPPolicy] + if isinstance(_dp_policy, Unset): + dp_policy = UNSET else: - run_mode = RunMode(_run_mode) + dp_policy = DPPolicy.from_dict(_dp_policy) - local = d.pop("local", UNSET) + cohort_id = d.pop("cohortId", UNSET) encrypted = d.pop("encrypted", UNSET) - end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) + join_id = d.pop("joinId", UNSET) - input_data_object = d.pop("inputDataObject", UNSET) + local = d.pop("local", UNSET) release_results = d.pop("releaseResults", UNSET) - timeout = d.pop("timeout", UNSET) + wait = d.pop("wait", UNSET) - _data_source_parameters = d.pop("dataSourceParameters", UNSET) - data_source_parameters: Union[Unset, ComputationDataSourceParameters] - if isinstance(_data_source_parameters, Unset): - data_source_parameters = UNSET - else: - data_source_parameters = ComputationDataSourceParameters.from_dict(_data_source_parameters) + join_columns = cast(List[str], d.pop("joinColumns", UNSET)) missing_patterns = cast(List[str], d.pop("missingPatterns", UNSET)) target_columns = cast(List[str], d.pop("targetColumns", UNSET)) - join_columns = cast(List[str], d.pop("joinColumns", UNSET)) - distributed_join = cls( type=type, - input_clipping_method=input_clipping_method, - preprocessing_parameters=preprocessing_parameters, - wait=wait, - dp_policy=dp_policy, dp_epsilon=dp_epsilon, - join_id=join_id, + input_clipping_method=input_clipping_method, + run_mode=run_mode, local_input_id=local_input_id, maximum_aggregated_value=maximum_aggregated_value, - cohort_id=cohort_id, + preprocessing_parameters=preprocessing_parameters, + data_source_parameters=data_source_parameters, + end_to_end_encrypted=end_to_end_encrypted, + input_data_object=input_data_object, + timeout=timeout, local_input=local_input, owner=owner, project_id=project_id, - run_mode=run_mode, - local=local, + dp_policy=dp_policy, + cohort_id=cohort_id, encrypted=encrypted, - end_to_end_encrypted=end_to_end_encrypted, - input_data_object=input_data_object, + join_id=join_id, + local=local, release_results=release_results, - timeout=timeout, - data_source_parameters=data_source_parameters, + wait=wait, + join_columns=join_columns, missing_patterns=missing_patterns, target_columns=target_columns, - join_columns=join_columns, ) distributed_join.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/dp_policy.py b/src/tuneinsight/api/sdk/models/dp_policy.py index 7aeaf95..58c315d 100644 --- a/src/tuneinsight/api/sdk/models/dp_policy.py +++ b/src/tuneinsight/api/sdk/models/dp_policy.py @@ -6,7 +6,6 @@ if TYPE_CHECKING: from ..models.execution_quota_parameters import ExecutionQuotaParameters - from ..models.noise_parameters import NoiseParameters from ..models.threshold import Threshold @@ -18,67 +17,46 @@ class DPPolicy: """represents the disclosure prevention policy that enables toggling various disclosure prevention mechanisms Attributes: - min_frequencies (Union[Unset, Threshold]): represents a threshold, which can be made relative of the dataset + max_column_count (Union[Unset, Threshold]): represents a threshold, which can be made relative of the dataset size - noise_parameters (Union[Unset, NoiseParameters]): parameters for adding differential privacy noise to the - computation's encrypted output (deprecated?) + max_factors (Union[Unset, Threshold]): represents a threshold, which can be made relative of the dataset size noisy_global_size (Union[Unset, bool]): when computing the global size, whether noise is used or not. If so, each node adds discrete noise to its input to the encrypted aggregation use_differential_privacy (Union[Unset, bool]): whether to use Differential Privacy to protect the privacy of the results. - authorized_variables (Union[Unset, List[str]]): constraint on the set of variables that can be used as input, in - order to prevent misuse of variables that are out of context of the project. - if > 0 variables are defined here, then the dataset will automatically drop any variables that do not belong to - this set. - Warning: this mechanism is only effective when the data selection parameters (data source queries) are fixed, - and therefore - returned variables cannot be aliased (for example using aliases in SQL SELECT statements) to evade this trap. execution_quota_parameters (Union[Unset, ExecutionQuotaParameters]): Execution quota settings. The unit of the execution quota depends on the computation and other policies. If differential privacy is applied, it is in terms of the the epsilon value (ϵ) of the privacy budget. If the computation is a private set intersection, each query consumes budget equal to the size of the querying set. Otherwise, a unit represents one computation. - max_column_count (Union[Unset, Threshold]): represents a threshold, which can be made relative of the dataset - size - max_factors (Union[Unset, Threshold]): represents a threshold, which can be made relative of the dataset size min_dataset_size (Union[Unset, int]): minimum size of the dataset used as local input (checked both before and after the preprocessing operations are run) + min_frequencies (Union[Unset, Threshold]): represents a threshold, which can be made relative of the dataset + size min_global_dataset_size (Union[Unset, int]): minimum size of the global / collective dataset. It is collectively computed using the encrypted aggregation + authorized_variables (Union[Unset, List[str]]): constraint on the set of variables that can be used as input, in + order to prevent misuse of variables that are out of context of the project. + if > 0 variables are defined here, then the dataset will automatically drop any variables that do not belong to + this set. + Warning: this mechanism is only effective when the data selection parameters (data source queries) are fixed, + and therefore + returned variables cannot be aliased (for example using aliases in SQL SELECT statements) to evade this trap. """ - min_frequencies: Union[Unset, "Threshold"] = UNSET - noise_parameters: Union[Unset, "NoiseParameters"] = UNSET - noisy_global_size: Union[Unset, bool] = UNSET - use_differential_privacy: Union[Unset, bool] = UNSET - authorized_variables: Union[Unset, List[str]] = UNSET - execution_quota_parameters: Union[Unset, "ExecutionQuotaParameters"] = UNSET max_column_count: Union[Unset, "Threshold"] = UNSET max_factors: Union[Unset, "Threshold"] = UNSET + noisy_global_size: Union[Unset, bool] = UNSET + use_differential_privacy: Union[Unset, bool] = False + execution_quota_parameters: Union[Unset, "ExecutionQuotaParameters"] = UNSET min_dataset_size: Union[Unset, int] = UNSET + min_frequencies: Union[Unset, "Threshold"] = UNSET min_global_dataset_size: Union[Unset, int] = UNSET + authorized_variables: Union[Unset, List[str]] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: - min_frequencies: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.min_frequencies, Unset): - min_frequencies = self.min_frequencies.to_dict() - - noise_parameters: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.noise_parameters, Unset): - noise_parameters = self.noise_parameters.to_dict() - - noisy_global_size = self.noisy_global_size - use_differential_privacy = self.use_differential_privacy - authorized_variables: Union[Unset, List[str]] = UNSET - if not isinstance(self.authorized_variables, Unset): - authorized_variables = self.authorized_variables - - execution_quota_parameters: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.execution_quota_parameters, Unset): - execution_quota_parameters = self.execution_quota_parameters.to_dict() - max_column_count: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.max_column_count, Unset): max_column_count = self.max_column_count.to_dict() @@ -87,62 +65,70 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.max_factors, Unset): max_factors = self.max_factors.to_dict() + noisy_global_size = self.noisy_global_size + use_differential_privacy = self.use_differential_privacy + execution_quota_parameters: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.execution_quota_parameters, Unset): + execution_quota_parameters = self.execution_quota_parameters.to_dict() + min_dataset_size = self.min_dataset_size + min_frequencies: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.min_frequencies, Unset): + min_frequencies = self.min_frequencies.to_dict() + min_global_dataset_size = self.min_global_dataset_size + authorized_variables: Union[Unset, List[str]] = UNSET + if not isinstance(self.authorized_variables, Unset): + authorized_variables = self.authorized_variables field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) - if min_frequencies is not UNSET: - field_dict["minFrequencies"] = min_frequencies - if noise_parameters is not UNSET: - field_dict["noiseParameters"] = noise_parameters + if max_column_count is not UNSET: + field_dict["maxColumnCount"] = max_column_count + if max_factors is not UNSET: + field_dict["maxFactors"] = max_factors if noisy_global_size is not UNSET: field_dict["noisyGlobalSize"] = noisy_global_size if use_differential_privacy is not UNSET: field_dict["useDifferentialPrivacy"] = use_differential_privacy - if authorized_variables is not UNSET: - field_dict["authorizedVariables"] = authorized_variables if execution_quota_parameters is not UNSET: field_dict["executionQuotaParameters"] = execution_quota_parameters - if max_column_count is not UNSET: - field_dict["maxColumnCount"] = max_column_count - if max_factors is not UNSET: - field_dict["maxFactors"] = max_factors if min_dataset_size is not UNSET: field_dict["minDatasetSize"] = min_dataset_size + if min_frequencies is not UNSET: + field_dict["minFrequencies"] = min_frequencies if min_global_dataset_size is not UNSET: field_dict["minGlobalDatasetSize"] = min_global_dataset_size + if authorized_variables is not UNSET: + field_dict["authorizedVariables"] = authorized_variables return field_dict @classmethod def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: from ..models.execution_quota_parameters import ExecutionQuotaParameters - from ..models.noise_parameters import NoiseParameters from ..models.threshold import Threshold d = src_dict.copy() - _min_frequencies = d.pop("minFrequencies", UNSET) - min_frequencies: Union[Unset, Threshold] - if isinstance(_min_frequencies, Unset): - min_frequencies = UNSET + _max_column_count = d.pop("maxColumnCount", UNSET) + max_column_count: Union[Unset, Threshold] + if isinstance(_max_column_count, Unset): + max_column_count = UNSET else: - min_frequencies = Threshold.from_dict(_min_frequencies) + max_column_count = Threshold.from_dict(_max_column_count) - _noise_parameters = d.pop("noiseParameters", UNSET) - noise_parameters: Union[Unset, NoiseParameters] - if isinstance(_noise_parameters, Unset): - noise_parameters = UNSET + _max_factors = d.pop("maxFactors", UNSET) + max_factors: Union[Unset, Threshold] + if isinstance(_max_factors, Unset): + max_factors = UNSET else: - noise_parameters = NoiseParameters.from_dict(_noise_parameters) + max_factors = Threshold.from_dict(_max_factors) noisy_global_size = d.pop("noisyGlobalSize", UNSET) use_differential_privacy = d.pop("useDifferentialPrivacy", UNSET) - authorized_variables = cast(List[str], d.pop("authorizedVariables", UNSET)) - _execution_quota_parameters = d.pop("executionQuotaParameters", UNSET) execution_quota_parameters: Union[Unset, ExecutionQuotaParameters] if isinstance(_execution_quota_parameters, Unset): @@ -150,35 +136,29 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: execution_quota_parameters = ExecutionQuotaParameters.from_dict(_execution_quota_parameters) - _max_column_count = d.pop("maxColumnCount", UNSET) - max_column_count: Union[Unset, Threshold] - if isinstance(_max_column_count, Unset): - max_column_count = UNSET - else: - max_column_count = Threshold.from_dict(_max_column_count) + min_dataset_size = d.pop("minDatasetSize", UNSET) - _max_factors = d.pop("maxFactors", UNSET) - max_factors: Union[Unset, Threshold] - if isinstance(_max_factors, Unset): - max_factors = UNSET + _min_frequencies = d.pop("minFrequencies", UNSET) + min_frequencies: Union[Unset, Threshold] + if isinstance(_min_frequencies, Unset): + min_frequencies = UNSET else: - max_factors = Threshold.from_dict(_max_factors) - - min_dataset_size = d.pop("minDatasetSize", UNSET) + min_frequencies = Threshold.from_dict(_min_frequencies) min_global_dataset_size = d.pop("minGlobalDatasetSize", UNSET) + authorized_variables = cast(List[str], d.pop("authorizedVariables", UNSET)) + dp_policy = cls( - min_frequencies=min_frequencies, - noise_parameters=noise_parameters, + max_column_count=max_column_count, + max_factors=max_factors, noisy_global_size=noisy_global_size, use_differential_privacy=use_differential_privacy, - authorized_variables=authorized_variables, execution_quota_parameters=execution_quota_parameters, - max_column_count=max_column_count, - max_factors=max_factors, min_dataset_size=min_dataset_size, + min_frequencies=min_frequencies, min_global_dataset_size=min_global_dataset_size, + authorized_variables=authorized_variables, ) dp_policy.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/dummy.py b/src/tuneinsight/api/sdk/models/dummy.py index 5387862..7e23cef 100644 --- a/src/tuneinsight/api/sdk/models/dummy.py +++ b/src/tuneinsight/api/sdk/models/dummy.py @@ -22,6 +22,8 @@ class Dummy: """ Attributes: type (ComputationType): Type of the computation. + dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP + mode. Default: -1.0. input_clipping_method (Union[Unset, ComputationDefinitionInputClippingMethod]): Optional method used for clipping before encrypting values when running aggregation-based workflows. The bounds are deduced based on the cryptographic parameters used for the aggregation. @@ -32,14 +34,7 @@ class Dummy: (default) - error: if some values are out of bounds, then the computation is aborted. Default: ComputationDefinitionInputClippingMethod.WARNING. - preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters - applied to the input retrieved from the datasource, if applicable - wait (Union[Unset, bool]): Whether to wait synchronously for the computation result. - dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various - disclosure prevention mechanisms - dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP - mode. Default: -1.0. - join_id (Union[Unset, str]): Unique identifier of a data object. + run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both) local_input_id (Union[Unset, str]): Unique identifier of a data object. maximum_aggregated_value (Union[Unset, None, float]): optional upper bound on the total expected value to be aggregated collectively. If provided, the computation will automatically deduce @@ -48,29 +43,34 @@ class Dummy: up to 16 million. For example, when using default parameters and running an aggregation with 4 participants, local aggregated values cannot exceed 4 million. - cohort_id (Union[Unset, str]): Unique identifier of a data object. + preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters + applied to the input retrieved from the datasource, if applicable + data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource + from each node before the computation + end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true, + then when release results is set to true and the output + is initially encrypted with a network collective key, then it is key switched to + the initiating user's public key. + input_data_object (Union[Unset, str]): Shared identifier of a data object. + timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run. local_input (Union[Unset, LocalInput]): If a local input is provided, the node initiating the computation will use it instead of querying the datasource. This data is *not* shared to other nodes, only used for the duration of the computation. The local input columns/values must be in the form {: [, , ...], ...} owner (Union[Unset, str]): The username of the end user who requested the computation. project_id (Union[Unset, str]): Unique identifier of a project. - run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both) + dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various + disclosure prevention mechanisms + cohort_id (Union[Unset, str]): Unique identifier of a data object. + encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key. + join_id (Union[Unset, str]): Unique identifier of a data object. local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured the network) - encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key. - end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true, - then when release results is set to true and the output - is initially encrypted with a network collective key, then it is key switched to - the initiating user's public key. - input_data_object (Union[Unset, str]): Shared identifier of a data object. release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output results. If set, then encrypted results are automatically key switched and decrypted and a Result entity is saved - timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run. - data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource - from each node before the computation + wait (Union[Unset, bool]): Whether to wait synchronously for the computation result. error_in_constructor (Union[Unset, bool]): error_in_start (Union[Unset, bool]): panic_in_constructor (Union[Unset, bool]): @@ -78,28 +78,28 @@ class Dummy: """ type: ComputationType + dp_epsilon: Union[Unset, float] = -1.0 input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] = ( ComputationDefinitionInputClippingMethod.WARNING ) - preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET - wait: Union[Unset, bool] = UNSET - dp_policy: Union[Unset, "DPPolicy"] = UNSET - dp_epsilon: Union[Unset, float] = -1.0 - join_id: Union[Unset, str] = UNSET + run_mode: Union[Unset, RunMode] = UNSET local_input_id: Union[Unset, str] = UNSET maximum_aggregated_value: Union[Unset, None, float] = UNSET - cohort_id: Union[Unset, str] = UNSET + preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET + data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET + end_to_end_encrypted: Union[Unset, bool] = UNSET + input_data_object: Union[Unset, str] = UNSET + timeout: Union[Unset, int] = UNSET local_input: Union[Unset, "LocalInput"] = UNSET owner: Union[Unset, str] = UNSET project_id: Union[Unset, str] = UNSET - run_mode: Union[Unset, RunMode] = UNSET - local: Union[Unset, bool] = UNSET + dp_policy: Union[Unset, "DPPolicy"] = UNSET + cohort_id: Union[Unset, str] = UNSET encrypted: Union[Unset, bool] = UNSET - end_to_end_encrypted: Union[Unset, bool] = UNSET - input_data_object: Union[Unset, str] = UNSET + join_id: Union[Unset, str] = UNSET + local: Union[Unset, bool] = UNSET release_results: Union[Unset, bool] = UNSET - timeout: Union[Unset, int] = UNSET - data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET + wait: Union[Unset, bool] = UNSET error_in_constructor: Union[Unset, bool] = UNSET error_in_start: Union[Unset, bool] = UNSET panic_in_constructor: Union[Unset, bool] = UNSET @@ -109,44 +109,44 @@ class Dummy: def to_dict(self) -> Dict[str, Any]: type = self.type.value + dp_epsilon = self.dp_epsilon input_clipping_method: Union[Unset, str] = UNSET if not isinstance(self.input_clipping_method, Unset): input_clipping_method = self.input_clipping_method.value + run_mode: Union[Unset, str] = UNSET + if not isinstance(self.run_mode, Unset): + run_mode = self.run_mode.value + + local_input_id = self.local_input_id + maximum_aggregated_value = self.maximum_aggregated_value preprocessing_parameters: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.preprocessing_parameters, Unset): preprocessing_parameters = self.preprocessing_parameters.to_dict() - wait = self.wait - dp_policy: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.dp_policy, Unset): - dp_policy = self.dp_policy.to_dict() + data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.data_source_parameters, Unset): + data_source_parameters = self.data_source_parameters.to_dict() - dp_epsilon = self.dp_epsilon - join_id = self.join_id - local_input_id = self.local_input_id - maximum_aggregated_value = self.maximum_aggregated_value - cohort_id = self.cohort_id + end_to_end_encrypted = self.end_to_end_encrypted + input_data_object = self.input_data_object + timeout = self.timeout local_input: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.local_input, Unset): local_input = self.local_input.to_dict() owner = self.owner project_id = self.project_id - run_mode: Union[Unset, str] = UNSET - if not isinstance(self.run_mode, Unset): - run_mode = self.run_mode.value + dp_policy: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.dp_policy, Unset): + dp_policy = self.dp_policy.to_dict() - local = self.local + cohort_id = self.cohort_id encrypted = self.encrypted - end_to_end_encrypted = self.end_to_end_encrypted - input_data_object = self.input_data_object + join_id = self.join_id + local = self.local release_results = self.release_results - timeout = self.timeout - data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.data_source_parameters, Unset): - data_source_parameters = self.data_source_parameters.to_dict() - + wait = self.wait error_in_constructor = self.error_in_constructor error_in_start = self.error_in_start panic_in_constructor = self.panic_in_constructor @@ -159,46 +159,46 @@ def to_dict(self) -> Dict[str, Any]: "type": type, } ) - if input_clipping_method is not UNSET: - field_dict["inputClippingMethod"] = input_clipping_method - if preprocessing_parameters is not UNSET: - field_dict["preprocessingParameters"] = preprocessing_parameters - if wait is not UNSET: - field_dict["wait"] = wait - if dp_policy is not UNSET: - field_dict["DPPolicy"] = dp_policy if dp_epsilon is not UNSET: field_dict["dpEpsilon"] = dp_epsilon - if join_id is not UNSET: - field_dict["joinId"] = join_id + if input_clipping_method is not UNSET: + field_dict["inputClippingMethod"] = input_clipping_method + if run_mode is not UNSET: + field_dict["runMode"] = run_mode if local_input_id is not UNSET: field_dict["localInputID"] = local_input_id if maximum_aggregated_value is not UNSET: field_dict["maximumAggregatedValue"] = maximum_aggregated_value - if cohort_id is not UNSET: - field_dict["cohortId"] = cohort_id + if preprocessing_parameters is not UNSET: + field_dict["preprocessingParameters"] = preprocessing_parameters + if data_source_parameters is not UNSET: + field_dict["dataSourceParameters"] = data_source_parameters + if end_to_end_encrypted is not UNSET: + field_dict["endToEndEncrypted"] = end_to_end_encrypted + if input_data_object is not UNSET: + field_dict["inputDataObject"] = input_data_object + if timeout is not UNSET: + field_dict["timeout"] = timeout if local_input is not UNSET: field_dict["localInput"] = local_input if owner is not UNSET: field_dict["owner"] = owner if project_id is not UNSET: field_dict["projectId"] = project_id - if run_mode is not UNSET: - field_dict["runMode"] = run_mode - if local is not UNSET: - field_dict["local"] = local + if dp_policy is not UNSET: + field_dict["DPPolicy"] = dp_policy + if cohort_id is not UNSET: + field_dict["cohortId"] = cohort_id if encrypted is not UNSET: field_dict["encrypted"] = encrypted - if end_to_end_encrypted is not UNSET: - field_dict["endToEndEncrypted"] = end_to_end_encrypted - if input_data_object is not UNSET: - field_dict["inputDataObject"] = input_data_object + if join_id is not UNSET: + field_dict["joinId"] = join_id + if local is not UNSET: + field_dict["local"] = local if release_results is not UNSET: field_dict["releaseResults"] = release_results - if timeout is not UNSET: - field_dict["timeout"] = timeout - if data_source_parameters is not UNSET: - field_dict["dataSourceParameters"] = data_source_parameters + if wait is not UNSET: + field_dict["wait"] = wait if error_in_constructor is not UNSET: field_dict["errorInConstructor"] = error_in_constructor if error_in_start is not UNSET: @@ -220,6 +220,8 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() type = ComputationType(d.pop("type")) + dp_epsilon = d.pop("dpEpsilon", UNSET) + _input_clipping_method = d.pop("inputClippingMethod", UNSET) input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] if isinstance(_input_clipping_method, Unset): @@ -227,6 +229,17 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method) + _run_mode = d.pop("runMode", UNSET) + run_mode: Union[Unset, RunMode] + if isinstance(_run_mode, Unset): + run_mode = UNSET + else: + run_mode = RunMode(_run_mode) + + local_input_id = d.pop("localInputID", UNSET) + + maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET) + _preprocessing_parameters = d.pop("preprocessingParameters", UNSET) preprocessing_parameters: Union[Unset, ComputationPreprocessingParameters] if isinstance(_preprocessing_parameters, Unset): @@ -234,24 +247,18 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: preprocessing_parameters = ComputationPreprocessingParameters.from_dict(_preprocessing_parameters) - wait = d.pop("wait", UNSET) - - _dp_policy = d.pop("DPPolicy", UNSET) - dp_policy: Union[Unset, DPPolicy] - if isinstance(_dp_policy, Unset): - dp_policy = UNSET + _data_source_parameters = d.pop("dataSourceParameters", UNSET) + data_source_parameters: Union[Unset, ComputationDataSourceParameters] + if isinstance(_data_source_parameters, Unset): + data_source_parameters = UNSET else: - dp_policy = DPPolicy.from_dict(_dp_policy) - - dp_epsilon = d.pop("dpEpsilon", UNSET) - - join_id = d.pop("joinId", UNSET) + data_source_parameters = ComputationDataSourceParameters.from_dict(_data_source_parameters) - local_input_id = d.pop("localInputID", UNSET) + end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) - maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET) + input_data_object = d.pop("inputDataObject", UNSET) - cohort_id = d.pop("cohortId", UNSET) + timeout = d.pop("timeout", UNSET) _local_input = d.pop("localInput", UNSET) local_input: Union[Unset, LocalInput] @@ -264,31 +271,24 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: project_id = d.pop("projectId", UNSET) - _run_mode = d.pop("runMode", UNSET) - run_mode: Union[Unset, RunMode] - if isinstance(_run_mode, Unset): - run_mode = UNSET + _dp_policy = d.pop("DPPolicy", UNSET) + dp_policy: Union[Unset, DPPolicy] + if isinstance(_dp_policy, Unset): + dp_policy = UNSET else: - run_mode = RunMode(_run_mode) + dp_policy = DPPolicy.from_dict(_dp_policy) - local = d.pop("local", UNSET) + cohort_id = d.pop("cohortId", UNSET) encrypted = d.pop("encrypted", UNSET) - end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) + join_id = d.pop("joinId", UNSET) - input_data_object = d.pop("inputDataObject", UNSET) + local = d.pop("local", UNSET) release_results = d.pop("releaseResults", UNSET) - timeout = d.pop("timeout", UNSET) - - _data_source_parameters = d.pop("dataSourceParameters", UNSET) - data_source_parameters: Union[Unset, ComputationDataSourceParameters] - if isinstance(_data_source_parameters, Unset): - data_source_parameters = UNSET - else: - data_source_parameters = ComputationDataSourceParameters.from_dict(_data_source_parameters) + wait = d.pop("wait", UNSET) error_in_constructor = d.pop("errorInConstructor", UNSET) @@ -300,26 +300,26 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: dummy = cls( type=type, - input_clipping_method=input_clipping_method, - preprocessing_parameters=preprocessing_parameters, - wait=wait, - dp_policy=dp_policy, dp_epsilon=dp_epsilon, - join_id=join_id, + input_clipping_method=input_clipping_method, + run_mode=run_mode, local_input_id=local_input_id, maximum_aggregated_value=maximum_aggregated_value, - cohort_id=cohort_id, + preprocessing_parameters=preprocessing_parameters, + data_source_parameters=data_source_parameters, + end_to_end_encrypted=end_to_end_encrypted, + input_data_object=input_data_object, + timeout=timeout, local_input=local_input, owner=owner, project_id=project_id, - run_mode=run_mode, - local=local, + dp_policy=dp_policy, + cohort_id=cohort_id, encrypted=encrypted, - end_to_end_encrypted=end_to_end_encrypted, - input_data_object=input_data_object, + join_id=join_id, + local=local, release_results=release_results, - timeout=timeout, - data_source_parameters=data_source_parameters, + wait=wait, error_in_constructor=error_in_constructor, error_in_start=error_in_start, panic_in_constructor=panic_in_constructor, diff --git a/src/tuneinsight/api/sdk/models/encrypted_aggregation.py b/src/tuneinsight/api/sdk/models/encrypted_aggregation.py index 3b56a17..6698430 100644 --- a/src/tuneinsight/api/sdk/models/encrypted_aggregation.py +++ b/src/tuneinsight/api/sdk/models/encrypted_aggregation.py @@ -22,6 +22,8 @@ class EncryptedAggregation: """ Attributes: type (ComputationType): Type of the computation. + dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP + mode. Default: -1.0. input_clipping_method (Union[Unset, ComputationDefinitionInputClippingMethod]): Optional method used for clipping before encrypting values when running aggregation-based workflows. The bounds are deduced based on the cryptographic parameters used for the aggregation. @@ -32,14 +34,7 @@ class EncryptedAggregation: (default) - error: if some values are out of bounds, then the computation is aborted. Default: ComputationDefinitionInputClippingMethod.WARNING. - preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters - applied to the input retrieved from the datasource, if applicable - wait (Union[Unset, bool]): Whether to wait synchronously for the computation result. - dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various - disclosure prevention mechanisms - dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP - mode. Default: -1.0. - join_id (Union[Unset, str]): Unique identifier of a data object. + run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both) local_input_id (Union[Unset, str]): Unique identifier of a data object. maximum_aggregated_value (Union[Unset, None, float]): optional upper bound on the total expected value to be aggregated collectively. If provided, the computation will automatically deduce @@ -48,32 +43,34 @@ class EncryptedAggregation: up to 16 million. For example, when using default parameters and running an aggregation with 4 participants, local aggregated values cannot exceed 4 million. - cohort_id (Union[Unset, str]): Unique identifier of a data object. + preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters + applied to the input retrieved from the datasource, if applicable + data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource + from each node before the computation + end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true, + then when release results is set to true and the output + is initially encrypted with a network collective key, then it is key switched to + the initiating user's public key. + input_data_object (Union[Unset, str]): Shared identifier of a data object. + timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run. local_input (Union[Unset, LocalInput]): If a local input is provided, the node initiating the computation will use it instead of querying the datasource. This data is *not* shared to other nodes, only used for the duration of the computation. The local input columns/values must be in the form {: [, , ...], ...} owner (Union[Unset, str]): The username of the end user who requested the computation. project_id (Union[Unset, str]): Unique identifier of a project. - run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both) + dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various + disclosure prevention mechanisms + cohort_id (Union[Unset, str]): Unique identifier of a data object. + encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key. + join_id (Union[Unset, str]): Unique identifier of a data object. local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured the network) - encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key. - end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true, - then when release results is set to true and the output - is initially encrypted with a network collective key, then it is key switched to - the initiating user's public key. - input_data_object (Union[Unset, str]): Shared identifier of a data object. release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output results. If set, then encrypted results are automatically key switched and decrypted and a Result entity is saved - timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run. - data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource - from each node before the computation - count_column (Union[Unset, str]): The column, if any, that is a count column (of 1s). Used in DP mode to improve - accuracy. - features (Union[Unset, str]): Shared identifier of a data object. + wait (Union[Unset, bool]): Whether to wait synchronously for the computation result. lower_bounds (Union[Unset, List[float]]): Lower bounds on the values in each column of the aggregation. Used in DP mode for clipping and sensitivity. nb_features (Union[Unset, int]): Number of columns of the dataset @@ -81,83 +78,84 @@ class EncryptedAggregation: DP mode for clipping and sensitivity. aggregate_columns (Union[Unset, List[str]]): The columns on which the data should be aggregated aggregate_features (Union[Unset, bool]): If true, sum the columns together into one number + count_column (Union[Unset, str]): The column, if any, that is a count column (of 1s). Used in DP mode to improve + accuracy. + features (Union[Unset, str]): Shared identifier of a data object. """ type: ComputationType + dp_epsilon: Union[Unset, float] = -1.0 input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] = ( ComputationDefinitionInputClippingMethod.WARNING ) - preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET - wait: Union[Unset, bool] = UNSET - dp_policy: Union[Unset, "DPPolicy"] = UNSET - dp_epsilon: Union[Unset, float] = -1.0 - join_id: Union[Unset, str] = UNSET + run_mode: Union[Unset, RunMode] = UNSET local_input_id: Union[Unset, str] = UNSET maximum_aggregated_value: Union[Unset, None, float] = UNSET - cohort_id: Union[Unset, str] = UNSET + preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET + data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET + end_to_end_encrypted: Union[Unset, bool] = UNSET + input_data_object: Union[Unset, str] = UNSET + timeout: Union[Unset, int] = UNSET local_input: Union[Unset, "LocalInput"] = UNSET owner: Union[Unset, str] = UNSET project_id: Union[Unset, str] = UNSET - run_mode: Union[Unset, RunMode] = UNSET - local: Union[Unset, bool] = UNSET + dp_policy: Union[Unset, "DPPolicy"] = UNSET + cohort_id: Union[Unset, str] = UNSET encrypted: Union[Unset, bool] = UNSET - end_to_end_encrypted: Union[Unset, bool] = UNSET - input_data_object: Union[Unset, str] = UNSET + join_id: Union[Unset, str] = UNSET + local: Union[Unset, bool] = UNSET release_results: Union[Unset, bool] = UNSET - timeout: Union[Unset, int] = UNSET - data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET - count_column: Union[Unset, str] = UNSET - features: Union[Unset, str] = UNSET + wait: Union[Unset, bool] = UNSET lower_bounds: Union[Unset, List[float]] = UNSET nb_features: Union[Unset, int] = UNSET upper_bounds: Union[Unset, List[float]] = UNSET aggregate_columns: Union[Unset, List[str]] = UNSET aggregate_features: Union[Unset, bool] = UNSET + count_column: Union[Unset, str] = UNSET + features: Union[Unset, str] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: type = self.type.value + dp_epsilon = self.dp_epsilon input_clipping_method: Union[Unset, str] = UNSET if not isinstance(self.input_clipping_method, Unset): input_clipping_method = self.input_clipping_method.value + run_mode: Union[Unset, str] = UNSET + if not isinstance(self.run_mode, Unset): + run_mode = self.run_mode.value + + local_input_id = self.local_input_id + maximum_aggregated_value = self.maximum_aggregated_value preprocessing_parameters: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.preprocessing_parameters, Unset): preprocessing_parameters = self.preprocessing_parameters.to_dict() - wait = self.wait - dp_policy: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.dp_policy, Unset): - dp_policy = self.dp_policy.to_dict() + data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.data_source_parameters, Unset): + data_source_parameters = self.data_source_parameters.to_dict() - dp_epsilon = self.dp_epsilon - join_id = self.join_id - local_input_id = self.local_input_id - maximum_aggregated_value = self.maximum_aggregated_value - cohort_id = self.cohort_id + end_to_end_encrypted = self.end_to_end_encrypted + input_data_object = self.input_data_object + timeout = self.timeout local_input: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.local_input, Unset): local_input = self.local_input.to_dict() owner = self.owner project_id = self.project_id - run_mode: Union[Unset, str] = UNSET - if not isinstance(self.run_mode, Unset): - run_mode = self.run_mode.value + dp_policy: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.dp_policy, Unset): + dp_policy = self.dp_policy.to_dict() - local = self.local + cohort_id = self.cohort_id encrypted = self.encrypted - end_to_end_encrypted = self.end_to_end_encrypted - input_data_object = self.input_data_object + join_id = self.join_id + local = self.local release_results = self.release_results - timeout = self.timeout - data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.data_source_parameters, Unset): - data_source_parameters = self.data_source_parameters.to_dict() - - count_column = self.count_column - features = self.features + wait = self.wait lower_bounds: Union[Unset, List[float]] = UNSET if not isinstance(self.lower_bounds, Unset): lower_bounds = self.lower_bounds @@ -172,6 +170,8 @@ def to_dict(self) -> Dict[str, Any]: aggregate_columns = self.aggregate_columns aggregate_features = self.aggregate_features + count_column = self.count_column + features = self.features field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) @@ -180,50 +180,46 @@ def to_dict(self) -> Dict[str, Any]: "type": type, } ) - if input_clipping_method is not UNSET: - field_dict["inputClippingMethod"] = input_clipping_method - if preprocessing_parameters is not UNSET: - field_dict["preprocessingParameters"] = preprocessing_parameters - if wait is not UNSET: - field_dict["wait"] = wait - if dp_policy is not UNSET: - field_dict["DPPolicy"] = dp_policy if dp_epsilon is not UNSET: field_dict["dpEpsilon"] = dp_epsilon - if join_id is not UNSET: - field_dict["joinId"] = join_id + if input_clipping_method is not UNSET: + field_dict["inputClippingMethod"] = input_clipping_method + if run_mode is not UNSET: + field_dict["runMode"] = run_mode if local_input_id is not UNSET: field_dict["localInputID"] = local_input_id if maximum_aggregated_value is not UNSET: field_dict["maximumAggregatedValue"] = maximum_aggregated_value - if cohort_id is not UNSET: - field_dict["cohortId"] = cohort_id + if preprocessing_parameters is not UNSET: + field_dict["preprocessingParameters"] = preprocessing_parameters + if data_source_parameters is not UNSET: + field_dict["dataSourceParameters"] = data_source_parameters + if end_to_end_encrypted is not UNSET: + field_dict["endToEndEncrypted"] = end_to_end_encrypted + if input_data_object is not UNSET: + field_dict["inputDataObject"] = input_data_object + if timeout is not UNSET: + field_dict["timeout"] = timeout if local_input is not UNSET: field_dict["localInput"] = local_input if owner is not UNSET: field_dict["owner"] = owner if project_id is not UNSET: field_dict["projectId"] = project_id - if run_mode is not UNSET: - field_dict["runMode"] = run_mode - if local is not UNSET: - field_dict["local"] = local + if dp_policy is not UNSET: + field_dict["DPPolicy"] = dp_policy + if cohort_id is not UNSET: + field_dict["cohortId"] = cohort_id if encrypted is not UNSET: field_dict["encrypted"] = encrypted - if end_to_end_encrypted is not UNSET: - field_dict["endToEndEncrypted"] = end_to_end_encrypted - if input_data_object is not UNSET: - field_dict["inputDataObject"] = input_data_object + if join_id is not UNSET: + field_dict["joinId"] = join_id + if local is not UNSET: + field_dict["local"] = local if release_results is not UNSET: field_dict["releaseResults"] = release_results - if timeout is not UNSET: - field_dict["timeout"] = timeout - if data_source_parameters is not UNSET: - field_dict["dataSourceParameters"] = data_source_parameters - if count_column is not UNSET: - field_dict["countColumn"] = count_column - if features is not UNSET: - field_dict["features"] = features + if wait is not UNSET: + field_dict["wait"] = wait if lower_bounds is not UNSET: field_dict["lowerBounds"] = lower_bounds if nb_features is not UNSET: @@ -234,6 +230,10 @@ def to_dict(self) -> Dict[str, Any]: field_dict["aggregateColumns"] = aggregate_columns if aggregate_features is not UNSET: field_dict["aggregateFeatures"] = aggregate_features + if count_column is not UNSET: + field_dict["countColumn"] = count_column + if features is not UNSET: + field_dict["features"] = features return field_dict @@ -247,6 +247,8 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() type = ComputationType(d.pop("type")) + dp_epsilon = d.pop("dpEpsilon", UNSET) + _input_clipping_method = d.pop("inputClippingMethod", UNSET) input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] if isinstance(_input_clipping_method, Unset): @@ -254,6 +256,17 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method) + _run_mode = d.pop("runMode", UNSET) + run_mode: Union[Unset, RunMode] + if isinstance(_run_mode, Unset): + run_mode = UNSET + else: + run_mode = RunMode(_run_mode) + + local_input_id = d.pop("localInputID", UNSET) + + maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET) + _preprocessing_parameters = d.pop("preprocessingParameters", UNSET) preprocessing_parameters: Union[Unset, ComputationPreprocessingParameters] if isinstance(_preprocessing_parameters, Unset): @@ -261,24 +274,18 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: preprocessing_parameters = ComputationPreprocessingParameters.from_dict(_preprocessing_parameters) - wait = d.pop("wait", UNSET) - - _dp_policy = d.pop("DPPolicy", UNSET) - dp_policy: Union[Unset, DPPolicy] - if isinstance(_dp_policy, Unset): - dp_policy = UNSET + _data_source_parameters = d.pop("dataSourceParameters", UNSET) + data_source_parameters: Union[Unset, ComputationDataSourceParameters] + if isinstance(_data_source_parameters, Unset): + data_source_parameters = UNSET else: - dp_policy = DPPolicy.from_dict(_dp_policy) - - dp_epsilon = d.pop("dpEpsilon", UNSET) - - join_id = d.pop("joinId", UNSET) + data_source_parameters = ComputationDataSourceParameters.from_dict(_data_source_parameters) - local_input_id = d.pop("localInputID", UNSET) + end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) - maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET) + input_data_object = d.pop("inputDataObject", UNSET) - cohort_id = d.pop("cohortId", UNSET) + timeout = d.pop("timeout", UNSET) _local_input = d.pop("localInput", UNSET) local_input: Union[Unset, LocalInput] @@ -291,35 +298,24 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: project_id = d.pop("projectId", UNSET) - _run_mode = d.pop("runMode", UNSET) - run_mode: Union[Unset, RunMode] - if isinstance(_run_mode, Unset): - run_mode = UNSET + _dp_policy = d.pop("DPPolicy", UNSET) + dp_policy: Union[Unset, DPPolicy] + if isinstance(_dp_policy, Unset): + dp_policy = UNSET else: - run_mode = RunMode(_run_mode) + dp_policy = DPPolicy.from_dict(_dp_policy) - local = d.pop("local", UNSET) + cohort_id = d.pop("cohortId", UNSET) encrypted = d.pop("encrypted", UNSET) - end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) + join_id = d.pop("joinId", UNSET) - input_data_object = d.pop("inputDataObject", UNSET) + local = d.pop("local", UNSET) release_results = d.pop("releaseResults", UNSET) - timeout = d.pop("timeout", UNSET) - - _data_source_parameters = d.pop("dataSourceParameters", UNSET) - data_source_parameters: Union[Unset, ComputationDataSourceParameters] - if isinstance(_data_source_parameters, Unset): - data_source_parameters = UNSET - else: - data_source_parameters = ComputationDataSourceParameters.from_dict(_data_source_parameters) - - count_column = d.pop("countColumn", UNSET) - - features = d.pop("features", UNSET) + wait = d.pop("wait", UNSET) lower_bounds = cast(List[float], d.pop("lowerBounds", UNSET)) @@ -331,35 +327,39 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: aggregate_features = d.pop("aggregateFeatures", UNSET) + count_column = d.pop("countColumn", UNSET) + + features = d.pop("features", UNSET) + encrypted_aggregation = cls( type=type, - input_clipping_method=input_clipping_method, - preprocessing_parameters=preprocessing_parameters, - wait=wait, - dp_policy=dp_policy, dp_epsilon=dp_epsilon, - join_id=join_id, + input_clipping_method=input_clipping_method, + run_mode=run_mode, local_input_id=local_input_id, maximum_aggregated_value=maximum_aggregated_value, - cohort_id=cohort_id, + preprocessing_parameters=preprocessing_parameters, + data_source_parameters=data_source_parameters, + end_to_end_encrypted=end_to_end_encrypted, + input_data_object=input_data_object, + timeout=timeout, local_input=local_input, owner=owner, project_id=project_id, - run_mode=run_mode, - local=local, + dp_policy=dp_policy, + cohort_id=cohort_id, encrypted=encrypted, - end_to_end_encrypted=end_to_end_encrypted, - input_data_object=input_data_object, + join_id=join_id, + local=local, release_results=release_results, - timeout=timeout, - data_source_parameters=data_source_parameters, - count_column=count_column, - features=features, + wait=wait, lower_bounds=lower_bounds, nb_features=nb_features, upper_bounds=upper_bounds, aggregate_columns=aggregate_columns, aggregate_features=aggregate_features, + count_column=count_column, + features=features, ) encrypted_aggregation.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/encrypted_mean.py b/src/tuneinsight/api/sdk/models/encrypted_mean.py index 8a4f26c..e49cbc1 100644 --- a/src/tuneinsight/api/sdk/models/encrypted_mean.py +++ b/src/tuneinsight/api/sdk/models/encrypted_mean.py @@ -22,6 +22,8 @@ class EncryptedMean: """ Attributes: type (ComputationType): Type of the computation. + dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP + mode. Default: -1.0. input_clipping_method (Union[Unset, ComputationDefinitionInputClippingMethod]): Optional method used for clipping before encrypting values when running aggregation-based workflows. The bounds are deduced based on the cryptographic parameters used for the aggregation. @@ -32,14 +34,7 @@ class EncryptedMean: (default) - error: if some values are out of bounds, then the computation is aborted. Default: ComputationDefinitionInputClippingMethod.WARNING. - preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters - applied to the input retrieved from the datasource, if applicable - wait (Union[Unset, bool]): Whether to wait synchronously for the computation result. - dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various - disclosure prevention mechanisms - dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP - mode. Default: -1.0. - join_id (Union[Unset, str]): Unique identifier of a data object. + run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both) local_input_id (Union[Unset, str]): Unique identifier of a data object. maximum_aggregated_value (Union[Unset, None, float]): optional upper bound on the total expected value to be aggregated collectively. If provided, the computation will automatically deduce @@ -48,32 +43,34 @@ class EncryptedMean: up to 16 million. For example, when using default parameters and running an aggregation with 4 participants, local aggregated values cannot exceed 4 million. - cohort_id (Union[Unset, str]): Unique identifier of a data object. + preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters + applied to the input retrieved from the datasource, if applicable + data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource + from each node before the computation + end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true, + then when release results is set to true and the output + is initially encrypted with a network collective key, then it is key switched to + the initiating user's public key. + input_data_object (Union[Unset, str]): Shared identifier of a data object. + timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run. local_input (Union[Unset, LocalInput]): If a local input is provided, the node initiating the computation will use it instead of querying the datasource. This data is *not* shared to other nodes, only used for the duration of the computation. The local input columns/values must be in the form {: [, , ...], ...} owner (Union[Unset, str]): The username of the end user who requested the computation. project_id (Union[Unset, str]): Unique identifier of a project. - run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both) + dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various + disclosure prevention mechanisms + cohort_id (Union[Unset, str]): Unique identifier of a data object. + encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key. + join_id (Union[Unset, str]): Unique identifier of a data object. local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured the network) - encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key. - end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true, - then when release results is set to true and the output - is initially encrypted with a network collective key, then it is key switched to - the initiating user's public key. - input_data_object (Union[Unset, str]): Shared identifier of a data object. release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output results. If set, then encrypted results are automatically key switched and decrypted and a Result entity is saved - timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run. - data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource - from each node before the computation - participant (Union[Unset, str]): optional column that is used to identify the name of the participant. - If empty, the name of the instance will be used instead. - variables (Union[Unset, List[str]]): list of variables to compute the mean on. + wait (Union[Unset, bool]): Whether to wait synchronously for the computation result. grouping_keys (Union[Unset, List[str]]): This parameter is used to specify the composite keys for grouping the aggregated values. For example, when the groupingKeys are set to [id, name], the aggregation will be performed separately @@ -84,90 +81,92 @@ class EncryptedMean: aggregated group. outlier_threshold (Union[Unset, float]): the threshold T to use to filter out outlier values. A value x will be considered an outlier if abs(x - mean) > T * STD. + participant (Union[Unset, str]): optional column that is used to identify the name of the participant. + If empty, the name of the instance will be used instead. + variables (Union[Unset, List[str]]): list of variables to compute the mean on. """ type: ComputationType + dp_epsilon: Union[Unset, float] = -1.0 input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] = ( ComputationDefinitionInputClippingMethod.WARNING ) - preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET - wait: Union[Unset, bool] = UNSET - dp_policy: Union[Unset, "DPPolicy"] = UNSET - dp_epsilon: Union[Unset, float] = -1.0 - join_id: Union[Unset, str] = UNSET + run_mode: Union[Unset, RunMode] = UNSET local_input_id: Union[Unset, str] = UNSET maximum_aggregated_value: Union[Unset, None, float] = UNSET - cohort_id: Union[Unset, str] = UNSET + preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET + data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET + end_to_end_encrypted: Union[Unset, bool] = UNSET + input_data_object: Union[Unset, str] = UNSET + timeout: Union[Unset, int] = UNSET local_input: Union[Unset, "LocalInput"] = UNSET owner: Union[Unset, str] = UNSET project_id: Union[Unset, str] = UNSET - run_mode: Union[Unset, RunMode] = UNSET - local: Union[Unset, bool] = UNSET + dp_policy: Union[Unset, "DPPolicy"] = UNSET + cohort_id: Union[Unset, str] = UNSET encrypted: Union[Unset, bool] = UNSET - end_to_end_encrypted: Union[Unset, bool] = UNSET - input_data_object: Union[Unset, str] = UNSET + join_id: Union[Unset, str] = UNSET + local: Union[Unset, bool] = UNSET release_results: Union[Unset, bool] = UNSET - timeout: Union[Unset, int] = UNSET - data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET - participant: Union[Unset, str] = UNSET - variables: Union[Unset, List[str]] = UNSET + wait: Union[Unset, bool] = UNSET grouping_keys: Union[Unset, List[str]] = UNSET min_participants: Union[Unset, int] = UNSET outlier_threshold: Union[Unset, float] = UNSET + participant: Union[Unset, str] = UNSET + variables: Union[Unset, List[str]] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: type = self.type.value + dp_epsilon = self.dp_epsilon input_clipping_method: Union[Unset, str] = UNSET if not isinstance(self.input_clipping_method, Unset): input_clipping_method = self.input_clipping_method.value + run_mode: Union[Unset, str] = UNSET + if not isinstance(self.run_mode, Unset): + run_mode = self.run_mode.value + + local_input_id = self.local_input_id + maximum_aggregated_value = self.maximum_aggregated_value preprocessing_parameters: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.preprocessing_parameters, Unset): preprocessing_parameters = self.preprocessing_parameters.to_dict() - wait = self.wait - dp_policy: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.dp_policy, Unset): - dp_policy = self.dp_policy.to_dict() + data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.data_source_parameters, Unset): + data_source_parameters = self.data_source_parameters.to_dict() - dp_epsilon = self.dp_epsilon - join_id = self.join_id - local_input_id = self.local_input_id - maximum_aggregated_value = self.maximum_aggregated_value - cohort_id = self.cohort_id + end_to_end_encrypted = self.end_to_end_encrypted + input_data_object = self.input_data_object + timeout = self.timeout local_input: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.local_input, Unset): local_input = self.local_input.to_dict() owner = self.owner project_id = self.project_id - run_mode: Union[Unset, str] = UNSET - if not isinstance(self.run_mode, Unset): - run_mode = self.run_mode.value + dp_policy: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.dp_policy, Unset): + dp_policy = self.dp_policy.to_dict() - local = self.local + cohort_id = self.cohort_id encrypted = self.encrypted - end_to_end_encrypted = self.end_to_end_encrypted - input_data_object = self.input_data_object + join_id = self.join_id + local = self.local release_results = self.release_results - timeout = self.timeout - data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.data_source_parameters, Unset): - data_source_parameters = self.data_source_parameters.to_dict() - - participant = self.participant - variables: Union[Unset, List[str]] = UNSET - if not isinstance(self.variables, Unset): - variables = self.variables - + wait = self.wait grouping_keys: Union[Unset, List[str]] = UNSET if not isinstance(self.grouping_keys, Unset): grouping_keys = self.grouping_keys min_participants = self.min_participants outlier_threshold = self.outlier_threshold + participant = self.participant + variables: Union[Unset, List[str]] = UNSET + if not isinstance(self.variables, Unset): + variables = self.variables field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) @@ -176,56 +175,56 @@ def to_dict(self) -> Dict[str, Any]: "type": type, } ) - if input_clipping_method is not UNSET: - field_dict["inputClippingMethod"] = input_clipping_method - if preprocessing_parameters is not UNSET: - field_dict["preprocessingParameters"] = preprocessing_parameters - if wait is not UNSET: - field_dict["wait"] = wait - if dp_policy is not UNSET: - field_dict["DPPolicy"] = dp_policy if dp_epsilon is not UNSET: field_dict["dpEpsilon"] = dp_epsilon - if join_id is not UNSET: - field_dict["joinId"] = join_id + if input_clipping_method is not UNSET: + field_dict["inputClippingMethod"] = input_clipping_method + if run_mode is not UNSET: + field_dict["runMode"] = run_mode if local_input_id is not UNSET: field_dict["localInputID"] = local_input_id if maximum_aggregated_value is not UNSET: field_dict["maximumAggregatedValue"] = maximum_aggregated_value - if cohort_id is not UNSET: - field_dict["cohortId"] = cohort_id + if preprocessing_parameters is not UNSET: + field_dict["preprocessingParameters"] = preprocessing_parameters + if data_source_parameters is not UNSET: + field_dict["dataSourceParameters"] = data_source_parameters + if end_to_end_encrypted is not UNSET: + field_dict["endToEndEncrypted"] = end_to_end_encrypted + if input_data_object is not UNSET: + field_dict["inputDataObject"] = input_data_object + if timeout is not UNSET: + field_dict["timeout"] = timeout if local_input is not UNSET: field_dict["localInput"] = local_input if owner is not UNSET: field_dict["owner"] = owner if project_id is not UNSET: field_dict["projectId"] = project_id - if run_mode is not UNSET: - field_dict["runMode"] = run_mode - if local is not UNSET: - field_dict["local"] = local + if dp_policy is not UNSET: + field_dict["DPPolicy"] = dp_policy + if cohort_id is not UNSET: + field_dict["cohortId"] = cohort_id if encrypted is not UNSET: field_dict["encrypted"] = encrypted - if end_to_end_encrypted is not UNSET: - field_dict["endToEndEncrypted"] = end_to_end_encrypted - if input_data_object is not UNSET: - field_dict["inputDataObject"] = input_data_object + if join_id is not UNSET: + field_dict["joinId"] = join_id + if local is not UNSET: + field_dict["local"] = local if release_results is not UNSET: field_dict["releaseResults"] = release_results - if timeout is not UNSET: - field_dict["timeout"] = timeout - if data_source_parameters is not UNSET: - field_dict["dataSourceParameters"] = data_source_parameters - if participant is not UNSET: - field_dict["participant"] = participant - if variables is not UNSET: - field_dict["variables"] = variables + if wait is not UNSET: + field_dict["wait"] = wait if grouping_keys is not UNSET: field_dict["groupingKeys"] = grouping_keys if min_participants is not UNSET: field_dict["minParticipants"] = min_participants if outlier_threshold is not UNSET: field_dict["outlierThreshold"] = outlier_threshold + if participant is not UNSET: + field_dict["participant"] = participant + if variables is not UNSET: + field_dict["variables"] = variables return field_dict @@ -239,6 +238,8 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() type = ComputationType(d.pop("type")) + dp_epsilon = d.pop("dpEpsilon", UNSET) + _input_clipping_method = d.pop("inputClippingMethod", UNSET) input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] if isinstance(_input_clipping_method, Unset): @@ -246,6 +247,17 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method) + _run_mode = d.pop("runMode", UNSET) + run_mode: Union[Unset, RunMode] + if isinstance(_run_mode, Unset): + run_mode = UNSET + else: + run_mode = RunMode(_run_mode) + + local_input_id = d.pop("localInputID", UNSET) + + maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET) + _preprocessing_parameters = d.pop("preprocessingParameters", UNSET) preprocessing_parameters: Union[Unset, ComputationPreprocessingParameters] if isinstance(_preprocessing_parameters, Unset): @@ -253,24 +265,18 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: preprocessing_parameters = ComputationPreprocessingParameters.from_dict(_preprocessing_parameters) - wait = d.pop("wait", UNSET) - - _dp_policy = d.pop("DPPolicy", UNSET) - dp_policy: Union[Unset, DPPolicy] - if isinstance(_dp_policy, Unset): - dp_policy = UNSET + _data_source_parameters = d.pop("dataSourceParameters", UNSET) + data_source_parameters: Union[Unset, ComputationDataSourceParameters] + if isinstance(_data_source_parameters, Unset): + data_source_parameters = UNSET else: - dp_policy = DPPolicy.from_dict(_dp_policy) - - dp_epsilon = d.pop("dpEpsilon", UNSET) - - join_id = d.pop("joinId", UNSET) + data_source_parameters = ComputationDataSourceParameters.from_dict(_data_source_parameters) - local_input_id = d.pop("localInputID", UNSET) + end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) - maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET) + input_data_object = d.pop("inputDataObject", UNSET) - cohort_id = d.pop("cohortId", UNSET) + timeout = d.pop("timeout", UNSET) _local_input = d.pop("localInput", UNSET) local_input: Union[Unset, LocalInput] @@ -283,35 +289,24 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: project_id = d.pop("projectId", UNSET) - _run_mode = d.pop("runMode", UNSET) - run_mode: Union[Unset, RunMode] - if isinstance(_run_mode, Unset): - run_mode = UNSET + _dp_policy = d.pop("DPPolicy", UNSET) + dp_policy: Union[Unset, DPPolicy] + if isinstance(_dp_policy, Unset): + dp_policy = UNSET else: - run_mode = RunMode(_run_mode) + dp_policy = DPPolicy.from_dict(_dp_policy) - local = d.pop("local", UNSET) + cohort_id = d.pop("cohortId", UNSET) encrypted = d.pop("encrypted", UNSET) - end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) + join_id = d.pop("joinId", UNSET) - input_data_object = d.pop("inputDataObject", UNSET) + local = d.pop("local", UNSET) release_results = d.pop("releaseResults", UNSET) - timeout = d.pop("timeout", UNSET) - - _data_source_parameters = d.pop("dataSourceParameters", UNSET) - data_source_parameters: Union[Unset, ComputationDataSourceParameters] - if isinstance(_data_source_parameters, Unset): - data_source_parameters = UNSET - else: - data_source_parameters = ComputationDataSourceParameters.from_dict(_data_source_parameters) - - participant = d.pop("participant", UNSET) - - variables = cast(List[str], d.pop("variables", UNSET)) + wait = d.pop("wait", UNSET) grouping_keys = cast(List[str], d.pop("groupingKeys", UNSET)) @@ -319,33 +314,37 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: outlier_threshold = d.pop("outlierThreshold", UNSET) + participant = d.pop("participant", UNSET) + + variables = cast(List[str], d.pop("variables", UNSET)) + encrypted_mean = cls( type=type, - input_clipping_method=input_clipping_method, - preprocessing_parameters=preprocessing_parameters, - wait=wait, - dp_policy=dp_policy, dp_epsilon=dp_epsilon, - join_id=join_id, + input_clipping_method=input_clipping_method, + run_mode=run_mode, local_input_id=local_input_id, maximum_aggregated_value=maximum_aggregated_value, - cohort_id=cohort_id, + preprocessing_parameters=preprocessing_parameters, + data_source_parameters=data_source_parameters, + end_to_end_encrypted=end_to_end_encrypted, + input_data_object=input_data_object, + timeout=timeout, local_input=local_input, owner=owner, project_id=project_id, - run_mode=run_mode, - local=local, + dp_policy=dp_policy, + cohort_id=cohort_id, encrypted=encrypted, - end_to_end_encrypted=end_to_end_encrypted, - input_data_object=input_data_object, + join_id=join_id, + local=local, release_results=release_results, - timeout=timeout, - data_source_parameters=data_source_parameters, - participant=participant, - variables=variables, + wait=wait, grouping_keys=grouping_keys, min_participants=min_participants, outlier_threshold=outlier_threshold, + participant=participant, + variables=variables, ) encrypted_mean.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/encrypted_prediction.py b/src/tuneinsight/api/sdk/models/encrypted_prediction.py index 9fa50f8..bf0a9ab 100644 --- a/src/tuneinsight/api/sdk/models/encrypted_prediction.py +++ b/src/tuneinsight/api/sdk/models/encrypted_prediction.py @@ -22,6 +22,8 @@ class EncryptedPrediction: """ Attributes: type (ComputationType): Type of the computation. + dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP + mode. Default: -1.0. input_clipping_method (Union[Unset, ComputationDefinitionInputClippingMethod]): Optional method used for clipping before encrypting values when running aggregation-based workflows. The bounds are deduced based on the cryptographic parameters used for the aggregation. @@ -32,14 +34,7 @@ class EncryptedPrediction: (default) - error: if some values are out of bounds, then the computation is aborted. Default: ComputationDefinitionInputClippingMethod.WARNING. - preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters - applied to the input retrieved from the datasource, if applicable - wait (Union[Unset, bool]): Whether to wait synchronously for the computation result. - dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various - disclosure prevention mechanisms - dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP - mode. Default: -1.0. - join_id (Union[Unset, str]): Unique identifier of a data object. + run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both) local_input_id (Union[Unset, str]): Unique identifier of a data object. maximum_aggregated_value (Union[Unset, None, float]): optional upper bound on the total expected value to be aggregated collectively. If provided, the computation will automatically deduce @@ -48,29 +43,36 @@ class EncryptedPrediction: up to 16 million. For example, when using default parameters and running an aggregation with 4 participants, local aggregated values cannot exceed 4 million. - cohort_id (Union[Unset, str]): Unique identifier of a data object. + preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters + applied to the input retrieved from the datasource, if applicable + data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource + from each node before the computation + end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true, + then when release results is set to true and the output + is initially encrypted with a network collective key, then it is key switched to + the initiating user's public key. + input_data_object (Union[Unset, str]): Shared identifier of a data object. + timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run. local_input (Union[Unset, LocalInput]): If a local input is provided, the node initiating the computation will use it instead of querying the datasource. This data is *not* shared to other nodes, only used for the duration of the computation. The local input columns/values must be in the form {: [, , ...], ...} owner (Union[Unset, str]): The username of the end user who requested the computation. project_id (Union[Unset, str]): Unique identifier of a project. - run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both) + dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various + disclosure prevention mechanisms + cohort_id (Union[Unset, str]): Unique identifier of a data object. + encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key. + join_id (Union[Unset, str]): Unique identifier of a data object. local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured the network) - encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key. - end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true, - then when release results is set to true and the output - is initially encrypted with a network collective key, then it is key switched to - the initiating user's public key. - input_data_object (Union[Unset, str]): Shared identifier of a data object. release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output results. If set, then encrypted results are automatically key switched and decrypted and a Result entity is saved - timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run. - data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource - from each node before the computation + wait (Union[Unset, bool]): Whether to wait synchronously for the computation result. + label_columns (Union[Unset, List[str]]): specified label columns of the original dataset if the computation + specifies to return the ground truth labels alongside model (Union[Unset, str]): Unique identifier of a data object. only_root_prediction (Union[Unset, bool]): if true and the computation is not local, then the prediction is only computed by the root node and the ct is broadcast to other nodes for later key switch @@ -79,81 +81,83 @@ class EncryptedPrediction: columns from the input dataset are used include_ground_truth_labels (Union[Unset, bool]): if true, then the result should contain the associated ground truth labels - label_columns (Union[Unset, List[str]]): specified label columns of the original dataset if the computation - specifies to return the ground truth labels alongside """ type: ComputationType + dp_epsilon: Union[Unset, float] = -1.0 input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] = ( ComputationDefinitionInputClippingMethod.WARNING ) - preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET - wait: Union[Unset, bool] = UNSET - dp_policy: Union[Unset, "DPPolicy"] = UNSET - dp_epsilon: Union[Unset, float] = -1.0 - join_id: Union[Unset, str] = UNSET + run_mode: Union[Unset, RunMode] = UNSET local_input_id: Union[Unset, str] = UNSET maximum_aggregated_value: Union[Unset, None, float] = UNSET - cohort_id: Union[Unset, str] = UNSET + preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET + data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET + end_to_end_encrypted: Union[Unset, bool] = UNSET + input_data_object: Union[Unset, str] = UNSET + timeout: Union[Unset, int] = UNSET local_input: Union[Unset, "LocalInput"] = UNSET owner: Union[Unset, str] = UNSET project_id: Union[Unset, str] = UNSET - run_mode: Union[Unset, RunMode] = UNSET - local: Union[Unset, bool] = UNSET + dp_policy: Union[Unset, "DPPolicy"] = UNSET + cohort_id: Union[Unset, str] = UNSET encrypted: Union[Unset, bool] = UNSET - end_to_end_encrypted: Union[Unset, bool] = UNSET - input_data_object: Union[Unset, str] = UNSET + join_id: Union[Unset, str] = UNSET + local: Union[Unset, bool] = UNSET release_results: Union[Unset, bool] = UNSET - timeout: Union[Unset, int] = UNSET - data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET + wait: Union[Unset, bool] = UNSET + label_columns: Union[Unset, List[str]] = UNSET model: Union[Unset, str] = UNSET only_root_prediction: Union[Unset, bool] = UNSET data: Union[Unset, str] = UNSET feature_columns: Union[Unset, List[str]] = UNSET include_ground_truth_labels: Union[Unset, bool] = UNSET - label_columns: Union[Unset, List[str]] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: type = self.type.value + dp_epsilon = self.dp_epsilon input_clipping_method: Union[Unset, str] = UNSET if not isinstance(self.input_clipping_method, Unset): input_clipping_method = self.input_clipping_method.value + run_mode: Union[Unset, str] = UNSET + if not isinstance(self.run_mode, Unset): + run_mode = self.run_mode.value + + local_input_id = self.local_input_id + maximum_aggregated_value = self.maximum_aggregated_value preprocessing_parameters: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.preprocessing_parameters, Unset): preprocessing_parameters = self.preprocessing_parameters.to_dict() - wait = self.wait - dp_policy: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.dp_policy, Unset): - dp_policy = self.dp_policy.to_dict() + data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.data_source_parameters, Unset): + data_source_parameters = self.data_source_parameters.to_dict() - dp_epsilon = self.dp_epsilon - join_id = self.join_id - local_input_id = self.local_input_id - maximum_aggregated_value = self.maximum_aggregated_value - cohort_id = self.cohort_id + end_to_end_encrypted = self.end_to_end_encrypted + input_data_object = self.input_data_object + timeout = self.timeout local_input: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.local_input, Unset): local_input = self.local_input.to_dict() owner = self.owner project_id = self.project_id - run_mode: Union[Unset, str] = UNSET - if not isinstance(self.run_mode, Unset): - run_mode = self.run_mode.value + dp_policy: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.dp_policy, Unset): + dp_policy = self.dp_policy.to_dict() - local = self.local + cohort_id = self.cohort_id encrypted = self.encrypted - end_to_end_encrypted = self.end_to_end_encrypted - input_data_object = self.input_data_object + join_id = self.join_id + local = self.local release_results = self.release_results - timeout = self.timeout - data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.data_source_parameters, Unset): - data_source_parameters = self.data_source_parameters.to_dict() + wait = self.wait + label_columns: Union[Unset, List[str]] = UNSET + if not isinstance(self.label_columns, Unset): + label_columns = self.label_columns model = self.model only_root_prediction = self.only_root_prediction @@ -163,9 +167,6 @@ def to_dict(self) -> Dict[str, Any]: feature_columns = self.feature_columns include_ground_truth_labels = self.include_ground_truth_labels - label_columns: Union[Unset, List[str]] = UNSET - if not isinstance(self.label_columns, Unset): - label_columns = self.label_columns field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) @@ -174,46 +175,48 @@ def to_dict(self) -> Dict[str, Any]: "type": type, } ) - if input_clipping_method is not UNSET: - field_dict["inputClippingMethod"] = input_clipping_method - if preprocessing_parameters is not UNSET: - field_dict["preprocessingParameters"] = preprocessing_parameters - if wait is not UNSET: - field_dict["wait"] = wait - if dp_policy is not UNSET: - field_dict["DPPolicy"] = dp_policy if dp_epsilon is not UNSET: field_dict["dpEpsilon"] = dp_epsilon - if join_id is not UNSET: - field_dict["joinId"] = join_id + if input_clipping_method is not UNSET: + field_dict["inputClippingMethod"] = input_clipping_method + if run_mode is not UNSET: + field_dict["runMode"] = run_mode if local_input_id is not UNSET: field_dict["localInputID"] = local_input_id if maximum_aggregated_value is not UNSET: field_dict["maximumAggregatedValue"] = maximum_aggregated_value - if cohort_id is not UNSET: - field_dict["cohortId"] = cohort_id + if preprocessing_parameters is not UNSET: + field_dict["preprocessingParameters"] = preprocessing_parameters + if data_source_parameters is not UNSET: + field_dict["dataSourceParameters"] = data_source_parameters + if end_to_end_encrypted is not UNSET: + field_dict["endToEndEncrypted"] = end_to_end_encrypted + if input_data_object is not UNSET: + field_dict["inputDataObject"] = input_data_object + if timeout is not UNSET: + field_dict["timeout"] = timeout if local_input is not UNSET: field_dict["localInput"] = local_input if owner is not UNSET: field_dict["owner"] = owner if project_id is not UNSET: field_dict["projectId"] = project_id - if run_mode is not UNSET: - field_dict["runMode"] = run_mode - if local is not UNSET: - field_dict["local"] = local + if dp_policy is not UNSET: + field_dict["DPPolicy"] = dp_policy + if cohort_id is not UNSET: + field_dict["cohortId"] = cohort_id if encrypted is not UNSET: field_dict["encrypted"] = encrypted - if end_to_end_encrypted is not UNSET: - field_dict["endToEndEncrypted"] = end_to_end_encrypted - if input_data_object is not UNSET: - field_dict["inputDataObject"] = input_data_object + if join_id is not UNSET: + field_dict["joinId"] = join_id + if local is not UNSET: + field_dict["local"] = local if release_results is not UNSET: field_dict["releaseResults"] = release_results - if timeout is not UNSET: - field_dict["timeout"] = timeout - if data_source_parameters is not UNSET: - field_dict["dataSourceParameters"] = data_source_parameters + if wait is not UNSET: + field_dict["wait"] = wait + if label_columns is not UNSET: + field_dict["labelColumns"] = label_columns if model is not UNSET: field_dict["model"] = model if only_root_prediction is not UNSET: @@ -224,8 +227,6 @@ def to_dict(self) -> Dict[str, Any]: field_dict["featureColumns"] = feature_columns if include_ground_truth_labels is not UNSET: field_dict["includeGroundTruthLabels"] = include_ground_truth_labels - if label_columns is not UNSET: - field_dict["labelColumns"] = label_columns return field_dict @@ -239,6 +240,8 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() type = ComputationType(d.pop("type")) + dp_epsilon = d.pop("dpEpsilon", UNSET) + _input_clipping_method = d.pop("inputClippingMethod", UNSET) input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] if isinstance(_input_clipping_method, Unset): @@ -246,6 +249,17 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method) + _run_mode = d.pop("runMode", UNSET) + run_mode: Union[Unset, RunMode] + if isinstance(_run_mode, Unset): + run_mode = UNSET + else: + run_mode = RunMode(_run_mode) + + local_input_id = d.pop("localInputID", UNSET) + + maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET) + _preprocessing_parameters = d.pop("preprocessingParameters", UNSET) preprocessing_parameters: Union[Unset, ComputationPreprocessingParameters] if isinstance(_preprocessing_parameters, Unset): @@ -253,24 +267,18 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: preprocessing_parameters = ComputationPreprocessingParameters.from_dict(_preprocessing_parameters) - wait = d.pop("wait", UNSET) - - _dp_policy = d.pop("DPPolicy", UNSET) - dp_policy: Union[Unset, DPPolicy] - if isinstance(_dp_policy, Unset): - dp_policy = UNSET + _data_source_parameters = d.pop("dataSourceParameters", UNSET) + data_source_parameters: Union[Unset, ComputationDataSourceParameters] + if isinstance(_data_source_parameters, Unset): + data_source_parameters = UNSET else: - dp_policy = DPPolicy.from_dict(_dp_policy) - - dp_epsilon = d.pop("dpEpsilon", UNSET) - - join_id = d.pop("joinId", UNSET) + data_source_parameters = ComputationDataSourceParameters.from_dict(_data_source_parameters) - local_input_id = d.pop("localInputID", UNSET) + end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) - maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET) + input_data_object = d.pop("inputDataObject", UNSET) - cohort_id = d.pop("cohortId", UNSET) + timeout = d.pop("timeout", UNSET) _local_input = d.pop("localInput", UNSET) local_input: Union[Unset, LocalInput] @@ -283,31 +291,26 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: project_id = d.pop("projectId", UNSET) - _run_mode = d.pop("runMode", UNSET) - run_mode: Union[Unset, RunMode] - if isinstance(_run_mode, Unset): - run_mode = UNSET + _dp_policy = d.pop("DPPolicy", UNSET) + dp_policy: Union[Unset, DPPolicy] + if isinstance(_dp_policy, Unset): + dp_policy = UNSET else: - run_mode = RunMode(_run_mode) + dp_policy = DPPolicy.from_dict(_dp_policy) - local = d.pop("local", UNSET) + cohort_id = d.pop("cohortId", UNSET) encrypted = d.pop("encrypted", UNSET) - end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) + join_id = d.pop("joinId", UNSET) - input_data_object = d.pop("inputDataObject", UNSET) + local = d.pop("local", UNSET) release_results = d.pop("releaseResults", UNSET) - timeout = d.pop("timeout", UNSET) + wait = d.pop("wait", UNSET) - _data_source_parameters = d.pop("dataSourceParameters", UNSET) - data_source_parameters: Union[Unset, ComputationDataSourceParameters] - if isinstance(_data_source_parameters, Unset): - data_source_parameters = UNSET - else: - data_source_parameters = ComputationDataSourceParameters.from_dict(_data_source_parameters) + label_columns = cast(List[str], d.pop("labelColumns", UNSET)) model = d.pop("model", UNSET) @@ -319,36 +322,34 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: include_ground_truth_labels = d.pop("includeGroundTruthLabels", UNSET) - label_columns = cast(List[str], d.pop("labelColumns", UNSET)) - encrypted_prediction = cls( type=type, - input_clipping_method=input_clipping_method, - preprocessing_parameters=preprocessing_parameters, - wait=wait, - dp_policy=dp_policy, dp_epsilon=dp_epsilon, - join_id=join_id, + input_clipping_method=input_clipping_method, + run_mode=run_mode, local_input_id=local_input_id, maximum_aggregated_value=maximum_aggregated_value, - cohort_id=cohort_id, + preprocessing_parameters=preprocessing_parameters, + data_source_parameters=data_source_parameters, + end_to_end_encrypted=end_to_end_encrypted, + input_data_object=input_data_object, + timeout=timeout, local_input=local_input, owner=owner, project_id=project_id, - run_mode=run_mode, - local=local, + dp_policy=dp_policy, + cohort_id=cohort_id, encrypted=encrypted, - end_to_end_encrypted=end_to_end_encrypted, - input_data_object=input_data_object, + join_id=join_id, + local=local, release_results=release_results, - timeout=timeout, - data_source_parameters=data_source_parameters, + wait=wait, + label_columns=label_columns, model=model, only_root_prediction=only_root_prediction, data=data, feature_columns=feature_columns, include_ground_truth_labels=include_ground_truth_labels, - label_columns=label_columns, ) encrypted_prediction.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/encrypted_regression.py b/src/tuneinsight/api/sdk/models/encrypted_regression.py index 828f625..63ecef9 100644 --- a/src/tuneinsight/api/sdk/models/encrypted_regression.py +++ b/src/tuneinsight/api/sdk/models/encrypted_regression.py @@ -23,6 +23,8 @@ class EncryptedRegression: """ Attributes: type (ComputationType): Type of the computation. + dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP + mode. Default: -1.0. input_clipping_method (Union[Unset, ComputationDefinitionInputClippingMethod]): Optional method used for clipping before encrypting values when running aggregation-based workflows. The bounds are deduced based on the cryptographic parameters used for the aggregation. @@ -33,14 +35,7 @@ class EncryptedRegression: (default) - error: if some values are out of bounds, then the computation is aborted. Default: ComputationDefinitionInputClippingMethod.WARNING. - preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters - applied to the input retrieved from the datasource, if applicable - wait (Union[Unset, bool]): Whether to wait synchronously for the computation result. - dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various - disclosure prevention mechanisms - dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP - mode. Default: -1.0. - join_id (Union[Unset, str]): Unique identifier of a data object. + run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both) local_input_id (Union[Unset, str]): Unique identifier of a data object. maximum_aggregated_value (Union[Unset, None, float]): optional upper bound on the total expected value to be aggregated collectively. If provided, the computation will automatically deduce @@ -49,29 +44,34 @@ class EncryptedRegression: up to 16 million. For example, when using default parameters and running an aggregation with 4 participants, local aggregated values cannot exceed 4 million. - cohort_id (Union[Unset, str]): Unique identifier of a data object. + preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters + applied to the input retrieved from the datasource, if applicable + data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource + from each node before the computation + end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true, + then when release results is set to true and the output + is initially encrypted with a network collective key, then it is key switched to + the initiating user's public key. + input_data_object (Union[Unset, str]): Shared identifier of a data object. + timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run. local_input (Union[Unset, LocalInput]): If a local input is provided, the node initiating the computation will use it instead of querying the datasource. This data is *not* shared to other nodes, only used for the duration of the computation. The local input columns/values must be in the form {: [, , ...], ...} owner (Union[Unset, str]): The username of the end user who requested the computation. project_id (Union[Unset, str]): Unique identifier of a project. - run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both) + dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various + disclosure prevention mechanisms + cohort_id (Union[Unset, str]): Unique identifier of a data object. + encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key. + join_id (Union[Unset, str]): Unique identifier of a data object. local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured the network) - encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key. - end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true, - then when release results is set to true and the output - is initially encrypted with a network collective key, then it is key switched to - the initiating user's public key. - input_data_object (Union[Unset, str]): Shared identifier of a data object. release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output results. If set, then encrypted results are automatically key switched and decrypted and a Result entity is saved - timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run. - data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource - from each node before the computation + wait (Union[Unset, bool]): Whether to wait synchronously for the computation result. feature_columns (Union[Unset, List[str]]): specified columns from the input dataset corresponding to the features label_columns (Union[Unset, List[str]]): specified columns from the input dataset corresponding to the labels @@ -80,28 +80,28 @@ class EncryptedRegression: """ type: ComputationType + dp_epsilon: Union[Unset, float] = -1.0 input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] = ( ComputationDefinitionInputClippingMethod.WARNING ) - preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET - wait: Union[Unset, bool] = UNSET - dp_policy: Union[Unset, "DPPolicy"] = UNSET - dp_epsilon: Union[Unset, float] = -1.0 - join_id: Union[Unset, str] = UNSET + run_mode: Union[Unset, RunMode] = UNSET local_input_id: Union[Unset, str] = UNSET maximum_aggregated_value: Union[Unset, None, float] = UNSET - cohort_id: Union[Unset, str] = UNSET + preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET + data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET + end_to_end_encrypted: Union[Unset, bool] = UNSET + input_data_object: Union[Unset, str] = UNSET + timeout: Union[Unset, int] = UNSET local_input: Union[Unset, "LocalInput"] = UNSET owner: Union[Unset, str] = UNSET project_id: Union[Unset, str] = UNSET - run_mode: Union[Unset, RunMode] = UNSET - local: Union[Unset, bool] = UNSET + dp_policy: Union[Unset, "DPPolicy"] = UNSET + cohort_id: Union[Unset, str] = UNSET encrypted: Union[Unset, bool] = UNSET - end_to_end_encrypted: Union[Unset, bool] = UNSET - input_data_object: Union[Unset, str] = UNSET + join_id: Union[Unset, str] = UNSET + local: Union[Unset, bool] = UNSET release_results: Union[Unset, bool] = UNSET - timeout: Union[Unset, int] = UNSET - data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET + wait: Union[Unset, bool] = UNSET feature_columns: Union[Unset, List[str]] = UNSET label_columns: Union[Unset, List[str]] = UNSET params: Union[Unset, "EncryptedRegressionParams"] = UNSET @@ -111,44 +111,44 @@ class EncryptedRegression: def to_dict(self) -> Dict[str, Any]: type = self.type.value + dp_epsilon = self.dp_epsilon input_clipping_method: Union[Unset, str] = UNSET if not isinstance(self.input_clipping_method, Unset): input_clipping_method = self.input_clipping_method.value + run_mode: Union[Unset, str] = UNSET + if not isinstance(self.run_mode, Unset): + run_mode = self.run_mode.value + + local_input_id = self.local_input_id + maximum_aggregated_value = self.maximum_aggregated_value preprocessing_parameters: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.preprocessing_parameters, Unset): preprocessing_parameters = self.preprocessing_parameters.to_dict() - wait = self.wait - dp_policy: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.dp_policy, Unset): - dp_policy = self.dp_policy.to_dict() + data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.data_source_parameters, Unset): + data_source_parameters = self.data_source_parameters.to_dict() - dp_epsilon = self.dp_epsilon - join_id = self.join_id - local_input_id = self.local_input_id - maximum_aggregated_value = self.maximum_aggregated_value - cohort_id = self.cohort_id + end_to_end_encrypted = self.end_to_end_encrypted + input_data_object = self.input_data_object + timeout = self.timeout local_input: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.local_input, Unset): local_input = self.local_input.to_dict() owner = self.owner project_id = self.project_id - run_mode: Union[Unset, str] = UNSET - if not isinstance(self.run_mode, Unset): - run_mode = self.run_mode.value + dp_policy: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.dp_policy, Unset): + dp_policy = self.dp_policy.to_dict() - local = self.local + cohort_id = self.cohort_id encrypted = self.encrypted - end_to_end_encrypted = self.end_to_end_encrypted - input_data_object = self.input_data_object + join_id = self.join_id + local = self.local release_results = self.release_results - timeout = self.timeout - data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.data_source_parameters, Unset): - data_source_parameters = self.data_source_parameters.to_dict() - + wait = self.wait feature_columns: Union[Unset, List[str]] = UNSET if not isinstance(self.feature_columns, Unset): feature_columns = self.feature_columns @@ -170,46 +170,46 @@ def to_dict(self) -> Dict[str, Any]: "type": type, } ) - if input_clipping_method is not UNSET: - field_dict["inputClippingMethod"] = input_clipping_method - if preprocessing_parameters is not UNSET: - field_dict["preprocessingParameters"] = preprocessing_parameters - if wait is not UNSET: - field_dict["wait"] = wait - if dp_policy is not UNSET: - field_dict["DPPolicy"] = dp_policy if dp_epsilon is not UNSET: field_dict["dpEpsilon"] = dp_epsilon - if join_id is not UNSET: - field_dict["joinId"] = join_id + if input_clipping_method is not UNSET: + field_dict["inputClippingMethod"] = input_clipping_method + if run_mode is not UNSET: + field_dict["runMode"] = run_mode if local_input_id is not UNSET: field_dict["localInputID"] = local_input_id if maximum_aggregated_value is not UNSET: field_dict["maximumAggregatedValue"] = maximum_aggregated_value - if cohort_id is not UNSET: - field_dict["cohortId"] = cohort_id + if preprocessing_parameters is not UNSET: + field_dict["preprocessingParameters"] = preprocessing_parameters + if data_source_parameters is not UNSET: + field_dict["dataSourceParameters"] = data_source_parameters + if end_to_end_encrypted is not UNSET: + field_dict["endToEndEncrypted"] = end_to_end_encrypted + if input_data_object is not UNSET: + field_dict["inputDataObject"] = input_data_object + if timeout is not UNSET: + field_dict["timeout"] = timeout if local_input is not UNSET: field_dict["localInput"] = local_input if owner is not UNSET: field_dict["owner"] = owner if project_id is not UNSET: field_dict["projectId"] = project_id - if run_mode is not UNSET: - field_dict["runMode"] = run_mode - if local is not UNSET: - field_dict["local"] = local + if dp_policy is not UNSET: + field_dict["DPPolicy"] = dp_policy + if cohort_id is not UNSET: + field_dict["cohortId"] = cohort_id if encrypted is not UNSET: field_dict["encrypted"] = encrypted - if end_to_end_encrypted is not UNSET: - field_dict["endToEndEncrypted"] = end_to_end_encrypted - if input_data_object is not UNSET: - field_dict["inputDataObject"] = input_data_object + if join_id is not UNSET: + field_dict["joinId"] = join_id + if local is not UNSET: + field_dict["local"] = local if release_results is not UNSET: field_dict["releaseResults"] = release_results - if timeout is not UNSET: - field_dict["timeout"] = timeout - if data_source_parameters is not UNSET: - field_dict["dataSourceParameters"] = data_source_parameters + if wait is not UNSET: + field_dict["wait"] = wait if feature_columns is not UNSET: field_dict["featureColumns"] = feature_columns if label_columns is not UNSET: @@ -232,6 +232,8 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() type = ComputationType(d.pop("type")) + dp_epsilon = d.pop("dpEpsilon", UNSET) + _input_clipping_method = d.pop("inputClippingMethod", UNSET) input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] if isinstance(_input_clipping_method, Unset): @@ -239,6 +241,17 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method) + _run_mode = d.pop("runMode", UNSET) + run_mode: Union[Unset, RunMode] + if isinstance(_run_mode, Unset): + run_mode = UNSET + else: + run_mode = RunMode(_run_mode) + + local_input_id = d.pop("localInputID", UNSET) + + maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET) + _preprocessing_parameters = d.pop("preprocessingParameters", UNSET) preprocessing_parameters: Union[Unset, ComputationPreprocessingParameters] if isinstance(_preprocessing_parameters, Unset): @@ -246,24 +259,18 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: preprocessing_parameters = ComputationPreprocessingParameters.from_dict(_preprocessing_parameters) - wait = d.pop("wait", UNSET) - - _dp_policy = d.pop("DPPolicy", UNSET) - dp_policy: Union[Unset, DPPolicy] - if isinstance(_dp_policy, Unset): - dp_policy = UNSET + _data_source_parameters = d.pop("dataSourceParameters", UNSET) + data_source_parameters: Union[Unset, ComputationDataSourceParameters] + if isinstance(_data_source_parameters, Unset): + data_source_parameters = UNSET else: - dp_policy = DPPolicy.from_dict(_dp_policy) - - dp_epsilon = d.pop("dpEpsilon", UNSET) - - join_id = d.pop("joinId", UNSET) + data_source_parameters = ComputationDataSourceParameters.from_dict(_data_source_parameters) - local_input_id = d.pop("localInputID", UNSET) + end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) - maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET) + input_data_object = d.pop("inputDataObject", UNSET) - cohort_id = d.pop("cohortId", UNSET) + timeout = d.pop("timeout", UNSET) _local_input = d.pop("localInput", UNSET) local_input: Union[Unset, LocalInput] @@ -276,31 +283,24 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: project_id = d.pop("projectId", UNSET) - _run_mode = d.pop("runMode", UNSET) - run_mode: Union[Unset, RunMode] - if isinstance(_run_mode, Unset): - run_mode = UNSET + _dp_policy = d.pop("DPPolicy", UNSET) + dp_policy: Union[Unset, DPPolicy] + if isinstance(_dp_policy, Unset): + dp_policy = UNSET else: - run_mode = RunMode(_run_mode) + dp_policy = DPPolicy.from_dict(_dp_policy) - local = d.pop("local", UNSET) + cohort_id = d.pop("cohortId", UNSET) encrypted = d.pop("encrypted", UNSET) - end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) + join_id = d.pop("joinId", UNSET) - input_data_object = d.pop("inputDataObject", UNSET) + local = d.pop("local", UNSET) release_results = d.pop("releaseResults", UNSET) - timeout = d.pop("timeout", UNSET) - - _data_source_parameters = d.pop("dataSourceParameters", UNSET) - data_source_parameters: Union[Unset, ComputationDataSourceParameters] - if isinstance(_data_source_parameters, Unset): - data_source_parameters = UNSET - else: - data_source_parameters = ComputationDataSourceParameters.from_dict(_data_source_parameters) + wait = d.pop("wait", UNSET) feature_columns = cast(List[str], d.pop("featureColumns", UNSET)) @@ -317,26 +317,26 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: encrypted_regression = cls( type=type, - input_clipping_method=input_clipping_method, - preprocessing_parameters=preprocessing_parameters, - wait=wait, - dp_policy=dp_policy, dp_epsilon=dp_epsilon, - join_id=join_id, + input_clipping_method=input_clipping_method, + run_mode=run_mode, local_input_id=local_input_id, maximum_aggregated_value=maximum_aggregated_value, - cohort_id=cohort_id, + preprocessing_parameters=preprocessing_parameters, + data_source_parameters=data_source_parameters, + end_to_end_encrypted=end_to_end_encrypted, + input_data_object=input_data_object, + timeout=timeout, local_input=local_input, owner=owner, project_id=project_id, - run_mode=run_mode, - local=local, + dp_policy=dp_policy, + cohort_id=cohort_id, encrypted=encrypted, - end_to_end_encrypted=end_to_end_encrypted, - input_data_object=input_data_object, + join_id=join_id, + local=local, release_results=release_results, - timeout=timeout, - data_source_parameters=data_source_parameters, + wait=wait, feature_columns=feature_columns, label_columns=label_columns, params=params, diff --git a/src/tuneinsight/api/sdk/models/encrypted_regression_params.py b/src/tuneinsight/api/sdk/models/encrypted_regression_params.py index 157597a..1929248 100644 --- a/src/tuneinsight/api/sdk/models/encrypted_regression_params.py +++ b/src/tuneinsight/api/sdk/models/encrypted_regression_params.py @@ -18,73 +18,74 @@ class EncryptedRegressionParams: """Parameters for the encrypted regression. Attributes: - network_iteration_count (Union[Unset, int]): The global maximum number of iteration. Default: 1. - seed (Union[Unset, float]): The seed to sample the initial weights. - learning_rate (Union[Unset, float]): The learning rate of the regression. Default: 0.02. - linear (Union[Unset, EncryptedRegressionParamsLinear]): Parameters specific for the linear regression. + elastic_rate (Union[Unset, float]): The elastic rate of the regression. Default: 0.85. local_batch_size (Union[Unset, int]): The batch size in each local iteration. Default: 64. - momentum (Union[Unset, float]): The momentum rate of the regression. Default: 0.92. + seed (Union[Unset, float]): The seed to sample the initial weights. approximation_params (Union[Unset, ApproximationParams]): parameters for polynomial approximation - elastic_rate (Union[Unset, float]): The elastic rate of the regression. Default: 0.85. + linear (Union[Unset, EncryptedRegressionParamsLinear]): Parameters specific for the linear regression. local_iteration_count (Union[Unset, int]): The maximum number of local iterations. Default: 1. + momentum (Union[Unset, float]): The momentum rate of the regression. Default: 0.92. + network_iteration_count (Union[Unset, int]): The global maximum number of iteration. Default: 1. type (Union[Unset, RegressionType]): type of the regression + learning_rate (Union[Unset, float]): The learning rate of the regression. Default: 0.02. """ - network_iteration_count: Union[Unset, int] = 1 - seed: Union[Unset, float] = 0.0 - learning_rate: Union[Unset, float] = 0.02 - linear: Union[Unset, "EncryptedRegressionParamsLinear"] = UNSET + elastic_rate: Union[Unset, float] = 0.85 local_batch_size: Union[Unset, int] = 64 - momentum: Union[Unset, float] = 0.92 + seed: Union[Unset, float] = 0.0 approximation_params: Union[Unset, "ApproximationParams"] = UNSET - elastic_rate: Union[Unset, float] = 0.85 + linear: Union[Unset, "EncryptedRegressionParamsLinear"] = UNSET local_iteration_count: Union[Unset, int] = 1 + momentum: Union[Unset, float] = 0.92 + network_iteration_count: Union[Unset, int] = 1 type: Union[Unset, RegressionType] = UNSET + learning_rate: Union[Unset, float] = 0.02 additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: - network_iteration_count = self.network_iteration_count - seed = self.seed - learning_rate = self.learning_rate - linear: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.linear, Unset): - linear = self.linear.to_dict() - + elastic_rate = self.elastic_rate local_batch_size = self.local_batch_size - momentum = self.momentum + seed = self.seed approximation_params: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.approximation_params, Unset): approximation_params = self.approximation_params.to_dict() - elastic_rate = self.elastic_rate + linear: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.linear, Unset): + linear = self.linear.to_dict() + local_iteration_count = self.local_iteration_count + momentum = self.momentum + network_iteration_count = self.network_iteration_count type: Union[Unset, str] = UNSET if not isinstance(self.type, Unset): type = self.type.value + learning_rate = self.learning_rate + field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) - if network_iteration_count is not UNSET: - field_dict["networkIterationCount"] = network_iteration_count - if seed is not UNSET: - field_dict["seed"] = seed - if learning_rate is not UNSET: - field_dict["learningRate"] = learning_rate - if linear is not UNSET: - field_dict["linear"] = linear + if elastic_rate is not UNSET: + field_dict["elasticRate"] = elastic_rate if local_batch_size is not UNSET: field_dict["localBatchSize"] = local_batch_size - if momentum is not UNSET: - field_dict["momentum"] = momentum + if seed is not UNSET: + field_dict["seed"] = seed if approximation_params is not UNSET: field_dict["approximationParams"] = approximation_params - if elastic_rate is not UNSET: - field_dict["elasticRate"] = elastic_rate + if linear is not UNSET: + field_dict["linear"] = linear if local_iteration_count is not UNSET: field_dict["localIterationCount"] = local_iteration_count + if momentum is not UNSET: + field_dict["momentum"] = momentum + if network_iteration_count is not UNSET: + field_dict["networkIterationCount"] = network_iteration_count if type is not UNSET: field_dict["type"] = type + if learning_rate is not UNSET: + field_dict["learningRate"] = learning_rate return field_dict @@ -94,11 +95,18 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: from ..models.encrypted_regression_params_linear import EncryptedRegressionParamsLinear d = src_dict.copy() - network_iteration_count = d.pop("networkIterationCount", UNSET) + elastic_rate = d.pop("elasticRate", UNSET) + + local_batch_size = d.pop("localBatchSize", UNSET) seed = d.pop("seed", UNSET) - learning_rate = d.pop("learningRate", UNSET) + _approximation_params = d.pop("approximationParams", UNSET) + approximation_params: Union[Unset, ApproximationParams] + if isinstance(_approximation_params, Unset): + approximation_params = UNSET + else: + approximation_params = ApproximationParams.from_dict(_approximation_params) _linear = d.pop("linear", UNSET) linear: Union[Unset, EncryptedRegressionParamsLinear] @@ -107,20 +115,11 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: linear = EncryptedRegressionParamsLinear.from_dict(_linear) - local_batch_size = d.pop("localBatchSize", UNSET) + local_iteration_count = d.pop("localIterationCount", UNSET) momentum = d.pop("momentum", UNSET) - _approximation_params = d.pop("approximationParams", UNSET) - approximation_params: Union[Unset, ApproximationParams] - if isinstance(_approximation_params, Unset): - approximation_params = UNSET - else: - approximation_params = ApproximationParams.from_dict(_approximation_params) - - elastic_rate = d.pop("elasticRate", UNSET) - - local_iteration_count = d.pop("localIterationCount", UNSET) + network_iteration_count = d.pop("networkIterationCount", UNSET) _type = d.pop("type", UNSET) type: Union[Unset, RegressionType] @@ -129,17 +128,19 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: type = RegressionType(_type) + learning_rate = d.pop("learningRate", UNSET) + encrypted_regression_params = cls( - network_iteration_count=network_iteration_count, - seed=seed, - learning_rate=learning_rate, - linear=linear, + elastic_rate=elastic_rate, local_batch_size=local_batch_size, - momentum=momentum, + seed=seed, approximation_params=approximation_params, - elastic_rate=elastic_rate, + linear=linear, local_iteration_count=local_iteration_count, + momentum=momentum, + network_iteration_count=network_iteration_count, type=type, + learning_rate=learning_rate, ) encrypted_regression_params.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/encryption.py b/src/tuneinsight/api/sdk/models/encryption.py index a0761d5..47e4e98 100644 --- a/src/tuneinsight/api/sdk/models/encryption.py +++ b/src/tuneinsight/api/sdk/models/encryption.py @@ -11,38 +11,38 @@ class Encryption: """ Attributes: - cryptosystem (Union[Unset, str]): key (Union[Unset, str]): + cryptosystem (Union[Unset, str]): """ - cryptosystem: Union[Unset, str] = UNSET key: Union[Unset, str] = UNSET + cryptosystem: Union[Unset, str] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: - cryptosystem = self.cryptosystem key = self.key + cryptosystem = self.cryptosystem field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) - if cryptosystem is not UNSET: - field_dict["cryptosystem"] = cryptosystem if key is not UNSET: field_dict["key"] = key + if cryptosystem is not UNSET: + field_dict["cryptosystem"] = cryptosystem return field_dict @classmethod def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() - cryptosystem = d.pop("cryptosystem", UNSET) - key = d.pop("key", UNSET) + cryptosystem = d.pop("cryptosystem", UNSET) + encryption = cls( - cryptosystem=cryptosystem, key=key, + cryptosystem=cryptosystem, ) encryption.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/execution_quota_parameters.py b/src/tuneinsight/api/sdk/models/execution_quota_parameters.py index 4119f32..a65f865 100644 --- a/src/tuneinsight/api/sdk/models/execution_quota_parameters.py +++ b/src/tuneinsight/api/sdk/models/execution_quota_parameters.py @@ -23,24 +23,32 @@ class ExecutionQuotaParameters: Otherwise, a unit represents one computation. Attributes: + allocation (Union[Unset, float]): quota allocated initially. + allocation_interval (Union[Unset, Duration]): definition of a date-independent time interval increment (Union[Unset, float]): value incremented after each allocation interval + local_computations_use_budget (Union[Unset, bool]): whether local computations consume the execution quota max_allocation (Union[Unset, float]): maximum value that can be taken by the execution quota scope (Union[Unset, ExecutionQuotaParametersScope]): scope of the quota start (Union[Unset, datetime.datetime]): date time at which the quota is effective - allocation (Union[Unset, float]): quota allocated initially. - allocation_interval (Union[Unset, Duration]): definition of a date-independent time interval """ + allocation: Union[Unset, float] = UNSET + allocation_interval: Union[Unset, "Duration"] = UNSET increment: Union[Unset, float] = UNSET + local_computations_use_budget: Union[Unset, bool] = False max_allocation: Union[Unset, float] = UNSET scope: Union[Unset, ExecutionQuotaParametersScope] = UNSET start: Union[Unset, datetime.datetime] = UNSET - allocation: Union[Unset, float] = UNSET - allocation_interval: Union[Unset, "Duration"] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: + allocation = self.allocation + allocation_interval: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.allocation_interval, Unset): + allocation_interval = self.allocation_interval.to_dict() + increment = self.increment + local_computations_use_budget = self.local_computations_use_budget max_allocation = self.max_allocation scope: Union[Unset, str] = UNSET if not isinstance(self.scope, Unset): @@ -50,26 +58,23 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.start, Unset): start = self.start.isoformat() - allocation = self.allocation - allocation_interval: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.allocation_interval, Unset): - allocation_interval = self.allocation_interval.to_dict() - field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) + if allocation is not UNSET: + field_dict["allocation"] = allocation + if allocation_interval is not UNSET: + field_dict["allocationInterval"] = allocation_interval if increment is not UNSET: field_dict["increment"] = increment + if local_computations_use_budget is not UNSET: + field_dict["localComputationsUseBudget"] = local_computations_use_budget if max_allocation is not UNSET: field_dict["maxAllocation"] = max_allocation if scope is not UNSET: field_dict["scope"] = scope if start is not UNSET: field_dict["start"] = start - if allocation is not UNSET: - field_dict["allocation"] = allocation - if allocation_interval is not UNSET: - field_dict["allocationInterval"] = allocation_interval return field_dict @@ -78,8 +83,19 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: from ..models.duration import Duration d = src_dict.copy() + allocation = d.pop("allocation", UNSET) + + _allocation_interval = d.pop("allocationInterval", UNSET) + allocation_interval: Union[Unset, Duration] + if isinstance(_allocation_interval, Unset): + allocation_interval = UNSET + else: + allocation_interval = Duration.from_dict(_allocation_interval) + increment = d.pop("increment", UNSET) + local_computations_use_budget = d.pop("localComputationsUseBudget", UNSET) + max_allocation = d.pop("maxAllocation", UNSET) _scope = d.pop("scope", UNSET) @@ -96,22 +112,14 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: start = isoparse(_start) - allocation = d.pop("allocation", UNSET) - - _allocation_interval = d.pop("allocationInterval", UNSET) - allocation_interval: Union[Unset, Duration] - if isinstance(_allocation_interval, Unset): - allocation_interval = UNSET - else: - allocation_interval = Duration.from_dict(_allocation_interval) - execution_quota_parameters = cls( + allocation=allocation, + allocation_interval=allocation_interval, increment=increment, + local_computations_use_budget=local_computations_use_budget, max_allocation=max_allocation, scope=scope, start=start, - allocation=allocation, - allocation_interval=allocation_interval, ) execution_quota_parameters.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/filter_.py b/src/tuneinsight/api/sdk/models/filter_.py index 6f67ac4..e15c4d3 100644 --- a/src/tuneinsight/api/sdk/models/filter_.py +++ b/src/tuneinsight/api/sdk/models/filter_.py @@ -16,17 +16,17 @@ class Filter: type (PreprocessingOperationType): type of preprocessing operation col_name (str): name of column to filter on value (str): value with which to compare + values (Union[Unset, List[str]]): list of values to pass in when comparison type is 'isin'. comparator (Union[Unset, ComparisonType]): type of comparison numerical (Union[Unset, bool]): indicate whether the comparison is on numerical values - values (Union[Unset, List[str]]): list of values to pass in when comparison type is 'isin'. """ type: PreprocessingOperationType col_name: str value: str + values: Union[Unset, List[str]] = UNSET comparator: Union[Unset, ComparisonType] = UNSET numerical: Union[Unset, bool] = UNSET - values: Union[Unset, List[str]] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: @@ -34,14 +34,15 @@ def to_dict(self) -> Dict[str, Any]: col_name = self.col_name value = self.value + values: Union[Unset, List[str]] = UNSET + if not isinstance(self.values, Unset): + values = self.values + comparator: Union[Unset, str] = UNSET if not isinstance(self.comparator, Unset): comparator = self.comparator.value numerical = self.numerical - values: Union[Unset, List[str]] = UNSET - if not isinstance(self.values, Unset): - values = self.values field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) @@ -52,12 +53,12 @@ def to_dict(self) -> Dict[str, Any]: "value": value, } ) + if values is not UNSET: + field_dict["values"] = values if comparator is not UNSET: field_dict["comparator"] = comparator if numerical is not UNSET: field_dict["numerical"] = numerical - if values is not UNSET: - field_dict["values"] = values return field_dict @@ -70,6 +71,8 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: value = d.pop("value") + values = cast(List[str], d.pop("values", UNSET)) + _comparator = d.pop("comparator", UNSET) comparator: Union[Unset, ComparisonType] if isinstance(_comparator, Unset): @@ -79,15 +82,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: numerical = d.pop("numerical", UNSET) - values = cast(List[str], d.pop("values", UNSET)) - filter_ = cls( type=type, col_name=col_name, value=value, + values=values, comparator=comparator, numerical=numerical, - values=values, ) filter_.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/float_matrix.py b/src/tuneinsight/api/sdk/models/float_matrix.py index deb17ad..4bc40e1 100644 --- a/src/tuneinsight/api/sdk/models/float_matrix.py +++ b/src/tuneinsight/api/sdk/models/float_matrix.py @@ -20,16 +20,16 @@ class FloatMatrix: columns (List[str]): Name of the columns of the matrix data (List[List[float]]): 2d array of float values contextual_info (Union[Unset, ResultContextualInfo]): contextual information about the content retrieved - column_count (Union[Unset, int]): row_count (Union[Unset, int]): + column_count (Union[Unset, int]): """ type: ContentType columns: List[str] data: List[List[float]] contextual_info: Union[Unset, "ResultContextualInfo"] = UNSET - column_count: Union[Unset, int] = UNSET row_count: Union[Unset, int] = UNSET + column_count: Union[Unset, int] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: @@ -47,8 +47,8 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.contextual_info, Unset): contextual_info = self.contextual_info.to_dict() - column_count = self.column_count row_count = self.row_count + column_count = self.column_count field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) @@ -61,10 +61,10 @@ def to_dict(self) -> Dict[str, Any]: ) if contextual_info is not UNSET: field_dict["contextualInfo"] = contextual_info - if column_count is not UNSET: - field_dict["columnCount"] = column_count if row_count is not UNSET: field_dict["rowCount"] = row_count + if column_count is not UNSET: + field_dict["columnCount"] = column_count return field_dict @@ -91,17 +91,17 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: contextual_info = ResultContextualInfo.from_dict(_contextual_info) - column_count = d.pop("columnCount", UNSET) - row_count = d.pop("rowCount", UNSET) + column_count = d.pop("columnCount", UNSET) + float_matrix = cls( type=type, columns=columns, data=data, contextual_info=contextual_info, - column_count=column_count, row_count=row_count, + column_count=column_count, ) float_matrix.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/get_network_metadata_response_200.py b/src/tuneinsight/api/sdk/models/get_network_metadata_response_200.py index 03974ae..6820d98 100644 --- a/src/tuneinsight/api/sdk/models/get_network_metadata_response_200.py +++ b/src/tuneinsight/api/sdk/models/get_network_metadata_response_200.py @@ -16,7 +16,6 @@ class GetNetworkMetadataResponse200: """ Attributes: - dpo_authorization_enabled (Union[Unset, bool]): Indicates if collective projects require authorization. networks (Union[Unset, List['Network']]): nodes (Union[Unset, List['Node']]): warnings (Union[Unset, List[str]]): @@ -24,18 +23,18 @@ class GetNetworkMetadataResponse200: source queries can be composed of multiple queries. default_topology (Union[Unset, str]): Indicates the default topology of the network used when creating a project. Values can be "star" or "tree". + dpo_authorization_enabled (Union[Unset, bool]): Indicates if collective projects require authorization. """ - dpo_authorization_enabled: Union[Unset, bool] = UNSET networks: Union[Unset, List["Network"]] = UNSET nodes: Union[Unset, List["Node"]] = UNSET warnings: Union[Unset, List[str]] = UNSET compound_queries_enabled: Union[Unset, bool] = UNSET default_topology: Union[Unset, str] = UNSET + dpo_authorization_enabled: Union[Unset, bool] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: - dpo_authorization_enabled = self.dpo_authorization_enabled networks: Union[Unset, List[Dict[str, Any]]] = UNSET if not isinstance(self.networks, Unset): networks = [] @@ -58,12 +57,11 @@ def to_dict(self) -> Dict[str, Any]: compound_queries_enabled = self.compound_queries_enabled default_topology = self.default_topology + dpo_authorization_enabled = self.dpo_authorization_enabled field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) - if dpo_authorization_enabled is not UNSET: - field_dict["dpoAuthorizationEnabled"] = dpo_authorization_enabled if networks is not UNSET: field_dict["networks"] = networks if nodes is not UNSET: @@ -74,6 +72,8 @@ def to_dict(self) -> Dict[str, Any]: field_dict["compoundQueriesEnabled"] = compound_queries_enabled if default_topology is not UNSET: field_dict["default-topology"] = default_topology + if dpo_authorization_enabled is not UNSET: + field_dict["dpoAuthorizationEnabled"] = dpo_authorization_enabled return field_dict @@ -83,8 +83,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: from ..models.node import Node d = src_dict.copy() - dpo_authorization_enabled = d.pop("dpoAuthorizationEnabled", UNSET) - networks = [] _networks = d.pop("networks", UNSET) for networks_item_data in _networks or []: @@ -105,13 +103,15 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: default_topology = d.pop("default-topology", UNSET) + dpo_authorization_enabled = d.pop("dpoAuthorizationEnabled", UNSET) + get_network_metadata_response_200 = cls( - dpo_authorization_enabled=dpo_authorization_enabled, networks=networks, nodes=nodes, warnings=warnings, compound_queries_enabled=compound_queries_enabled, default_topology=default_topology, + dpo_authorization_enabled=dpo_authorization_enabled, ) get_network_metadata_response_200.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/gwas.py b/src/tuneinsight/api/sdk/models/gwas.py index ece1b64..03a0fec 100644 --- a/src/tuneinsight/api/sdk/models/gwas.py +++ b/src/tuneinsight/api/sdk/models/gwas.py @@ -24,6 +24,8 @@ class GWAS: """ Attributes: type (ComputationType): Type of the computation. + dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP + mode. Default: -1.0. input_clipping_method (Union[Unset, ComputationDefinitionInputClippingMethod]): Optional method used for clipping before encrypting values when running aggregation-based workflows. The bounds are deduced based on the cryptographic parameters used for the aggregation. @@ -34,14 +36,7 @@ class GWAS: (default) - error: if some values are out of bounds, then the computation is aborted. Default: ComputationDefinitionInputClippingMethod.WARNING. - preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters - applied to the input retrieved from the datasource, if applicable - wait (Union[Unset, bool]): Whether to wait synchronously for the computation result. - dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various - disclosure prevention mechanisms - dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP - mode. Default: -1.0. - join_id (Union[Unset, str]): Unique identifier of a data object. + run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both) local_input_id (Union[Unset, str]): Unique identifier of a data object. maximum_aggregated_value (Union[Unset, None, float]): optional upper bound on the total expected value to be aggregated collectively. If provided, the computation will automatically deduce @@ -50,107 +45,117 @@ class GWAS: up to 16 million. For example, when using default parameters and running an aggregation with 4 participants, local aggregated values cannot exceed 4 million. - cohort_id (Union[Unset, str]): Unique identifier of a data object. + preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters + applied to the input retrieved from the datasource, if applicable + data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource + from each node before the computation + end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true, + then when release results is set to true and the output + is initially encrypted with a network collective key, then it is key switched to + the initiating user's public key. + input_data_object (Union[Unset, str]): Shared identifier of a data object. + timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run. local_input (Union[Unset, LocalInput]): If a local input is provided, the node initiating the computation will use it instead of querying the datasource. This data is *not* shared to other nodes, only used for the duration of the computation. The local input columns/values must be in the form {: [, , ...], ...} owner (Union[Unset, str]): The username of the end user who requested the computation. project_id (Union[Unset, str]): Unique identifier of a project. - run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both) + dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various + disclosure prevention mechanisms + cohort_id (Union[Unset, str]): Unique identifier of a data object. + encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key. + join_id (Union[Unset, str]): Unique identifier of a data object. local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured the network) - encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key. - end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true, - then when release results is set to true and the output - is initially encrypted with a network collective key, then it is key switched to - the initiating user's public key. - input_data_object (Union[Unset, str]): Shared identifier of a data object. release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output results. If set, then encrypted results are automatically key switched and decrypted and a Result entity is saved - timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run. - data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource - from each node before the computation + wait (Union[Unset, bool]): Whether to wait synchronously for the computation result. + matching_params (Union[Unset, MatchingParams]): parameters relevant for matching + target_label (Union[Unset, str]): target to use from the clinical datasets variants_organization (Union[Unset, str]): organization holding the variants covariates (Union[Unset, List[str]]): list of columns holding the covariate values locus_range (Union[Unset, LocusRange]): range specification for locus genomic positions - matching_params (Union[Unset, MatchingParams]): parameters relevant for matching - target_label (Union[Unset, str]): target to use from the clinical datasets """ type: ComputationType + dp_epsilon: Union[Unset, float] = -1.0 input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] = ( ComputationDefinitionInputClippingMethod.WARNING ) - preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET - wait: Union[Unset, bool] = UNSET - dp_policy: Union[Unset, "DPPolicy"] = UNSET - dp_epsilon: Union[Unset, float] = -1.0 - join_id: Union[Unset, str] = UNSET + run_mode: Union[Unset, RunMode] = UNSET local_input_id: Union[Unset, str] = UNSET maximum_aggregated_value: Union[Unset, None, float] = UNSET - cohort_id: Union[Unset, str] = UNSET + preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET + data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET + end_to_end_encrypted: Union[Unset, bool] = UNSET + input_data_object: Union[Unset, str] = UNSET + timeout: Union[Unset, int] = UNSET local_input: Union[Unset, "LocalInput"] = UNSET owner: Union[Unset, str] = UNSET project_id: Union[Unset, str] = UNSET - run_mode: Union[Unset, RunMode] = UNSET - local: Union[Unset, bool] = UNSET + dp_policy: Union[Unset, "DPPolicy"] = UNSET + cohort_id: Union[Unset, str] = UNSET encrypted: Union[Unset, bool] = UNSET - end_to_end_encrypted: Union[Unset, bool] = UNSET - input_data_object: Union[Unset, str] = UNSET + join_id: Union[Unset, str] = UNSET + local: Union[Unset, bool] = UNSET release_results: Union[Unset, bool] = UNSET - timeout: Union[Unset, int] = UNSET - data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET + wait: Union[Unset, bool] = UNSET + matching_params: Union[Unset, "MatchingParams"] = UNSET + target_label: Union[Unset, str] = UNSET variants_organization: Union[Unset, str] = UNSET covariates: Union[Unset, List[str]] = UNSET locus_range: Union[Unset, "LocusRange"] = UNSET - matching_params: Union[Unset, "MatchingParams"] = UNSET - target_label: Union[Unset, str] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: type = self.type.value + dp_epsilon = self.dp_epsilon input_clipping_method: Union[Unset, str] = UNSET if not isinstance(self.input_clipping_method, Unset): input_clipping_method = self.input_clipping_method.value + run_mode: Union[Unset, str] = UNSET + if not isinstance(self.run_mode, Unset): + run_mode = self.run_mode.value + + local_input_id = self.local_input_id + maximum_aggregated_value = self.maximum_aggregated_value preprocessing_parameters: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.preprocessing_parameters, Unset): preprocessing_parameters = self.preprocessing_parameters.to_dict() - wait = self.wait - dp_policy: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.dp_policy, Unset): - dp_policy = self.dp_policy.to_dict() + data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.data_source_parameters, Unset): + data_source_parameters = self.data_source_parameters.to_dict() - dp_epsilon = self.dp_epsilon - join_id = self.join_id - local_input_id = self.local_input_id - maximum_aggregated_value = self.maximum_aggregated_value - cohort_id = self.cohort_id + end_to_end_encrypted = self.end_to_end_encrypted + input_data_object = self.input_data_object + timeout = self.timeout local_input: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.local_input, Unset): local_input = self.local_input.to_dict() owner = self.owner project_id = self.project_id - run_mode: Union[Unset, str] = UNSET - if not isinstance(self.run_mode, Unset): - run_mode = self.run_mode.value + dp_policy: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.dp_policy, Unset): + dp_policy = self.dp_policy.to_dict() - local = self.local + cohort_id = self.cohort_id encrypted = self.encrypted - end_to_end_encrypted = self.end_to_end_encrypted - input_data_object = self.input_data_object + join_id = self.join_id + local = self.local release_results = self.release_results - timeout = self.timeout - data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.data_source_parameters, Unset): - data_source_parameters = self.data_source_parameters.to_dict() + wait = self.wait + matching_params: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.matching_params, Unset): + matching_params = self.matching_params.to_dict() + target_label = self.target_label variants_organization = self.variants_organization covariates: Union[Unset, List[str]] = UNSET if not isinstance(self.covariates, Unset): @@ -160,12 +165,6 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.locus_range, Unset): locus_range = self.locus_range.to_dict() - matching_params: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.matching_params, Unset): - matching_params = self.matching_params.to_dict() - - target_label = self.target_label - field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update( @@ -173,56 +172,56 @@ def to_dict(self) -> Dict[str, Any]: "type": type, } ) - if input_clipping_method is not UNSET: - field_dict["inputClippingMethod"] = input_clipping_method - if preprocessing_parameters is not UNSET: - field_dict["preprocessingParameters"] = preprocessing_parameters - if wait is not UNSET: - field_dict["wait"] = wait - if dp_policy is not UNSET: - field_dict["DPPolicy"] = dp_policy if dp_epsilon is not UNSET: field_dict["dpEpsilon"] = dp_epsilon - if join_id is not UNSET: - field_dict["joinId"] = join_id + if input_clipping_method is not UNSET: + field_dict["inputClippingMethod"] = input_clipping_method + if run_mode is not UNSET: + field_dict["runMode"] = run_mode if local_input_id is not UNSET: field_dict["localInputID"] = local_input_id if maximum_aggregated_value is not UNSET: field_dict["maximumAggregatedValue"] = maximum_aggregated_value - if cohort_id is not UNSET: - field_dict["cohortId"] = cohort_id + if preprocessing_parameters is not UNSET: + field_dict["preprocessingParameters"] = preprocessing_parameters + if data_source_parameters is not UNSET: + field_dict["dataSourceParameters"] = data_source_parameters + if end_to_end_encrypted is not UNSET: + field_dict["endToEndEncrypted"] = end_to_end_encrypted + if input_data_object is not UNSET: + field_dict["inputDataObject"] = input_data_object + if timeout is not UNSET: + field_dict["timeout"] = timeout if local_input is not UNSET: field_dict["localInput"] = local_input if owner is not UNSET: field_dict["owner"] = owner if project_id is not UNSET: field_dict["projectId"] = project_id - if run_mode is not UNSET: - field_dict["runMode"] = run_mode - if local is not UNSET: - field_dict["local"] = local + if dp_policy is not UNSET: + field_dict["DPPolicy"] = dp_policy + if cohort_id is not UNSET: + field_dict["cohortId"] = cohort_id if encrypted is not UNSET: field_dict["encrypted"] = encrypted - if end_to_end_encrypted is not UNSET: - field_dict["endToEndEncrypted"] = end_to_end_encrypted - if input_data_object is not UNSET: - field_dict["inputDataObject"] = input_data_object + if join_id is not UNSET: + field_dict["joinId"] = join_id + if local is not UNSET: + field_dict["local"] = local if release_results is not UNSET: field_dict["releaseResults"] = release_results - if timeout is not UNSET: - field_dict["timeout"] = timeout - if data_source_parameters is not UNSET: - field_dict["dataSourceParameters"] = data_source_parameters + if wait is not UNSET: + field_dict["wait"] = wait + if matching_params is not UNSET: + field_dict["matchingParams"] = matching_params + if target_label is not UNSET: + field_dict["targetLabel"] = target_label if variants_organization is not UNSET: field_dict["variantsOrganization"] = variants_organization if covariates is not UNSET: field_dict["covariates"] = covariates if locus_range is not UNSET: field_dict["locusRange"] = locus_range - if matching_params is not UNSET: - field_dict["matchingParams"] = matching_params - if target_label is not UNSET: - field_dict["targetLabel"] = target_label return field_dict @@ -238,6 +237,8 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() type = ComputationType(d.pop("type")) + dp_epsilon = d.pop("dpEpsilon", UNSET) + _input_clipping_method = d.pop("inputClippingMethod", UNSET) input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] if isinstance(_input_clipping_method, Unset): @@ -245,6 +246,17 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method) + _run_mode = d.pop("runMode", UNSET) + run_mode: Union[Unset, RunMode] + if isinstance(_run_mode, Unset): + run_mode = UNSET + else: + run_mode = RunMode(_run_mode) + + local_input_id = d.pop("localInputID", UNSET) + + maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET) + _preprocessing_parameters = d.pop("preprocessingParameters", UNSET) preprocessing_parameters: Union[Unset, ComputationPreprocessingParameters] if isinstance(_preprocessing_parameters, Unset): @@ -252,24 +264,18 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: preprocessing_parameters = ComputationPreprocessingParameters.from_dict(_preprocessing_parameters) - wait = d.pop("wait", UNSET) - - _dp_policy = d.pop("DPPolicy", UNSET) - dp_policy: Union[Unset, DPPolicy] - if isinstance(_dp_policy, Unset): - dp_policy = UNSET + _data_source_parameters = d.pop("dataSourceParameters", UNSET) + data_source_parameters: Union[Unset, ComputationDataSourceParameters] + if isinstance(_data_source_parameters, Unset): + data_source_parameters = UNSET else: - dp_policy = DPPolicy.from_dict(_dp_policy) - - dp_epsilon = d.pop("dpEpsilon", UNSET) - - join_id = d.pop("joinId", UNSET) + data_source_parameters = ComputationDataSourceParameters.from_dict(_data_source_parameters) - local_input_id = d.pop("localInputID", UNSET) + end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) - maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET) + input_data_object = d.pop("inputDataObject", UNSET) - cohort_id = d.pop("cohortId", UNSET) + timeout = d.pop("timeout", UNSET) _local_input = d.pop("localInput", UNSET) local_input: Union[Unset, LocalInput] @@ -282,31 +288,33 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: project_id = d.pop("projectId", UNSET) - _run_mode = d.pop("runMode", UNSET) - run_mode: Union[Unset, RunMode] - if isinstance(_run_mode, Unset): - run_mode = UNSET + _dp_policy = d.pop("DPPolicy", UNSET) + dp_policy: Union[Unset, DPPolicy] + if isinstance(_dp_policy, Unset): + dp_policy = UNSET else: - run_mode = RunMode(_run_mode) + dp_policy = DPPolicy.from_dict(_dp_policy) - local = d.pop("local", UNSET) + cohort_id = d.pop("cohortId", UNSET) encrypted = d.pop("encrypted", UNSET) - end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) + join_id = d.pop("joinId", UNSET) - input_data_object = d.pop("inputDataObject", UNSET) + local = d.pop("local", UNSET) release_results = d.pop("releaseResults", UNSET) - timeout = d.pop("timeout", UNSET) + wait = d.pop("wait", UNSET) - _data_source_parameters = d.pop("dataSourceParameters", UNSET) - data_source_parameters: Union[Unset, ComputationDataSourceParameters] - if isinstance(_data_source_parameters, Unset): - data_source_parameters = UNSET + _matching_params = d.pop("matchingParams", UNSET) + matching_params: Union[Unset, MatchingParams] + if isinstance(_matching_params, Unset): + matching_params = UNSET else: - data_source_parameters = ComputationDataSourceParameters.from_dict(_data_source_parameters) + matching_params = MatchingParams.from_dict(_matching_params) + + target_label = d.pop("targetLabel", UNSET) variants_organization = d.pop("variantsOrganization", UNSET) @@ -319,42 +327,33 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: locus_range = LocusRange.from_dict(_locus_range) - _matching_params = d.pop("matchingParams", UNSET) - matching_params: Union[Unset, MatchingParams] - if isinstance(_matching_params, Unset): - matching_params = UNSET - else: - matching_params = MatchingParams.from_dict(_matching_params) - - target_label = d.pop("targetLabel", UNSET) - gwas = cls( type=type, - input_clipping_method=input_clipping_method, - preprocessing_parameters=preprocessing_parameters, - wait=wait, - dp_policy=dp_policy, dp_epsilon=dp_epsilon, - join_id=join_id, + input_clipping_method=input_clipping_method, + run_mode=run_mode, local_input_id=local_input_id, maximum_aggregated_value=maximum_aggregated_value, - cohort_id=cohort_id, + preprocessing_parameters=preprocessing_parameters, + data_source_parameters=data_source_parameters, + end_to_end_encrypted=end_to_end_encrypted, + input_data_object=input_data_object, + timeout=timeout, local_input=local_input, owner=owner, project_id=project_id, - run_mode=run_mode, - local=local, + dp_policy=dp_policy, + cohort_id=cohort_id, encrypted=encrypted, - end_to_end_encrypted=end_to_end_encrypted, - input_data_object=input_data_object, + join_id=join_id, + local=local, release_results=release_results, - timeout=timeout, - data_source_parameters=data_source_parameters, + wait=wait, + matching_params=matching_params, + target_label=target_label, variants_organization=variants_organization, covariates=covariates, locus_range=locus_range, - matching_params=matching_params, - target_label=target_label, ) gwas.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/hybrid_fl.py b/src/tuneinsight/api/sdk/models/hybrid_fl.py index c2abbed..97e759e 100644 --- a/src/tuneinsight/api/sdk/models/hybrid_fl.py +++ b/src/tuneinsight/api/sdk/models/hybrid_fl.py @@ -23,6 +23,8 @@ class HybridFL: """ Attributes: type (ComputationType): Type of the computation. + dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP + mode. Default: -1.0. input_clipping_method (Union[Unset, ComputationDefinitionInputClippingMethod]): Optional method used for clipping before encrypting values when running aggregation-based workflows. The bounds are deduced based on the cryptographic parameters used for the aggregation. @@ -33,14 +35,7 @@ class HybridFL: (default) - error: if some values are out of bounds, then the computation is aborted. Default: ComputationDefinitionInputClippingMethod.WARNING. - preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters - applied to the input retrieved from the datasource, if applicable - wait (Union[Unset, bool]): Whether to wait synchronously for the computation result. - dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various - disclosure prevention mechanisms - dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP - mode. Default: -1.0. - join_id (Union[Unset, str]): Unique identifier of a data object. + run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both) local_input_id (Union[Unset, str]): Unique identifier of a data object. maximum_aggregated_value (Union[Unset, None, float]): optional upper bound on the total expected value to be aggregated collectively. If provided, the computation will automatically deduce @@ -49,109 +44,114 @@ class HybridFL: up to 16 million. For example, when using default parameters and running an aggregation with 4 participants, local aggregated values cannot exceed 4 million. - cohort_id (Union[Unset, str]): Unique identifier of a data object. + preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters + applied to the input retrieved from the datasource, if applicable + data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource + from each node before the computation + end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true, + then when release results is set to true and the output + is initially encrypted with a network collective key, then it is key switched to + the initiating user's public key. + input_data_object (Union[Unset, str]): Shared identifier of a data object. + timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run. local_input (Union[Unset, LocalInput]): If a local input is provided, the node initiating the computation will use it instead of querying the datasource. This data is *not* shared to other nodes, only used for the duration of the computation. The local input columns/values must be in the form {: [, , ...], ...} owner (Union[Unset, str]): The username of the end user who requested the computation. project_id (Union[Unset, str]): Unique identifier of a project. - run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both) + dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various + disclosure prevention mechanisms + cohort_id (Union[Unset, str]): Unique identifier of a data object. + encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key. + join_id (Union[Unset, str]): Unique identifier of a data object. local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured the network) - encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key. - end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true, - then when release results is set to true and the output - is initially encrypted with a network collective key, then it is key switched to - the initiating user's public key. - input_data_object (Union[Unset, str]): Shared identifier of a data object. release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output results. If set, then encrypted results are automatically key switched and decrypted and a Result entity is saved - timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run. - data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource - from each node before the computation + wait (Union[Unset, bool]): Whether to wait synchronously for the computation result. + task_id (Union[Unset, str]): learning_params (Union[Unset, HybridFLLearningParams]): Hyperparameters for the Hybrid Federated Learning task_def (Union[Unset, str]): - task_id (Union[Unset, str]): """ type: ComputationType + dp_epsilon: Union[Unset, float] = -1.0 input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] = ( ComputationDefinitionInputClippingMethod.WARNING ) - preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET - wait: Union[Unset, bool] = UNSET - dp_policy: Union[Unset, "DPPolicy"] = UNSET - dp_epsilon: Union[Unset, float] = -1.0 - join_id: Union[Unset, str] = UNSET + run_mode: Union[Unset, RunMode] = UNSET local_input_id: Union[Unset, str] = UNSET maximum_aggregated_value: Union[Unset, None, float] = UNSET - cohort_id: Union[Unset, str] = UNSET + preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET + data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET + end_to_end_encrypted: Union[Unset, bool] = UNSET + input_data_object: Union[Unset, str] = UNSET + timeout: Union[Unset, int] = UNSET local_input: Union[Unset, "LocalInput"] = UNSET owner: Union[Unset, str] = UNSET project_id: Union[Unset, str] = UNSET - run_mode: Union[Unset, RunMode] = UNSET - local: Union[Unset, bool] = UNSET + dp_policy: Union[Unset, "DPPolicy"] = UNSET + cohort_id: Union[Unset, str] = UNSET encrypted: Union[Unset, bool] = UNSET - end_to_end_encrypted: Union[Unset, bool] = UNSET - input_data_object: Union[Unset, str] = UNSET + join_id: Union[Unset, str] = UNSET + local: Union[Unset, bool] = UNSET release_results: Union[Unset, bool] = UNSET - timeout: Union[Unset, int] = UNSET - data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET + wait: Union[Unset, bool] = UNSET + task_id: Union[Unset, str] = UNSET learning_params: Union[Unset, "HybridFLLearningParams"] = UNSET task_def: Union[Unset, str] = UNSET - task_id: Union[Unset, str] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: type = self.type.value + dp_epsilon = self.dp_epsilon input_clipping_method: Union[Unset, str] = UNSET if not isinstance(self.input_clipping_method, Unset): input_clipping_method = self.input_clipping_method.value + run_mode: Union[Unset, str] = UNSET + if not isinstance(self.run_mode, Unset): + run_mode = self.run_mode.value + + local_input_id = self.local_input_id + maximum_aggregated_value = self.maximum_aggregated_value preprocessing_parameters: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.preprocessing_parameters, Unset): preprocessing_parameters = self.preprocessing_parameters.to_dict() - wait = self.wait - dp_policy: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.dp_policy, Unset): - dp_policy = self.dp_policy.to_dict() + data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.data_source_parameters, Unset): + data_source_parameters = self.data_source_parameters.to_dict() - dp_epsilon = self.dp_epsilon - join_id = self.join_id - local_input_id = self.local_input_id - maximum_aggregated_value = self.maximum_aggregated_value - cohort_id = self.cohort_id + end_to_end_encrypted = self.end_to_end_encrypted + input_data_object = self.input_data_object + timeout = self.timeout local_input: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.local_input, Unset): local_input = self.local_input.to_dict() owner = self.owner project_id = self.project_id - run_mode: Union[Unset, str] = UNSET - if not isinstance(self.run_mode, Unset): - run_mode = self.run_mode.value + dp_policy: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.dp_policy, Unset): + dp_policy = self.dp_policy.to_dict() - local = self.local + cohort_id = self.cohort_id encrypted = self.encrypted - end_to_end_encrypted = self.end_to_end_encrypted - input_data_object = self.input_data_object + join_id = self.join_id + local = self.local release_results = self.release_results - timeout = self.timeout - data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.data_source_parameters, Unset): - data_source_parameters = self.data_source_parameters.to_dict() - + wait = self.wait + task_id = self.task_id learning_params: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.learning_params, Unset): learning_params = self.learning_params.to_dict() task_def = self.task_def - task_id = self.task_id field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) @@ -160,52 +160,52 @@ def to_dict(self) -> Dict[str, Any]: "type": type, } ) - if input_clipping_method is not UNSET: - field_dict["inputClippingMethod"] = input_clipping_method - if preprocessing_parameters is not UNSET: - field_dict["preprocessingParameters"] = preprocessing_parameters - if wait is not UNSET: - field_dict["wait"] = wait - if dp_policy is not UNSET: - field_dict["DPPolicy"] = dp_policy if dp_epsilon is not UNSET: field_dict["dpEpsilon"] = dp_epsilon - if join_id is not UNSET: - field_dict["joinId"] = join_id + if input_clipping_method is not UNSET: + field_dict["inputClippingMethod"] = input_clipping_method + if run_mode is not UNSET: + field_dict["runMode"] = run_mode if local_input_id is not UNSET: field_dict["localInputID"] = local_input_id if maximum_aggregated_value is not UNSET: field_dict["maximumAggregatedValue"] = maximum_aggregated_value - if cohort_id is not UNSET: - field_dict["cohortId"] = cohort_id + if preprocessing_parameters is not UNSET: + field_dict["preprocessingParameters"] = preprocessing_parameters + if data_source_parameters is not UNSET: + field_dict["dataSourceParameters"] = data_source_parameters + if end_to_end_encrypted is not UNSET: + field_dict["endToEndEncrypted"] = end_to_end_encrypted + if input_data_object is not UNSET: + field_dict["inputDataObject"] = input_data_object + if timeout is not UNSET: + field_dict["timeout"] = timeout if local_input is not UNSET: field_dict["localInput"] = local_input if owner is not UNSET: field_dict["owner"] = owner if project_id is not UNSET: field_dict["projectId"] = project_id - if run_mode is not UNSET: - field_dict["runMode"] = run_mode - if local is not UNSET: - field_dict["local"] = local + if dp_policy is not UNSET: + field_dict["DPPolicy"] = dp_policy + if cohort_id is not UNSET: + field_dict["cohortId"] = cohort_id if encrypted is not UNSET: field_dict["encrypted"] = encrypted - if end_to_end_encrypted is not UNSET: - field_dict["endToEndEncrypted"] = end_to_end_encrypted - if input_data_object is not UNSET: - field_dict["inputDataObject"] = input_data_object + if join_id is not UNSET: + field_dict["joinId"] = join_id + if local is not UNSET: + field_dict["local"] = local if release_results is not UNSET: field_dict["releaseResults"] = release_results - if timeout is not UNSET: - field_dict["timeout"] = timeout - if data_source_parameters is not UNSET: - field_dict["dataSourceParameters"] = data_source_parameters + if wait is not UNSET: + field_dict["wait"] = wait + if task_id is not UNSET: + field_dict["taskId"] = task_id if learning_params is not UNSET: field_dict["learningParams"] = learning_params if task_def is not UNSET: field_dict["taskDef"] = task_def - if task_id is not UNSET: - field_dict["taskId"] = task_id return field_dict @@ -220,6 +220,8 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() type = ComputationType(d.pop("type")) + dp_epsilon = d.pop("dpEpsilon", UNSET) + _input_clipping_method = d.pop("inputClippingMethod", UNSET) input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] if isinstance(_input_clipping_method, Unset): @@ -227,6 +229,17 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method) + _run_mode = d.pop("runMode", UNSET) + run_mode: Union[Unset, RunMode] + if isinstance(_run_mode, Unset): + run_mode = UNSET + else: + run_mode = RunMode(_run_mode) + + local_input_id = d.pop("localInputID", UNSET) + + maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET) + _preprocessing_parameters = d.pop("preprocessingParameters", UNSET) preprocessing_parameters: Union[Unset, ComputationPreprocessingParameters] if isinstance(_preprocessing_parameters, Unset): @@ -234,24 +247,18 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: preprocessing_parameters = ComputationPreprocessingParameters.from_dict(_preprocessing_parameters) - wait = d.pop("wait", UNSET) - - _dp_policy = d.pop("DPPolicy", UNSET) - dp_policy: Union[Unset, DPPolicy] - if isinstance(_dp_policy, Unset): - dp_policy = UNSET + _data_source_parameters = d.pop("dataSourceParameters", UNSET) + data_source_parameters: Union[Unset, ComputationDataSourceParameters] + if isinstance(_data_source_parameters, Unset): + data_source_parameters = UNSET else: - dp_policy = DPPolicy.from_dict(_dp_policy) - - dp_epsilon = d.pop("dpEpsilon", UNSET) - - join_id = d.pop("joinId", UNSET) + data_source_parameters = ComputationDataSourceParameters.from_dict(_data_source_parameters) - local_input_id = d.pop("localInputID", UNSET) + end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) - maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET) + input_data_object = d.pop("inputDataObject", UNSET) - cohort_id = d.pop("cohortId", UNSET) + timeout = d.pop("timeout", UNSET) _local_input = d.pop("localInput", UNSET) local_input: Union[Unset, LocalInput] @@ -264,31 +271,26 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: project_id = d.pop("projectId", UNSET) - _run_mode = d.pop("runMode", UNSET) - run_mode: Union[Unset, RunMode] - if isinstance(_run_mode, Unset): - run_mode = UNSET + _dp_policy = d.pop("DPPolicy", UNSET) + dp_policy: Union[Unset, DPPolicy] + if isinstance(_dp_policy, Unset): + dp_policy = UNSET else: - run_mode = RunMode(_run_mode) + dp_policy = DPPolicy.from_dict(_dp_policy) - local = d.pop("local", UNSET) + cohort_id = d.pop("cohortId", UNSET) encrypted = d.pop("encrypted", UNSET) - end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) + join_id = d.pop("joinId", UNSET) - input_data_object = d.pop("inputDataObject", UNSET) + local = d.pop("local", UNSET) release_results = d.pop("releaseResults", UNSET) - timeout = d.pop("timeout", UNSET) + wait = d.pop("wait", UNSET) - _data_source_parameters = d.pop("dataSourceParameters", UNSET) - data_source_parameters: Union[Unset, ComputationDataSourceParameters] - if isinstance(_data_source_parameters, Unset): - data_source_parameters = UNSET - else: - data_source_parameters = ComputationDataSourceParameters.from_dict(_data_source_parameters) + task_id = d.pop("taskId", UNSET) _learning_params = d.pop("learningParams", UNSET) learning_params: Union[Unset, HybridFLLearningParams] @@ -299,33 +301,31 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: task_def = d.pop("taskDef", UNSET) - task_id = d.pop("taskId", UNSET) - hybrid_fl = cls( type=type, - input_clipping_method=input_clipping_method, - preprocessing_parameters=preprocessing_parameters, - wait=wait, - dp_policy=dp_policy, dp_epsilon=dp_epsilon, - join_id=join_id, + input_clipping_method=input_clipping_method, + run_mode=run_mode, local_input_id=local_input_id, maximum_aggregated_value=maximum_aggregated_value, - cohort_id=cohort_id, + preprocessing_parameters=preprocessing_parameters, + data_source_parameters=data_source_parameters, + end_to_end_encrypted=end_to_end_encrypted, + input_data_object=input_data_object, + timeout=timeout, local_input=local_input, owner=owner, project_id=project_id, - run_mode=run_mode, - local=local, + dp_policy=dp_policy, + cohort_id=cohort_id, encrypted=encrypted, - end_to_end_encrypted=end_to_end_encrypted, - input_data_object=input_data_object, + join_id=join_id, + local=local, release_results=release_results, - timeout=timeout, - data_source_parameters=data_source_parameters, + wait=wait, + task_id=task_id, learning_params=learning_params, task_def=task_def, - task_id=task_id, ) hybrid_fl.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/hybrid_fl_learning_params.py b/src/tuneinsight/api/sdk/models/hybrid_fl_learning_params.py index 62b1406..ee87653 100644 --- a/src/tuneinsight/api/sdk/models/hybrid_fl_learning_params.py +++ b/src/tuneinsight/api/sdk/models/hybrid_fl_learning_params.py @@ -13,99 +13,102 @@ class HybridFLLearningParams: """Hyperparameters for the Hybrid Federated Learning Attributes: - add_noise (Union[Unset, bool]): Whether to add differential privacy or not to the HybridFL Default: True. - gradient_clipping (Union[Unset, float]): Gradient clipping to apply for the training and the noise computation - learning_rate (Union[Unset, float]): Learning rate of the optimizer in the python-server - local_epochs (Union[Unset, int]): Number of local epochs of the Hybrid FL between aggregations + num_workers (Union[Unset, int]): Number of workers loading the data for training in the python-server + strategy (Union[Unset, AggregationStrategy]): weighting aggregation strategy Default: + AggregationStrategy.CONSTANT. + use_clipping_factor (Union[Unset, bool]): If set to true, gradient clipping is adjusted specifically at each + layer Default: True. batch_size (Union[Unset, int]): Batch size for the training in the python-server delta (Union[Unset, float]): Delta parameter of the differential privacy in HybridFL encrypt_aggregation (Union[Unset, bool]): Whether to to the aggregation encrypted or not in HybridFL Default: True. + momentum (Union[Unset, float]): Momentum of the optimizer in the python-server + learning_rate (Union[Unset, float]): Learning rate of the optimizer in the python-server + local_epochs (Union[Unset, int]): Number of local epochs of the Hybrid FL between aggregations + add_noise (Union[Unset, bool]): Whether to add differential privacy or not to the HybridFL Default: True. epsilon (Union[Unset, float]): Epsilon parameter of the differential privacy in HybridFL fl_rounds (Union[Unset, int]): Number of federated rounds of the Hybrid FL - momentum (Union[Unset, float]): Momentum of the optimizer in the python-server - num_workers (Union[Unset, int]): Number of workers loading the data for training in the python-server - strategy (Union[Unset, AggregationStrategy]): weighting aggregation strategy Default: - AggregationStrategy.CONSTANT. - use_clipping_factor (Union[Unset, bool]): If set to true, gradient clipping is adjusted specifically at each - layer Default: True. + gradient_clipping (Union[Unset, float]): Gradient clipping to apply for the training and the noise computation """ - add_noise: Union[Unset, bool] = True - gradient_clipping: Union[Unset, float] = UNSET - learning_rate: Union[Unset, float] = UNSET - local_epochs: Union[Unset, int] = UNSET + num_workers: Union[Unset, int] = UNSET + strategy: Union[Unset, AggregationStrategy] = AggregationStrategy.CONSTANT + use_clipping_factor: Union[Unset, bool] = True batch_size: Union[Unset, int] = UNSET delta: Union[Unset, float] = UNSET encrypt_aggregation: Union[Unset, bool] = True + momentum: Union[Unset, float] = UNSET + learning_rate: Union[Unset, float] = UNSET + local_epochs: Union[Unset, int] = UNSET + add_noise: Union[Unset, bool] = True epsilon: Union[Unset, float] = UNSET fl_rounds: Union[Unset, int] = UNSET - momentum: Union[Unset, float] = UNSET - num_workers: Union[Unset, int] = UNSET - strategy: Union[Unset, AggregationStrategy] = AggregationStrategy.CONSTANT - use_clipping_factor: Union[Unset, bool] = True + gradient_clipping: Union[Unset, float] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: - add_noise = self.add_noise - gradient_clipping = self.gradient_clipping - learning_rate = self.learning_rate - local_epochs = self.local_epochs - batch_size = self.batch_size - delta = self.delta - encrypt_aggregation = self.encrypt_aggregation - epsilon = self.epsilon - fl_rounds = self.fl_rounds - momentum = self.momentum num_workers = self.num_workers strategy: Union[Unset, str] = UNSET if not isinstance(self.strategy, Unset): strategy = self.strategy.value use_clipping_factor = self.use_clipping_factor + batch_size = self.batch_size + delta = self.delta + encrypt_aggregation = self.encrypt_aggregation + momentum = self.momentum + learning_rate = self.learning_rate + local_epochs = self.local_epochs + add_noise = self.add_noise + epsilon = self.epsilon + fl_rounds = self.fl_rounds + gradient_clipping = self.gradient_clipping field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) - if add_noise is not UNSET: - field_dict["addNoise"] = add_noise - if gradient_clipping is not UNSET: - field_dict["gradientClipping"] = gradient_clipping - if learning_rate is not UNSET: - field_dict["learningRate"] = learning_rate - if local_epochs is not UNSET: - field_dict["localEpochs"] = local_epochs + if num_workers is not UNSET: + field_dict["numWorkers"] = num_workers + if strategy is not UNSET: + field_dict["strategy"] = strategy + if use_clipping_factor is not UNSET: + field_dict["useClippingFactor"] = use_clipping_factor if batch_size is not UNSET: field_dict["batchSize"] = batch_size if delta is not UNSET: field_dict["delta"] = delta if encrypt_aggregation is not UNSET: field_dict["encryptAggregation"] = encrypt_aggregation + if momentum is not UNSET: + field_dict["momentum"] = momentum + if learning_rate is not UNSET: + field_dict["learningRate"] = learning_rate + if local_epochs is not UNSET: + field_dict["localEpochs"] = local_epochs + if add_noise is not UNSET: + field_dict["addNoise"] = add_noise if epsilon is not UNSET: field_dict["epsilon"] = epsilon if fl_rounds is not UNSET: field_dict["flRounds"] = fl_rounds - if momentum is not UNSET: - field_dict["momentum"] = momentum - if num_workers is not UNSET: - field_dict["numWorkers"] = num_workers - if strategy is not UNSET: - field_dict["strategy"] = strategy - if use_clipping_factor is not UNSET: - field_dict["useClippingFactor"] = use_clipping_factor + if gradient_clipping is not UNSET: + field_dict["gradientClipping"] = gradient_clipping return field_dict @classmethod def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() - add_noise = d.pop("addNoise", UNSET) - - gradient_clipping = d.pop("gradientClipping", UNSET) + num_workers = d.pop("numWorkers", UNSET) - learning_rate = d.pop("learningRate", UNSET) + _strategy = d.pop("strategy", UNSET) + strategy: Union[Unset, AggregationStrategy] + if isinstance(_strategy, Unset): + strategy = UNSET + else: + strategy = AggregationStrategy(_strategy) - local_epochs = d.pop("localEpochs", UNSET) + use_clipping_factor = d.pop("useClippingFactor", UNSET) batch_size = d.pop("batchSize", UNSET) @@ -113,37 +116,34 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: encrypt_aggregation = d.pop("encryptAggregation", UNSET) - epsilon = d.pop("epsilon", UNSET) + momentum = d.pop("momentum", UNSET) - fl_rounds = d.pop("flRounds", UNSET) + learning_rate = d.pop("learningRate", UNSET) - momentum = d.pop("momentum", UNSET) + local_epochs = d.pop("localEpochs", UNSET) - num_workers = d.pop("numWorkers", UNSET) + add_noise = d.pop("addNoise", UNSET) - _strategy = d.pop("strategy", UNSET) - strategy: Union[Unset, AggregationStrategy] - if isinstance(_strategy, Unset): - strategy = UNSET - else: - strategy = AggregationStrategy(_strategy) + epsilon = d.pop("epsilon", UNSET) - use_clipping_factor = d.pop("useClippingFactor", UNSET) + fl_rounds = d.pop("flRounds", UNSET) + + gradient_clipping = d.pop("gradientClipping", UNSET) hybrid_fl_learning_params = cls( - add_noise=add_noise, - gradient_clipping=gradient_clipping, - learning_rate=learning_rate, - local_epochs=local_epochs, + num_workers=num_workers, + strategy=strategy, + use_clipping_factor=use_clipping_factor, batch_size=batch_size, delta=delta, encrypt_aggregation=encrypt_aggregation, + momentum=momentum, + learning_rate=learning_rate, + local_epochs=local_epochs, + add_noise=add_noise, epsilon=epsilon, fl_rounds=fl_rounds, - momentum=momentum, - num_workers=num_workers, - strategy=strategy, - use_clipping_factor=use_clipping_factor, + gradient_clipping=gradient_clipping, ) hybrid_fl_learning_params.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/key_switched_computation.py b/src/tuneinsight/api/sdk/models/key_switched_computation.py index b4e1905..7011eae 100644 --- a/src/tuneinsight/api/sdk/models/key_switched_computation.py +++ b/src/tuneinsight/api/sdk/models/key_switched_computation.py @@ -23,6 +23,8 @@ class KeySwitchedComputation: """ Attributes: type (ComputationType): Type of the computation. + dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP + mode. Default: -1.0. input_clipping_method (Union[Unset, ComputationDefinitionInputClippingMethod]): Optional method used for clipping before encrypting values when running aggregation-based workflows. The bounds are deduced based on the cryptographic parameters used for the aggregation. @@ -33,14 +35,7 @@ class KeySwitchedComputation: (default) - error: if some values are out of bounds, then the computation is aborted. Default: ComputationDefinitionInputClippingMethod.WARNING. - preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters - applied to the input retrieved from the datasource, if applicable - wait (Union[Unset, bool]): Whether to wait synchronously for the computation result. - dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various - disclosure prevention mechanisms - dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP - mode. Default: -1.0. - join_id (Union[Unset, str]): Unique identifier of a data object. + run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both) local_input_id (Union[Unset, str]): Unique identifier of a data object. maximum_aggregated_value (Union[Unset, None, float]): optional upper bound on the total expected value to be aggregated collectively. If provided, the computation will automatically deduce @@ -49,113 +44,119 @@ class KeySwitchedComputation: up to 16 million. For example, when using default parameters and running an aggregation with 4 participants, local aggregated values cannot exceed 4 million. - cohort_id (Union[Unset, str]): Unique identifier of a data object. + preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters + applied to the input retrieved from the datasource, if applicable + data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource + from each node before the computation + end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true, + then when release results is set to true and the output + is initially encrypted with a network collective key, then it is key switched to + the initiating user's public key. + input_data_object (Union[Unset, str]): Shared identifier of a data object. + timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run. local_input (Union[Unset, LocalInput]): If a local input is provided, the node initiating the computation will use it instead of querying the datasource. This data is *not* shared to other nodes, only used for the duration of the computation. The local input columns/values must be in the form {: [, , ...], ...} owner (Union[Unset, str]): The username of the end user who requested the computation. project_id (Union[Unset, str]): Unique identifier of a project. - run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both) + dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various + disclosure prevention mechanisms + cohort_id (Union[Unset, str]): Unique identifier of a data object. + encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key. + join_id (Union[Unset, str]): Unique identifier of a data object. local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured the network) - encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key. - end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true, - then when release results is set to true and the output - is initially encrypted with a network collective key, then it is key switched to - the initiating user's public key. - input_data_object (Union[Unset, str]): Shared identifier of a data object. release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output results. If set, then encrypted results are automatically key switched and decrypted and a Result entity is saved - timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run. - data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource - from each node before the computation + wait (Union[Unset, bool]): Whether to wait synchronously for the computation result. + target_public_key (Union[Unset, str]): Unique identifier of a data object. + computation (Union[Unset, ComputationDefinition]): Generic computation. decrypt_results (Union[Unset, bool]): if true, the key-switched results are decrypted using either the specified secret key or the secret key from the session secret_key (Union[Unset, str]): Unique identifier of a data object. - target_public_key (Union[Unset, str]): Unique identifier of a data object. - computation (Union[Unset, ComputationDefinition]): Generic computation. """ type: ComputationType + dp_epsilon: Union[Unset, float] = -1.0 input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] = ( ComputationDefinitionInputClippingMethod.WARNING ) - preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET - wait: Union[Unset, bool] = UNSET - dp_policy: Union[Unset, "DPPolicy"] = UNSET - dp_epsilon: Union[Unset, float] = -1.0 - join_id: Union[Unset, str] = UNSET + run_mode: Union[Unset, RunMode] = UNSET local_input_id: Union[Unset, str] = UNSET maximum_aggregated_value: Union[Unset, None, float] = UNSET - cohort_id: Union[Unset, str] = UNSET + preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET + data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET + end_to_end_encrypted: Union[Unset, bool] = UNSET + input_data_object: Union[Unset, str] = UNSET + timeout: Union[Unset, int] = UNSET local_input: Union[Unset, "LocalInput"] = UNSET owner: Union[Unset, str] = UNSET project_id: Union[Unset, str] = UNSET - run_mode: Union[Unset, RunMode] = UNSET - local: Union[Unset, bool] = UNSET + dp_policy: Union[Unset, "DPPolicy"] = UNSET + cohort_id: Union[Unset, str] = UNSET encrypted: Union[Unset, bool] = UNSET - end_to_end_encrypted: Union[Unset, bool] = UNSET - input_data_object: Union[Unset, str] = UNSET + join_id: Union[Unset, str] = UNSET + local: Union[Unset, bool] = UNSET release_results: Union[Unset, bool] = UNSET - timeout: Union[Unset, int] = UNSET - data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET - decrypt_results: Union[Unset, bool] = UNSET - secret_key: Union[Unset, str] = UNSET + wait: Union[Unset, bool] = UNSET target_public_key: Union[Unset, str] = UNSET computation: Union[Unset, "ComputationDefinition"] = UNSET + decrypt_results: Union[Unset, bool] = UNSET + secret_key: Union[Unset, str] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: type = self.type.value + dp_epsilon = self.dp_epsilon input_clipping_method: Union[Unset, str] = UNSET if not isinstance(self.input_clipping_method, Unset): input_clipping_method = self.input_clipping_method.value + run_mode: Union[Unset, str] = UNSET + if not isinstance(self.run_mode, Unset): + run_mode = self.run_mode.value + + local_input_id = self.local_input_id + maximum_aggregated_value = self.maximum_aggregated_value preprocessing_parameters: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.preprocessing_parameters, Unset): preprocessing_parameters = self.preprocessing_parameters.to_dict() - wait = self.wait - dp_policy: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.dp_policy, Unset): - dp_policy = self.dp_policy.to_dict() + data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.data_source_parameters, Unset): + data_source_parameters = self.data_source_parameters.to_dict() - dp_epsilon = self.dp_epsilon - join_id = self.join_id - local_input_id = self.local_input_id - maximum_aggregated_value = self.maximum_aggregated_value - cohort_id = self.cohort_id + end_to_end_encrypted = self.end_to_end_encrypted + input_data_object = self.input_data_object + timeout = self.timeout local_input: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.local_input, Unset): local_input = self.local_input.to_dict() owner = self.owner project_id = self.project_id - run_mode: Union[Unset, str] = UNSET - if not isinstance(self.run_mode, Unset): - run_mode = self.run_mode.value + dp_policy: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.dp_policy, Unset): + dp_policy = self.dp_policy.to_dict() - local = self.local + cohort_id = self.cohort_id encrypted = self.encrypted - end_to_end_encrypted = self.end_to_end_encrypted - input_data_object = self.input_data_object + join_id = self.join_id + local = self.local release_results = self.release_results - timeout = self.timeout - data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.data_source_parameters, Unset): - data_source_parameters = self.data_source_parameters.to_dict() - - decrypt_results = self.decrypt_results - secret_key = self.secret_key + wait = self.wait target_public_key = self.target_public_key computation: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.computation, Unset): computation = self.computation.to_dict() + decrypt_results = self.decrypt_results + secret_key = self.secret_key + field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update( @@ -163,54 +164,54 @@ def to_dict(self) -> Dict[str, Any]: "type": type, } ) - if input_clipping_method is not UNSET: - field_dict["inputClippingMethod"] = input_clipping_method - if preprocessing_parameters is not UNSET: - field_dict["preprocessingParameters"] = preprocessing_parameters - if wait is not UNSET: - field_dict["wait"] = wait - if dp_policy is not UNSET: - field_dict["DPPolicy"] = dp_policy if dp_epsilon is not UNSET: field_dict["dpEpsilon"] = dp_epsilon - if join_id is not UNSET: - field_dict["joinId"] = join_id + if input_clipping_method is not UNSET: + field_dict["inputClippingMethod"] = input_clipping_method + if run_mode is not UNSET: + field_dict["runMode"] = run_mode if local_input_id is not UNSET: field_dict["localInputID"] = local_input_id if maximum_aggregated_value is not UNSET: field_dict["maximumAggregatedValue"] = maximum_aggregated_value - if cohort_id is not UNSET: - field_dict["cohortId"] = cohort_id + if preprocessing_parameters is not UNSET: + field_dict["preprocessingParameters"] = preprocessing_parameters + if data_source_parameters is not UNSET: + field_dict["dataSourceParameters"] = data_source_parameters + if end_to_end_encrypted is not UNSET: + field_dict["endToEndEncrypted"] = end_to_end_encrypted + if input_data_object is not UNSET: + field_dict["inputDataObject"] = input_data_object + if timeout is not UNSET: + field_dict["timeout"] = timeout if local_input is not UNSET: field_dict["localInput"] = local_input if owner is not UNSET: field_dict["owner"] = owner if project_id is not UNSET: field_dict["projectId"] = project_id - if run_mode is not UNSET: - field_dict["runMode"] = run_mode - if local is not UNSET: - field_dict["local"] = local + if dp_policy is not UNSET: + field_dict["DPPolicy"] = dp_policy + if cohort_id is not UNSET: + field_dict["cohortId"] = cohort_id if encrypted is not UNSET: field_dict["encrypted"] = encrypted - if end_to_end_encrypted is not UNSET: - field_dict["endToEndEncrypted"] = end_to_end_encrypted - if input_data_object is not UNSET: - field_dict["inputDataObject"] = input_data_object + if join_id is not UNSET: + field_dict["joinId"] = join_id + if local is not UNSET: + field_dict["local"] = local if release_results is not UNSET: field_dict["releaseResults"] = release_results - if timeout is not UNSET: - field_dict["timeout"] = timeout - if data_source_parameters is not UNSET: - field_dict["dataSourceParameters"] = data_source_parameters - if decrypt_results is not UNSET: - field_dict["decryptResults"] = decrypt_results - if secret_key is not UNSET: - field_dict["secretKey"] = secret_key + if wait is not UNSET: + field_dict["wait"] = wait if target_public_key is not UNSET: field_dict["targetPublicKey"] = target_public_key if computation is not UNSET: field_dict["computation"] = computation + if decrypt_results is not UNSET: + field_dict["decryptResults"] = decrypt_results + if secret_key is not UNSET: + field_dict["secretKey"] = secret_key return field_dict @@ -225,6 +226,8 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() type = ComputationType(d.pop("type")) + dp_epsilon = d.pop("dpEpsilon", UNSET) + _input_clipping_method = d.pop("inputClippingMethod", UNSET) input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] if isinstance(_input_clipping_method, Unset): @@ -232,6 +235,17 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method) + _run_mode = d.pop("runMode", UNSET) + run_mode: Union[Unset, RunMode] + if isinstance(_run_mode, Unset): + run_mode = UNSET + else: + run_mode = RunMode(_run_mode) + + local_input_id = d.pop("localInputID", UNSET) + + maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET) + _preprocessing_parameters = d.pop("preprocessingParameters", UNSET) preprocessing_parameters: Union[Unset, ComputationPreprocessingParameters] if isinstance(_preprocessing_parameters, Unset): @@ -239,24 +253,18 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: preprocessing_parameters = ComputationPreprocessingParameters.from_dict(_preprocessing_parameters) - wait = d.pop("wait", UNSET) - - _dp_policy = d.pop("DPPolicy", UNSET) - dp_policy: Union[Unset, DPPolicy] - if isinstance(_dp_policy, Unset): - dp_policy = UNSET + _data_source_parameters = d.pop("dataSourceParameters", UNSET) + data_source_parameters: Union[Unset, ComputationDataSourceParameters] + if isinstance(_data_source_parameters, Unset): + data_source_parameters = UNSET else: - dp_policy = DPPolicy.from_dict(_dp_policy) - - dp_epsilon = d.pop("dpEpsilon", UNSET) - - join_id = d.pop("joinId", UNSET) + data_source_parameters = ComputationDataSourceParameters.from_dict(_data_source_parameters) - local_input_id = d.pop("localInputID", UNSET) + end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) - maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET) + input_data_object = d.pop("inputDataObject", UNSET) - cohort_id = d.pop("cohortId", UNSET) + timeout = d.pop("timeout", UNSET) _local_input = d.pop("localInput", UNSET) local_input: Union[Unset, LocalInput] @@ -269,35 +277,24 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: project_id = d.pop("projectId", UNSET) - _run_mode = d.pop("runMode", UNSET) - run_mode: Union[Unset, RunMode] - if isinstance(_run_mode, Unset): - run_mode = UNSET + _dp_policy = d.pop("DPPolicy", UNSET) + dp_policy: Union[Unset, DPPolicy] + if isinstance(_dp_policy, Unset): + dp_policy = UNSET else: - run_mode = RunMode(_run_mode) + dp_policy = DPPolicy.from_dict(_dp_policy) - local = d.pop("local", UNSET) + cohort_id = d.pop("cohortId", UNSET) encrypted = d.pop("encrypted", UNSET) - end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) + join_id = d.pop("joinId", UNSET) - input_data_object = d.pop("inputDataObject", UNSET) + local = d.pop("local", UNSET) release_results = d.pop("releaseResults", UNSET) - timeout = d.pop("timeout", UNSET) - - _data_source_parameters = d.pop("dataSourceParameters", UNSET) - data_source_parameters: Union[Unset, ComputationDataSourceParameters] - if isinstance(_data_source_parameters, Unset): - data_source_parameters = UNSET - else: - data_source_parameters = ComputationDataSourceParameters.from_dict(_data_source_parameters) - - decrypt_results = d.pop("decryptResults", UNSET) - - secret_key = d.pop("secretKey", UNSET) + wait = d.pop("wait", UNSET) target_public_key = d.pop("targetPublicKey", UNSET) @@ -308,32 +305,36 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: computation = ComputationDefinition.from_dict(_computation) + decrypt_results = d.pop("decryptResults", UNSET) + + secret_key = d.pop("secretKey", UNSET) + key_switched_computation = cls( type=type, - input_clipping_method=input_clipping_method, - preprocessing_parameters=preprocessing_parameters, - wait=wait, - dp_policy=dp_policy, dp_epsilon=dp_epsilon, - join_id=join_id, + input_clipping_method=input_clipping_method, + run_mode=run_mode, local_input_id=local_input_id, maximum_aggregated_value=maximum_aggregated_value, - cohort_id=cohort_id, + preprocessing_parameters=preprocessing_parameters, + data_source_parameters=data_source_parameters, + end_to_end_encrypted=end_to_end_encrypted, + input_data_object=input_data_object, + timeout=timeout, local_input=local_input, owner=owner, project_id=project_id, - run_mode=run_mode, - local=local, + dp_policy=dp_policy, + cohort_id=cohort_id, encrypted=encrypted, - end_to_end_encrypted=end_to_end_encrypted, - input_data_object=input_data_object, + join_id=join_id, + local=local, release_results=release_results, - timeout=timeout, - data_source_parameters=data_source_parameters, - decrypt_results=decrypt_results, - secret_key=secret_key, + wait=wait, target_public_key=target_public_key, computation=computation, + decrypt_results=decrypt_results, + secret_key=secret_key, ) key_switched_computation.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/local_data_selection.py b/src/tuneinsight/api/sdk/models/local_data_selection.py index 3c818ed..08785c5 100644 --- a/src/tuneinsight/api/sdk/models/local_data_selection.py +++ b/src/tuneinsight/api/sdk/models/local_data_selection.py @@ -21,58 +21,51 @@ class LocalDataSelection: """selection to retrieve data from the datasource and preprocess it Attributes: - preview_content_disabled (Union[Unset, None, bool]): whether to disable previewing the content (metadata only) - store_in_database (Union[Unset, None, bool]): whether to store the selection in the database - type (Union[Unset, DataSelectionType]): - visible_to_network (Union[Unset, None, bool]): whether the data selection parameters are viewable by other - instances in the network. data_selection (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource from each node before the computation description (Union[Unset, str]): optional description for the selection name (Union[Unset, str]): name given to the selection preprocessing (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters applied to the input retrieved from the datasource, if applicable + preview_content_disabled (Union[Unset, None, bool]): whether to disable previewing the content (metadata only) + store_in_database (Union[Unset, None, bool]): whether to store the selection in the database + type (Union[Unset, DataSelectionType]): + visible_to_network (Union[Unset, None, bool]): whether the data selection parameters are viewable by other + instances in the network. + num_local_records (Union[Unset, int]): holds the total number of local records from the selection (only + displayed when the selection is saved to the database) query (Union[Unset, Query]): Data source query + remote_instance_id (Union[Unset, str]): the name of the remote instance id this selection was retrieved from. + created_at (Union[Unset, str]): + data_source (Union[Unset, DataSource]): + preview (Union[Unset, DataSourceQueryPreview]): preview of a datasource query remote (Union[Unset, bool]): whether the selection was fetched remotely updated_at (Union[Unset, str]): created_by_user (Union[Unset, str]): creator of the selection id (Union[Unset, str]): id of the selection - num_local_records (Union[Unset, int]): holds the total number of local records from the selection (only - displayed when the selection is saved to the database) - preview (Union[Unset, DataSourceQueryPreview]): preview of a datasource query - created_at (Union[Unset, str]): - data_source (Union[Unset, DataSource]): - remote_instance_id (Union[Unset, str]): the name of the remote instance id this selection was retrieved from. """ - preview_content_disabled: Union[Unset, None, bool] = UNSET - store_in_database: Union[Unset, None, bool] = UNSET - type: Union[Unset, DataSelectionType] = UNSET - visible_to_network: Union[Unset, None, bool] = UNSET data_selection: Union[Unset, "ComputationDataSourceParameters"] = UNSET description: Union[Unset, str] = UNSET name: Union[Unset, str] = UNSET preprocessing: Union[Unset, "ComputationPreprocessingParameters"] = UNSET + preview_content_disabled: Union[Unset, None, bool] = UNSET + store_in_database: Union[Unset, None, bool] = UNSET + type: Union[Unset, DataSelectionType] = UNSET + visible_to_network: Union[Unset, None, bool] = UNSET + num_local_records: Union[Unset, int] = UNSET query: Union[Unset, "Query"] = UNSET + remote_instance_id: Union[Unset, str] = UNSET + created_at: Union[Unset, str] = UNSET + data_source: Union[Unset, "DataSource"] = UNSET + preview: Union[Unset, "DataSourceQueryPreview"] = UNSET remote: Union[Unset, bool] = UNSET updated_at: Union[Unset, str] = UNSET created_by_user: Union[Unset, str] = UNSET id: Union[Unset, str] = UNSET - num_local_records: Union[Unset, int] = UNSET - preview: Union[Unset, "DataSourceQueryPreview"] = UNSET - created_at: Union[Unset, str] = UNSET - data_source: Union[Unset, "DataSource"] = UNSET - remote_instance_id: Union[Unset, str] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: - preview_content_disabled = self.preview_content_disabled - store_in_database = self.store_in_database - type: Union[Unset, str] = UNSET - if not isinstance(self.type, Unset): - type = self.type.value - - visible_to_network = self.visible_to_network data_selection: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.data_selection, Unset): data_selection = self.data_selection.to_dict() @@ -83,37 +76,36 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.preprocessing, Unset): preprocessing = self.preprocessing.to_dict() + preview_content_disabled = self.preview_content_disabled + store_in_database = self.store_in_database + type: Union[Unset, str] = UNSET + if not isinstance(self.type, Unset): + type = self.type.value + + visible_to_network = self.visible_to_network + num_local_records = self.num_local_records query: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.query, Unset): query = self.query.to_dict() - remote = self.remote - updated_at = self.updated_at - created_by_user = self.created_by_user - id = self.id - num_local_records = self.num_local_records - preview: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.preview, Unset): - preview = self.preview.to_dict() - + remote_instance_id = self.remote_instance_id created_at = self.created_at data_source: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.data_source, Unset): data_source = self.data_source.to_dict() - remote_instance_id = self.remote_instance_id + preview: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.preview, Unset): + preview = self.preview.to_dict() + + remote = self.remote + updated_at = self.updated_at + created_by_user = self.created_by_user + id = self.id field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) - if preview_content_disabled is not UNSET: - field_dict["previewContentDisabled"] = preview_content_disabled - if store_in_database is not UNSET: - field_dict["storeInDatabase"] = store_in_database - if type is not UNSET: - field_dict["type"] = type - if visible_to_network is not UNSET: - field_dict["visibleToNetwork"] = visible_to_network if data_selection is not UNSET: field_dict["dataSelection"] = data_selection if description is not UNSET: @@ -122,8 +114,26 @@ def to_dict(self) -> Dict[str, Any]: field_dict["name"] = name if preprocessing is not UNSET: field_dict["preprocessing"] = preprocessing + if preview_content_disabled is not UNSET: + field_dict["previewContentDisabled"] = preview_content_disabled + if store_in_database is not UNSET: + field_dict["storeInDatabase"] = store_in_database + if type is not UNSET: + field_dict["type"] = type + if visible_to_network is not UNSET: + field_dict["visibleToNetwork"] = visible_to_network + if num_local_records is not UNSET: + field_dict["numLocalRecords"] = num_local_records if query is not UNSET: field_dict["query"] = query + if remote_instance_id is not UNSET: + field_dict["remoteInstanceId"] = remote_instance_id + if created_at is not UNSET: + field_dict["createdAt"] = created_at + if data_source is not UNSET: + field_dict["dataSource"] = data_source + if preview is not UNSET: + field_dict["preview"] = preview if remote is not UNSET: field_dict["remote"] = remote if updated_at is not UNSET: @@ -132,16 +142,6 @@ def to_dict(self) -> Dict[str, Any]: field_dict["createdByUser"] = created_by_user if id is not UNSET: field_dict["id"] = id - if num_local_records is not UNSET: - field_dict["numLocalRecords"] = num_local_records - if preview is not UNSET: - field_dict["preview"] = preview - if created_at is not UNSET: - field_dict["createdAt"] = created_at - if data_source is not UNSET: - field_dict["dataSource"] = data_source - if remote_instance_id is not UNSET: - field_dict["remoteInstanceId"] = remote_instance_id return field_dict @@ -154,19 +154,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: from ..models.query import Query d = src_dict.copy() - preview_content_disabled = d.pop("previewContentDisabled", UNSET) - - store_in_database = d.pop("storeInDatabase", UNSET) - - _type = d.pop("type", UNSET) - type: Union[Unset, DataSelectionType] - if isinstance(_type, Unset): - type = UNSET - else: - type = DataSelectionType(_type) - - visible_to_network = d.pop("visibleToNetwork", UNSET) - _data_selection = d.pop("dataSelection", UNSET) data_selection: Union[Unset, ComputationDataSourceParameters] if isinstance(_data_selection, Unset): @@ -185,6 +172,21 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: preprocessing = ComputationPreprocessingParameters.from_dict(_preprocessing) + preview_content_disabled = d.pop("previewContentDisabled", UNSET) + + store_in_database = d.pop("storeInDatabase", UNSET) + + _type = d.pop("type", UNSET) + type: Union[Unset, DataSelectionType] + if isinstance(_type, Unset): + type = UNSET + else: + type = DataSelectionType(_type) + + visible_to_network = d.pop("visibleToNetwork", UNSET) + + num_local_records = d.pop("numLocalRecords", UNSET) + _query = d.pop("query", UNSET) query: Union[Unset, Query] if isinstance(_query, Unset): @@ -192,15 +194,16 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: query = Query.from_dict(_query) - remote = d.pop("remote", UNSET) - - updated_at = d.pop("updatedAt", UNSET) - - created_by_user = d.pop("createdByUser", UNSET) + remote_instance_id = d.pop("remoteInstanceId", UNSET) - id = d.pop("id", UNSET) + created_at = d.pop("createdAt", UNSET) - num_local_records = d.pop("numLocalRecords", UNSET) + _data_source = d.pop("dataSource", UNSET) + data_source: Union[Unset, DataSource] + if isinstance(_data_source, Unset): + data_source = UNSET + else: + data_source = DataSource.from_dict(_data_source) _preview = d.pop("preview", UNSET) preview: Union[Unset, DataSourceQueryPreview] @@ -209,36 +212,33 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: preview = DataSourceQueryPreview.from_dict(_preview) - created_at = d.pop("createdAt", UNSET) + remote = d.pop("remote", UNSET) - _data_source = d.pop("dataSource", UNSET) - data_source: Union[Unset, DataSource] - if isinstance(_data_source, Unset): - data_source = UNSET - else: - data_source = DataSource.from_dict(_data_source) + updated_at = d.pop("updatedAt", UNSET) - remote_instance_id = d.pop("remoteInstanceId", UNSET) + created_by_user = d.pop("createdByUser", UNSET) + + id = d.pop("id", UNSET) local_data_selection = cls( - preview_content_disabled=preview_content_disabled, - store_in_database=store_in_database, - type=type, - visible_to_network=visible_to_network, data_selection=data_selection, description=description, name=name, preprocessing=preprocessing, + preview_content_disabled=preview_content_disabled, + store_in_database=store_in_database, + type=type, + visible_to_network=visible_to_network, + num_local_records=num_local_records, query=query, + remote_instance_id=remote_instance_id, + created_at=created_at, + data_source=data_source, + preview=preview, remote=remote, updated_at=updated_at, created_by_user=created_by_user, id=id, - num_local_records=num_local_records, - preview=preview, - created_at=created_at, - data_source=data_source, - remote_instance_id=remote_instance_id, ) local_data_selection.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/local_data_selection_definition.py b/src/tuneinsight/api/sdk/models/local_data_selection_definition.py index dc6707e..6a8a401 100644 --- a/src/tuneinsight/api/sdk/models/local_data_selection_definition.py +++ b/src/tuneinsight/api/sdk/models/local_data_selection_definition.py @@ -18,37 +18,30 @@ class LocalDataSelectionDefinition: """datasource selection definition. A selection is a "query" or data selection definition to run on the datasource Attributes: - preview_content_disabled (Union[Unset, None, bool]): whether to disable previewing the content (metadata only) - store_in_database (Union[Unset, None, bool]): whether to store the selection in the database - type (Union[Unset, DataSelectionType]): - visible_to_network (Union[Unset, None, bool]): whether the data selection parameters are viewable by other - instances in the network. data_selection (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource from each node before the computation description (Union[Unset, str]): optional description for the selection name (Union[Unset, str]): name given to the selection preprocessing (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters applied to the input retrieved from the datasource, if applicable + preview_content_disabled (Union[Unset, None, bool]): whether to disable previewing the content (metadata only) + store_in_database (Union[Unset, None, bool]): whether to store the selection in the database + type (Union[Unset, DataSelectionType]): + visible_to_network (Union[Unset, None, bool]): whether the data selection parameters are viewable by other + instances in the network. """ - preview_content_disabled: Union[Unset, None, bool] = UNSET - store_in_database: Union[Unset, None, bool] = UNSET - type: Union[Unset, DataSelectionType] = UNSET - visible_to_network: Union[Unset, None, bool] = UNSET data_selection: Union[Unset, "ComputationDataSourceParameters"] = UNSET description: Union[Unset, str] = UNSET name: Union[Unset, str] = UNSET preprocessing: Union[Unset, "ComputationPreprocessingParameters"] = UNSET + preview_content_disabled: Union[Unset, None, bool] = UNSET + store_in_database: Union[Unset, None, bool] = UNSET + type: Union[Unset, DataSelectionType] = UNSET + visible_to_network: Union[Unset, None, bool] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: - preview_content_disabled = self.preview_content_disabled - store_in_database = self.store_in_database - type: Union[Unset, str] = UNSET - if not isinstance(self.type, Unset): - type = self.type.value - - visible_to_network = self.visible_to_network data_selection: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.data_selection, Unset): data_selection = self.data_selection.to_dict() @@ -59,17 +52,17 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.preprocessing, Unset): preprocessing = self.preprocessing.to_dict() + preview_content_disabled = self.preview_content_disabled + store_in_database = self.store_in_database + type: Union[Unset, str] = UNSET + if not isinstance(self.type, Unset): + type = self.type.value + + visible_to_network = self.visible_to_network + field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) - if preview_content_disabled is not UNSET: - field_dict["previewContentDisabled"] = preview_content_disabled - if store_in_database is not UNSET: - field_dict["storeInDatabase"] = store_in_database - if type is not UNSET: - field_dict["type"] = type - if visible_to_network is not UNSET: - field_dict["visibleToNetwork"] = visible_to_network if data_selection is not UNSET: field_dict["dataSelection"] = data_selection if description is not UNSET: @@ -78,6 +71,14 @@ def to_dict(self) -> Dict[str, Any]: field_dict["name"] = name if preprocessing is not UNSET: field_dict["preprocessing"] = preprocessing + if preview_content_disabled is not UNSET: + field_dict["previewContentDisabled"] = preview_content_disabled + if store_in_database is not UNSET: + field_dict["storeInDatabase"] = store_in_database + if type is not UNSET: + field_dict["type"] = type + if visible_to_network is not UNSET: + field_dict["visibleToNetwork"] = visible_to_network return field_dict @@ -87,19 +88,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: from ..models.computation_preprocessing_parameters import ComputationPreprocessingParameters d = src_dict.copy() - preview_content_disabled = d.pop("previewContentDisabled", UNSET) - - store_in_database = d.pop("storeInDatabase", UNSET) - - _type = d.pop("type", UNSET) - type: Union[Unset, DataSelectionType] - if isinstance(_type, Unset): - type = UNSET - else: - type = DataSelectionType(_type) - - visible_to_network = d.pop("visibleToNetwork", UNSET) - _data_selection = d.pop("dataSelection", UNSET) data_selection: Union[Unset, ComputationDataSourceParameters] if isinstance(_data_selection, Unset): @@ -118,15 +106,28 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: preprocessing = ComputationPreprocessingParameters.from_dict(_preprocessing) + preview_content_disabled = d.pop("previewContentDisabled", UNSET) + + store_in_database = d.pop("storeInDatabase", UNSET) + + _type = d.pop("type", UNSET) + type: Union[Unset, DataSelectionType] + if isinstance(_type, Unset): + type = UNSET + else: + type = DataSelectionType(_type) + + visible_to_network = d.pop("visibleToNetwork", UNSET) + local_data_selection_definition = cls( - preview_content_disabled=preview_content_disabled, - store_in_database=store_in_database, - type=type, - visible_to_network=visible_to_network, data_selection=data_selection, description=description, name=name, preprocessing=preprocessing, + preview_content_disabled=preview_content_disabled, + store_in_database=store_in_database, + type=type, + visible_to_network=visible_to_network, ) local_data_selection_definition.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/local_data_source_type.py b/src/tuneinsight/api/sdk/models/local_data_source_type.py new file mode 100644 index 0000000..4729e4b --- /dev/null +++ b/src/tuneinsight/api/sdk/models/local_data_source_type.py @@ -0,0 +1,9 @@ +from enum import Enum + + +class LocalDataSourceType(str, Enum): + CSV = "CSV" + JSON = "JSON" + + def __str__(self) -> str: + return str(self.value) diff --git a/src/tuneinsight/api/sdk/models/log.py b/src/tuneinsight/api/sdk/models/log.py index 2d898fa..ffcfcbd 100644 --- a/src/tuneinsight/api/sdk/models/log.py +++ b/src/tuneinsight/api/sdk/models/log.py @@ -12,46 +12,46 @@ class Log: """Definition of an audit log Attributes: + created_at (Union[Unset, str]): user (Union[Unset, str]): ID of user who generated the log value (Union[Unset, str]): - created_at (Union[Unset, str]): """ + created_at: Union[Unset, str] = UNSET user: Union[Unset, str] = UNSET value: Union[Unset, str] = UNSET - created_at: Union[Unset, str] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: + created_at = self.created_at user = self.user value = self.value - created_at = self.created_at field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) + if created_at is not UNSET: + field_dict["createdAt"] = created_at if user is not UNSET: field_dict["user"] = user if value is not UNSET: field_dict["value"] = value - if created_at is not UNSET: - field_dict["createdAt"] = created_at return field_dict @classmethod def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() + created_at = d.pop("createdAt", UNSET) + user = d.pop("user", UNSET) value = d.pop("value", UNSET) - created_at = d.pop("createdAt", UNSET) - log = cls( + created_at=created_at, user=user, value=value, - created_at=created_at, ) log.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/measurement.py b/src/tuneinsight/api/sdk/models/measurement.py index 5f35803..bc67e78 100644 --- a/src/tuneinsight/api/sdk/models/measurement.py +++ b/src/tuneinsight/api/sdk/models/measurement.py @@ -12,30 +12,32 @@ class Measurement: """measurement done during a specific part of a computation Attributes: + start (Union[Unset, str]): start time of the measurement. (RFC 3339 Nano format) allocated (Union[Unset, int]): total number of bytes allocated during this part. description (Union[Unset, str]): description of the computation part. end (Union[Unset, str]): end time of the measurement. (RFC 3339 Nano format) name (Union[Unset, str]): name of the computation part. - start (Union[Unset, str]): start time of the measurement. (RFC 3339 Nano format) """ + start: Union[Unset, str] = UNSET allocated: Union[Unset, int] = UNSET description: Union[Unset, str] = UNSET end: Union[Unset, str] = UNSET name: Union[Unset, str] = UNSET - start: Union[Unset, str] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: + start = self.start allocated = self.allocated description = self.description end = self.end name = self.name - start = self.start field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) + if start is not UNSET: + field_dict["start"] = start if allocated is not UNSET: field_dict["allocated"] = allocated if description is not UNSET: @@ -44,14 +46,14 @@ def to_dict(self) -> Dict[str, Any]: field_dict["end"] = end if name is not UNSET: field_dict["name"] = name - if start is not UNSET: - field_dict["start"] = start return field_dict @classmethod def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() + start = d.pop("start", UNSET) + allocated = d.pop("allocated", UNSET) description = d.pop("description", UNSET) @@ -60,14 +62,12 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: name = d.pop("name", UNSET) - start = d.pop("start", UNSET) - measurement = cls( + start=start, allocated=allocated, description=description, end=end, name=name, - start=start, ) measurement.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/model.py b/src/tuneinsight/api/sdk/models/model.py index 4fc16da..b08a022 100644 --- a/src/tuneinsight/api/sdk/models/model.py +++ b/src/tuneinsight/api/sdk/models/model.py @@ -20,80 +20,81 @@ class Model: """Machine learning model metadata definition Attributes: - created_at (Union[Unset, str]): + type (Union[Unset, ModelType]): whether the model is local (plaintext) or collective (ciphertext) + computation_id (Union[Unset, str]): Computation that created this model if collective model data_object (Union[Unset, DataObject]): A data object definition. + metadata (Union[Unset, ModelMetadata]): public metadata about the model + model_id (Union[Unset, str]): Unique identifier of a model. model_params (Union[Unset, ModelParams]): detailed parameters about the model, only returned when getting specific model name (Union[Unset, str]): common name for the model - type (Union[Unset, ModelType]): whether the model is local (plaintext) or collective (ciphertext) - updated_at (Union[Unset, str]): - computation_id (Union[Unset, str]): Computation that created this model if collective model - model_id (Union[Unset, str]): Unique identifier of a model. training_algorithm (Union[Unset, TrainingAlgorithm]): the algorithm used to train the model - metadata (Union[Unset, ModelMetadata]): public metadata about the model + created_at (Union[Unset, str]): + updated_at (Union[Unset, str]): """ - created_at: Union[Unset, str] = UNSET - data_object: Union[Unset, "DataObject"] = UNSET - model_params: Union[Unset, "ModelParams"] = UNSET - name: Union[Unset, str] = UNSET type: Union[Unset, ModelType] = UNSET - updated_at: Union[Unset, str] = UNSET computation_id: Union[Unset, str] = UNSET + data_object: Union[Unset, "DataObject"] = UNSET + metadata: Union[Unset, "ModelMetadata"] = UNSET model_id: Union[Unset, str] = UNSET + model_params: Union[Unset, "ModelParams"] = UNSET + name: Union[Unset, str] = UNSET training_algorithm: Union[Unset, TrainingAlgorithm] = UNSET - metadata: Union[Unset, "ModelMetadata"] = UNSET + created_at: Union[Unset, str] = UNSET + updated_at: Union[Unset, str] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: - created_at = self.created_at + type: Union[Unset, str] = UNSET + if not isinstance(self.type, Unset): + type = self.type.value + + computation_id = self.computation_id data_object: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.data_object, Unset): data_object = self.data_object.to_dict() + metadata: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.metadata, Unset): + metadata = self.metadata.to_dict() + + model_id = self.model_id model_params: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.model_params, Unset): model_params = self.model_params.to_dict() name = self.name - type: Union[Unset, str] = UNSET - if not isinstance(self.type, Unset): - type = self.type.value - - updated_at = self.updated_at - computation_id = self.computation_id - model_id = self.model_id training_algorithm: Union[Unset, str] = UNSET if not isinstance(self.training_algorithm, Unset): training_algorithm = self.training_algorithm.value - metadata: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.metadata, Unset): - metadata = self.metadata.to_dict() + created_at = self.created_at + updated_at = self.updated_at field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) - if created_at is not UNSET: - field_dict["createdAt"] = created_at - if data_object is not UNSET: - field_dict["dataObject"] = data_object - if model_params is not UNSET: - field_dict["modelParams"] = model_params - if name is not UNSET: - field_dict["name"] = name if type is not UNSET: field_dict["type"] = type - if updated_at is not UNSET: - field_dict["updatedAt"] = updated_at if computation_id is not UNSET: field_dict["computationId"] = computation_id + if data_object is not UNSET: + field_dict["dataObject"] = data_object + if metadata is not UNSET: + field_dict["metadata"] = metadata if model_id is not UNSET: field_dict["modelID"] = model_id + if model_params is not UNSET: + field_dict["modelParams"] = model_params + if name is not UNSET: + field_dict["name"] = name if training_algorithm is not UNSET: field_dict["trainingAlgorithm"] = training_algorithm - if metadata is not UNSET: - field_dict["metadata"] = metadata + if created_at is not UNSET: + field_dict["createdAt"] = created_at + if updated_at is not UNSET: + field_dict["updatedAt"] = updated_at return field_dict @@ -104,7 +105,14 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: from ..models.model_params import ModelParams d = src_dict.copy() - created_at = d.pop("createdAt", UNSET) + _type = d.pop("type", UNSET) + type: Union[Unset, ModelType] + if isinstance(_type, Unset): + type = UNSET + else: + type = ModelType(_type) + + computation_id = d.pop("computationId", UNSET) _data_object = d.pop("dataObject", UNSET) data_object: Union[Unset, DataObject] @@ -113,6 +121,15 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: data_object = DataObject.from_dict(_data_object) + _metadata = d.pop("metadata", UNSET) + metadata: Union[Unset, ModelMetadata] + if isinstance(_metadata, Unset): + metadata = UNSET + else: + metadata = ModelMetadata.from_dict(_metadata) + + model_id = d.pop("modelID", UNSET) + _model_params = d.pop("modelParams", UNSET) model_params: Union[Unset, ModelParams] if isinstance(_model_params, Unset): @@ -122,19 +139,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: name = d.pop("name", UNSET) - _type = d.pop("type", UNSET) - type: Union[Unset, ModelType] - if isinstance(_type, Unset): - type = UNSET - else: - type = ModelType(_type) - - updated_at = d.pop("updatedAt", UNSET) - - computation_id = d.pop("computationId", UNSET) - - model_id = d.pop("modelID", UNSET) - _training_algorithm = d.pop("trainingAlgorithm", UNSET) training_algorithm: Union[Unset, TrainingAlgorithm] if isinstance(_training_algorithm, Unset): @@ -142,24 +146,21 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: training_algorithm = TrainingAlgorithm(_training_algorithm) - _metadata = d.pop("metadata", UNSET) - metadata: Union[Unset, ModelMetadata] - if isinstance(_metadata, Unset): - metadata = UNSET - else: - metadata = ModelMetadata.from_dict(_metadata) + created_at = d.pop("createdAt", UNSET) + + updated_at = d.pop("updatedAt", UNSET) model = cls( - created_at=created_at, - data_object=data_object, - model_params=model_params, - name=name, type=type, - updated_at=updated_at, computation_id=computation_id, + data_object=data_object, + metadata=metadata, model_id=model_id, + model_params=model_params, + name=name, training_algorithm=training_algorithm, - metadata=metadata, + created_at=created_at, + updated_at=updated_at, ) model.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/model_definition.py b/src/tuneinsight/api/sdk/models/model_definition.py index e6ca752..0503cbf 100644 --- a/src/tuneinsight/api/sdk/models/model_definition.py +++ b/src/tuneinsight/api/sdk/models/model_definition.py @@ -17,21 +17,22 @@ class ModelDefinition: """Definition of a model to upload Attributes: + name (str): common name to give to the model prediction_params (PredictionParams): subset of parameters required for only the prediction weights (List[List[float]]): Plaintext weights of the model as a float matrix - name (str): common name to give to the model - project_id (Union[Unset, str]): Unique identifier of a project. metadata (Union[Unset, ModelMetadata]): public metadata about the model + project_id (Union[Unset, str]): Unique identifier of a project. """ + name: str prediction_params: "PredictionParams" weights: List[List[float]] - name: str - project_id: Union[Unset, str] = UNSET metadata: Union[Unset, "ModelMetadata"] = UNSET + project_id: Union[Unset, str] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: + name = self.name prediction_params = self.prediction_params.to_dict() weights = [] @@ -40,25 +41,25 @@ def to_dict(self) -> Dict[str, Any]: weights.append(weights_item) - name = self.name - project_id = self.project_id metadata: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.metadata, Unset): metadata = self.metadata.to_dict() + project_id = self.project_id + field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update( { + "name": name, "predictionParams": prediction_params, "weights": weights, - "name": name, } ) - if project_id is not UNSET: - field_dict["projectId"] = project_id if metadata is not UNSET: field_dict["metadata"] = metadata + if project_id is not UNSET: + field_dict["projectId"] = project_id return field_dict @@ -68,6 +69,8 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: from ..models.prediction_params import PredictionParams d = src_dict.copy() + name = d.pop("name") + prediction_params = PredictionParams.from_dict(d.pop("predictionParams")) weights = [] @@ -77,10 +80,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: weights.append(weights_item) - name = d.pop("name") - - project_id = d.pop("projectId", UNSET) - _metadata = d.pop("metadata", UNSET) metadata: Union[Unset, ModelMetadata] if isinstance(_metadata, Unset): @@ -88,12 +87,14 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: metadata = ModelMetadata.from_dict(_metadata) + project_id = d.pop("projectId", UNSET) + model_definition = cls( + name=name, prediction_params=prediction_params, weights=weights, - name=name, - project_id=project_id, metadata=metadata, + project_id=project_id, ) model_definition.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/model_metadata.py b/src/tuneinsight/api/sdk/models/model_metadata.py index ce54394..048eb87 100644 --- a/src/tuneinsight/api/sdk/models/model_metadata.py +++ b/src/tuneinsight/api/sdk/models/model_metadata.py @@ -12,68 +12,68 @@ class ModelMetadata: """public metadata about the model Attributes: + classes (Union[Unset, List[str]]): optional labels for classes + description (Union[Unset, str]): optional description for the model features (Union[Unset, List[str]]): optional labels for features num_classes (Union[Unset, int]): number classes num_features (Union[Unset, int]): number of features - classes (Union[Unset, List[str]]): optional labels for classes - description (Union[Unset, str]): optional description for the model """ + classes: Union[Unset, List[str]] = UNSET + description: Union[Unset, str] = UNSET features: Union[Unset, List[str]] = UNSET num_classes: Union[Unset, int] = UNSET num_features: Union[Unset, int] = UNSET - classes: Union[Unset, List[str]] = UNSET - description: Union[Unset, str] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: + classes: Union[Unset, List[str]] = UNSET + if not isinstance(self.classes, Unset): + classes = self.classes + + description = self.description features: Union[Unset, List[str]] = UNSET if not isinstance(self.features, Unset): features = self.features num_classes = self.num_classes num_features = self.num_features - classes: Union[Unset, List[str]] = UNSET - if not isinstance(self.classes, Unset): - classes = self.classes - - description = self.description field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) + if classes is not UNSET: + field_dict["classes"] = classes + if description is not UNSET: + field_dict["description"] = description if features is not UNSET: field_dict["features"] = features if num_classes is not UNSET: field_dict["numClasses"] = num_classes if num_features is not UNSET: field_dict["numFeatures"] = num_features - if classes is not UNSET: - field_dict["classes"] = classes - if description is not UNSET: - field_dict["description"] = description return field_dict @classmethod def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() + classes = cast(List[str], d.pop("classes", UNSET)) + + description = d.pop("description", UNSET) + features = cast(List[str], d.pop("features", UNSET)) num_classes = d.pop("numClasses", UNSET) num_features = d.pop("numFeatures", UNSET) - classes = cast(List[str], d.pop("classes", UNSET)) - - description = d.pop("description", UNSET) - model_metadata = cls( + classes=classes, + description=description, features=features, num_classes=num_classes, num_features=num_features, - classes=classes, - description=description, ) model_metadata.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/network.py b/src/tuneinsight/api/sdk/models/network.py index 9bf05b9..999cc73 100644 --- a/src/tuneinsight/api/sdk/models/network.py +++ b/src/tuneinsight/api/sdk/models/network.py @@ -18,22 +18,21 @@ class Network: """Network that represents a set of nodes Attributes: - name (Union[Unset, str]): nodes (Union[Unset, List['Node']]): topology (Union[Unset, Topology]): Network Topologies. 'star' or 'tree'. In star topology all nodes are connected to a central node. In tree topology all nodes are connected and aware of each other. visibility_type (Union[Unset, NetworkVisibilityType]): represents the type of visibility leaf nodes have in a network + name (Union[Unset, str]): """ - name: Union[Unset, str] = UNSET nodes: Union[Unset, List["Node"]] = UNSET topology: Union[Unset, Topology] = UNSET visibility_type: Union[Unset, NetworkVisibilityType] = UNSET + name: Union[Unset, str] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: - name = self.name nodes: Union[Unset, List[Dict[str, Any]]] = UNSET if not isinstance(self.nodes, Unset): nodes = [] @@ -50,17 +49,19 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.visibility_type, Unset): visibility_type = self.visibility_type.value + name = self.name + field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) - if name is not UNSET: - field_dict["name"] = name if nodes is not UNSET: field_dict["nodes"] = nodes if topology is not UNSET: field_dict["topology"] = topology if visibility_type is not UNSET: field_dict["visibilityType"] = visibility_type + if name is not UNSET: + field_dict["name"] = name return field_dict @@ -69,8 +70,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: from ..models.node import Node d = src_dict.copy() - name = d.pop("name", UNSET) - nodes = [] _nodes = d.pop("nodes", UNSET) for nodes_item_data in _nodes or []: @@ -92,11 +91,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: visibility_type = NetworkVisibilityType(_visibility_type) + name = d.pop("name", UNSET) + network = cls( - name=name, nodes=nodes, topology=topology, visibility_type=visibility_type, + name=name, ) network.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/node.py b/src/tuneinsight/api/sdk/models/node.py index 3c90e1d..50568f0 100644 --- a/src/tuneinsight/api/sdk/models/node.py +++ b/src/tuneinsight/api/sdk/models/node.py @@ -16,68 +16,69 @@ class Node: """Node or agent of the network Attributes: + is_in_network (Union[Unset, bool]): True if the node can be found in the instance's network. If False, then the + node information cannot be completed. + name (Union[Unset, str]): + api_path (Union[Unset, str]): + current (Union[Unset, bool]): True if this node is the current one (root node). is_contributor (Union[Unset, bool]): Indicates if this instance does contribute data. + organization (Union[Unset, Organization]): Organization taking part in a project url (Union[Unset, str]): - api_path (Union[Unset, str]): certificate (Union[Unset, str]): Certificate of the node, in base64-encoded DER format. - current (Union[Unset, bool]): True if this node is the current one (root node). has_user_management (Union[Unset, bool]): True if the node has the user management APIs enabled. - is_in_network (Union[Unset, bool]): True if the node can be found in the instance's network. If False, then the - node information cannot be completed. is_root (Union[Unset, bool]): True if the node is the root node in a tree topology network. - name (Union[Unset, str]): - organization (Union[Unset, Organization]): Organization taking part in a project """ + is_in_network: Union[Unset, bool] = UNSET + name: Union[Unset, str] = UNSET + api_path: Union[Unset, str] = UNSET + current: Union[Unset, bool] = UNSET is_contributor: Union[Unset, bool] = UNSET + organization: Union[Unset, "Organization"] = UNSET url: Union[Unset, str] = UNSET - api_path: Union[Unset, str] = UNSET certificate: Union[Unset, str] = UNSET - current: Union[Unset, bool] = UNSET has_user_management: Union[Unset, bool] = UNSET - is_in_network: Union[Unset, bool] = UNSET is_root: Union[Unset, bool] = UNSET - name: Union[Unset, str] = UNSET - organization: Union[Unset, "Organization"] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: - is_contributor = self.is_contributor - url = self.url - api_path = self.api_path - certificate = self.certificate - current = self.current - has_user_management = self.has_user_management is_in_network = self.is_in_network - is_root = self.is_root name = self.name + api_path = self.api_path + current = self.current + is_contributor = self.is_contributor organization: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.organization, Unset): organization = self.organization.to_dict() + url = self.url + certificate = self.certificate + has_user_management = self.has_user_management + is_root = self.is_root + field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) + if is_in_network is not UNSET: + field_dict["isInNetwork"] = is_in_network + if name is not UNSET: + field_dict["name"] = name + if api_path is not UNSET: + field_dict["apiPath"] = api_path + if current is not UNSET: + field_dict["current"] = current if is_contributor is not UNSET: field_dict["isContributor"] = is_contributor + if organization is not UNSET: + field_dict["organization"] = organization if url is not UNSET: field_dict["url"] = url - if api_path is not UNSET: - field_dict["apiPath"] = api_path if certificate is not UNSET: field_dict["certificate"] = certificate - if current is not UNSET: - field_dict["current"] = current if has_user_management is not UNSET: field_dict["hasUserManagement"] = has_user_management - if is_in_network is not UNSET: - field_dict["isInNetwork"] = is_in_network if is_root is not UNSET: field_dict["isRoot"] = is_root - if name is not UNSET: - field_dict["name"] = name - if organization is not UNSET: - field_dict["organization"] = organization return field_dict @@ -86,23 +87,15 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: from ..models.organization import Organization d = src_dict.copy() - is_contributor = d.pop("isContributor", UNSET) + is_in_network = d.pop("isInNetwork", UNSET) - url = d.pop("url", UNSET) + name = d.pop("name", UNSET) api_path = d.pop("apiPath", UNSET) - certificate = d.pop("certificate", UNSET) - current = d.pop("current", UNSET) - has_user_management = d.pop("hasUserManagement", UNSET) - - is_in_network = d.pop("isInNetwork", UNSET) - - is_root = d.pop("isRoot", UNSET) - - name = d.pop("name", UNSET) + is_contributor = d.pop("isContributor", UNSET) _organization = d.pop("organization", UNSET) organization: Union[Unset, Organization] @@ -111,17 +104,25 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: organization = Organization.from_dict(_organization) + url = d.pop("url", UNSET) + + certificate = d.pop("certificate", UNSET) + + has_user_management = d.pop("hasUserManagement", UNSET) + + is_root = d.pop("isRoot", UNSET) + node = cls( + is_in_network=is_in_network, + name=name, + api_path=api_path, + current=current, is_contributor=is_contributor, + organization=organization, url=url, - api_path=api_path, certificate=certificate, - current=current, has_user_management=has_user_management, - is_in_network=is_in_network, is_root=is_root, - name=name, - organization=organization, ) node.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/node_status.py b/src/tuneinsight/api/sdk/models/node_status.py index f6b1a93..101856d 100644 --- a/src/tuneinsight/api/sdk/models/node_status.py +++ b/src/tuneinsight/api/sdk/models/node_status.py @@ -12,46 +12,46 @@ class NodeStatus: """Network Status of a node Attributes: - version (Union[Unset, str]): Version of the node node (Union[Unset, str]): URL of the node status (Union[Unset, str]): Status (ok/nok) + version (Union[Unset, str]): Version of the node """ - version: Union[Unset, str] = UNSET node: Union[Unset, str] = UNSET status: Union[Unset, str] = UNSET + version: Union[Unset, str] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: - version = self.version node = self.node status = self.status + version = self.version field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) - if version is not UNSET: - field_dict["version"] = version if node is not UNSET: field_dict["node"] = node if status is not UNSET: field_dict["status"] = status + if version is not UNSET: + field_dict["version"] = version return field_dict @classmethod def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() - version = d.pop("version", UNSET) - node = d.pop("node", UNSET) status = d.pop("status", UNSET) + version = d.pop("version", UNSET) + node_status = cls( - version=version, node=node, status=status, + version=version, ) node_status.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/noise_distributions.py b/src/tuneinsight/api/sdk/models/noise_distributions.py new file mode 100644 index 0000000..c295327 --- /dev/null +++ b/src/tuneinsight/api/sdk/models/noise_distributions.py @@ -0,0 +1,9 @@ +from enum import Enum + + +class NoiseDistributions(str, Enum): + LAPLACE = "laplace" + GAUSSIAN = "gaussian" + + def __str__(self) -> str: + return str(self.value) diff --git a/src/tuneinsight/api/sdk/models/organization.py b/src/tuneinsight/api/sdk/models/organization.py index b18f92a..91c6eed 100644 --- a/src/tuneinsight/api/sdk/models/organization.py +++ b/src/tuneinsight/api/sdk/models/organization.py @@ -17,25 +17,23 @@ class Organization: """Organization taking part in a project Attributes: - group (Union[Unset, str]): Name of the corresponding keycloak group - name (Union[Unset, str]): Name of the organization authorization_status (Union[Unset, AuthorizationStatus]): Authorization status of the project coordinates (Union[Unset, OrganizationCoordinates]): Coordinates of the organization. (Decimal degrees, WGS84) country (Union[Unset, str]): Country code of the organization. (Lower case two-letter ISO 3166-1 alpha-2) data_officer (Union[Unset, str]): Name of the data officer in charge in the organization + group (Union[Unset, str]): Name of the corresponding keycloak group + name (Union[Unset, str]): Name of the organization """ - group: Union[Unset, str] = UNSET - name: Union[Unset, str] = UNSET authorization_status: Union[Unset, AuthorizationStatus] = UNSET coordinates: Union[Unset, "OrganizationCoordinates"] = UNSET country: Union[Unset, str] = UNSET data_officer: Union[Unset, str] = UNSET + group: Union[Unset, str] = UNSET + name: Union[Unset, str] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: - group = self.group - name = self.name authorization_status: Union[Unset, str] = UNSET if not isinstance(self.authorization_status, Unset): authorization_status = self.authorization_status.value @@ -46,14 +44,12 @@ def to_dict(self) -> Dict[str, Any]: country = self.country data_officer = self.data_officer + group = self.group + name = self.name field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) - if group is not UNSET: - field_dict["group"] = group - if name is not UNSET: - field_dict["name"] = name if authorization_status is not UNSET: field_dict["authorizationStatus"] = authorization_status if coordinates is not UNSET: @@ -62,6 +58,10 @@ def to_dict(self) -> Dict[str, Any]: field_dict["country"] = country if data_officer is not UNSET: field_dict["dataOfficer"] = data_officer + if group is not UNSET: + field_dict["group"] = group + if name is not UNSET: + field_dict["name"] = name return field_dict @@ -70,10 +70,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: from ..models.organization_coordinates import OrganizationCoordinates d = src_dict.copy() - group = d.pop("group", UNSET) - - name = d.pop("name", UNSET) - _authorization_status = d.pop("authorizationStatus", UNSET) authorization_status: Union[Unset, AuthorizationStatus] if isinstance(_authorization_status, Unset): @@ -92,13 +88,17 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: data_officer = d.pop("dataOfficer", UNSET) + group = d.pop("group", UNSET) + + name = d.pop("name", UNSET) + organization = cls( - group=group, - name=name, authorization_status=authorization_status, coordinates=coordinates, country=country, data_officer=data_officer, + group=group, + name=name, ) organization.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/post_data_object_json_body.py b/src/tuneinsight/api/sdk/models/post_data_object_json_body.py index 60e7100..68a92b6 100644 --- a/src/tuneinsight/api/sdk/models/post_data_object_json_body.py +++ b/src/tuneinsight/api/sdk/models/post_data_object_json_body.py @@ -18,111 +18,111 @@ class PostDataObjectJsonBody: """ Attributes: - query (Union[Unset, str]): - visibility_status (Union[Unset, DataObjectVisibilityStatus]): type of visibility set to the dataobject - json_path (Union[Unset, str]): JsonPath expression to retrieve data from within JSON-structured data. - key_info (Union[Unset, KeyInfo]): information about keys - public_key (Union[Unset, str]): Unique identifier of a data object. - data_object_id (Union[Unset, str]): Unique identifier of a data object. data_object_shared_id (Union[Unset, str]): Shared identifier of a data object. - method (Union[Unset, DataObjectCreationMethod]): Method of creation: from a data source or by - encrypting/decrypting a data object, or simply create a new one + data_source_id (Union[Unset, str]): Data source adapting into data object private_key (Union[Unset, str]): Unique identifier of a data object. project_id (Union[Unset, str]): Unique identifier of a project. - type (Union[Unset, DataObjectType]): type of the dataobject - columns (Union[Unset, List[str]]): - data_source_id (Union[Unset, str]): Data source adapting into data object shared (Union[Unset, bool]): whether the dataobject is meant to be used as a collective input - encrypted (Union[Unset, bool]): indicator whether or not the uploaded dataobject is encrypted + visibility_status (Union[Unset, DataObjectVisibilityStatus]): type of visibility set to the dataobject + columns (Union[Unset, List[str]]): + data_object_id (Union[Unset, str]): Unique identifier of a data object. + query (Union[Unset, str]): + type (Union[Unset, DataObjectType]): type of the dataobject session_id (Union[Unset, str]): Unique identifier of a session + encrypted (Union[Unset, bool]): indicator whether or not the uploaded dataobject is encrypted + json_path (Union[Unset, str]): JsonPath expression to retrieve data from within JSON-structured data. + key_info (Union[Unset, KeyInfo]): information about keys + method (Union[Unset, DataObjectCreationMethod]): Method of creation: from a data source or by + encrypting/decrypting a data object, or simply create a new one + public_key (Union[Unset, str]): Unique identifier of a data object. """ - query: Union[Unset, str] = UNSET - visibility_status: Union[Unset, DataObjectVisibilityStatus] = UNSET - json_path: Union[Unset, str] = UNSET - key_info: Union[Unset, "KeyInfo"] = UNSET - public_key: Union[Unset, str] = UNSET - data_object_id: Union[Unset, str] = UNSET data_object_shared_id: Union[Unset, str] = UNSET - method: Union[Unset, DataObjectCreationMethod] = UNSET + data_source_id: Union[Unset, str] = UNSET private_key: Union[Unset, str] = UNSET project_id: Union[Unset, str] = UNSET - type: Union[Unset, DataObjectType] = UNSET - columns: Union[Unset, List[str]] = UNSET - data_source_id: Union[Unset, str] = UNSET shared: Union[Unset, bool] = UNSET - encrypted: Union[Unset, bool] = UNSET + visibility_status: Union[Unset, DataObjectVisibilityStatus] = UNSET + columns: Union[Unset, List[str]] = UNSET + data_object_id: Union[Unset, str] = UNSET + query: Union[Unset, str] = UNSET + type: Union[Unset, DataObjectType] = UNSET session_id: Union[Unset, str] = UNSET + encrypted: Union[Unset, bool] = UNSET + json_path: Union[Unset, str] = UNSET + key_info: Union[Unset, "KeyInfo"] = UNSET + method: Union[Unset, DataObjectCreationMethod] = UNSET + public_key: Union[Unset, str] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: - query = self.query + data_object_shared_id = self.data_object_shared_id + data_source_id = self.data_source_id + private_key = self.private_key + project_id = self.project_id + shared = self.shared visibility_status: Union[Unset, str] = UNSET if not isinstance(self.visibility_status, Unset): visibility_status = self.visibility_status.value + columns: Union[Unset, List[str]] = UNSET + if not isinstance(self.columns, Unset): + columns = self.columns + + data_object_id = self.data_object_id + query = self.query + type: Union[Unset, str] = UNSET + if not isinstance(self.type, Unset): + type = self.type.value + + session_id = self.session_id + encrypted = self.encrypted json_path = self.json_path key_info: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.key_info, Unset): key_info = self.key_info.to_dict() - public_key = self.public_key - data_object_id = self.data_object_id - data_object_shared_id = self.data_object_shared_id method: Union[Unset, str] = UNSET if not isinstance(self.method, Unset): method = self.method.value - private_key = self.private_key - project_id = self.project_id - type: Union[Unset, str] = UNSET - if not isinstance(self.type, Unset): - type = self.type.value - - columns: Union[Unset, List[str]] = UNSET - if not isinstance(self.columns, Unset): - columns = self.columns - - data_source_id = self.data_source_id - shared = self.shared - encrypted = self.encrypted - session_id = self.session_id + public_key = self.public_key field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) - if query is not UNSET: - field_dict["query"] = query - if visibility_status is not UNSET: - field_dict["visibilityStatus"] = visibility_status - if json_path is not UNSET: - field_dict["jsonPath"] = json_path - if key_info is not UNSET: - field_dict["keyInfo"] = key_info - if public_key is not UNSET: - field_dict["publicKey"] = public_key - if data_object_id is not UNSET: - field_dict["dataObjectId"] = data_object_id if data_object_shared_id is not UNSET: field_dict["dataObjectSharedId"] = data_object_shared_id - if method is not UNSET: - field_dict["method"] = method + if data_source_id is not UNSET: + field_dict["dataSourceId"] = data_source_id if private_key is not UNSET: field_dict["privateKey"] = private_key if project_id is not UNSET: field_dict["projectId"] = project_id - if type is not UNSET: - field_dict["type"] = type - if columns is not UNSET: - field_dict["columns"] = columns - if data_source_id is not UNSET: - field_dict["dataSourceId"] = data_source_id if shared is not UNSET: field_dict["shared"] = shared - if encrypted is not UNSET: - field_dict["encrypted"] = encrypted + if visibility_status is not UNSET: + field_dict["visibilityStatus"] = visibility_status + if columns is not UNSET: + field_dict["columns"] = columns + if data_object_id is not UNSET: + field_dict["dataObjectId"] = data_object_id + if query is not UNSET: + field_dict["query"] = query + if type is not UNSET: + field_dict["type"] = type if session_id is not UNSET: field_dict["sessionId"] = session_id + if encrypted is not UNSET: + field_dict["encrypted"] = encrypted + if json_path is not UNSET: + field_dict["jsonPath"] = json_path + if key_info is not UNSET: + field_dict["keyInfo"] = key_info + if method is not UNSET: + field_dict["method"] = method + if public_key is not UNSET: + field_dict["publicKey"] = public_key return field_dict @@ -131,7 +131,15 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: from ..models.key_info import KeyInfo d = src_dict.copy() - query = d.pop("query", UNSET) + data_object_shared_id = d.pop("dataObjectSharedId", UNSET) + + data_source_id = d.pop("dataSourceId", UNSET) + + private_key = d.pop("privateKey", UNSET) + + project_id = d.pop("projectId", UNSET) + + shared = d.pop("shared", UNSET) _visibility_status = d.pop("visibilityStatus", UNSET) visibility_status: Union[Unset, DataObjectVisibilityStatus] @@ -140,6 +148,23 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: visibility_status = DataObjectVisibilityStatus(_visibility_status) + columns = cast(List[str], d.pop("columns", UNSET)) + + data_object_id = d.pop("dataObjectId", UNSET) + + query = d.pop("query", UNSET) + + _type = d.pop("type", UNSET) + type: Union[Unset, DataObjectType] + if isinstance(_type, Unset): + type = UNSET + else: + type = DataObjectType(_type) + + session_id = d.pop("sessionId", UNSET) + + encrypted = d.pop("encrypted", UNSET) + json_path = d.pop("jsonPath", UNSET) _key_info = d.pop("keyInfo", UNSET) @@ -149,12 +174,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: key_info = KeyInfo.from_dict(_key_info) - public_key = d.pop("publicKey", UNSET) - - data_object_id = d.pop("dataObjectId", UNSET) - - data_object_shared_id = d.pop("dataObjectSharedId", UNSET) - _method = d.pop("method", UNSET) method: Union[Unset, DataObjectCreationMethod] if isinstance(_method, Unset): @@ -162,44 +181,25 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: method = DataObjectCreationMethod(_method) - private_key = d.pop("privateKey", UNSET) - - project_id = d.pop("projectId", UNSET) - - _type = d.pop("type", UNSET) - type: Union[Unset, DataObjectType] - if isinstance(_type, Unset): - type = UNSET - else: - type = DataObjectType(_type) - - columns = cast(List[str], d.pop("columns", UNSET)) - - data_source_id = d.pop("dataSourceId", UNSET) - - shared = d.pop("shared", UNSET) - - encrypted = d.pop("encrypted", UNSET) - - session_id = d.pop("sessionId", UNSET) + public_key = d.pop("publicKey", UNSET) post_data_object_json_body = cls( - query=query, - visibility_status=visibility_status, - json_path=json_path, - key_info=key_info, - public_key=public_key, - data_object_id=data_object_id, data_object_shared_id=data_object_shared_id, - method=method, + data_source_id=data_source_id, private_key=private_key, project_id=project_id, - type=type, - columns=columns, - data_source_id=data_source_id, shared=shared, - encrypted=encrypted, + visibility_status=visibility_status, + columns=columns, + data_object_id=data_object_id, + query=query, + type=type, session_id=session_id, + encrypted=encrypted, + json_path=json_path, + key_info=key_info, + method=method, + public_key=public_key, ) post_data_object_json_body.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/post_llm_request_json_body_prompt_args.py b/src/tuneinsight/api/sdk/models/post_llm_request_json_body_prompt_args.py index b68d389..72c7896 100644 --- a/src/tuneinsight/api/sdk/models/post_llm_request_json_body_prompt_args.py +++ b/src/tuneinsight/api/sdk/models/post_llm_request_json_body_prompt_args.py @@ -12,36 +12,40 @@ class PostLlmRequestJsonBodyPromptArgs: """Parameters of the prompt as a dict Attributes: + computation_labels (Union[Unset, str]): (AI-Explainer) Labels of the computation to explain + computation_results (Union[Unset, str]): (AI-Explainer) Results of the computation to explain computation_type (Union[Unset, str]): (AI-Explainer) Type of the computation to explain query (Union[Unset, str]): User's additional query rdf_filter (Union[Unset, str]): (SPARQL) Filter for SPARQL relations and subclasses to include rdf_schema (Union[Unset, str]): (SPARQL) SPARQL schema sql_schema (Union[Unset, str]): (SQL) SQL schema - computation_labels (Union[Unset, str]): (AI-Explainer) Labels of the computation to explain - computation_results (Union[Unset, str]): (AI-Explainer) Results of the computation to explain """ + computation_labels: Union[Unset, str] = UNSET + computation_results: Union[Unset, str] = UNSET computation_type: Union[Unset, str] = UNSET query: Union[Unset, str] = UNSET rdf_filter: Union[Unset, str] = UNSET rdf_schema: Union[Unset, str] = UNSET sql_schema: Union[Unset, str] = UNSET - computation_labels: Union[Unset, str] = UNSET - computation_results: Union[Unset, str] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: + computation_labels = self.computation_labels + computation_results = self.computation_results computation_type = self.computation_type query = self.query rdf_filter = self.rdf_filter rdf_schema = self.rdf_schema sql_schema = self.sql_schema - computation_labels = self.computation_labels - computation_results = self.computation_results field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) + if computation_labels is not UNSET: + field_dict["computation_labels"] = computation_labels + if computation_results is not UNSET: + field_dict["computation_results"] = computation_results if computation_type is not UNSET: field_dict["computation_type"] = computation_type if query is not UNSET: @@ -52,16 +56,16 @@ def to_dict(self) -> Dict[str, Any]: field_dict["rdf_schema"] = rdf_schema if sql_schema is not UNSET: field_dict["sql_schema"] = sql_schema - if computation_labels is not UNSET: - field_dict["computation_labels"] = computation_labels - if computation_results is not UNSET: - field_dict["computation_results"] = computation_results return field_dict @classmethod def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() + computation_labels = d.pop("computation_labels", UNSET) + + computation_results = d.pop("computation_results", UNSET) + computation_type = d.pop("computation_type", UNSET) query = d.pop("query", UNSET) @@ -72,18 +76,14 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: sql_schema = d.pop("sql_schema", UNSET) - computation_labels = d.pop("computation_labels", UNSET) - - computation_results = d.pop("computation_results", UNSET) - post_llm_request_json_body_prompt_args = cls( + computation_labels=computation_labels, + computation_results=computation_results, computation_type=computation_type, query=query, rdf_filter=rdf_filter, rdf_schema=rdf_schema, sql_schema=sql_schema, - computation_labels=computation_labels, - computation_results=computation_results, ) post_llm_request_json_body_prompt_args.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/post_user_response_201.py b/src/tuneinsight/api/sdk/models/post_user_response_201.py index 11bf7b8..a06c861 100644 --- a/src/tuneinsight/api/sdk/models/post_user_response_201.py +++ b/src/tuneinsight/api/sdk/models/post_user_response_201.py @@ -11,38 +11,38 @@ class PostUserResponse201: """ Attributes: - email (Union[Unset, str]): User email id (Union[Unset, str]): User id + email (Union[Unset, str]): User email """ - email: Union[Unset, str] = UNSET id: Union[Unset, str] = UNSET + email: Union[Unset, str] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: - email = self.email id = self.id + email = self.email field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) - if email is not UNSET: - field_dict["email"] = email if id is not UNSET: field_dict["id"] = id + if email is not UNSET: + field_dict["email"] = email return field_dict @classmethod def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() - email = d.pop("email", UNSET) - id = d.pop("id", UNSET) + email = d.pop("email", UNSET) + post_user_response_201 = cls( - email=email, id=id, + email=email, ) post_user_response_201.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/prediction_params.py b/src/tuneinsight/api/sdk/models/prediction_params.py index d14a3fe..2c42515 100644 --- a/src/tuneinsight/api/sdk/models/prediction_params.py +++ b/src/tuneinsight/api/sdk/models/prediction_params.py @@ -17,30 +17,30 @@ class PredictionParams: """subset of parameters required for only the prediction Attributes: - approximation_params (Union[Unset, ApproximationParams]): parameters for polynomial approximation regression_type (Union[Unset, RegressionType]): type of the regression + approximation_params (Union[Unset, ApproximationParams]): parameters for polynomial approximation """ - approximation_params: Union[Unset, "ApproximationParams"] = UNSET regression_type: Union[Unset, RegressionType] = UNSET + approximation_params: Union[Unset, "ApproximationParams"] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: - approximation_params: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.approximation_params, Unset): - approximation_params = self.approximation_params.to_dict() - regression_type: Union[Unset, str] = UNSET if not isinstance(self.regression_type, Unset): regression_type = self.regression_type.value + approximation_params: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.approximation_params, Unset): + approximation_params = self.approximation_params.to_dict() + field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) - if approximation_params is not UNSET: - field_dict["approximationParams"] = approximation_params if regression_type is not UNSET: field_dict["regressionType"] = regression_type + if approximation_params is not UNSET: + field_dict["approximationParams"] = approximation_params return field_dict @@ -49,13 +49,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: from ..models.approximation_params import ApproximationParams d = src_dict.copy() - _approximation_params = d.pop("approximationParams", UNSET) - approximation_params: Union[Unset, ApproximationParams] - if isinstance(_approximation_params, Unset): - approximation_params = UNSET - else: - approximation_params = ApproximationParams.from_dict(_approximation_params) - _regression_type = d.pop("regressionType", UNSET) regression_type: Union[Unset, RegressionType] if isinstance(_regression_type, Unset): @@ -63,9 +56,16 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: regression_type = RegressionType(_regression_type) + _approximation_params = d.pop("approximationParams", UNSET) + approximation_params: Union[Unset, ApproximationParams] + if isinstance(_approximation_params, Unset): + approximation_params = UNSET + else: + approximation_params = ApproximationParams.from_dict(_approximation_params) + prediction_params = cls( - approximation_params=approximation_params, regression_type=regression_type, + approximation_params=approximation_params, ) prediction_params.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/privacy_summary.py b/src/tuneinsight/api/sdk/models/privacy_summary.py index b14a724..afcea80 100644 --- a/src/tuneinsight/api/sdk/models/privacy_summary.py +++ b/src/tuneinsight/api/sdk/models/privacy_summary.py @@ -19,24 +19,20 @@ class PrivacySummary: """Privacy summary for a project Attributes: - authorization_status (Union[Unset, AuthorizationStatus]): Authorization status of the project computation (Union[Unset, PrivacySummaryComputation]): Description of the computation that will be run for the project data_source (Union[Unset, DataSource]): execution_quota (Union[Unset, ExecutionQuota]): stores information about the status of the execution quota + authorization_status (Union[Unset, AuthorizationStatus]): Authorization status of the project """ - authorization_status: Union[Unset, AuthorizationStatus] = UNSET computation: Union[Unset, "PrivacySummaryComputation"] = UNSET data_source: Union[Unset, "DataSource"] = UNSET execution_quota: Union[Unset, "ExecutionQuota"] = UNSET + authorization_status: Union[Unset, AuthorizationStatus] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: - authorization_status: Union[Unset, str] = UNSET - if not isinstance(self.authorization_status, Unset): - authorization_status = self.authorization_status.value - computation: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.computation, Unset): computation = self.computation.to_dict() @@ -49,17 +45,21 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.execution_quota, Unset): execution_quota = self.execution_quota.to_dict() + authorization_status: Union[Unset, str] = UNSET + if not isinstance(self.authorization_status, Unset): + authorization_status = self.authorization_status.value + field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) - if authorization_status is not UNSET: - field_dict["authorizationStatus"] = authorization_status if computation is not UNSET: field_dict["computation"] = computation if data_source is not UNSET: field_dict["dataSource"] = data_source if execution_quota is not UNSET: field_dict["executionQuota"] = execution_quota + if authorization_status is not UNSET: + field_dict["authorizationStatus"] = authorization_status return field_dict @@ -70,13 +70,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: from ..models.privacy_summary_computation import PrivacySummaryComputation d = src_dict.copy() - _authorization_status = d.pop("authorizationStatus", UNSET) - authorization_status: Union[Unset, AuthorizationStatus] - if isinstance(_authorization_status, Unset): - authorization_status = UNSET - else: - authorization_status = AuthorizationStatus(_authorization_status) - _computation = d.pop("computation", UNSET) computation: Union[Unset, PrivacySummaryComputation] if isinstance(_computation, Unset): @@ -98,11 +91,18 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: execution_quota = ExecutionQuota.from_dict(_execution_quota) + _authorization_status = d.pop("authorizationStatus", UNSET) + authorization_status: Union[Unset, AuthorizationStatus] + if isinstance(_authorization_status, Unset): + authorization_status = UNSET + else: + authorization_status = AuthorizationStatus(_authorization_status) + privacy_summary = cls( - authorization_status=authorization_status, computation=computation, data_source=data_source, execution_quota=execution_quota, + authorization_status=authorization_status, ) privacy_summary.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/private_search.py b/src/tuneinsight/api/sdk/models/private_search.py index 0750540..af412b8 100644 --- a/src/tuneinsight/api/sdk/models/private_search.py +++ b/src/tuneinsight/api/sdk/models/private_search.py @@ -22,6 +22,8 @@ class PrivateSearch: """ Attributes: type (ComputationType): Type of the computation. + dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP + mode. Default: -1.0. input_clipping_method (Union[Unset, ComputationDefinitionInputClippingMethod]): Optional method used for clipping before encrypting values when running aggregation-based workflows. The bounds are deduced based on the cryptographic parameters used for the aggregation. @@ -32,14 +34,7 @@ class PrivateSearch: (default) - error: if some values are out of bounds, then the computation is aborted. Default: ComputationDefinitionInputClippingMethod.WARNING. - preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters - applied to the input retrieved from the datasource, if applicable - wait (Union[Unset, bool]): Whether to wait synchronously for the computation result. - dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various - disclosure prevention mechanisms - dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP - mode. Default: -1.0. - join_id (Union[Unset, str]): Unique identifier of a data object. + run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both) local_input_id (Union[Unset, str]): Unique identifier of a data object. maximum_aggregated_value (Union[Unset, None, float]): optional upper bound on the total expected value to be aggregated collectively. If provided, the computation will automatically deduce @@ -48,56 +43,61 @@ class PrivateSearch: up to 16 million. For example, when using default parameters and running an aggregation with 4 participants, local aggregated values cannot exceed 4 million. - cohort_id (Union[Unset, str]): Unique identifier of a data object. + preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters + applied to the input retrieved from the datasource, if applicable + data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource + from each node before the computation + end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true, + then when release results is set to true and the output + is initially encrypted with a network collective key, then it is key switched to + the initiating user's public key. + input_data_object (Union[Unset, str]): Shared identifier of a data object. + timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run. local_input (Union[Unset, LocalInput]): If a local input is provided, the node initiating the computation will use it instead of querying the datasource. This data is *not* shared to other nodes, only used for the duration of the computation. The local input columns/values must be in the form {: [, , ...], ...} owner (Union[Unset, str]): The username of the end user who requested the computation. project_id (Union[Unset, str]): Unique identifier of a project. - run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both) + dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various + disclosure prevention mechanisms + cohort_id (Union[Unset, str]): Unique identifier of a data object. + encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key. + join_id (Union[Unset, str]): Unique identifier of a data object. local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured the network) - encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key. - end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true, - then when release results is set to true and the output - is initially encrypted with a network collective key, then it is key switched to - the initiating user's public key. - input_data_object (Union[Unset, str]): Shared identifier of a data object. release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output results. If set, then encrypted results are automatically key switched and decrypted and a Result entity is saved - timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run. - data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource - from each node before the computation + wait (Union[Unset, bool]): Whether to wait synchronously for the computation result. pir_dataset_object_id (Union[Unset, str]): Unique identifier of a data object. pir_search_object_id (Union[Unset, str]): Unique identifier of a data object. """ type: ComputationType + dp_epsilon: Union[Unset, float] = -1.0 input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] = ( ComputationDefinitionInputClippingMethod.WARNING ) - preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET - wait: Union[Unset, bool] = UNSET - dp_policy: Union[Unset, "DPPolicy"] = UNSET - dp_epsilon: Union[Unset, float] = -1.0 - join_id: Union[Unset, str] = UNSET + run_mode: Union[Unset, RunMode] = UNSET local_input_id: Union[Unset, str] = UNSET maximum_aggregated_value: Union[Unset, None, float] = UNSET - cohort_id: Union[Unset, str] = UNSET + preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET + data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET + end_to_end_encrypted: Union[Unset, bool] = UNSET + input_data_object: Union[Unset, str] = UNSET + timeout: Union[Unset, int] = UNSET local_input: Union[Unset, "LocalInput"] = UNSET owner: Union[Unset, str] = UNSET project_id: Union[Unset, str] = UNSET - run_mode: Union[Unset, RunMode] = UNSET - local: Union[Unset, bool] = UNSET + dp_policy: Union[Unset, "DPPolicy"] = UNSET + cohort_id: Union[Unset, str] = UNSET encrypted: Union[Unset, bool] = UNSET - end_to_end_encrypted: Union[Unset, bool] = UNSET - input_data_object: Union[Unset, str] = UNSET + join_id: Union[Unset, str] = UNSET + local: Union[Unset, bool] = UNSET release_results: Union[Unset, bool] = UNSET - timeout: Union[Unset, int] = UNSET - data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET + wait: Union[Unset, bool] = UNSET pir_dataset_object_id: Union[Unset, str] = UNSET pir_search_object_id: Union[Unset, str] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) @@ -105,44 +105,44 @@ class PrivateSearch: def to_dict(self) -> Dict[str, Any]: type = self.type.value + dp_epsilon = self.dp_epsilon input_clipping_method: Union[Unset, str] = UNSET if not isinstance(self.input_clipping_method, Unset): input_clipping_method = self.input_clipping_method.value + run_mode: Union[Unset, str] = UNSET + if not isinstance(self.run_mode, Unset): + run_mode = self.run_mode.value + + local_input_id = self.local_input_id + maximum_aggregated_value = self.maximum_aggregated_value preprocessing_parameters: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.preprocessing_parameters, Unset): preprocessing_parameters = self.preprocessing_parameters.to_dict() - wait = self.wait - dp_policy: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.dp_policy, Unset): - dp_policy = self.dp_policy.to_dict() + data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.data_source_parameters, Unset): + data_source_parameters = self.data_source_parameters.to_dict() - dp_epsilon = self.dp_epsilon - join_id = self.join_id - local_input_id = self.local_input_id - maximum_aggregated_value = self.maximum_aggregated_value - cohort_id = self.cohort_id + end_to_end_encrypted = self.end_to_end_encrypted + input_data_object = self.input_data_object + timeout = self.timeout local_input: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.local_input, Unset): local_input = self.local_input.to_dict() owner = self.owner project_id = self.project_id - run_mode: Union[Unset, str] = UNSET - if not isinstance(self.run_mode, Unset): - run_mode = self.run_mode.value + dp_policy: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.dp_policy, Unset): + dp_policy = self.dp_policy.to_dict() - local = self.local + cohort_id = self.cohort_id encrypted = self.encrypted - end_to_end_encrypted = self.end_to_end_encrypted - input_data_object = self.input_data_object + join_id = self.join_id + local = self.local release_results = self.release_results - timeout = self.timeout - data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.data_source_parameters, Unset): - data_source_parameters = self.data_source_parameters.to_dict() - + wait = self.wait pir_dataset_object_id = self.pir_dataset_object_id pir_search_object_id = self.pir_search_object_id @@ -153,46 +153,46 @@ def to_dict(self) -> Dict[str, Any]: "type": type, } ) - if input_clipping_method is not UNSET: - field_dict["inputClippingMethod"] = input_clipping_method - if preprocessing_parameters is not UNSET: - field_dict["preprocessingParameters"] = preprocessing_parameters - if wait is not UNSET: - field_dict["wait"] = wait - if dp_policy is not UNSET: - field_dict["DPPolicy"] = dp_policy if dp_epsilon is not UNSET: field_dict["dpEpsilon"] = dp_epsilon - if join_id is not UNSET: - field_dict["joinId"] = join_id + if input_clipping_method is not UNSET: + field_dict["inputClippingMethod"] = input_clipping_method + if run_mode is not UNSET: + field_dict["runMode"] = run_mode if local_input_id is not UNSET: field_dict["localInputID"] = local_input_id if maximum_aggregated_value is not UNSET: field_dict["maximumAggregatedValue"] = maximum_aggregated_value - if cohort_id is not UNSET: - field_dict["cohortId"] = cohort_id + if preprocessing_parameters is not UNSET: + field_dict["preprocessingParameters"] = preprocessing_parameters + if data_source_parameters is not UNSET: + field_dict["dataSourceParameters"] = data_source_parameters + if end_to_end_encrypted is not UNSET: + field_dict["endToEndEncrypted"] = end_to_end_encrypted + if input_data_object is not UNSET: + field_dict["inputDataObject"] = input_data_object + if timeout is not UNSET: + field_dict["timeout"] = timeout if local_input is not UNSET: field_dict["localInput"] = local_input if owner is not UNSET: field_dict["owner"] = owner if project_id is not UNSET: field_dict["projectId"] = project_id - if run_mode is not UNSET: - field_dict["runMode"] = run_mode - if local is not UNSET: - field_dict["local"] = local + if dp_policy is not UNSET: + field_dict["DPPolicy"] = dp_policy + if cohort_id is not UNSET: + field_dict["cohortId"] = cohort_id if encrypted is not UNSET: field_dict["encrypted"] = encrypted - if end_to_end_encrypted is not UNSET: - field_dict["endToEndEncrypted"] = end_to_end_encrypted - if input_data_object is not UNSET: - field_dict["inputDataObject"] = input_data_object + if join_id is not UNSET: + field_dict["joinId"] = join_id + if local is not UNSET: + field_dict["local"] = local if release_results is not UNSET: field_dict["releaseResults"] = release_results - if timeout is not UNSET: - field_dict["timeout"] = timeout - if data_source_parameters is not UNSET: - field_dict["dataSourceParameters"] = data_source_parameters + if wait is not UNSET: + field_dict["wait"] = wait if pir_dataset_object_id is not UNSET: field_dict["pirDatasetObjectId"] = pir_dataset_object_id if pir_search_object_id is not UNSET: @@ -210,6 +210,8 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() type = ComputationType(d.pop("type")) + dp_epsilon = d.pop("dpEpsilon", UNSET) + _input_clipping_method = d.pop("inputClippingMethod", UNSET) input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] if isinstance(_input_clipping_method, Unset): @@ -217,6 +219,17 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method) + _run_mode = d.pop("runMode", UNSET) + run_mode: Union[Unset, RunMode] + if isinstance(_run_mode, Unset): + run_mode = UNSET + else: + run_mode = RunMode(_run_mode) + + local_input_id = d.pop("localInputID", UNSET) + + maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET) + _preprocessing_parameters = d.pop("preprocessingParameters", UNSET) preprocessing_parameters: Union[Unset, ComputationPreprocessingParameters] if isinstance(_preprocessing_parameters, Unset): @@ -224,24 +237,18 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: preprocessing_parameters = ComputationPreprocessingParameters.from_dict(_preprocessing_parameters) - wait = d.pop("wait", UNSET) - - _dp_policy = d.pop("DPPolicy", UNSET) - dp_policy: Union[Unset, DPPolicy] - if isinstance(_dp_policy, Unset): - dp_policy = UNSET + _data_source_parameters = d.pop("dataSourceParameters", UNSET) + data_source_parameters: Union[Unset, ComputationDataSourceParameters] + if isinstance(_data_source_parameters, Unset): + data_source_parameters = UNSET else: - dp_policy = DPPolicy.from_dict(_dp_policy) - - dp_epsilon = d.pop("dpEpsilon", UNSET) - - join_id = d.pop("joinId", UNSET) + data_source_parameters = ComputationDataSourceParameters.from_dict(_data_source_parameters) - local_input_id = d.pop("localInputID", UNSET) + end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) - maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET) + input_data_object = d.pop("inputDataObject", UNSET) - cohort_id = d.pop("cohortId", UNSET) + timeout = d.pop("timeout", UNSET) _local_input = d.pop("localInput", UNSET) local_input: Union[Unset, LocalInput] @@ -254,31 +261,24 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: project_id = d.pop("projectId", UNSET) - _run_mode = d.pop("runMode", UNSET) - run_mode: Union[Unset, RunMode] - if isinstance(_run_mode, Unset): - run_mode = UNSET + _dp_policy = d.pop("DPPolicy", UNSET) + dp_policy: Union[Unset, DPPolicy] + if isinstance(_dp_policy, Unset): + dp_policy = UNSET else: - run_mode = RunMode(_run_mode) + dp_policy = DPPolicy.from_dict(_dp_policy) - local = d.pop("local", UNSET) + cohort_id = d.pop("cohortId", UNSET) encrypted = d.pop("encrypted", UNSET) - end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) + join_id = d.pop("joinId", UNSET) - input_data_object = d.pop("inputDataObject", UNSET) + local = d.pop("local", UNSET) release_results = d.pop("releaseResults", UNSET) - timeout = d.pop("timeout", UNSET) - - _data_source_parameters = d.pop("dataSourceParameters", UNSET) - data_source_parameters: Union[Unset, ComputationDataSourceParameters] - if isinstance(_data_source_parameters, Unset): - data_source_parameters = UNSET - else: - data_source_parameters = ComputationDataSourceParameters.from_dict(_data_source_parameters) + wait = d.pop("wait", UNSET) pir_dataset_object_id = d.pop("pirDatasetObjectId", UNSET) @@ -286,26 +286,26 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: private_search = cls( type=type, - input_clipping_method=input_clipping_method, - preprocessing_parameters=preprocessing_parameters, - wait=wait, - dp_policy=dp_policy, dp_epsilon=dp_epsilon, - join_id=join_id, + input_clipping_method=input_clipping_method, + run_mode=run_mode, local_input_id=local_input_id, maximum_aggregated_value=maximum_aggregated_value, - cohort_id=cohort_id, + preprocessing_parameters=preprocessing_parameters, + data_source_parameters=data_source_parameters, + end_to_end_encrypted=end_to_end_encrypted, + input_data_object=input_data_object, + timeout=timeout, local_input=local_input, owner=owner, project_id=project_id, - run_mode=run_mode, - local=local, + dp_policy=dp_policy, + cohort_id=cohort_id, encrypted=encrypted, - end_to_end_encrypted=end_to_end_encrypted, - input_data_object=input_data_object, + join_id=join_id, + local=local, release_results=release_results, - timeout=timeout, - data_source_parameters=data_source_parameters, + wait=wait, pir_dataset_object_id=pir_dataset_object_id, pir_search_object_id=pir_search_object_id, ) diff --git a/src/tuneinsight/api/sdk/models/private_search_database.py b/src/tuneinsight/api/sdk/models/private_search_database.py index 1f0ec70..038a7bf 100644 --- a/src/tuneinsight/api/sdk/models/private_search_database.py +++ b/src/tuneinsight/api/sdk/models/private_search_database.py @@ -12,55 +12,55 @@ class PrivateSearchDatabase: """Database used by private search Attributes: - cryptosystem_params (Union[Unset, str]): cryptosystem parameters (b64-encoded) database_id (Union[Unset, str]): Unique identifier of a private search database. database_index (Union[Unset, str]): private search database hash index (b64-encoded) database_params (Union[Unset, str]): private search database parameters (b64-encoded), returned on GET /private- search-databases/ + cryptosystem_params (Union[Unset, str]): cryptosystem parameters (b64-encoded) """ - cryptosystem_params: Union[Unset, str] = UNSET database_id: Union[Unset, str] = UNSET database_index: Union[Unset, str] = UNSET database_params: Union[Unset, str] = UNSET + cryptosystem_params: Union[Unset, str] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: - cryptosystem_params = self.cryptosystem_params database_id = self.database_id database_index = self.database_index database_params = self.database_params + cryptosystem_params = self.cryptosystem_params field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) - if cryptosystem_params is not UNSET: - field_dict["cryptosystemParams"] = cryptosystem_params if database_id is not UNSET: field_dict["databaseID"] = database_id if database_index is not UNSET: field_dict["databaseIndex"] = database_index if database_params is not UNSET: field_dict["databaseParams"] = database_params + if cryptosystem_params is not UNSET: + field_dict["cryptosystemParams"] = cryptosystem_params return field_dict @classmethod def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() - cryptosystem_params = d.pop("cryptosystemParams", UNSET) - database_id = d.pop("databaseID", UNSET) database_index = d.pop("databaseIndex", UNSET) database_params = d.pop("databaseParams", UNSET) + cryptosystem_params = d.pop("cryptosystemParams", UNSET) + private_search_database = cls( - cryptosystem_params=cryptosystem_params, database_id=database_id, database_index=database_index, database_params=database_params, + cryptosystem_params=cryptosystem_params, ) private_search_database.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/private_search_setup.py b/src/tuneinsight/api/sdk/models/private_search_setup.py index 85520f9..e70d873 100644 --- a/src/tuneinsight/api/sdk/models/private_search_setup.py +++ b/src/tuneinsight/api/sdk/models/private_search_setup.py @@ -23,6 +23,8 @@ class PrivateSearchSetup: Attributes: type (ComputationType): Type of the computation. keys (str): (required) name of the column from the dataset which stores the keys of the database + dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP + mode. Default: -1.0. input_clipping_method (Union[Unset, ComputationDefinitionInputClippingMethod]): Optional method used for clipping before encrypting values when running aggregation-based workflows. The bounds are deduced based on the cryptographic parameters used for the aggregation. @@ -33,14 +35,7 @@ class PrivateSearchSetup: (default) - error: if some values are out of bounds, then the computation is aborted. Default: ComputationDefinitionInputClippingMethod.WARNING. - preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters - applied to the input retrieved from the datasource, if applicable - wait (Union[Unset, bool]): Whether to wait synchronously for the computation result. - dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various - disclosure prevention mechanisms - dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP - mode. Default: -1.0. - join_id (Union[Unset, str]): Unique identifier of a data object. + run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both) local_input_id (Union[Unset, str]): Unique identifier of a data object. maximum_aggregated_value (Union[Unset, None, float]): optional upper bound on the total expected value to be aggregated collectively. If provided, the computation will automatically deduce @@ -49,29 +44,34 @@ class PrivateSearchSetup: up to 16 million. For example, when using default parameters and running an aggregation with 4 participants, local aggregated values cannot exceed 4 million. - cohort_id (Union[Unset, str]): Unique identifier of a data object. + preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters + applied to the input retrieved from the datasource, if applicable + data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource + from each node before the computation + end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true, + then when release results is set to true and the output + is initially encrypted with a network collective key, then it is key switched to + the initiating user's public key. + input_data_object (Union[Unset, str]): Shared identifier of a data object. + timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run. local_input (Union[Unset, LocalInput]): If a local input is provided, the node initiating the computation will use it instead of querying the datasource. This data is *not* shared to other nodes, only used for the duration of the computation. The local input columns/values must be in the form {: [, , ...], ...} owner (Union[Unset, str]): The username of the end user who requested the computation. project_id (Union[Unset, str]): Unique identifier of a project. - run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both) + dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various + disclosure prevention mechanisms + cohort_id (Union[Unset, str]): Unique identifier of a data object. + encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key. + join_id (Union[Unset, str]): Unique identifier of a data object. local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured the network) - encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key. - end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true, - then when release results is set to true and the output - is initially encrypted with a network collective key, then it is key switched to - the initiating user's public key. - input_data_object (Union[Unset, str]): Shared identifier of a data object. release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output results. If set, then encrypted results are automatically key switched and decrypted and a Result entity is saved - timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run. - data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource - from each node before the computation + wait (Union[Unset, bool]): Whether to wait synchronously for the computation result. values (Union[Unset, List[str]]): name of the columns from the dataset which stores the values of the database. If empty, the computation will set this parameter to the column names of the dataset after dropping the keys column. @@ -79,28 +79,28 @@ class PrivateSearchSetup: type: ComputationType keys: str + dp_epsilon: Union[Unset, float] = -1.0 input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] = ( ComputationDefinitionInputClippingMethod.WARNING ) - preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET - wait: Union[Unset, bool] = UNSET - dp_policy: Union[Unset, "DPPolicy"] = UNSET - dp_epsilon: Union[Unset, float] = -1.0 - join_id: Union[Unset, str] = UNSET + run_mode: Union[Unset, RunMode] = UNSET local_input_id: Union[Unset, str] = UNSET maximum_aggregated_value: Union[Unset, None, float] = UNSET - cohort_id: Union[Unset, str] = UNSET + preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET + data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET + end_to_end_encrypted: Union[Unset, bool] = UNSET + input_data_object: Union[Unset, str] = UNSET + timeout: Union[Unset, int] = UNSET local_input: Union[Unset, "LocalInput"] = UNSET owner: Union[Unset, str] = UNSET project_id: Union[Unset, str] = UNSET - run_mode: Union[Unset, RunMode] = UNSET - local: Union[Unset, bool] = UNSET + dp_policy: Union[Unset, "DPPolicy"] = UNSET + cohort_id: Union[Unset, str] = UNSET encrypted: Union[Unset, bool] = UNSET - end_to_end_encrypted: Union[Unset, bool] = UNSET - input_data_object: Union[Unset, str] = UNSET + join_id: Union[Unset, str] = UNSET + local: Union[Unset, bool] = UNSET release_results: Union[Unset, bool] = UNSET - timeout: Union[Unset, int] = UNSET - data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET + wait: Union[Unset, bool] = UNSET values: Union[Unset, List[str]] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) @@ -108,44 +108,44 @@ def to_dict(self) -> Dict[str, Any]: type = self.type.value keys = self.keys + dp_epsilon = self.dp_epsilon input_clipping_method: Union[Unset, str] = UNSET if not isinstance(self.input_clipping_method, Unset): input_clipping_method = self.input_clipping_method.value + run_mode: Union[Unset, str] = UNSET + if not isinstance(self.run_mode, Unset): + run_mode = self.run_mode.value + + local_input_id = self.local_input_id + maximum_aggregated_value = self.maximum_aggregated_value preprocessing_parameters: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.preprocessing_parameters, Unset): preprocessing_parameters = self.preprocessing_parameters.to_dict() - wait = self.wait - dp_policy: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.dp_policy, Unset): - dp_policy = self.dp_policy.to_dict() + data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.data_source_parameters, Unset): + data_source_parameters = self.data_source_parameters.to_dict() - dp_epsilon = self.dp_epsilon - join_id = self.join_id - local_input_id = self.local_input_id - maximum_aggregated_value = self.maximum_aggregated_value - cohort_id = self.cohort_id + end_to_end_encrypted = self.end_to_end_encrypted + input_data_object = self.input_data_object + timeout = self.timeout local_input: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.local_input, Unset): local_input = self.local_input.to_dict() owner = self.owner project_id = self.project_id - run_mode: Union[Unset, str] = UNSET - if not isinstance(self.run_mode, Unset): - run_mode = self.run_mode.value + dp_policy: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.dp_policy, Unset): + dp_policy = self.dp_policy.to_dict() - local = self.local + cohort_id = self.cohort_id encrypted = self.encrypted - end_to_end_encrypted = self.end_to_end_encrypted - input_data_object = self.input_data_object + join_id = self.join_id + local = self.local release_results = self.release_results - timeout = self.timeout - data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.data_source_parameters, Unset): - data_source_parameters = self.data_source_parameters.to_dict() - + wait = self.wait values: Union[Unset, List[str]] = UNSET if not isinstance(self.values, Unset): values = self.values @@ -158,46 +158,46 @@ def to_dict(self) -> Dict[str, Any]: "keys": keys, } ) - if input_clipping_method is not UNSET: - field_dict["inputClippingMethod"] = input_clipping_method - if preprocessing_parameters is not UNSET: - field_dict["preprocessingParameters"] = preprocessing_parameters - if wait is not UNSET: - field_dict["wait"] = wait - if dp_policy is not UNSET: - field_dict["DPPolicy"] = dp_policy if dp_epsilon is not UNSET: field_dict["dpEpsilon"] = dp_epsilon - if join_id is not UNSET: - field_dict["joinId"] = join_id + if input_clipping_method is not UNSET: + field_dict["inputClippingMethod"] = input_clipping_method + if run_mode is not UNSET: + field_dict["runMode"] = run_mode if local_input_id is not UNSET: field_dict["localInputID"] = local_input_id if maximum_aggregated_value is not UNSET: field_dict["maximumAggregatedValue"] = maximum_aggregated_value - if cohort_id is not UNSET: - field_dict["cohortId"] = cohort_id + if preprocessing_parameters is not UNSET: + field_dict["preprocessingParameters"] = preprocessing_parameters + if data_source_parameters is not UNSET: + field_dict["dataSourceParameters"] = data_source_parameters + if end_to_end_encrypted is not UNSET: + field_dict["endToEndEncrypted"] = end_to_end_encrypted + if input_data_object is not UNSET: + field_dict["inputDataObject"] = input_data_object + if timeout is not UNSET: + field_dict["timeout"] = timeout if local_input is not UNSET: field_dict["localInput"] = local_input if owner is not UNSET: field_dict["owner"] = owner if project_id is not UNSET: field_dict["projectId"] = project_id - if run_mode is not UNSET: - field_dict["runMode"] = run_mode - if local is not UNSET: - field_dict["local"] = local + if dp_policy is not UNSET: + field_dict["DPPolicy"] = dp_policy + if cohort_id is not UNSET: + field_dict["cohortId"] = cohort_id if encrypted is not UNSET: field_dict["encrypted"] = encrypted - if end_to_end_encrypted is not UNSET: - field_dict["endToEndEncrypted"] = end_to_end_encrypted - if input_data_object is not UNSET: - field_dict["inputDataObject"] = input_data_object + if join_id is not UNSET: + field_dict["joinId"] = join_id + if local is not UNSET: + field_dict["local"] = local if release_results is not UNSET: field_dict["releaseResults"] = release_results - if timeout is not UNSET: - field_dict["timeout"] = timeout - if data_source_parameters is not UNSET: - field_dict["dataSourceParameters"] = data_source_parameters + if wait is not UNSET: + field_dict["wait"] = wait if values is not UNSET: field_dict["values"] = values @@ -215,6 +215,8 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: keys = d.pop("keys") + dp_epsilon = d.pop("dpEpsilon", UNSET) + _input_clipping_method = d.pop("inputClippingMethod", UNSET) input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] if isinstance(_input_clipping_method, Unset): @@ -222,6 +224,17 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method) + _run_mode = d.pop("runMode", UNSET) + run_mode: Union[Unset, RunMode] + if isinstance(_run_mode, Unset): + run_mode = UNSET + else: + run_mode = RunMode(_run_mode) + + local_input_id = d.pop("localInputID", UNSET) + + maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET) + _preprocessing_parameters = d.pop("preprocessingParameters", UNSET) preprocessing_parameters: Union[Unset, ComputationPreprocessingParameters] if isinstance(_preprocessing_parameters, Unset): @@ -229,24 +242,18 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: preprocessing_parameters = ComputationPreprocessingParameters.from_dict(_preprocessing_parameters) - wait = d.pop("wait", UNSET) - - _dp_policy = d.pop("DPPolicy", UNSET) - dp_policy: Union[Unset, DPPolicy] - if isinstance(_dp_policy, Unset): - dp_policy = UNSET + _data_source_parameters = d.pop("dataSourceParameters", UNSET) + data_source_parameters: Union[Unset, ComputationDataSourceParameters] + if isinstance(_data_source_parameters, Unset): + data_source_parameters = UNSET else: - dp_policy = DPPolicy.from_dict(_dp_policy) - - dp_epsilon = d.pop("dpEpsilon", UNSET) - - join_id = d.pop("joinId", UNSET) + data_source_parameters = ComputationDataSourceParameters.from_dict(_data_source_parameters) - local_input_id = d.pop("localInputID", UNSET) + end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) - maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET) + input_data_object = d.pop("inputDataObject", UNSET) - cohort_id = d.pop("cohortId", UNSET) + timeout = d.pop("timeout", UNSET) _local_input = d.pop("localInput", UNSET) local_input: Union[Unset, LocalInput] @@ -259,57 +266,50 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: project_id = d.pop("projectId", UNSET) - _run_mode = d.pop("runMode", UNSET) - run_mode: Union[Unset, RunMode] - if isinstance(_run_mode, Unset): - run_mode = UNSET + _dp_policy = d.pop("DPPolicy", UNSET) + dp_policy: Union[Unset, DPPolicy] + if isinstance(_dp_policy, Unset): + dp_policy = UNSET else: - run_mode = RunMode(_run_mode) + dp_policy = DPPolicy.from_dict(_dp_policy) - local = d.pop("local", UNSET) + cohort_id = d.pop("cohortId", UNSET) encrypted = d.pop("encrypted", UNSET) - end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) + join_id = d.pop("joinId", UNSET) - input_data_object = d.pop("inputDataObject", UNSET) + local = d.pop("local", UNSET) release_results = d.pop("releaseResults", UNSET) - timeout = d.pop("timeout", UNSET) - - _data_source_parameters = d.pop("dataSourceParameters", UNSET) - data_source_parameters: Union[Unset, ComputationDataSourceParameters] - if isinstance(_data_source_parameters, Unset): - data_source_parameters = UNSET - else: - data_source_parameters = ComputationDataSourceParameters.from_dict(_data_source_parameters) + wait = d.pop("wait", UNSET) values = cast(List[str], d.pop("values", UNSET)) private_search_setup = cls( type=type, keys=keys, - input_clipping_method=input_clipping_method, - preprocessing_parameters=preprocessing_parameters, - wait=wait, - dp_policy=dp_policy, dp_epsilon=dp_epsilon, - join_id=join_id, + input_clipping_method=input_clipping_method, + run_mode=run_mode, local_input_id=local_input_id, maximum_aggregated_value=maximum_aggregated_value, - cohort_id=cohort_id, + preprocessing_parameters=preprocessing_parameters, + data_source_parameters=data_source_parameters, + end_to_end_encrypted=end_to_end_encrypted, + input_data_object=input_data_object, + timeout=timeout, local_input=local_input, owner=owner, project_id=project_id, - run_mode=run_mode, - local=local, + dp_policy=dp_policy, + cohort_id=cohort_id, encrypted=encrypted, - end_to_end_encrypted=end_to_end_encrypted, - input_data_object=input_data_object, + join_id=join_id, + local=local, release_results=release_results, - timeout=timeout, - data_source_parameters=data_source_parameters, + wait=wait, values=values, ) diff --git a/src/tuneinsight/api/sdk/models/project.py b/src/tuneinsight/api/sdk/models/project.py index 0e98f25..9c3740b 100644 --- a/src/tuneinsight/api/sdk/models/project.py +++ b/src/tuneinsight/api/sdk/models/project.py @@ -4,9 +4,9 @@ from ..models.authorization_status import AuthorizationStatus from ..models.client import Client -from ..models.project_base_workflow_type import ProjectBaseWorkflowType from ..models.project_status import ProjectStatus from ..models.topology import Topology +from ..models.workflow_type import WorkflowType from ..types import UNSET, Unset if TYPE_CHECKING: @@ -27,168 +27,162 @@ class Project: """Project entity definition. Attributes: - local (Union[Unset, None, bool]): True if the project's computation should run only with local data (not - configured the network) + authorization_status (Union[Unset, AuthorizationStatus]): Authorization status of the project + created_with_client (Union[Unset, Client]): Type of client that communicates with the agent API + description (Union[Unset, None, str]): allow_shared_edit (Union[Unset, bool]): True if this project can be modified after being shared. Modifications of a shared project will be broadcasted to the network - authorized_users (Union[Unset, List[str]]): The IDs of the users who can run the project - end_to_end_encrypted (Union[Unset, None, bool]): whether results are always end to end encrypted and decrypted - on the client side + created_by_node (Union[Unset, str]): ID of node where the project was first created + locked (Union[Unset, None, bool]): True if the project is read-only (likely because it has already been shared) + unrestricted_access (Union[Unset, None, bool]): when set to true, then all users from the same organization are + authorized to access the project (view / edit depends on the roles) + workflow_type (Union[Unset, WorkflowType]): type of the workflow UI in the frontend + computation_definition (Union[Unset, ComputationDefinition]): Generic computation. + shared (Union[Unset, bool]): True if the project has once been shared across the participants policy (Union[Unset, ComputationPolicy]): policy to validate a specific computation + created_by_user (Union[Unset, str]): ID of user who created the project + name (Union[Unset, str]): + network_id (Union[Unset, str]): id to uniquely identify the network + query (Union[Unset, DataSourceQuery]): schema used for the query topology (Union[Unset, Topology]): Network Topologies. 'star' or 'tree'. In star topology all nodes are connected to a central node. In tree topology all nodes are connected and aware of each other. allow_clear_query (Union[Unset, bool]): [Dangerous, can lead to cross code data share] True if it is allowed for a client to query the data source all participants of the project and return the clear text result - network_id (Union[Unset, str]): id to uniquely identify the network + local (Union[Unset, None, bool]): True if the project's computation should run only with local data (not + configured the network) run_async (Union[Unset, bool]): flag indicating if computation should be run asynchronously - shared (Union[Unset, bool]): True if the project has once been shared across the participants - unrestricted_access (Union[Unset, None, bool]): when set to true, then all users from the same organization are - authorized to access the project (view / edit depends on the roles) - data_source_id (Union[Unset, None, str]): Unique identifier of a data source. - hide_leaf_participants (Union[Unset, None, bool]): whether leaf project participants are not shown to other leaf - participants when the project is in a star topology. - query_timeout (Union[Unset, int]): Timeout for the data source queries Default: 30. - created_with_client (Union[Unset, Client]): Type of client that communicates with the agent API + unique_id (Union[Unset, str]): Unique identifier of a project. data_source_auto_match (Union[Unset, bool]): whether or not to automatically assign the first matching datasource when the project is shared with other nodes + end_to_end_encrypted (Union[Unset, None, bool]): whether results are always end to end encrypted and decrypted + on the client side + hide_leaf_participants (Union[Unset, None, bool]): whether leaf project participants are not shown to other leaf + participants when the project is in a star topology. + local_data_selection_definition (Union[Unset, LocalDataSelectionDefinition]): datasource selection definition. A + selection is a "query" or data selection definition to run on the datasource min_contributors (Union[Unset, None, int]): minimum number of participants that contribute with their data required to run computations within this project + query_timeout (Union[Unset, int]): Timeout for the data source queries Default: 30. + workflow_json (Union[Unset, str]): JSON representation of the workflow UI in the frontend + authorized_users (Union[Unset, List[str]]): The IDs of the users who can run the project + dpia (Union[Unset, str]): non_contributor (Union[Unset, None, bool]): indicates that the current project participant takes part in the distributed computations but does not have any input data. By default this field is set according to the instance's configuration. - query (Union[Unset, DataSourceQuery]): schema used for the query - unique_id (Union[Unset, str]): Unique identifier of a project. - workflow_json (Union[Unset, str]): JSON representation of the workflow UI in the frontend - authorization_status (Union[Unset, AuthorizationStatus]): Authorization status of the project - computation_definition (Union[Unset, ComputationDefinition]): Generic computation. - dpia (Union[Unset, str]): - local_data_selection_definition (Union[Unset, LocalDataSelectionDefinition]): datasource selection definition. A - selection is a "query" or data selection definition to run on the datasource - locked (Union[Unset, None, bool]): True if the project is read-only (likely because it has already been shared) - name (Union[Unset, str]): - created_by_node (Union[Unset, str]): ID of node where the project was first created - created_by_user (Union[Unset, str]): ID of user who created the project - description (Union[Unset, None, str]): - workflow_type (Union[Unset, ProjectBaseWorkflowType]): type of the workflow UI in the frontend + data_source_id (Union[Unset, None, str]): Unique identifier of a data source. + computations (Union[Unset, List['Computation']]): List of computations of the project + created_at (Union[Unset, str]): + error (Union[Unset, str]): Description of a potential error that happened during the project lifespan + participants (Union[Unset, List['Participant']]): List of participants in the project + privacy_summary (Union[Unset, PrivacySummary]): Privacy summary for a project status (Union[Unset, ProjectStatus]): Stages of a project workflow updated_at (Union[Unset, str]): workflow_description (Union[Unset, str]): dynamically generated markdown description of the distributed workflow that is currently configured with the project. Not to be confused with the project description which is set by the user that has created the project for informative purposes. - computations (Union[Unset, List['Computation']]): List of computations of the project - created_at (Union[Unset, str]): - error (Union[Unset, str]): Description of a potential error that happened during the project lifespan - participants (Union[Unset, List['Participant']]): List of participants in the project - privacy_summary (Union[Unset, PrivacySummary]): Privacy summary for a project """ - local: Union[Unset, None, bool] = UNSET + authorization_status: Union[Unset, AuthorizationStatus] = UNSET + created_with_client: Union[Unset, Client] = UNSET + description: Union[Unset, None, str] = UNSET allow_shared_edit: Union[Unset, bool] = UNSET - authorized_users: Union[Unset, List[str]] = UNSET - end_to_end_encrypted: Union[Unset, None, bool] = UNSET + created_by_node: Union[Unset, str] = UNSET + locked: Union[Unset, None, bool] = UNSET + unrestricted_access: Union[Unset, None, bool] = UNSET + workflow_type: Union[Unset, WorkflowType] = UNSET + computation_definition: Union[Unset, "ComputationDefinition"] = UNSET + shared: Union[Unset, bool] = UNSET policy: Union[Unset, "ComputationPolicy"] = UNSET + created_by_user: Union[Unset, str] = UNSET + name: Union[Unset, str] = UNSET + network_id: Union[Unset, str] = UNSET + query: Union[Unset, "DataSourceQuery"] = UNSET topology: Union[Unset, Topology] = UNSET allow_clear_query: Union[Unset, bool] = UNSET - network_id: Union[Unset, str] = UNSET + local: Union[Unset, None, bool] = UNSET run_async: Union[Unset, bool] = UNSET - shared: Union[Unset, bool] = UNSET - unrestricted_access: Union[Unset, None, bool] = UNSET - data_source_id: Union[Unset, None, str] = UNSET - hide_leaf_participants: Union[Unset, None, bool] = UNSET - query_timeout: Union[Unset, int] = 30 - created_with_client: Union[Unset, Client] = UNSET + unique_id: Union[Unset, str] = UNSET data_source_auto_match: Union[Unset, bool] = UNSET + end_to_end_encrypted: Union[Unset, None, bool] = UNSET + hide_leaf_participants: Union[Unset, None, bool] = UNSET + local_data_selection_definition: Union[Unset, "LocalDataSelectionDefinition"] = UNSET min_contributors: Union[Unset, None, int] = UNSET - non_contributor: Union[Unset, None, bool] = UNSET - query: Union[Unset, "DataSourceQuery"] = UNSET - unique_id: Union[Unset, str] = UNSET + query_timeout: Union[Unset, int] = 30 workflow_json: Union[Unset, str] = UNSET - authorization_status: Union[Unset, AuthorizationStatus] = UNSET - computation_definition: Union[Unset, "ComputationDefinition"] = UNSET + authorized_users: Union[Unset, List[str]] = UNSET dpia: Union[Unset, str] = UNSET - local_data_selection_definition: Union[Unset, "LocalDataSelectionDefinition"] = UNSET - locked: Union[Unset, None, bool] = UNSET - name: Union[Unset, str] = UNSET - created_by_node: Union[Unset, str] = UNSET - created_by_user: Union[Unset, str] = UNSET - description: Union[Unset, None, str] = UNSET - workflow_type: Union[Unset, ProjectBaseWorkflowType] = UNSET - status: Union[Unset, ProjectStatus] = UNSET - updated_at: Union[Unset, str] = UNSET - workflow_description: Union[Unset, str] = UNSET + non_contributor: Union[Unset, None, bool] = UNSET + data_source_id: Union[Unset, None, str] = UNSET computations: Union[Unset, List["Computation"]] = UNSET created_at: Union[Unset, str] = UNSET error: Union[Unset, str] = UNSET participants: Union[Unset, List["Participant"]] = UNSET privacy_summary: Union[Unset, "PrivacySummary"] = UNSET + status: Union[Unset, ProjectStatus] = UNSET + updated_at: Union[Unset, str] = UNSET + workflow_description: Union[Unset, str] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: - local = self.local + authorization_status: Union[Unset, str] = UNSET + if not isinstance(self.authorization_status, Unset): + authorization_status = self.authorization_status.value + + created_with_client: Union[Unset, str] = UNSET + if not isinstance(self.created_with_client, Unset): + created_with_client = self.created_with_client.value + + description = self.description allow_shared_edit = self.allow_shared_edit - authorized_users: Union[Unset, List[str]] = UNSET - if not isinstance(self.authorized_users, Unset): - authorized_users = self.authorized_users + created_by_node = self.created_by_node + locked = self.locked + unrestricted_access = self.unrestricted_access + workflow_type: Union[Unset, str] = UNSET + if not isinstance(self.workflow_type, Unset): + workflow_type = self.workflow_type.value - end_to_end_encrypted = self.end_to_end_encrypted + computation_definition: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.computation_definition, Unset): + computation_definition = self.computation_definition.to_dict() + + shared = self.shared policy: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.policy, Unset): policy = self.policy.to_dict() + created_by_user = self.created_by_user + name = self.name + network_id = self.network_id + query: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.query, Unset): + query = self.query.to_dict() + topology: Union[Unset, str] = UNSET if not isinstance(self.topology, Unset): topology = self.topology.value allow_clear_query = self.allow_clear_query - network_id = self.network_id + local = self.local run_async = self.run_async - shared = self.shared - unrestricted_access = self.unrestricted_access - data_source_id = self.data_source_id - hide_leaf_participants = self.hide_leaf_participants - query_timeout = self.query_timeout - created_with_client: Union[Unset, str] = UNSET - if not isinstance(self.created_with_client, Unset): - created_with_client = self.created_with_client.value - - data_source_auto_match = self.data_source_auto_match - min_contributors = self.min_contributors - non_contributor = self.non_contributor - query: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.query, Unset): - query = self.query.to_dict() - unique_id = self.unique_id - workflow_json = self.workflow_json - authorization_status: Union[Unset, str] = UNSET - if not isinstance(self.authorization_status, Unset): - authorization_status = self.authorization_status.value - - computation_definition: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.computation_definition, Unset): - computation_definition = self.computation_definition.to_dict() - - dpia = self.dpia + data_source_auto_match = self.data_source_auto_match + end_to_end_encrypted = self.end_to_end_encrypted + hide_leaf_participants = self.hide_leaf_participants local_data_selection_definition: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.local_data_selection_definition, Unset): local_data_selection_definition = self.local_data_selection_definition.to_dict() - locked = self.locked - name = self.name - created_by_node = self.created_by_node - created_by_user = self.created_by_user - description = self.description - workflow_type: Union[Unset, str] = UNSET - if not isinstance(self.workflow_type, Unset): - workflow_type = self.workflow_type.value - - status: Union[Unset, str] = UNSET - if not isinstance(self.status, Unset): - status = self.status.value + min_contributors = self.min_contributors + query_timeout = self.query_timeout + workflow_json = self.workflow_json + authorized_users: Union[Unset, List[str]] = UNSET + if not isinstance(self.authorized_users, Unset): + authorized_users = self.authorized_users - updated_at = self.updated_at - workflow_description = self.workflow_description + dpia = self.dpia + non_contributor = self.non_contributor + data_source_id = self.data_source_id computations: Union[Unset, List[Dict[str, Any]]] = UNSET if not isinstance(self.computations, Unset): computations = [] @@ -211,77 +205,78 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.privacy_summary, Unset): privacy_summary = self.privacy_summary.to_dict() + status: Union[Unset, str] = UNSET + if not isinstance(self.status, Unset): + status = self.status.value + + updated_at = self.updated_at + workflow_description = self.workflow_description + field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) - if local is not UNSET: - field_dict["local"] = local + if authorization_status is not UNSET: + field_dict["authorizationStatus"] = authorization_status + if created_with_client is not UNSET: + field_dict["createdWithClient"] = created_with_client + if description is not UNSET: + field_dict["description"] = description if allow_shared_edit is not UNSET: field_dict["allowSharedEdit"] = allow_shared_edit - if authorized_users is not UNSET: - field_dict["authorizedUsers"] = authorized_users - if end_to_end_encrypted is not UNSET: - field_dict["endToEndEncrypted"] = end_to_end_encrypted + if created_by_node is not UNSET: + field_dict["createdByNode"] = created_by_node + if locked is not UNSET: + field_dict["locked"] = locked + if unrestricted_access is not UNSET: + field_dict["unrestrictedAccess"] = unrestricted_access + if workflow_type is not UNSET: + field_dict["workflowType"] = workflow_type + if computation_definition is not UNSET: + field_dict["computationDefinition"] = computation_definition + if shared is not UNSET: + field_dict["shared"] = shared if policy is not UNSET: field_dict["policy"] = policy + if created_by_user is not UNSET: + field_dict["createdByUser"] = created_by_user + if name is not UNSET: + field_dict["name"] = name + if network_id is not UNSET: + field_dict["networkId"] = network_id + if query is not UNSET: + field_dict["query"] = query if topology is not UNSET: field_dict["topology"] = topology if allow_clear_query is not UNSET: field_dict["allowClearQuery"] = allow_clear_query - if network_id is not UNSET: - field_dict["networkId"] = network_id + if local is not UNSET: + field_dict["local"] = local if run_async is not UNSET: field_dict["runAsync"] = run_async - if shared is not UNSET: - field_dict["shared"] = shared - if unrestricted_access is not UNSET: - field_dict["unrestrictedAccess"] = unrestricted_access - if data_source_id is not UNSET: - field_dict["dataSourceId"] = data_source_id - if hide_leaf_participants is not UNSET: - field_dict["hideLeafParticipants"] = hide_leaf_participants - if query_timeout is not UNSET: - field_dict["queryTimeout"] = query_timeout - if created_with_client is not UNSET: - field_dict["createdWithClient"] = created_with_client + if unique_id is not UNSET: + field_dict["uniqueId"] = unique_id if data_source_auto_match is not UNSET: field_dict["dataSourceAutoMatch"] = data_source_auto_match + if end_to_end_encrypted is not UNSET: + field_dict["endToEndEncrypted"] = end_to_end_encrypted + if hide_leaf_participants is not UNSET: + field_dict["hideLeafParticipants"] = hide_leaf_participants + if local_data_selection_definition is not UNSET: + field_dict["localDataSelectionDefinition"] = local_data_selection_definition if min_contributors is not UNSET: field_dict["minContributors"] = min_contributors - if non_contributor is not UNSET: - field_dict["nonContributor"] = non_contributor - if query is not UNSET: - field_dict["query"] = query - if unique_id is not UNSET: - field_dict["uniqueId"] = unique_id + if query_timeout is not UNSET: + field_dict["queryTimeout"] = query_timeout if workflow_json is not UNSET: field_dict["workflowJSON"] = workflow_json - if authorization_status is not UNSET: - field_dict["authorizationStatus"] = authorization_status - if computation_definition is not UNSET: - field_dict["computationDefinition"] = computation_definition + if authorized_users is not UNSET: + field_dict["authorizedUsers"] = authorized_users if dpia is not UNSET: field_dict["dpia"] = dpia - if local_data_selection_definition is not UNSET: - field_dict["localDataSelectionDefinition"] = local_data_selection_definition - if locked is not UNSET: - field_dict["locked"] = locked - if name is not UNSET: - field_dict["name"] = name - if created_by_node is not UNSET: - field_dict["createdByNode"] = created_by_node - if created_by_user is not UNSET: - field_dict["createdByUser"] = created_by_user - if description is not UNSET: - field_dict["description"] = description - if workflow_type is not UNSET: - field_dict["workflowType"] = workflow_type - if status is not UNSET: - field_dict["status"] = status - if updated_at is not UNSET: - field_dict["updatedAt"] = updated_at - if workflow_description is not UNSET: - field_dict["workflowDescription"] = workflow_description + if non_contributor is not UNSET: + field_dict["nonContributor"] = non_contributor + if data_source_id is not UNSET: + field_dict["dataSourceId"] = data_source_id if computations is not UNSET: field_dict["computations"] = computations if created_at is not UNSET: @@ -292,6 +287,12 @@ def to_dict(self) -> Dict[str, Any]: field_dict["participants"] = participants if privacy_summary is not UNSET: field_dict["privacySummary"] = privacy_summary + if status is not UNSET: + field_dict["status"] = status + if updated_at is not UNSET: + field_dict["updatedAt"] = updated_at + if workflow_description is not UNSET: + field_dict["workflowDescription"] = workflow_description return field_dict @@ -306,56 +307,58 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: from ..models.privacy_summary import PrivacySummary d = src_dict.copy() - local = d.pop("local", UNSET) - - allow_shared_edit = d.pop("allowSharedEdit", UNSET) - - authorized_users = cast(List[str], d.pop("authorizedUsers", UNSET)) - - end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) - - _policy = d.pop("policy", UNSET) - policy: Union[Unset, ComputationPolicy] - if isinstance(_policy, Unset): - policy = UNSET + _authorization_status = d.pop("authorizationStatus", UNSET) + authorization_status: Union[Unset, AuthorizationStatus] + if isinstance(_authorization_status, Unset): + authorization_status = UNSET else: - policy = ComputationPolicy.from_dict(_policy) + authorization_status = AuthorizationStatus(_authorization_status) - _topology = d.pop("topology", UNSET) - topology: Union[Unset, Topology] - if isinstance(_topology, Unset): - topology = UNSET + _created_with_client = d.pop("createdWithClient", UNSET) + created_with_client: Union[Unset, Client] + if isinstance(_created_with_client, Unset): + created_with_client = UNSET else: - topology = Topology(_topology) + created_with_client = Client(_created_with_client) - allow_clear_query = d.pop("allowClearQuery", UNSET) + description = d.pop("description", UNSET) - network_id = d.pop("networkId", UNSET) + allow_shared_edit = d.pop("allowSharedEdit", UNSET) - run_async = d.pop("runAsync", UNSET) + created_by_node = d.pop("createdByNode", UNSET) - shared = d.pop("shared", UNSET) + locked = d.pop("locked", UNSET) unrestricted_access = d.pop("unrestrictedAccess", UNSET) - data_source_id = d.pop("dataSourceId", UNSET) + _workflow_type = d.pop("workflowType", UNSET) + workflow_type: Union[Unset, WorkflowType] + if isinstance(_workflow_type, Unset): + workflow_type = UNSET + else: + workflow_type = WorkflowType(_workflow_type) - hide_leaf_participants = d.pop("hideLeafParticipants", UNSET) + _computation_definition = d.pop("computationDefinition", UNSET) + computation_definition: Union[Unset, ComputationDefinition] + if isinstance(_computation_definition, Unset): + computation_definition = UNSET + else: + computation_definition = ComputationDefinition.from_dict(_computation_definition) - query_timeout = d.pop("queryTimeout", UNSET) + shared = d.pop("shared", UNSET) - _created_with_client = d.pop("createdWithClient", UNSET) - created_with_client: Union[Unset, Client] - if isinstance(_created_with_client, Unset): - created_with_client = UNSET + _policy = d.pop("policy", UNSET) + policy: Union[Unset, ComputationPolicy] + if isinstance(_policy, Unset): + policy = UNSET else: - created_with_client = Client(_created_with_client) + policy = ComputationPolicy.from_dict(_policy) - data_source_auto_match = d.pop("dataSourceAutoMatch", UNSET) + created_by_user = d.pop("createdByUser", UNSET) - min_contributors = d.pop("minContributors", UNSET) + name = d.pop("name", UNSET) - non_contributor = d.pop("nonContributor", UNSET) + network_id = d.pop("networkId", UNSET) _query = d.pop("query", UNSET) query: Union[Unset, DataSourceQuery] @@ -364,25 +367,26 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: query = DataSourceQuery.from_dict(_query) - unique_id = d.pop("uniqueId", UNSET) + _topology = d.pop("topology", UNSET) + topology: Union[Unset, Topology] + if isinstance(_topology, Unset): + topology = UNSET + else: + topology = Topology(_topology) - workflow_json = d.pop("workflowJSON", UNSET) + allow_clear_query = d.pop("allowClearQuery", UNSET) - _authorization_status = d.pop("authorizationStatus", UNSET) - authorization_status: Union[Unset, AuthorizationStatus] - if isinstance(_authorization_status, Unset): - authorization_status = UNSET - else: - authorization_status = AuthorizationStatus(_authorization_status) + local = d.pop("local", UNSET) - _computation_definition = d.pop("computationDefinition", UNSET) - computation_definition: Union[Unset, ComputationDefinition] - if isinstance(_computation_definition, Unset): - computation_definition = UNSET - else: - computation_definition = ComputationDefinition.from_dict(_computation_definition) + run_async = d.pop("runAsync", UNSET) - dpia = d.pop("dpia", UNSET) + unique_id = d.pop("uniqueId", UNSET) + + data_source_auto_match = d.pop("dataSourceAutoMatch", UNSET) + + end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) + + hide_leaf_participants = d.pop("hideLeafParticipants", UNSET) _local_data_selection_definition = d.pop("localDataSelectionDefinition", UNSET) local_data_selection_definition: Union[Unset, LocalDataSelectionDefinition] @@ -391,33 +395,19 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: local_data_selection_definition = LocalDataSelectionDefinition.from_dict(_local_data_selection_definition) - locked = d.pop("locked", UNSET) - - name = d.pop("name", UNSET) - - created_by_node = d.pop("createdByNode", UNSET) + min_contributors = d.pop("minContributors", UNSET) - created_by_user = d.pop("createdByUser", UNSET) + query_timeout = d.pop("queryTimeout", UNSET) - description = d.pop("description", UNSET) + workflow_json = d.pop("workflowJSON", UNSET) - _workflow_type = d.pop("workflowType", UNSET) - workflow_type: Union[Unset, ProjectBaseWorkflowType] - if isinstance(_workflow_type, Unset): - workflow_type = UNSET - else: - workflow_type = ProjectBaseWorkflowType(_workflow_type) + authorized_users = cast(List[str], d.pop("authorizedUsers", UNSET)) - _status = d.pop("status", UNSET) - status: Union[Unset, ProjectStatus] - if isinstance(_status, Unset): - status = UNSET - else: - status = ProjectStatus(_status) + dpia = d.pop("dpia", UNSET) - updated_at = d.pop("updatedAt", UNSET) + non_contributor = d.pop("nonContributor", UNSET) - workflow_description = d.pop("workflowDescription", UNSET) + data_source_id = d.pop("dataSourceId", UNSET) computations = [] _computations = d.pop("computations", UNSET) @@ -444,46 +434,57 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: privacy_summary = PrivacySummary.from_dict(_privacy_summary) + _status = d.pop("status", UNSET) + status: Union[Unset, ProjectStatus] + if isinstance(_status, Unset): + status = UNSET + else: + status = ProjectStatus(_status) + + updated_at = d.pop("updatedAt", UNSET) + + workflow_description = d.pop("workflowDescription", UNSET) + project = cls( - local=local, + authorization_status=authorization_status, + created_with_client=created_with_client, + description=description, allow_shared_edit=allow_shared_edit, - authorized_users=authorized_users, - end_to_end_encrypted=end_to_end_encrypted, + created_by_node=created_by_node, + locked=locked, + unrestricted_access=unrestricted_access, + workflow_type=workflow_type, + computation_definition=computation_definition, + shared=shared, policy=policy, + created_by_user=created_by_user, + name=name, + network_id=network_id, + query=query, topology=topology, allow_clear_query=allow_clear_query, - network_id=network_id, + local=local, run_async=run_async, - shared=shared, - unrestricted_access=unrestricted_access, - data_source_id=data_source_id, - hide_leaf_participants=hide_leaf_participants, - query_timeout=query_timeout, - created_with_client=created_with_client, + unique_id=unique_id, data_source_auto_match=data_source_auto_match, + end_to_end_encrypted=end_to_end_encrypted, + hide_leaf_participants=hide_leaf_participants, + local_data_selection_definition=local_data_selection_definition, min_contributors=min_contributors, - non_contributor=non_contributor, - query=query, - unique_id=unique_id, + query_timeout=query_timeout, workflow_json=workflow_json, - authorization_status=authorization_status, - computation_definition=computation_definition, + authorized_users=authorized_users, dpia=dpia, - local_data_selection_definition=local_data_selection_definition, - locked=locked, - name=name, - created_by_node=created_by_node, - created_by_user=created_by_user, - description=description, - workflow_type=workflow_type, - status=status, - updated_at=updated_at, - workflow_description=workflow_description, + non_contributor=non_contributor, + data_source_id=data_source_id, computations=computations, created_at=created_at, error=error, participants=participants, privacy_summary=privacy_summary, + status=status, + updated_at=updated_at, + workflow_description=workflow_description, ) project.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/project_base.py b/src/tuneinsight/api/sdk/models/project_base.py index 3f67bfe..99f10ab 100644 --- a/src/tuneinsight/api/sdk/models/project_base.py +++ b/src/tuneinsight/api/sdk/models/project_base.py @@ -4,8 +4,8 @@ from ..models.authorization_status import AuthorizationStatus from ..models.client import Client -from ..models.project_base_workflow_type import ProjectBaseWorkflowType from ..models.topology import Topology +from ..models.workflow_type import WorkflowType from ..types import UNSET, Unset if TYPE_CHECKING: @@ -23,208 +23,209 @@ class ProjectBase: """Common fields of a project (for get, patch and post) Attributes: - local (Union[Unset, None, bool]): True if the project's computation should run only with local data (not - configured the network) + authorization_status (Union[Unset, AuthorizationStatus]): Authorization status of the project + created_with_client (Union[Unset, Client]): Type of client that communicates with the agent API + description (Union[Unset, None, str]): allow_shared_edit (Union[Unset, bool]): True if this project can be modified after being shared. Modifications of a shared project will be broadcasted to the network - authorized_users (Union[Unset, List[str]]): The IDs of the users who can run the project - end_to_end_encrypted (Union[Unset, None, bool]): whether results are always end to end encrypted and decrypted - on the client side + created_by_node (Union[Unset, str]): ID of node where the project was first created + locked (Union[Unset, None, bool]): True if the project is read-only (likely because it has already been shared) + unrestricted_access (Union[Unset, None, bool]): when set to true, then all users from the same organization are + authorized to access the project (view / edit depends on the roles) + workflow_type (Union[Unset, WorkflowType]): type of the workflow UI in the frontend + computation_definition (Union[Unset, ComputationDefinition]): Generic computation. + shared (Union[Unset, bool]): True if the project has once been shared across the participants policy (Union[Unset, ComputationPolicy]): policy to validate a specific computation + created_by_user (Union[Unset, str]): ID of user who created the project + name (Union[Unset, str]): + network_id (Union[Unset, str]): id to uniquely identify the network + query (Union[Unset, DataSourceQuery]): schema used for the query topology (Union[Unset, Topology]): Network Topologies. 'star' or 'tree'. In star topology all nodes are connected to a central node. In tree topology all nodes are connected and aware of each other. allow_clear_query (Union[Unset, bool]): [Dangerous, can lead to cross code data share] True if it is allowed for a client to query the data source all participants of the project and return the clear text result - network_id (Union[Unset, str]): id to uniquely identify the network + local (Union[Unset, None, bool]): True if the project's computation should run only with local data (not + configured the network) run_async (Union[Unset, bool]): flag indicating if computation should be run asynchronously - shared (Union[Unset, bool]): True if the project has once been shared across the participants - unrestricted_access (Union[Unset, None, bool]): when set to true, then all users from the same organization are - authorized to access the project (view / edit depends on the roles) - data_source_id (Union[Unset, None, str]): Unique identifier of a data source. - hide_leaf_participants (Union[Unset, None, bool]): whether leaf project participants are not shown to other leaf - participants when the project is in a star topology. - query_timeout (Union[Unset, int]): Timeout for the data source queries Default: 30. - created_with_client (Union[Unset, Client]): Type of client that communicates with the agent API + unique_id (Union[Unset, str]): Unique identifier of a project. data_source_auto_match (Union[Unset, bool]): whether or not to automatically assign the first matching datasource when the project is shared with other nodes + end_to_end_encrypted (Union[Unset, None, bool]): whether results are always end to end encrypted and decrypted + on the client side + hide_leaf_participants (Union[Unset, None, bool]): whether leaf project participants are not shown to other leaf + participants when the project is in a star topology. + local_data_selection_definition (Union[Unset, LocalDataSelectionDefinition]): datasource selection definition. A + selection is a "query" or data selection definition to run on the datasource min_contributors (Union[Unset, None, int]): minimum number of participants that contribute with their data required to run computations within this project + query_timeout (Union[Unset, int]): Timeout for the data source queries Default: 30. + workflow_json (Union[Unset, str]): JSON representation of the workflow UI in the frontend + authorized_users (Union[Unset, List[str]]): The IDs of the users who can run the project + dpia (Union[Unset, str]): non_contributor (Union[Unset, None, bool]): indicates that the current project participant takes part in the distributed computations but does not have any input data. By default this field is set according to the instance's configuration. - query (Union[Unset, DataSourceQuery]): schema used for the query - unique_id (Union[Unset, str]): Unique identifier of a project. - workflow_json (Union[Unset, str]): JSON representation of the workflow UI in the frontend - authorization_status (Union[Unset, AuthorizationStatus]): Authorization status of the project - computation_definition (Union[Unset, ComputationDefinition]): Generic computation. - dpia (Union[Unset, str]): - local_data_selection_definition (Union[Unset, LocalDataSelectionDefinition]): datasource selection definition. A - selection is a "query" or data selection definition to run on the datasource - locked (Union[Unset, None, bool]): True if the project is read-only (likely because it has already been shared) - name (Union[Unset, str]): - created_by_node (Union[Unset, str]): ID of node where the project was first created - created_by_user (Union[Unset, str]): ID of user who created the project - description (Union[Unset, None, str]): - workflow_type (Union[Unset, ProjectBaseWorkflowType]): type of the workflow UI in the frontend + data_source_id (Union[Unset, None, str]): Unique identifier of a data source. """ - local: Union[Unset, None, bool] = UNSET + authorization_status: Union[Unset, AuthorizationStatus] = UNSET + created_with_client: Union[Unset, Client] = UNSET + description: Union[Unset, None, str] = UNSET allow_shared_edit: Union[Unset, bool] = UNSET - authorized_users: Union[Unset, List[str]] = UNSET - end_to_end_encrypted: Union[Unset, None, bool] = UNSET + created_by_node: Union[Unset, str] = UNSET + locked: Union[Unset, None, bool] = UNSET + unrestricted_access: Union[Unset, None, bool] = UNSET + workflow_type: Union[Unset, WorkflowType] = UNSET + computation_definition: Union[Unset, "ComputationDefinition"] = UNSET + shared: Union[Unset, bool] = UNSET policy: Union[Unset, "ComputationPolicy"] = UNSET + created_by_user: Union[Unset, str] = UNSET + name: Union[Unset, str] = UNSET + network_id: Union[Unset, str] = UNSET + query: Union[Unset, "DataSourceQuery"] = UNSET topology: Union[Unset, Topology] = UNSET allow_clear_query: Union[Unset, bool] = UNSET - network_id: Union[Unset, str] = UNSET + local: Union[Unset, None, bool] = UNSET run_async: Union[Unset, bool] = UNSET - shared: Union[Unset, bool] = UNSET - unrestricted_access: Union[Unset, None, bool] = UNSET - data_source_id: Union[Unset, None, str] = UNSET - hide_leaf_participants: Union[Unset, None, bool] = UNSET - query_timeout: Union[Unset, int] = 30 - created_with_client: Union[Unset, Client] = UNSET + unique_id: Union[Unset, str] = UNSET data_source_auto_match: Union[Unset, bool] = UNSET + end_to_end_encrypted: Union[Unset, None, bool] = UNSET + hide_leaf_participants: Union[Unset, None, bool] = UNSET + local_data_selection_definition: Union[Unset, "LocalDataSelectionDefinition"] = UNSET min_contributors: Union[Unset, None, int] = UNSET - non_contributor: Union[Unset, None, bool] = UNSET - query: Union[Unset, "DataSourceQuery"] = UNSET - unique_id: Union[Unset, str] = UNSET + query_timeout: Union[Unset, int] = 30 workflow_json: Union[Unset, str] = UNSET - authorization_status: Union[Unset, AuthorizationStatus] = UNSET - computation_definition: Union[Unset, "ComputationDefinition"] = UNSET + authorized_users: Union[Unset, List[str]] = UNSET dpia: Union[Unset, str] = UNSET - local_data_selection_definition: Union[Unset, "LocalDataSelectionDefinition"] = UNSET - locked: Union[Unset, None, bool] = UNSET - name: Union[Unset, str] = UNSET - created_by_node: Union[Unset, str] = UNSET - created_by_user: Union[Unset, str] = UNSET - description: Union[Unset, None, str] = UNSET - workflow_type: Union[Unset, ProjectBaseWorkflowType] = UNSET + non_contributor: Union[Unset, None, bool] = UNSET + data_source_id: Union[Unset, None, str] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: - local = self.local + authorization_status: Union[Unset, str] = UNSET + if not isinstance(self.authorization_status, Unset): + authorization_status = self.authorization_status.value + + created_with_client: Union[Unset, str] = UNSET + if not isinstance(self.created_with_client, Unset): + created_with_client = self.created_with_client.value + + description = self.description allow_shared_edit = self.allow_shared_edit - authorized_users: Union[Unset, List[str]] = UNSET - if not isinstance(self.authorized_users, Unset): - authorized_users = self.authorized_users + created_by_node = self.created_by_node + locked = self.locked + unrestricted_access = self.unrestricted_access + workflow_type: Union[Unset, str] = UNSET + if not isinstance(self.workflow_type, Unset): + workflow_type = self.workflow_type.value - end_to_end_encrypted = self.end_to_end_encrypted + computation_definition: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.computation_definition, Unset): + computation_definition = self.computation_definition.to_dict() + + shared = self.shared policy: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.policy, Unset): policy = self.policy.to_dict() + created_by_user = self.created_by_user + name = self.name + network_id = self.network_id + query: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.query, Unset): + query = self.query.to_dict() + topology: Union[Unset, str] = UNSET if not isinstance(self.topology, Unset): topology = self.topology.value allow_clear_query = self.allow_clear_query - network_id = self.network_id + local = self.local run_async = self.run_async - shared = self.shared - unrestricted_access = self.unrestricted_access - data_source_id = self.data_source_id + unique_id = self.unique_id + data_source_auto_match = self.data_source_auto_match + end_to_end_encrypted = self.end_to_end_encrypted hide_leaf_participants = self.hide_leaf_participants - query_timeout = self.query_timeout - created_with_client: Union[Unset, str] = UNSET - if not isinstance(self.created_with_client, Unset): - created_with_client = self.created_with_client.value + local_data_selection_definition: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.local_data_selection_definition, Unset): + local_data_selection_definition = self.local_data_selection_definition.to_dict() - data_source_auto_match = self.data_source_auto_match min_contributors = self.min_contributors - non_contributor = self.non_contributor - query: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.query, Unset): - query = self.query.to_dict() - - unique_id = self.unique_id + query_timeout = self.query_timeout workflow_json = self.workflow_json - authorization_status: Union[Unset, str] = UNSET - if not isinstance(self.authorization_status, Unset): - authorization_status = self.authorization_status.value - - computation_definition: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.computation_definition, Unset): - computation_definition = self.computation_definition.to_dict() + authorized_users: Union[Unset, List[str]] = UNSET + if not isinstance(self.authorized_users, Unset): + authorized_users = self.authorized_users dpia = self.dpia - local_data_selection_definition: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.local_data_selection_definition, Unset): - local_data_selection_definition = self.local_data_selection_definition.to_dict() - - locked = self.locked - name = self.name - created_by_node = self.created_by_node - created_by_user = self.created_by_user - description = self.description - workflow_type: Union[Unset, str] = UNSET - if not isinstance(self.workflow_type, Unset): - workflow_type = self.workflow_type.value + non_contributor = self.non_contributor + data_source_id = self.data_source_id field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) - if local is not UNSET: - field_dict["local"] = local + if authorization_status is not UNSET: + field_dict["authorizationStatus"] = authorization_status + if created_with_client is not UNSET: + field_dict["createdWithClient"] = created_with_client + if description is not UNSET: + field_dict["description"] = description if allow_shared_edit is not UNSET: field_dict["allowSharedEdit"] = allow_shared_edit - if authorized_users is not UNSET: - field_dict["authorizedUsers"] = authorized_users - if end_to_end_encrypted is not UNSET: - field_dict["endToEndEncrypted"] = end_to_end_encrypted + if created_by_node is not UNSET: + field_dict["createdByNode"] = created_by_node + if locked is not UNSET: + field_dict["locked"] = locked + if unrestricted_access is not UNSET: + field_dict["unrestrictedAccess"] = unrestricted_access + if workflow_type is not UNSET: + field_dict["workflowType"] = workflow_type + if computation_definition is not UNSET: + field_dict["computationDefinition"] = computation_definition + if shared is not UNSET: + field_dict["shared"] = shared if policy is not UNSET: field_dict["policy"] = policy + if created_by_user is not UNSET: + field_dict["createdByUser"] = created_by_user + if name is not UNSET: + field_dict["name"] = name + if network_id is not UNSET: + field_dict["networkId"] = network_id + if query is not UNSET: + field_dict["query"] = query if topology is not UNSET: field_dict["topology"] = topology if allow_clear_query is not UNSET: field_dict["allowClearQuery"] = allow_clear_query - if network_id is not UNSET: - field_dict["networkId"] = network_id + if local is not UNSET: + field_dict["local"] = local if run_async is not UNSET: field_dict["runAsync"] = run_async - if shared is not UNSET: - field_dict["shared"] = shared - if unrestricted_access is not UNSET: - field_dict["unrestrictedAccess"] = unrestricted_access - if data_source_id is not UNSET: - field_dict["dataSourceId"] = data_source_id - if hide_leaf_participants is not UNSET: - field_dict["hideLeafParticipants"] = hide_leaf_participants - if query_timeout is not UNSET: - field_dict["queryTimeout"] = query_timeout - if created_with_client is not UNSET: - field_dict["createdWithClient"] = created_with_client + if unique_id is not UNSET: + field_dict["uniqueId"] = unique_id if data_source_auto_match is not UNSET: field_dict["dataSourceAutoMatch"] = data_source_auto_match + if end_to_end_encrypted is not UNSET: + field_dict["endToEndEncrypted"] = end_to_end_encrypted + if hide_leaf_participants is not UNSET: + field_dict["hideLeafParticipants"] = hide_leaf_participants + if local_data_selection_definition is not UNSET: + field_dict["localDataSelectionDefinition"] = local_data_selection_definition if min_contributors is not UNSET: field_dict["minContributors"] = min_contributors - if non_contributor is not UNSET: - field_dict["nonContributor"] = non_contributor - if query is not UNSET: - field_dict["query"] = query - if unique_id is not UNSET: - field_dict["uniqueId"] = unique_id + if query_timeout is not UNSET: + field_dict["queryTimeout"] = query_timeout if workflow_json is not UNSET: field_dict["workflowJSON"] = workflow_json - if authorization_status is not UNSET: - field_dict["authorizationStatus"] = authorization_status - if computation_definition is not UNSET: - field_dict["computationDefinition"] = computation_definition + if authorized_users is not UNSET: + field_dict["authorizedUsers"] = authorized_users if dpia is not UNSET: field_dict["dpia"] = dpia - if local_data_selection_definition is not UNSET: - field_dict["localDataSelectionDefinition"] = local_data_selection_definition - if locked is not UNSET: - field_dict["locked"] = locked - if name is not UNSET: - field_dict["name"] = name - if created_by_node is not UNSET: - field_dict["createdByNode"] = created_by_node - if created_by_user is not UNSET: - field_dict["createdByUser"] = created_by_user - if description is not UNSET: - field_dict["description"] = description - if workflow_type is not UNSET: - field_dict["workflowType"] = workflow_type + if non_contributor is not UNSET: + field_dict["nonContributor"] = non_contributor + if data_source_id is not UNSET: + field_dict["dataSourceId"] = data_source_id return field_dict @@ -236,56 +237,58 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: from ..models.local_data_selection_definition import LocalDataSelectionDefinition d = src_dict.copy() - local = d.pop("local", UNSET) - - allow_shared_edit = d.pop("allowSharedEdit", UNSET) - - authorized_users = cast(List[str], d.pop("authorizedUsers", UNSET)) - - end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) - - _policy = d.pop("policy", UNSET) - policy: Union[Unset, ComputationPolicy] - if isinstance(_policy, Unset): - policy = UNSET + _authorization_status = d.pop("authorizationStatus", UNSET) + authorization_status: Union[Unset, AuthorizationStatus] + if isinstance(_authorization_status, Unset): + authorization_status = UNSET else: - policy = ComputationPolicy.from_dict(_policy) + authorization_status = AuthorizationStatus(_authorization_status) - _topology = d.pop("topology", UNSET) - topology: Union[Unset, Topology] - if isinstance(_topology, Unset): - topology = UNSET + _created_with_client = d.pop("createdWithClient", UNSET) + created_with_client: Union[Unset, Client] + if isinstance(_created_with_client, Unset): + created_with_client = UNSET else: - topology = Topology(_topology) + created_with_client = Client(_created_with_client) - allow_clear_query = d.pop("allowClearQuery", UNSET) + description = d.pop("description", UNSET) - network_id = d.pop("networkId", UNSET) + allow_shared_edit = d.pop("allowSharedEdit", UNSET) - run_async = d.pop("runAsync", UNSET) + created_by_node = d.pop("createdByNode", UNSET) - shared = d.pop("shared", UNSET) + locked = d.pop("locked", UNSET) unrestricted_access = d.pop("unrestrictedAccess", UNSET) - data_source_id = d.pop("dataSourceId", UNSET) + _workflow_type = d.pop("workflowType", UNSET) + workflow_type: Union[Unset, WorkflowType] + if isinstance(_workflow_type, Unset): + workflow_type = UNSET + else: + workflow_type = WorkflowType(_workflow_type) - hide_leaf_participants = d.pop("hideLeafParticipants", UNSET) + _computation_definition = d.pop("computationDefinition", UNSET) + computation_definition: Union[Unset, ComputationDefinition] + if isinstance(_computation_definition, Unset): + computation_definition = UNSET + else: + computation_definition = ComputationDefinition.from_dict(_computation_definition) - query_timeout = d.pop("queryTimeout", UNSET) + shared = d.pop("shared", UNSET) - _created_with_client = d.pop("createdWithClient", UNSET) - created_with_client: Union[Unset, Client] - if isinstance(_created_with_client, Unset): - created_with_client = UNSET + _policy = d.pop("policy", UNSET) + policy: Union[Unset, ComputationPolicy] + if isinstance(_policy, Unset): + policy = UNSET else: - created_with_client = Client(_created_with_client) + policy = ComputationPolicy.from_dict(_policy) - data_source_auto_match = d.pop("dataSourceAutoMatch", UNSET) + created_by_user = d.pop("createdByUser", UNSET) - min_contributors = d.pop("minContributors", UNSET) + name = d.pop("name", UNSET) - non_contributor = d.pop("nonContributor", UNSET) + network_id = d.pop("networkId", UNSET) _query = d.pop("query", UNSET) query: Union[Unset, DataSourceQuery] @@ -294,25 +297,26 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: query = DataSourceQuery.from_dict(_query) - unique_id = d.pop("uniqueId", UNSET) + _topology = d.pop("topology", UNSET) + topology: Union[Unset, Topology] + if isinstance(_topology, Unset): + topology = UNSET + else: + topology = Topology(_topology) - workflow_json = d.pop("workflowJSON", UNSET) + allow_clear_query = d.pop("allowClearQuery", UNSET) - _authorization_status = d.pop("authorizationStatus", UNSET) - authorization_status: Union[Unset, AuthorizationStatus] - if isinstance(_authorization_status, Unset): - authorization_status = UNSET - else: - authorization_status = AuthorizationStatus(_authorization_status) + local = d.pop("local", UNSET) - _computation_definition = d.pop("computationDefinition", UNSET) - computation_definition: Union[Unset, ComputationDefinition] - if isinstance(_computation_definition, Unset): - computation_definition = UNSET - else: - computation_definition = ComputationDefinition.from_dict(_computation_definition) + run_async = d.pop("runAsync", UNSET) - dpia = d.pop("dpia", UNSET) + unique_id = d.pop("uniqueId", UNSET) + + data_source_auto_match = d.pop("dataSourceAutoMatch", UNSET) + + end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) + + hide_leaf_participants = d.pop("hideLeafParticipants", UNSET) _local_data_selection_definition = d.pop("localDataSelectionDefinition", UNSET) local_data_selection_definition: Union[Unset, LocalDataSelectionDefinition] @@ -321,55 +325,52 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: local_data_selection_definition = LocalDataSelectionDefinition.from_dict(_local_data_selection_definition) - locked = d.pop("locked", UNSET) + min_contributors = d.pop("minContributors", UNSET) - name = d.pop("name", UNSET) + query_timeout = d.pop("queryTimeout", UNSET) - created_by_node = d.pop("createdByNode", UNSET) + workflow_json = d.pop("workflowJSON", UNSET) - created_by_user = d.pop("createdByUser", UNSET) + authorized_users = cast(List[str], d.pop("authorizedUsers", UNSET)) - description = d.pop("description", UNSET) + dpia = d.pop("dpia", UNSET) - _workflow_type = d.pop("workflowType", UNSET) - workflow_type: Union[Unset, ProjectBaseWorkflowType] - if isinstance(_workflow_type, Unset): - workflow_type = UNSET - else: - workflow_type = ProjectBaseWorkflowType(_workflow_type) + non_contributor = d.pop("nonContributor", UNSET) + + data_source_id = d.pop("dataSourceId", UNSET) project_base = cls( - local=local, + authorization_status=authorization_status, + created_with_client=created_with_client, + description=description, allow_shared_edit=allow_shared_edit, - authorized_users=authorized_users, - end_to_end_encrypted=end_to_end_encrypted, + created_by_node=created_by_node, + locked=locked, + unrestricted_access=unrestricted_access, + workflow_type=workflow_type, + computation_definition=computation_definition, + shared=shared, policy=policy, + created_by_user=created_by_user, + name=name, + network_id=network_id, + query=query, topology=topology, allow_clear_query=allow_clear_query, - network_id=network_id, + local=local, run_async=run_async, - shared=shared, - unrestricted_access=unrestricted_access, - data_source_id=data_source_id, - hide_leaf_participants=hide_leaf_participants, - query_timeout=query_timeout, - created_with_client=created_with_client, + unique_id=unique_id, data_source_auto_match=data_source_auto_match, + end_to_end_encrypted=end_to_end_encrypted, + hide_leaf_participants=hide_leaf_participants, + local_data_selection_definition=local_data_selection_definition, min_contributors=min_contributors, - non_contributor=non_contributor, - query=query, - unique_id=unique_id, + query_timeout=query_timeout, workflow_json=workflow_json, - authorization_status=authorization_status, - computation_definition=computation_definition, + authorized_users=authorized_users, dpia=dpia, - local_data_selection_definition=local_data_selection_definition, - locked=locked, - name=name, - created_by_node=created_by_node, - created_by_user=created_by_user, - description=description, - workflow_type=workflow_type, + non_contributor=non_contributor, + data_source_id=data_source_id, ) project_base.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/project_definition.py b/src/tuneinsight/api/sdk/models/project_definition.py index 643ec1c..61d29f3 100644 --- a/src/tuneinsight/api/sdk/models/project_definition.py +++ b/src/tuneinsight/api/sdk/models/project_definition.py @@ -4,8 +4,9 @@ from ..models.authorization_status import AuthorizationStatus from ..models.client import Client -from ..models.project_base_workflow_type import ProjectBaseWorkflowType +from ..models.data_source_type import DataSourceType from ..models.topology import Topology +from ..models.workflow_type import WorkflowType from ..types import UNSET, Unset if TYPE_CHECKING: @@ -22,152 +23,154 @@ class ProjectDefinition: """ Attributes: - local (Union[Unset, None, bool]): True if the project's computation should run only with local data (not - configured the network) + authorization_status (Union[Unset, AuthorizationStatus]): Authorization status of the project + created_with_client (Union[Unset, Client]): Type of client that communicates with the agent API + description (Union[Unset, None, str]): allow_shared_edit (Union[Unset, bool]): True if this project can be modified after being shared. Modifications of a shared project will be broadcasted to the network - authorized_users (Union[Unset, List[str]]): The IDs of the users who can run the project - end_to_end_encrypted (Union[Unset, None, bool]): whether results are always end to end encrypted and decrypted - on the client side + created_by_node (Union[Unset, str]): ID of node where the project was first created + locked (Union[Unset, None, bool]): True if the project is read-only (likely because it has already been shared) + unrestricted_access (Union[Unset, None, bool]): when set to true, then all users from the same organization are + authorized to access the project (view / edit depends on the roles) + workflow_type (Union[Unset, WorkflowType]): type of the workflow UI in the frontend + computation_definition (Union[Unset, ComputationDefinition]): Generic computation. + shared (Union[Unset, bool]): True if the project has once been shared across the participants policy (Union[Unset, ComputationPolicy]): policy to validate a specific computation + created_by_user (Union[Unset, str]): ID of user who created the project + name (Union[Unset, str]): + network_id (Union[Unset, str]): id to uniquely identify the network + query (Union[Unset, DataSourceQuery]): schema used for the query topology (Union[Unset, Topology]): Network Topologies. 'star' or 'tree'. In star topology all nodes are connected to a central node. In tree topology all nodes are connected and aware of each other. allow_clear_query (Union[Unset, bool]): [Dangerous, can lead to cross code data share] True if it is allowed for a client to query the data source all participants of the project and return the clear text result - network_id (Union[Unset, str]): id to uniquely identify the network + local (Union[Unset, None, bool]): True if the project's computation should run only with local data (not + configured the network) run_async (Union[Unset, bool]): flag indicating if computation should be run asynchronously - shared (Union[Unset, bool]): True if the project has once been shared across the participants - unrestricted_access (Union[Unset, None, bool]): when set to true, then all users from the same organization are - authorized to access the project (view / edit depends on the roles) - data_source_id (Union[Unset, None, str]): Unique identifier of a data source. - hide_leaf_participants (Union[Unset, None, bool]): whether leaf project participants are not shown to other leaf - participants when the project is in a star topology. - query_timeout (Union[Unset, int]): Timeout for the data source queries Default: 30. - created_with_client (Union[Unset, Client]): Type of client that communicates with the agent API + unique_id (Union[Unset, str]): Unique identifier of a project. data_source_auto_match (Union[Unset, bool]): whether or not to automatically assign the first matching datasource when the project is shared with other nodes + end_to_end_encrypted (Union[Unset, None, bool]): whether results are always end to end encrypted and decrypted + on the client side + hide_leaf_participants (Union[Unset, None, bool]): whether leaf project participants are not shown to other leaf + participants when the project is in a star topology. + local_data_selection_definition (Union[Unset, LocalDataSelectionDefinition]): datasource selection definition. A + selection is a "query" or data selection definition to run on the datasource min_contributors (Union[Unset, None, int]): minimum number of participants that contribute with their data required to run computations within this project + query_timeout (Union[Unset, int]): Timeout for the data source queries Default: 30. + workflow_json (Union[Unset, str]): JSON representation of the workflow UI in the frontend + authorized_users (Union[Unset, List[str]]): The IDs of the users who can run the project + dpia (Union[Unset, str]): non_contributor (Union[Unset, None, bool]): indicates that the current project participant takes part in the distributed computations but does not have any input data. By default this field is set according to the instance's configuration. - query (Union[Unset, DataSourceQuery]): schema used for the query - unique_id (Union[Unset, str]): Unique identifier of a project. - workflow_json (Union[Unset, str]): JSON representation of the workflow UI in the frontend - authorization_status (Union[Unset, AuthorizationStatus]): Authorization status of the project - computation_definition (Union[Unset, ComputationDefinition]): Generic computation. - dpia (Union[Unset, str]): - local_data_selection_definition (Union[Unset, LocalDataSelectionDefinition]): datasource selection definition. A - selection is a "query" or data selection definition to run on the datasource - locked (Union[Unset, None, bool]): True if the project is read-only (likely because it has already been shared) - name (Union[Unset, str]): - created_by_node (Union[Unset, str]): ID of node where the project was first created - created_by_user (Union[Unset, str]): ID of user who created the project - description (Union[Unset, None, str]): - workflow_type (Union[Unset, ProjectBaseWorkflowType]): type of the workflow UI in the frontend + data_source_id (Union[Unset, None, str]): Unique identifier of a data source. broadcast (Union[Unset, bool]): Temporary field. Always set to false. Only used for server-server communication - data_source_type (Union[Unset, str]): Type of the data source to share to other nodes to match with their data - source of the same type + data_source_type (Union[Unset, DataSourceType]): participants (Union[Unset, None, List[str]]): List of nodes involved in the project's collaboration """ - local: Union[Unset, None, bool] = UNSET + authorization_status: Union[Unset, AuthorizationStatus] = UNSET + created_with_client: Union[Unset, Client] = UNSET + description: Union[Unset, None, str] = UNSET allow_shared_edit: Union[Unset, bool] = UNSET - authorized_users: Union[Unset, List[str]] = UNSET - end_to_end_encrypted: Union[Unset, None, bool] = UNSET + created_by_node: Union[Unset, str] = UNSET + locked: Union[Unset, None, bool] = UNSET + unrestricted_access: Union[Unset, None, bool] = UNSET + workflow_type: Union[Unset, WorkflowType] = UNSET + computation_definition: Union[Unset, "ComputationDefinition"] = UNSET + shared: Union[Unset, bool] = UNSET policy: Union[Unset, "ComputationPolicy"] = UNSET + created_by_user: Union[Unset, str] = UNSET + name: Union[Unset, str] = UNSET + network_id: Union[Unset, str] = UNSET + query: Union[Unset, "DataSourceQuery"] = UNSET topology: Union[Unset, Topology] = UNSET allow_clear_query: Union[Unset, bool] = UNSET - network_id: Union[Unset, str] = UNSET + local: Union[Unset, None, bool] = UNSET run_async: Union[Unset, bool] = UNSET - shared: Union[Unset, bool] = UNSET - unrestricted_access: Union[Unset, None, bool] = UNSET - data_source_id: Union[Unset, None, str] = UNSET - hide_leaf_participants: Union[Unset, None, bool] = UNSET - query_timeout: Union[Unset, int] = 30 - created_with_client: Union[Unset, Client] = UNSET + unique_id: Union[Unset, str] = UNSET data_source_auto_match: Union[Unset, bool] = UNSET + end_to_end_encrypted: Union[Unset, None, bool] = UNSET + hide_leaf_participants: Union[Unset, None, bool] = UNSET + local_data_selection_definition: Union[Unset, "LocalDataSelectionDefinition"] = UNSET min_contributors: Union[Unset, None, int] = UNSET - non_contributor: Union[Unset, None, bool] = UNSET - query: Union[Unset, "DataSourceQuery"] = UNSET - unique_id: Union[Unset, str] = UNSET + query_timeout: Union[Unset, int] = 30 workflow_json: Union[Unset, str] = UNSET - authorization_status: Union[Unset, AuthorizationStatus] = UNSET - computation_definition: Union[Unset, "ComputationDefinition"] = UNSET + authorized_users: Union[Unset, List[str]] = UNSET dpia: Union[Unset, str] = UNSET - local_data_selection_definition: Union[Unset, "LocalDataSelectionDefinition"] = UNSET - locked: Union[Unset, None, bool] = UNSET - name: Union[Unset, str] = UNSET - created_by_node: Union[Unset, str] = UNSET - created_by_user: Union[Unset, str] = UNSET - description: Union[Unset, None, str] = UNSET - workflow_type: Union[Unset, ProjectBaseWorkflowType] = UNSET + non_contributor: Union[Unset, None, bool] = UNSET + data_source_id: Union[Unset, None, str] = UNSET broadcast: Union[Unset, bool] = UNSET - data_source_type: Union[Unset, str] = UNSET + data_source_type: Union[Unset, DataSourceType] = UNSET participants: Union[Unset, None, List[str]] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: - local = self.local + authorization_status: Union[Unset, str] = UNSET + if not isinstance(self.authorization_status, Unset): + authorization_status = self.authorization_status.value + + created_with_client: Union[Unset, str] = UNSET + if not isinstance(self.created_with_client, Unset): + created_with_client = self.created_with_client.value + + description = self.description allow_shared_edit = self.allow_shared_edit - authorized_users: Union[Unset, List[str]] = UNSET - if not isinstance(self.authorized_users, Unset): - authorized_users = self.authorized_users + created_by_node = self.created_by_node + locked = self.locked + unrestricted_access = self.unrestricted_access + workflow_type: Union[Unset, str] = UNSET + if not isinstance(self.workflow_type, Unset): + workflow_type = self.workflow_type.value - end_to_end_encrypted = self.end_to_end_encrypted + computation_definition: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.computation_definition, Unset): + computation_definition = self.computation_definition.to_dict() + + shared = self.shared policy: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.policy, Unset): policy = self.policy.to_dict() + created_by_user = self.created_by_user + name = self.name + network_id = self.network_id + query: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.query, Unset): + query = self.query.to_dict() + topology: Union[Unset, str] = UNSET if not isinstance(self.topology, Unset): topology = self.topology.value allow_clear_query = self.allow_clear_query - network_id = self.network_id + local = self.local run_async = self.run_async - shared = self.shared - unrestricted_access = self.unrestricted_access - data_source_id = self.data_source_id - hide_leaf_participants = self.hide_leaf_participants - query_timeout = self.query_timeout - created_with_client: Union[Unset, str] = UNSET - if not isinstance(self.created_with_client, Unset): - created_with_client = self.created_with_client.value - - data_source_auto_match = self.data_source_auto_match - min_contributors = self.min_contributors - non_contributor = self.non_contributor - query: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.query, Unset): - query = self.query.to_dict() - unique_id = self.unique_id - workflow_json = self.workflow_json - authorization_status: Union[Unset, str] = UNSET - if not isinstance(self.authorization_status, Unset): - authorization_status = self.authorization_status.value - - computation_definition: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.computation_definition, Unset): - computation_definition = self.computation_definition.to_dict() - - dpia = self.dpia + data_source_auto_match = self.data_source_auto_match + end_to_end_encrypted = self.end_to_end_encrypted + hide_leaf_participants = self.hide_leaf_participants local_data_selection_definition: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.local_data_selection_definition, Unset): local_data_selection_definition = self.local_data_selection_definition.to_dict() - locked = self.locked - name = self.name - created_by_node = self.created_by_node - created_by_user = self.created_by_user - description = self.description - workflow_type: Union[Unset, str] = UNSET - if not isinstance(self.workflow_type, Unset): - workflow_type = self.workflow_type.value + min_contributors = self.min_contributors + query_timeout = self.query_timeout + workflow_json = self.workflow_json + authorized_users: Union[Unset, List[str]] = UNSET + if not isinstance(self.authorized_users, Unset): + authorized_users = self.authorized_users + dpia = self.dpia + non_contributor = self.non_contributor + data_source_id = self.data_source_id broadcast = self.broadcast - data_source_type = self.data_source_type + data_source_type: Union[Unset, str] = UNSET + if not isinstance(self.data_source_type, Unset): + data_source_type = self.data_source_type.value + participants: Union[Unset, None, List[str]] = UNSET if not isinstance(self.participants, Unset): if self.participants is None: @@ -178,68 +181,68 @@ def to_dict(self) -> Dict[str, Any]: field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) - if local is not UNSET: - field_dict["local"] = local + if authorization_status is not UNSET: + field_dict["authorizationStatus"] = authorization_status + if created_with_client is not UNSET: + field_dict["createdWithClient"] = created_with_client + if description is not UNSET: + field_dict["description"] = description if allow_shared_edit is not UNSET: field_dict["allowSharedEdit"] = allow_shared_edit - if authorized_users is not UNSET: - field_dict["authorizedUsers"] = authorized_users - if end_to_end_encrypted is not UNSET: - field_dict["endToEndEncrypted"] = end_to_end_encrypted + if created_by_node is not UNSET: + field_dict["createdByNode"] = created_by_node + if locked is not UNSET: + field_dict["locked"] = locked + if unrestricted_access is not UNSET: + field_dict["unrestrictedAccess"] = unrestricted_access + if workflow_type is not UNSET: + field_dict["workflowType"] = workflow_type + if computation_definition is not UNSET: + field_dict["computationDefinition"] = computation_definition + if shared is not UNSET: + field_dict["shared"] = shared if policy is not UNSET: field_dict["policy"] = policy + if created_by_user is not UNSET: + field_dict["createdByUser"] = created_by_user + if name is not UNSET: + field_dict["name"] = name + if network_id is not UNSET: + field_dict["networkId"] = network_id + if query is not UNSET: + field_dict["query"] = query if topology is not UNSET: field_dict["topology"] = topology if allow_clear_query is not UNSET: field_dict["allowClearQuery"] = allow_clear_query - if network_id is not UNSET: - field_dict["networkId"] = network_id + if local is not UNSET: + field_dict["local"] = local if run_async is not UNSET: field_dict["runAsync"] = run_async - if shared is not UNSET: - field_dict["shared"] = shared - if unrestricted_access is not UNSET: - field_dict["unrestrictedAccess"] = unrestricted_access - if data_source_id is not UNSET: - field_dict["dataSourceId"] = data_source_id - if hide_leaf_participants is not UNSET: - field_dict["hideLeafParticipants"] = hide_leaf_participants - if query_timeout is not UNSET: - field_dict["queryTimeout"] = query_timeout - if created_with_client is not UNSET: - field_dict["createdWithClient"] = created_with_client + if unique_id is not UNSET: + field_dict["uniqueId"] = unique_id if data_source_auto_match is not UNSET: field_dict["dataSourceAutoMatch"] = data_source_auto_match + if end_to_end_encrypted is not UNSET: + field_dict["endToEndEncrypted"] = end_to_end_encrypted + if hide_leaf_participants is not UNSET: + field_dict["hideLeafParticipants"] = hide_leaf_participants + if local_data_selection_definition is not UNSET: + field_dict["localDataSelectionDefinition"] = local_data_selection_definition if min_contributors is not UNSET: field_dict["minContributors"] = min_contributors - if non_contributor is not UNSET: - field_dict["nonContributor"] = non_contributor - if query is not UNSET: - field_dict["query"] = query - if unique_id is not UNSET: - field_dict["uniqueId"] = unique_id + if query_timeout is not UNSET: + field_dict["queryTimeout"] = query_timeout if workflow_json is not UNSET: field_dict["workflowJSON"] = workflow_json - if authorization_status is not UNSET: - field_dict["authorizationStatus"] = authorization_status - if computation_definition is not UNSET: - field_dict["computationDefinition"] = computation_definition + if authorized_users is not UNSET: + field_dict["authorizedUsers"] = authorized_users if dpia is not UNSET: field_dict["dpia"] = dpia - if local_data_selection_definition is not UNSET: - field_dict["localDataSelectionDefinition"] = local_data_selection_definition - if locked is not UNSET: - field_dict["locked"] = locked - if name is not UNSET: - field_dict["name"] = name - if created_by_node is not UNSET: - field_dict["createdByNode"] = created_by_node - if created_by_user is not UNSET: - field_dict["createdByUser"] = created_by_user - if description is not UNSET: - field_dict["description"] = description - if workflow_type is not UNSET: - field_dict["workflowType"] = workflow_type + if non_contributor is not UNSET: + field_dict["nonContributor"] = non_contributor + if data_source_id is not UNSET: + field_dict["dataSourceId"] = data_source_id if broadcast is not UNSET: field_dict["broadcast"] = broadcast if data_source_type is not UNSET: @@ -257,56 +260,58 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: from ..models.local_data_selection_definition import LocalDataSelectionDefinition d = src_dict.copy() - local = d.pop("local", UNSET) - - allow_shared_edit = d.pop("allowSharedEdit", UNSET) - - authorized_users = cast(List[str], d.pop("authorizedUsers", UNSET)) - - end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) - - _policy = d.pop("policy", UNSET) - policy: Union[Unset, ComputationPolicy] - if isinstance(_policy, Unset): - policy = UNSET + _authorization_status = d.pop("authorizationStatus", UNSET) + authorization_status: Union[Unset, AuthorizationStatus] + if isinstance(_authorization_status, Unset): + authorization_status = UNSET else: - policy = ComputationPolicy.from_dict(_policy) + authorization_status = AuthorizationStatus(_authorization_status) - _topology = d.pop("topology", UNSET) - topology: Union[Unset, Topology] - if isinstance(_topology, Unset): - topology = UNSET + _created_with_client = d.pop("createdWithClient", UNSET) + created_with_client: Union[Unset, Client] + if isinstance(_created_with_client, Unset): + created_with_client = UNSET else: - topology = Topology(_topology) + created_with_client = Client(_created_with_client) - allow_clear_query = d.pop("allowClearQuery", UNSET) + description = d.pop("description", UNSET) - network_id = d.pop("networkId", UNSET) + allow_shared_edit = d.pop("allowSharedEdit", UNSET) - run_async = d.pop("runAsync", UNSET) + created_by_node = d.pop("createdByNode", UNSET) - shared = d.pop("shared", UNSET) + locked = d.pop("locked", UNSET) unrestricted_access = d.pop("unrestrictedAccess", UNSET) - data_source_id = d.pop("dataSourceId", UNSET) + _workflow_type = d.pop("workflowType", UNSET) + workflow_type: Union[Unset, WorkflowType] + if isinstance(_workflow_type, Unset): + workflow_type = UNSET + else: + workflow_type = WorkflowType(_workflow_type) - hide_leaf_participants = d.pop("hideLeafParticipants", UNSET) + _computation_definition = d.pop("computationDefinition", UNSET) + computation_definition: Union[Unset, ComputationDefinition] + if isinstance(_computation_definition, Unset): + computation_definition = UNSET + else: + computation_definition = ComputationDefinition.from_dict(_computation_definition) - query_timeout = d.pop("queryTimeout", UNSET) + shared = d.pop("shared", UNSET) - _created_with_client = d.pop("createdWithClient", UNSET) - created_with_client: Union[Unset, Client] - if isinstance(_created_with_client, Unset): - created_with_client = UNSET + _policy = d.pop("policy", UNSET) + policy: Union[Unset, ComputationPolicy] + if isinstance(_policy, Unset): + policy = UNSET else: - created_with_client = Client(_created_with_client) + policy = ComputationPolicy.from_dict(_policy) - data_source_auto_match = d.pop("dataSourceAutoMatch", UNSET) + created_by_user = d.pop("createdByUser", UNSET) - min_contributors = d.pop("minContributors", UNSET) + name = d.pop("name", UNSET) - non_contributor = d.pop("nonContributor", UNSET) + network_id = d.pop("networkId", UNSET) _query = d.pop("query", UNSET) query: Union[Unset, DataSourceQuery] @@ -315,25 +320,26 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: query = DataSourceQuery.from_dict(_query) - unique_id = d.pop("uniqueId", UNSET) + _topology = d.pop("topology", UNSET) + topology: Union[Unset, Topology] + if isinstance(_topology, Unset): + topology = UNSET + else: + topology = Topology(_topology) - workflow_json = d.pop("workflowJSON", UNSET) + allow_clear_query = d.pop("allowClearQuery", UNSET) - _authorization_status = d.pop("authorizationStatus", UNSET) - authorization_status: Union[Unset, AuthorizationStatus] - if isinstance(_authorization_status, Unset): - authorization_status = UNSET - else: - authorization_status = AuthorizationStatus(_authorization_status) + local = d.pop("local", UNSET) - _computation_definition = d.pop("computationDefinition", UNSET) - computation_definition: Union[Unset, ComputationDefinition] - if isinstance(_computation_definition, Unset): - computation_definition = UNSET - else: - computation_definition = ComputationDefinition.from_dict(_computation_definition) + run_async = d.pop("runAsync", UNSET) - dpia = d.pop("dpia", UNSET) + unique_id = d.pop("uniqueId", UNSET) + + data_source_auto_match = d.pop("dataSourceAutoMatch", UNSET) + + end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) + + hide_leaf_participants = d.pop("hideLeafParticipants", UNSET) _local_data_selection_definition = d.pop("localDataSelectionDefinition", UNSET) local_data_selection_definition: Union[Unset, LocalDataSelectionDefinition] @@ -342,61 +348,63 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: local_data_selection_definition = LocalDataSelectionDefinition.from_dict(_local_data_selection_definition) - locked = d.pop("locked", UNSET) + min_contributors = d.pop("minContributors", UNSET) - name = d.pop("name", UNSET) + query_timeout = d.pop("queryTimeout", UNSET) - created_by_node = d.pop("createdByNode", UNSET) + workflow_json = d.pop("workflowJSON", UNSET) - created_by_user = d.pop("createdByUser", UNSET) + authorized_users = cast(List[str], d.pop("authorizedUsers", UNSET)) - description = d.pop("description", UNSET) + dpia = d.pop("dpia", UNSET) - _workflow_type = d.pop("workflowType", UNSET) - workflow_type: Union[Unset, ProjectBaseWorkflowType] - if isinstance(_workflow_type, Unset): - workflow_type = UNSET - else: - workflow_type = ProjectBaseWorkflowType(_workflow_type) + non_contributor = d.pop("nonContributor", UNSET) + + data_source_id = d.pop("dataSourceId", UNSET) broadcast = d.pop("broadcast", UNSET) - data_source_type = d.pop("dataSourceType", UNSET) + _data_source_type = d.pop("dataSourceType", UNSET) + data_source_type: Union[Unset, DataSourceType] + if isinstance(_data_source_type, Unset): + data_source_type = UNSET + else: + data_source_type = DataSourceType(_data_source_type) participants = cast(List[str], d.pop("participants", UNSET)) project_definition = cls( - local=local, + authorization_status=authorization_status, + created_with_client=created_with_client, + description=description, allow_shared_edit=allow_shared_edit, - authorized_users=authorized_users, - end_to_end_encrypted=end_to_end_encrypted, + created_by_node=created_by_node, + locked=locked, + unrestricted_access=unrestricted_access, + workflow_type=workflow_type, + computation_definition=computation_definition, + shared=shared, policy=policy, + created_by_user=created_by_user, + name=name, + network_id=network_id, + query=query, topology=topology, allow_clear_query=allow_clear_query, - network_id=network_id, + local=local, run_async=run_async, - shared=shared, - unrestricted_access=unrestricted_access, - data_source_id=data_source_id, - hide_leaf_participants=hide_leaf_participants, - query_timeout=query_timeout, - created_with_client=created_with_client, + unique_id=unique_id, data_source_auto_match=data_source_auto_match, + end_to_end_encrypted=end_to_end_encrypted, + hide_leaf_participants=hide_leaf_participants, + local_data_selection_definition=local_data_selection_definition, min_contributors=min_contributors, - non_contributor=non_contributor, - query=query, - unique_id=unique_id, + query_timeout=query_timeout, workflow_json=workflow_json, - authorization_status=authorization_status, - computation_definition=computation_definition, + authorized_users=authorized_users, dpia=dpia, - local_data_selection_definition=local_data_selection_definition, - locked=locked, - name=name, - created_by_node=created_by_node, - created_by_user=created_by_user, - description=description, - workflow_type=workflow_type, + non_contributor=non_contributor, + data_source_id=data_source_id, broadcast=broadcast, data_source_type=data_source_type, participants=participants, diff --git a/src/tuneinsight/api/sdk/models/query.py b/src/tuneinsight/api/sdk/models/query.py index 7808758..2293b07 100644 --- a/src/tuneinsight/api/sdk/models/query.py +++ b/src/tuneinsight/api/sdk/models/query.py @@ -17,66 +17,66 @@ class Query: """Data source query Attributes: - created_by_user (Union[Unset, str]): ID of user who created the project - project_id (Union[Unset, str]): Unique identifier of a project. query_string (Union[Unset, str]): String of the query e.g. SQL or JSON results (Union[Unset, QueryResults]): result dataobject IDs + error (Union[Unset, str]): Error message, in case status of the query is error. + created_by_user (Union[Unset, str]): ID of user who created the project + id (Union[Unset, str]): + project_id (Union[Unset, str]): Unique identifier of a project. status (Union[Unset, QueryStatus]): updated_at (Union[Unset, str]): created_at (Union[Unset, str]): - error (Union[Unset, str]): Error message, in case status of the query is error. - id (Union[Unset, str]): """ - created_by_user: Union[Unset, str] = UNSET - project_id: Union[Unset, str] = UNSET query_string: Union[Unset, str] = UNSET results: Union[Unset, "QueryResults"] = UNSET + error: Union[Unset, str] = UNSET + created_by_user: Union[Unset, str] = UNSET + id: Union[Unset, str] = UNSET + project_id: Union[Unset, str] = UNSET status: Union[Unset, QueryStatus] = UNSET updated_at: Union[Unset, str] = UNSET created_at: Union[Unset, str] = UNSET - error: Union[Unset, str] = UNSET - id: Union[Unset, str] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: - created_by_user = self.created_by_user - project_id = self.project_id query_string = self.query_string results: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.results, Unset): results = self.results.to_dict() + error = self.error + created_by_user = self.created_by_user + id = self.id + project_id = self.project_id status: Union[Unset, str] = UNSET if not isinstance(self.status, Unset): status = self.status.value updated_at = self.updated_at created_at = self.created_at - error = self.error - id = self.id field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) - if created_by_user is not UNSET: - field_dict["createdByUser"] = created_by_user - if project_id is not UNSET: - field_dict["projectId"] = project_id if query_string is not UNSET: field_dict["queryString"] = query_string if results is not UNSET: field_dict["results"] = results + if error is not UNSET: + field_dict["error"] = error + if created_by_user is not UNSET: + field_dict["createdByUser"] = created_by_user + if id is not UNSET: + field_dict["id"] = id + if project_id is not UNSET: + field_dict["projectId"] = project_id if status is not UNSET: field_dict["status"] = status if updated_at is not UNSET: field_dict["updatedAt"] = updated_at if created_at is not UNSET: field_dict["createdAt"] = created_at - if error is not UNSET: - field_dict["error"] = error - if id is not UNSET: - field_dict["id"] = id return field_dict @@ -85,10 +85,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: from ..models.query_results import QueryResults d = src_dict.copy() - created_by_user = d.pop("createdByUser", UNSET) - - project_id = d.pop("projectId", UNSET) - query_string = d.pop("queryString", UNSET) _results = d.pop("results", UNSET) @@ -98,6 +94,14 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: results = QueryResults.from_dict(_results) + error = d.pop("error", UNSET) + + created_by_user = d.pop("createdByUser", UNSET) + + id = d.pop("id", UNSET) + + project_id = d.pop("projectId", UNSET) + _status = d.pop("status", UNSET) status: Union[Unset, QueryStatus] if isinstance(_status, Unset): @@ -109,20 +113,16 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: created_at = d.pop("createdAt", UNSET) - error = d.pop("error", UNSET) - - id = d.pop("id", UNSET) - query = cls( - created_by_user=created_by_user, - project_id=project_id, query_string=query_string, results=results, + error=error, + created_by_user=created_by_user, + id=id, + project_id=project_id, status=status, updated_at=updated_at, created_at=created_at, - error=error, - id=id, ) query.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/realm_role.py b/src/tuneinsight/api/sdk/models/realm_role.py index 3d5abc7..6952149 100644 --- a/src/tuneinsight/api/sdk/models/realm_role.py +++ b/src/tuneinsight/api/sdk/models/realm_role.py @@ -11,37 +11,33 @@ class RealmRole: """ Attributes: - id (Union[Unset, str]): - name (Union[Unset, str]): client_role (Union[Unset, bool]): composite (Union[Unset, bool]): container_id (Union[Unset, str]): description (Union[Unset, str]): + id (Union[Unset, str]): + name (Union[Unset, str]): """ - id: Union[Unset, str] = UNSET - name: Union[Unset, str] = UNSET client_role: Union[Unset, bool] = UNSET composite: Union[Unset, bool] = UNSET container_id: Union[Unset, str] = UNSET description: Union[Unset, str] = UNSET + id: Union[Unset, str] = UNSET + name: Union[Unset, str] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: - id = self.id - name = self.name client_role = self.client_role composite = self.composite container_id = self.container_id description = self.description + id = self.id + name = self.name field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) - if id is not UNSET: - field_dict["id"] = id - if name is not UNSET: - field_dict["name"] = name if client_role is not UNSET: field_dict["clientRole"] = client_role if composite is not UNSET: @@ -50,16 +46,16 @@ def to_dict(self) -> Dict[str, Any]: field_dict["containerId"] = container_id if description is not UNSET: field_dict["description"] = description + if id is not UNSET: + field_dict["id"] = id + if name is not UNSET: + field_dict["name"] = name return field_dict @classmethod def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() - id = d.pop("id", UNSET) - - name = d.pop("name", UNSET) - client_role = d.pop("clientRole", UNSET) composite = d.pop("composite", UNSET) @@ -68,13 +64,17 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: description = d.pop("description", UNSET) + id = d.pop("id", UNSET) + + name = d.pop("name", UNSET) + realm_role = cls( - id=id, - name=name, client_role=client_role, composite=composite, container_id=container_id, description=description, + id=id, + name=name, ) realm_role.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/relin_key_gen.py b/src/tuneinsight/api/sdk/models/relin_key_gen.py index 1cc12cd..ce055f4 100644 --- a/src/tuneinsight/api/sdk/models/relin_key_gen.py +++ b/src/tuneinsight/api/sdk/models/relin_key_gen.py @@ -22,6 +22,8 @@ class RelinKeyGen: """ Attributes: type (ComputationType): Type of the computation. + dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP + mode. Default: -1.0. input_clipping_method (Union[Unset, ComputationDefinitionInputClippingMethod]): Optional method used for clipping before encrypting values when running aggregation-based workflows. The bounds are deduced based on the cryptographic parameters used for the aggregation. @@ -32,14 +34,7 @@ class RelinKeyGen: (default) - error: if some values are out of bounds, then the computation is aborted. Default: ComputationDefinitionInputClippingMethod.WARNING. - preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters - applied to the input retrieved from the datasource, if applicable - wait (Union[Unset, bool]): Whether to wait synchronously for the computation result. - dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various - disclosure prevention mechanisms - dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP - mode. Default: -1.0. - join_id (Union[Unset, str]): Unique identifier of a data object. + run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both) local_input_id (Union[Unset, str]): Unique identifier of a data object. maximum_aggregated_value (Union[Unset, None, float]): optional upper bound on the total expected value to be aggregated collectively. If provided, the computation will automatically deduce @@ -48,96 +43,102 @@ class RelinKeyGen: up to 16 million. For example, when using default parameters and running an aggregation with 4 participants, local aggregated values cannot exceed 4 million. - cohort_id (Union[Unset, str]): Unique identifier of a data object. + preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters + applied to the input retrieved from the datasource, if applicable + data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource + from each node before the computation + end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true, + then when release results is set to true and the output + is initially encrypted with a network collective key, then it is key switched to + the initiating user's public key. + input_data_object (Union[Unset, str]): Shared identifier of a data object. + timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run. local_input (Union[Unset, LocalInput]): If a local input is provided, the node initiating the computation will use it instead of querying the datasource. This data is *not* shared to other nodes, only used for the duration of the computation. The local input columns/values must be in the form {: [, , ...], ...} owner (Union[Unset, str]): The username of the end user who requested the computation. project_id (Union[Unset, str]): Unique identifier of a project. - run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both) + dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various + disclosure prevention mechanisms + cohort_id (Union[Unset, str]): Unique identifier of a data object. + encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key. + join_id (Union[Unset, str]): Unique identifier of a data object. local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured the network) - encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key. - end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true, - then when release results is set to true and the output - is initially encrypted with a network collective key, then it is key switched to - the initiating user's public key. - input_data_object (Union[Unset, str]): Shared identifier of a data object. release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output results. If set, then encrypted results are automatically key switched and decrypted and a Result entity is saved - timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run. - data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource - from each node before the computation + wait (Union[Unset, bool]): Whether to wait synchronously for the computation result. """ type: ComputationType + dp_epsilon: Union[Unset, float] = -1.0 input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] = ( ComputationDefinitionInputClippingMethod.WARNING ) - preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET - wait: Union[Unset, bool] = UNSET - dp_policy: Union[Unset, "DPPolicy"] = UNSET - dp_epsilon: Union[Unset, float] = -1.0 - join_id: Union[Unset, str] = UNSET + run_mode: Union[Unset, RunMode] = UNSET local_input_id: Union[Unset, str] = UNSET maximum_aggregated_value: Union[Unset, None, float] = UNSET - cohort_id: Union[Unset, str] = UNSET + preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET + data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET + end_to_end_encrypted: Union[Unset, bool] = UNSET + input_data_object: Union[Unset, str] = UNSET + timeout: Union[Unset, int] = UNSET local_input: Union[Unset, "LocalInput"] = UNSET owner: Union[Unset, str] = UNSET project_id: Union[Unset, str] = UNSET - run_mode: Union[Unset, RunMode] = UNSET - local: Union[Unset, bool] = UNSET + dp_policy: Union[Unset, "DPPolicy"] = UNSET + cohort_id: Union[Unset, str] = UNSET encrypted: Union[Unset, bool] = UNSET - end_to_end_encrypted: Union[Unset, bool] = UNSET - input_data_object: Union[Unset, str] = UNSET + join_id: Union[Unset, str] = UNSET + local: Union[Unset, bool] = UNSET release_results: Union[Unset, bool] = UNSET - timeout: Union[Unset, int] = UNSET - data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET + wait: Union[Unset, bool] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: type = self.type.value + dp_epsilon = self.dp_epsilon input_clipping_method: Union[Unset, str] = UNSET if not isinstance(self.input_clipping_method, Unset): input_clipping_method = self.input_clipping_method.value + run_mode: Union[Unset, str] = UNSET + if not isinstance(self.run_mode, Unset): + run_mode = self.run_mode.value + + local_input_id = self.local_input_id + maximum_aggregated_value = self.maximum_aggregated_value preprocessing_parameters: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.preprocessing_parameters, Unset): preprocessing_parameters = self.preprocessing_parameters.to_dict() - wait = self.wait - dp_policy: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.dp_policy, Unset): - dp_policy = self.dp_policy.to_dict() + data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.data_source_parameters, Unset): + data_source_parameters = self.data_source_parameters.to_dict() - dp_epsilon = self.dp_epsilon - join_id = self.join_id - local_input_id = self.local_input_id - maximum_aggregated_value = self.maximum_aggregated_value - cohort_id = self.cohort_id + end_to_end_encrypted = self.end_to_end_encrypted + input_data_object = self.input_data_object + timeout = self.timeout local_input: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.local_input, Unset): local_input = self.local_input.to_dict() owner = self.owner project_id = self.project_id - run_mode: Union[Unset, str] = UNSET - if not isinstance(self.run_mode, Unset): - run_mode = self.run_mode.value + dp_policy: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.dp_policy, Unset): + dp_policy = self.dp_policy.to_dict() - local = self.local + cohort_id = self.cohort_id encrypted = self.encrypted - end_to_end_encrypted = self.end_to_end_encrypted - input_data_object = self.input_data_object + join_id = self.join_id + local = self.local release_results = self.release_results - timeout = self.timeout - data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.data_source_parameters, Unset): - data_source_parameters = self.data_source_parameters.to_dict() + wait = self.wait field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) @@ -146,46 +147,46 @@ def to_dict(self) -> Dict[str, Any]: "type": type, } ) - if input_clipping_method is not UNSET: - field_dict["inputClippingMethod"] = input_clipping_method - if preprocessing_parameters is not UNSET: - field_dict["preprocessingParameters"] = preprocessing_parameters - if wait is not UNSET: - field_dict["wait"] = wait - if dp_policy is not UNSET: - field_dict["DPPolicy"] = dp_policy if dp_epsilon is not UNSET: field_dict["dpEpsilon"] = dp_epsilon - if join_id is not UNSET: - field_dict["joinId"] = join_id + if input_clipping_method is not UNSET: + field_dict["inputClippingMethod"] = input_clipping_method + if run_mode is not UNSET: + field_dict["runMode"] = run_mode if local_input_id is not UNSET: field_dict["localInputID"] = local_input_id if maximum_aggregated_value is not UNSET: field_dict["maximumAggregatedValue"] = maximum_aggregated_value - if cohort_id is not UNSET: - field_dict["cohortId"] = cohort_id + if preprocessing_parameters is not UNSET: + field_dict["preprocessingParameters"] = preprocessing_parameters + if data_source_parameters is not UNSET: + field_dict["dataSourceParameters"] = data_source_parameters + if end_to_end_encrypted is not UNSET: + field_dict["endToEndEncrypted"] = end_to_end_encrypted + if input_data_object is not UNSET: + field_dict["inputDataObject"] = input_data_object + if timeout is not UNSET: + field_dict["timeout"] = timeout if local_input is not UNSET: field_dict["localInput"] = local_input if owner is not UNSET: field_dict["owner"] = owner if project_id is not UNSET: field_dict["projectId"] = project_id - if run_mode is not UNSET: - field_dict["runMode"] = run_mode - if local is not UNSET: - field_dict["local"] = local + if dp_policy is not UNSET: + field_dict["DPPolicy"] = dp_policy + if cohort_id is not UNSET: + field_dict["cohortId"] = cohort_id if encrypted is not UNSET: field_dict["encrypted"] = encrypted - if end_to_end_encrypted is not UNSET: - field_dict["endToEndEncrypted"] = end_to_end_encrypted - if input_data_object is not UNSET: - field_dict["inputDataObject"] = input_data_object + if join_id is not UNSET: + field_dict["joinId"] = join_id + if local is not UNSET: + field_dict["local"] = local if release_results is not UNSET: field_dict["releaseResults"] = release_results - if timeout is not UNSET: - field_dict["timeout"] = timeout - if data_source_parameters is not UNSET: - field_dict["dataSourceParameters"] = data_source_parameters + if wait is not UNSET: + field_dict["wait"] = wait return field_dict @@ -199,6 +200,8 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() type = ComputationType(d.pop("type")) + dp_epsilon = d.pop("dpEpsilon", UNSET) + _input_clipping_method = d.pop("inputClippingMethod", UNSET) input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] if isinstance(_input_clipping_method, Unset): @@ -206,6 +209,17 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method) + _run_mode = d.pop("runMode", UNSET) + run_mode: Union[Unset, RunMode] + if isinstance(_run_mode, Unset): + run_mode = UNSET + else: + run_mode = RunMode(_run_mode) + + local_input_id = d.pop("localInputID", UNSET) + + maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET) + _preprocessing_parameters = d.pop("preprocessingParameters", UNSET) preprocessing_parameters: Union[Unset, ComputationPreprocessingParameters] if isinstance(_preprocessing_parameters, Unset): @@ -213,24 +227,18 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: preprocessing_parameters = ComputationPreprocessingParameters.from_dict(_preprocessing_parameters) - wait = d.pop("wait", UNSET) - - _dp_policy = d.pop("DPPolicy", UNSET) - dp_policy: Union[Unset, DPPolicy] - if isinstance(_dp_policy, Unset): - dp_policy = UNSET + _data_source_parameters = d.pop("dataSourceParameters", UNSET) + data_source_parameters: Union[Unset, ComputationDataSourceParameters] + if isinstance(_data_source_parameters, Unset): + data_source_parameters = UNSET else: - dp_policy = DPPolicy.from_dict(_dp_policy) - - dp_epsilon = d.pop("dpEpsilon", UNSET) - - join_id = d.pop("joinId", UNSET) + data_source_parameters = ComputationDataSourceParameters.from_dict(_data_source_parameters) - local_input_id = d.pop("localInputID", UNSET) + end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) - maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET) + input_data_object = d.pop("inputDataObject", UNSET) - cohort_id = d.pop("cohortId", UNSET) + timeout = d.pop("timeout", UNSET) _local_input = d.pop("localInput", UNSET) local_input: Union[Unset, LocalInput] @@ -243,54 +251,47 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: project_id = d.pop("projectId", UNSET) - _run_mode = d.pop("runMode", UNSET) - run_mode: Union[Unset, RunMode] - if isinstance(_run_mode, Unset): - run_mode = UNSET + _dp_policy = d.pop("DPPolicy", UNSET) + dp_policy: Union[Unset, DPPolicy] + if isinstance(_dp_policy, Unset): + dp_policy = UNSET else: - run_mode = RunMode(_run_mode) + dp_policy = DPPolicy.from_dict(_dp_policy) - local = d.pop("local", UNSET) + cohort_id = d.pop("cohortId", UNSET) encrypted = d.pop("encrypted", UNSET) - end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) + join_id = d.pop("joinId", UNSET) - input_data_object = d.pop("inputDataObject", UNSET) + local = d.pop("local", UNSET) release_results = d.pop("releaseResults", UNSET) - timeout = d.pop("timeout", UNSET) - - _data_source_parameters = d.pop("dataSourceParameters", UNSET) - data_source_parameters: Union[Unset, ComputationDataSourceParameters] - if isinstance(_data_source_parameters, Unset): - data_source_parameters = UNSET - else: - data_source_parameters = ComputationDataSourceParameters.from_dict(_data_source_parameters) + wait = d.pop("wait", UNSET) relin_key_gen = cls( type=type, - input_clipping_method=input_clipping_method, - preprocessing_parameters=preprocessing_parameters, - wait=wait, - dp_policy=dp_policy, dp_epsilon=dp_epsilon, - join_id=join_id, + input_clipping_method=input_clipping_method, + run_mode=run_mode, local_input_id=local_input_id, maximum_aggregated_value=maximum_aggregated_value, - cohort_id=cohort_id, + preprocessing_parameters=preprocessing_parameters, + data_source_parameters=data_source_parameters, + end_to_end_encrypted=end_to_end_encrypted, + input_data_object=input_data_object, + timeout=timeout, local_input=local_input, owner=owner, project_id=project_id, - run_mode=run_mode, - local=local, + dp_policy=dp_policy, + cohort_id=cohort_id, encrypted=encrypted, - end_to_end_encrypted=end_to_end_encrypted, - input_data_object=input_data_object, + join_id=join_id, + local=local, release_results=release_results, - timeout=timeout, - data_source_parameters=data_source_parameters, + wait=wait, ) relin_key_gen.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/result.py b/src/tuneinsight/api/sdk/models/result.py index 97322e2..0eff13f 100644 --- a/src/tuneinsight/api/sdk/models/result.py +++ b/src/tuneinsight/api/sdk/models/result.py @@ -16,43 +16,64 @@ class Result: """ Attributes: + is_large (Union[Unset, None, bool]): format to display the result shared (Union[Unset, None, bool]): if set to true, the result is shared with users from the same project tags (Union[Unset, List[str]]): title (Union[Unset, str]): title given to the result - is_large (Union[Unset, None, bool]): format to display the result + updated_at (Union[Unset, str]): + collective_encrypted (Union[Unset, None, bool]): + end_to_end_encrypted (Union[Unset, bool]): + metadata (Union[Unset, ResultMetadata]): various metadata field along with the result to provide additional + context + switching_key_id (Union[Unset, str]): Unique identifier of a data object. + switching_params (Union[Unset, str]): + original_ciphertext_id (Union[Unset, str]): Unique identifier of a data object. + owner (Union[Unset, str]): computation_id (Union[Unset, str]): Identifier of a computation, unique across all computing nodes. computation_type (Union[Unset, ComputationType]): Type of the computation. created_at (Union[Unset, str]): data_object_id (Union[Unset, str]): Unique identifier of a data object. id (Union[Unset, str]): Unique identifier of a result. - metadata (Union[Unset, ResultMetadata]): various metadata field along with the result to provide additional - context - owner (Union[Unset, str]): - updated_at (Union[Unset, str]): """ + is_large: Union[Unset, None, bool] = UNSET shared: Union[Unset, None, bool] = UNSET tags: Union[Unset, List[str]] = UNSET title: Union[Unset, str] = UNSET - is_large: Union[Unset, None, bool] = UNSET + updated_at: Union[Unset, str] = UNSET + collective_encrypted: Union[Unset, None, bool] = UNSET + end_to_end_encrypted: Union[Unset, bool] = UNSET + metadata: Union[Unset, "ResultMetadata"] = UNSET + switching_key_id: Union[Unset, str] = UNSET + switching_params: Union[Unset, str] = UNSET + original_ciphertext_id: Union[Unset, str] = UNSET + owner: Union[Unset, str] = UNSET computation_id: Union[Unset, str] = UNSET computation_type: Union[Unset, ComputationType] = UNSET created_at: Union[Unset, str] = UNSET data_object_id: Union[Unset, str] = UNSET id: Union[Unset, str] = UNSET - metadata: Union[Unset, "ResultMetadata"] = UNSET - owner: Union[Unset, str] = UNSET - updated_at: Union[Unset, str] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: + is_large = self.is_large shared = self.shared tags: Union[Unset, List[str]] = UNSET if not isinstance(self.tags, Unset): tags = self.tags title = self.title - is_large = self.is_large + updated_at = self.updated_at + collective_encrypted = self.collective_encrypted + end_to_end_encrypted = self.end_to_end_encrypted + metadata: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.metadata, Unset): + metadata = self.metadata.to_dict() + + switching_key_id = self.switching_key_id + switching_params = self.switching_params + original_ciphertext_id = self.original_ciphertext_id + owner = self.owner computation_id = self.computation_id computation_type: Union[Unset, str] = UNSET if not isinstance(self.computation_type, Unset): @@ -61,24 +82,34 @@ def to_dict(self) -> Dict[str, Any]: created_at = self.created_at data_object_id = self.data_object_id id = self.id - metadata: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.metadata, Unset): - metadata = self.metadata.to_dict() - - owner = self.owner - updated_at = self.updated_at field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) + if is_large is not UNSET: + field_dict["isLarge"] = is_large if shared is not UNSET: field_dict["shared"] = shared if tags is not UNSET: field_dict["tags"] = tags if title is not UNSET: field_dict["title"] = title - if is_large is not UNSET: - field_dict["isLarge"] = is_large + if updated_at is not UNSET: + field_dict["updatedAt"] = updated_at + if collective_encrypted is not UNSET: + field_dict["collectiveEncrypted"] = collective_encrypted + if end_to_end_encrypted is not UNSET: + field_dict["endToEndEncrypted"] = end_to_end_encrypted + if metadata is not UNSET: + field_dict["metadata"] = metadata + if switching_key_id is not UNSET: + field_dict["switchingKeyId"] = switching_key_id + if switching_params is not UNSET: + field_dict["switchingParams"] = switching_params + if original_ciphertext_id is not UNSET: + field_dict["originalCiphertextID"] = original_ciphertext_id + if owner is not UNSET: + field_dict["owner"] = owner if computation_id is not UNSET: field_dict["computationId"] = computation_id if computation_type is not UNSET: @@ -89,12 +120,6 @@ def to_dict(self) -> Dict[str, Any]: field_dict["dataObjectId"] = data_object_id if id is not UNSET: field_dict["id"] = id - if metadata is not UNSET: - field_dict["metadata"] = metadata - if owner is not UNSET: - field_dict["owner"] = owner - if updated_at is not UNSET: - field_dict["updatedAt"] = updated_at return field_dict @@ -103,13 +128,34 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: from ..models.result_metadata import ResultMetadata d = src_dict.copy() + is_large = d.pop("isLarge", UNSET) + shared = d.pop("shared", UNSET) tags = cast(List[str], d.pop("tags", UNSET)) title = d.pop("title", UNSET) - is_large = d.pop("isLarge", UNSET) + updated_at = d.pop("updatedAt", UNSET) + + collective_encrypted = d.pop("collectiveEncrypted", UNSET) + + end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) + + _metadata = d.pop("metadata", UNSET) + metadata: Union[Unset, ResultMetadata] + if isinstance(_metadata, Unset): + metadata = UNSET + else: + metadata = ResultMetadata.from_dict(_metadata) + + switching_key_id = d.pop("switchingKeyId", UNSET) + + switching_params = d.pop("switchingParams", UNSET) + + original_ciphertext_id = d.pop("originalCiphertextID", UNSET) + + owner = d.pop("owner", UNSET) computation_id = d.pop("computationId", UNSET) @@ -126,30 +172,24 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: id = d.pop("id", UNSET) - _metadata = d.pop("metadata", UNSET) - metadata: Union[Unset, ResultMetadata] - if isinstance(_metadata, Unset): - metadata = UNSET - else: - metadata = ResultMetadata.from_dict(_metadata) - - owner = d.pop("owner", UNSET) - - updated_at = d.pop("updatedAt", UNSET) - result = cls( + is_large=is_large, shared=shared, tags=tags, title=title, - is_large=is_large, + updated_at=updated_at, + collective_encrypted=collective_encrypted, + end_to_end_encrypted=end_to_end_encrypted, + metadata=metadata, + switching_key_id=switching_key_id, + switching_params=switching_params, + original_ciphertext_id=original_ciphertext_id, + owner=owner, computation_id=computation_id, computation_type=computation_type, created_at=created_at, data_object_id=data_object_id, id=id, - metadata=metadata, - owner=owner, - updated_at=updated_at, ) result.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/result_content.py b/src/tuneinsight/api/sdk/models/result_content.py index 43ca66d..ac9e753 100644 --- a/src/tuneinsight/api/sdk/models/result_content.py +++ b/src/tuneinsight/api/sdk/models/result_content.py @@ -18,21 +18,17 @@ class ResultContent: """result along with content and computation details Attributes: - result (Union[Unset, Result]): computation (Union[Unset, Computation]): Metadata of a computation. content (Union[Unset, Content]): Content that can be retrieved and displayed for the user + result (Union[Unset, Result]): """ - result: Union[Unset, "Result"] = UNSET computation: Union[Unset, "Computation"] = UNSET content: Union[Unset, "Content"] = UNSET + result: Union[Unset, "Result"] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: - result: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.result, Unset): - result = self.result.to_dict() - computation: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.computation, Unset): computation = self.computation.to_dict() @@ -41,15 +37,19 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.content, Unset): content = self.content.to_dict() + result: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.result, Unset): + result = self.result.to_dict() + field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) - if result is not UNSET: - field_dict["result"] = result if computation is not UNSET: field_dict["computation"] = computation if content is not UNSET: field_dict["content"] = content + if result is not UNSET: + field_dict["result"] = result return field_dict @@ -60,13 +60,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: from ..models.result import Result d = src_dict.copy() - _result = d.pop("result", UNSET) - result: Union[Unset, Result] - if isinstance(_result, Unset): - result = UNSET - else: - result = Result.from_dict(_result) - _computation = d.pop("computation", UNSET) computation: Union[Unset, Computation] if isinstance(_computation, Unset): @@ -81,10 +74,17 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: content = Content.from_dict(_content) + _result = d.pop("result", UNSET) + result: Union[Unset, Result] + if isinstance(_result, Unset): + result = UNSET + else: + result = Result.from_dict(_result) + result_content = cls( - result=result, computation=computation, content=content, + result=result, ) result_content.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/result_definition.py b/src/tuneinsight/api/sdk/models/result_definition.py index 9f9c1de..90e0f28 100644 --- a/src/tuneinsight/api/sdk/models/result_definition.py +++ b/src/tuneinsight/api/sdk/models/result_definition.py @@ -12,57 +12,57 @@ class ResultDefinition: """user-defined computation result fields Attributes: + is_large (Union[Unset, None, bool]): format to display the result shared (Union[Unset, None, bool]): if set to true, the result is shared with users from the same project tags (Union[Unset, List[str]]): title (Union[Unset, str]): title given to the result - is_large (Union[Unset, None, bool]): format to display the result """ + is_large: Union[Unset, None, bool] = UNSET shared: Union[Unset, None, bool] = UNSET tags: Union[Unset, List[str]] = UNSET title: Union[Unset, str] = UNSET - is_large: Union[Unset, None, bool] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: + is_large = self.is_large shared = self.shared tags: Union[Unset, List[str]] = UNSET if not isinstance(self.tags, Unset): tags = self.tags title = self.title - is_large = self.is_large field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) + if is_large is not UNSET: + field_dict["isLarge"] = is_large if shared is not UNSET: field_dict["shared"] = shared if tags is not UNSET: field_dict["tags"] = tags if title is not UNSET: field_dict["title"] = title - if is_large is not UNSET: - field_dict["isLarge"] = is_large return field_dict @classmethod def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() + is_large = d.pop("isLarge", UNSET) + shared = d.pop("shared", UNSET) tags = cast(List[str], d.pop("tags", UNSET)) title = d.pop("title", UNSET) - is_large = d.pop("isLarge", UNSET) - result_definition = cls( + is_large=is_large, shared=shared, tags=tags, title=title, - is_large=is_large, ) result_definition.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/result_metadata.py b/src/tuneinsight/api/sdk/models/result_metadata.py index fdfb0d5..a12f77c 100644 --- a/src/tuneinsight/api/sdk/models/result_metadata.py +++ b/src/tuneinsight/api/sdk/models/result_metadata.py @@ -1,7 +1,8 @@ -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import Any, Dict, List, Type, TypeVar, Union, cast import attr +from ..models.noise_distributions import NoiseDistributions from ..types import UNSET, Unset T = TypeVar("T", bound="ResultMetadata") @@ -12,31 +13,48 @@ class ResultMetadata: """various metadata field along with the result to provide additional context Attributes: - noise_bound (Union[Unset, float]): numerical bound on the amount of noise added to the result, on a 95% - confidence interval. + noise_scale (Union[Unset, List[float]]): standard deviation of the noise added on each entry in the results + noise_type (Union[Unset, NoiseDistributions]): the distribution of the noise added on each entry in the results """ - noise_bound: Union[Unset, float] = UNSET + noise_scale: Union[Unset, List[float]] = UNSET + noise_type: Union[Unset, NoiseDistributions] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: - noise_bound = self.noise_bound + noise_scale: Union[Unset, List[float]] = UNSET + if not isinstance(self.noise_scale, Unset): + noise_scale = self.noise_scale + + noise_type: Union[Unset, str] = UNSET + if not isinstance(self.noise_type, Unset): + noise_type = self.noise_type.value field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) - if noise_bound is not UNSET: - field_dict["noiseBound"] = noise_bound + if noise_scale is not UNSET: + field_dict["noiseScale"] = noise_scale + if noise_type is not UNSET: + field_dict["noiseType"] = noise_type return field_dict @classmethod def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() - noise_bound = d.pop("noiseBound", UNSET) + noise_scale = cast(List[float], d.pop("noiseScale", UNSET)) + + _noise_type = d.pop("noiseType", UNSET) + noise_type: Union[Unset, NoiseDistributions] + if isinstance(_noise_type, Unset): + noise_type = UNSET + else: + noise_type = NoiseDistributions(_noise_type) result_metadata = cls( - noise_bound=noise_bound, + noise_scale=noise_scale, + noise_type=noise_type, ) result_metadata.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/result_release.py b/src/tuneinsight/api/sdk/models/result_release.py new file mode 100644 index 0000000..063a55b --- /dev/null +++ b/src/tuneinsight/api/sdk/models/result_release.py @@ -0,0 +1,58 @@ +from typing import Any, Dict, List, Type, TypeVar, Union + +import attr + +from ..types import UNSET, Unset + +T = TypeVar("T", bound="ResultRelease") + + +@attr.s(auto_attribs=True) +class ResultRelease: + """required data to re-encrypt a result + + Attributes: + public_key (Union[Unset, str]): public key to which the result is re-encrypted + """ + + public_key: Union[Unset, str] = UNSET + additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + + def to_dict(self) -> Dict[str, Any]: + public_key = self.public_key + + field_dict: Dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update({}) + if public_key is not UNSET: + field_dict["publicKey"] = public_key + + return field_dict + + @classmethod + def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + d = src_dict.copy() + public_key = d.pop("publicKey", UNSET) + + result_release = cls( + public_key=public_key, + ) + + result_release.additional_properties = d + return result_release + + @property + def additional_keys(self) -> List[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/src/tuneinsight/api/sdk/models/rot_key_gen.py b/src/tuneinsight/api/sdk/models/rot_key_gen.py index 9da55f4..59cb243 100644 --- a/src/tuneinsight/api/sdk/models/rot_key_gen.py +++ b/src/tuneinsight/api/sdk/models/rot_key_gen.py @@ -23,6 +23,8 @@ class RotKeyGen: """ Attributes: type (ComputationType): Type of the computation. + dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP + mode. Default: -1.0. input_clipping_method (Union[Unset, ComputationDefinitionInputClippingMethod]): Optional method used for clipping before encrypting values when running aggregation-based workflows. The bounds are deduced based on the cryptographic parameters used for the aggregation. @@ -33,14 +35,7 @@ class RotKeyGen: (default) - error: if some values are out of bounds, then the computation is aborted. Default: ComputationDefinitionInputClippingMethod.WARNING. - preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters - applied to the input retrieved from the datasource, if applicable - wait (Union[Unset, bool]): Whether to wait synchronously for the computation result. - dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various - disclosure prevention mechanisms - dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP - mode. Default: -1.0. - join_id (Union[Unset, str]): Unique identifier of a data object. + run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both) local_input_id (Union[Unset, str]): Unique identifier of a data object. maximum_aggregated_value (Union[Unset, None, float]): optional upper bound on the total expected value to be aggregated collectively. If provided, the computation will automatically deduce @@ -49,99 +44,104 @@ class RotKeyGen: up to 16 million. For example, when using default parameters and running an aggregation with 4 participants, local aggregated values cannot exceed 4 million. - cohort_id (Union[Unset, str]): Unique identifier of a data object. + preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters + applied to the input retrieved from the datasource, if applicable + data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource + from each node before the computation + end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true, + then when release results is set to true and the output + is initially encrypted with a network collective key, then it is key switched to + the initiating user's public key. + input_data_object (Union[Unset, str]): Shared identifier of a data object. + timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run. local_input (Union[Unset, LocalInput]): If a local input is provided, the node initiating the computation will use it instead of querying the datasource. This data is *not* shared to other nodes, only used for the duration of the computation. The local input columns/values must be in the form {: [, , ...], ...} owner (Union[Unset, str]): The username of the end user who requested the computation. project_id (Union[Unset, str]): Unique identifier of a project. - run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both) + dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various + disclosure prevention mechanisms + cohort_id (Union[Unset, str]): Unique identifier of a data object. + encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key. + join_id (Union[Unset, str]): Unique identifier of a data object. local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured the network) - encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key. - end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true, - then when release results is set to true and the output - is initially encrypted with a network collective key, then it is key switched to - the initiating user's public key. - input_data_object (Union[Unset, str]): Shared identifier of a data object. release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output results. If set, then encrypted results are automatically key switched and decrypted and a Result entity is saved - timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run. - data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource - from each node before the computation + wait (Union[Unset, bool]): Whether to wait synchronously for the computation result. rotations (Union[Unset, List['RotKeyGenRotationsItem']]): """ type: ComputationType + dp_epsilon: Union[Unset, float] = -1.0 input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] = ( ComputationDefinitionInputClippingMethod.WARNING ) - preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET - wait: Union[Unset, bool] = UNSET - dp_policy: Union[Unset, "DPPolicy"] = UNSET - dp_epsilon: Union[Unset, float] = -1.0 - join_id: Union[Unset, str] = UNSET + run_mode: Union[Unset, RunMode] = UNSET local_input_id: Union[Unset, str] = UNSET maximum_aggregated_value: Union[Unset, None, float] = UNSET - cohort_id: Union[Unset, str] = UNSET + preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET + data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET + end_to_end_encrypted: Union[Unset, bool] = UNSET + input_data_object: Union[Unset, str] = UNSET + timeout: Union[Unset, int] = UNSET local_input: Union[Unset, "LocalInput"] = UNSET owner: Union[Unset, str] = UNSET project_id: Union[Unset, str] = UNSET - run_mode: Union[Unset, RunMode] = UNSET - local: Union[Unset, bool] = UNSET + dp_policy: Union[Unset, "DPPolicy"] = UNSET + cohort_id: Union[Unset, str] = UNSET encrypted: Union[Unset, bool] = UNSET - end_to_end_encrypted: Union[Unset, bool] = UNSET - input_data_object: Union[Unset, str] = UNSET + join_id: Union[Unset, str] = UNSET + local: Union[Unset, bool] = UNSET release_results: Union[Unset, bool] = UNSET - timeout: Union[Unset, int] = UNSET - data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET + wait: Union[Unset, bool] = UNSET rotations: Union[Unset, List["RotKeyGenRotationsItem"]] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: type = self.type.value + dp_epsilon = self.dp_epsilon input_clipping_method: Union[Unset, str] = UNSET if not isinstance(self.input_clipping_method, Unset): input_clipping_method = self.input_clipping_method.value + run_mode: Union[Unset, str] = UNSET + if not isinstance(self.run_mode, Unset): + run_mode = self.run_mode.value + + local_input_id = self.local_input_id + maximum_aggregated_value = self.maximum_aggregated_value preprocessing_parameters: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.preprocessing_parameters, Unset): preprocessing_parameters = self.preprocessing_parameters.to_dict() - wait = self.wait - dp_policy: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.dp_policy, Unset): - dp_policy = self.dp_policy.to_dict() + data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.data_source_parameters, Unset): + data_source_parameters = self.data_source_parameters.to_dict() - dp_epsilon = self.dp_epsilon - join_id = self.join_id - local_input_id = self.local_input_id - maximum_aggregated_value = self.maximum_aggregated_value - cohort_id = self.cohort_id + end_to_end_encrypted = self.end_to_end_encrypted + input_data_object = self.input_data_object + timeout = self.timeout local_input: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.local_input, Unset): local_input = self.local_input.to_dict() owner = self.owner project_id = self.project_id - run_mode: Union[Unset, str] = UNSET - if not isinstance(self.run_mode, Unset): - run_mode = self.run_mode.value + dp_policy: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.dp_policy, Unset): + dp_policy = self.dp_policy.to_dict() - local = self.local + cohort_id = self.cohort_id encrypted = self.encrypted - end_to_end_encrypted = self.end_to_end_encrypted - input_data_object = self.input_data_object + join_id = self.join_id + local = self.local release_results = self.release_results - timeout = self.timeout - data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.data_source_parameters, Unset): - data_source_parameters = self.data_source_parameters.to_dict() - + wait = self.wait rotations: Union[Unset, List[Dict[str, Any]]] = UNSET if not isinstance(self.rotations, Unset): rotations = [] @@ -157,46 +157,46 @@ def to_dict(self) -> Dict[str, Any]: "type": type, } ) - if input_clipping_method is not UNSET: - field_dict["inputClippingMethod"] = input_clipping_method - if preprocessing_parameters is not UNSET: - field_dict["preprocessingParameters"] = preprocessing_parameters - if wait is not UNSET: - field_dict["wait"] = wait - if dp_policy is not UNSET: - field_dict["DPPolicy"] = dp_policy if dp_epsilon is not UNSET: field_dict["dpEpsilon"] = dp_epsilon - if join_id is not UNSET: - field_dict["joinId"] = join_id + if input_clipping_method is not UNSET: + field_dict["inputClippingMethod"] = input_clipping_method + if run_mode is not UNSET: + field_dict["runMode"] = run_mode if local_input_id is not UNSET: field_dict["localInputID"] = local_input_id if maximum_aggregated_value is not UNSET: field_dict["maximumAggregatedValue"] = maximum_aggregated_value - if cohort_id is not UNSET: - field_dict["cohortId"] = cohort_id + if preprocessing_parameters is not UNSET: + field_dict["preprocessingParameters"] = preprocessing_parameters + if data_source_parameters is not UNSET: + field_dict["dataSourceParameters"] = data_source_parameters + if end_to_end_encrypted is not UNSET: + field_dict["endToEndEncrypted"] = end_to_end_encrypted + if input_data_object is not UNSET: + field_dict["inputDataObject"] = input_data_object + if timeout is not UNSET: + field_dict["timeout"] = timeout if local_input is not UNSET: field_dict["localInput"] = local_input if owner is not UNSET: field_dict["owner"] = owner if project_id is not UNSET: field_dict["projectId"] = project_id - if run_mode is not UNSET: - field_dict["runMode"] = run_mode - if local is not UNSET: - field_dict["local"] = local + if dp_policy is not UNSET: + field_dict["DPPolicy"] = dp_policy + if cohort_id is not UNSET: + field_dict["cohortId"] = cohort_id if encrypted is not UNSET: field_dict["encrypted"] = encrypted - if end_to_end_encrypted is not UNSET: - field_dict["endToEndEncrypted"] = end_to_end_encrypted - if input_data_object is not UNSET: - field_dict["inputDataObject"] = input_data_object + if join_id is not UNSET: + field_dict["joinId"] = join_id + if local is not UNSET: + field_dict["local"] = local if release_results is not UNSET: field_dict["releaseResults"] = release_results - if timeout is not UNSET: - field_dict["timeout"] = timeout - if data_source_parameters is not UNSET: - field_dict["dataSourceParameters"] = data_source_parameters + if wait is not UNSET: + field_dict["wait"] = wait if rotations is not UNSET: field_dict["rotations"] = rotations @@ -213,6 +213,8 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() type = ComputationType(d.pop("type")) + dp_epsilon = d.pop("dpEpsilon", UNSET) + _input_clipping_method = d.pop("inputClippingMethod", UNSET) input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] if isinstance(_input_clipping_method, Unset): @@ -220,6 +222,17 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method) + _run_mode = d.pop("runMode", UNSET) + run_mode: Union[Unset, RunMode] + if isinstance(_run_mode, Unset): + run_mode = UNSET + else: + run_mode = RunMode(_run_mode) + + local_input_id = d.pop("localInputID", UNSET) + + maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET) + _preprocessing_parameters = d.pop("preprocessingParameters", UNSET) preprocessing_parameters: Union[Unset, ComputationPreprocessingParameters] if isinstance(_preprocessing_parameters, Unset): @@ -227,24 +240,18 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: preprocessing_parameters = ComputationPreprocessingParameters.from_dict(_preprocessing_parameters) - wait = d.pop("wait", UNSET) - - _dp_policy = d.pop("DPPolicy", UNSET) - dp_policy: Union[Unset, DPPolicy] - if isinstance(_dp_policy, Unset): - dp_policy = UNSET + _data_source_parameters = d.pop("dataSourceParameters", UNSET) + data_source_parameters: Union[Unset, ComputationDataSourceParameters] + if isinstance(_data_source_parameters, Unset): + data_source_parameters = UNSET else: - dp_policy = DPPolicy.from_dict(_dp_policy) - - dp_epsilon = d.pop("dpEpsilon", UNSET) - - join_id = d.pop("joinId", UNSET) + data_source_parameters = ComputationDataSourceParameters.from_dict(_data_source_parameters) - local_input_id = d.pop("localInputID", UNSET) + end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) - maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET) + input_data_object = d.pop("inputDataObject", UNSET) - cohort_id = d.pop("cohortId", UNSET) + timeout = d.pop("timeout", UNSET) _local_input = d.pop("localInput", UNSET) local_input: Union[Unset, LocalInput] @@ -257,31 +264,24 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: project_id = d.pop("projectId", UNSET) - _run_mode = d.pop("runMode", UNSET) - run_mode: Union[Unset, RunMode] - if isinstance(_run_mode, Unset): - run_mode = UNSET + _dp_policy = d.pop("DPPolicy", UNSET) + dp_policy: Union[Unset, DPPolicy] + if isinstance(_dp_policy, Unset): + dp_policy = UNSET else: - run_mode = RunMode(_run_mode) + dp_policy = DPPolicy.from_dict(_dp_policy) - local = d.pop("local", UNSET) + cohort_id = d.pop("cohortId", UNSET) encrypted = d.pop("encrypted", UNSET) - end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) + join_id = d.pop("joinId", UNSET) - input_data_object = d.pop("inputDataObject", UNSET) + local = d.pop("local", UNSET) release_results = d.pop("releaseResults", UNSET) - timeout = d.pop("timeout", UNSET) - - _data_source_parameters = d.pop("dataSourceParameters", UNSET) - data_source_parameters: Union[Unset, ComputationDataSourceParameters] - if isinstance(_data_source_parameters, Unset): - data_source_parameters = UNSET - else: - data_source_parameters = ComputationDataSourceParameters.from_dict(_data_source_parameters) + wait = d.pop("wait", UNSET) rotations = [] _rotations = d.pop("rotations", UNSET) @@ -292,26 +292,26 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: rot_key_gen = cls( type=type, - input_clipping_method=input_clipping_method, - preprocessing_parameters=preprocessing_parameters, - wait=wait, - dp_policy=dp_policy, dp_epsilon=dp_epsilon, - join_id=join_id, + input_clipping_method=input_clipping_method, + run_mode=run_mode, local_input_id=local_input_id, maximum_aggregated_value=maximum_aggregated_value, - cohort_id=cohort_id, + preprocessing_parameters=preprocessing_parameters, + data_source_parameters=data_source_parameters, + end_to_end_encrypted=end_to_end_encrypted, + input_data_object=input_data_object, + timeout=timeout, local_input=local_input, owner=owner, project_id=project_id, - run_mode=run_mode, - local=local, + dp_policy=dp_policy, + cohort_id=cohort_id, encrypted=encrypted, - end_to_end_encrypted=end_to_end_encrypted, - input_data_object=input_data_object, + join_id=join_id, + local=local, release_results=release_results, - timeout=timeout, - data_source_parameters=data_source_parameters, + wait=wait, rotations=rotations, ) diff --git a/src/tuneinsight/api/sdk/models/rot_key_gen_rotations_item.py b/src/tuneinsight/api/sdk/models/rot_key_gen_rotations_item.py index 89ca32a..d031113 100644 --- a/src/tuneinsight/api/sdk/models/rot_key_gen_rotations_item.py +++ b/src/tuneinsight/api/sdk/models/rot_key_gen_rotations_item.py @@ -11,38 +11,38 @@ class RotKeyGenRotationsItem: """ Attributes: - side (Union[Unset, bool]): value (Union[Unset, int]): + side (Union[Unset, bool]): """ - side: Union[Unset, bool] = UNSET value: Union[Unset, int] = UNSET + side: Union[Unset, bool] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: - side = self.side value = self.value + side = self.side field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) - if side is not UNSET: - field_dict["side"] = side if value is not UNSET: field_dict["value"] = value + if side is not UNSET: + field_dict["side"] = side return field_dict @classmethod def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() - side = d.pop("side", UNSET) - value = d.pop("value", UNSET) + side = d.pop("side", UNSET) + rot_key_gen_rotations_item = cls( - side=side, value=value, + side=side, ) rot_key_gen_rotations_item.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/run_project_parameters.py b/src/tuneinsight/api/sdk/models/run_project_parameters.py index 9fcc0bc..41c52be 100644 --- a/src/tuneinsight/api/sdk/models/run_project_parameters.py +++ b/src/tuneinsight/api/sdk/models/run_project_parameters.py @@ -17,15 +17,18 @@ class RunProjectParameters: """parameters used to launch the project with. Attributes: + wait (Union[Unset, None, bool]): whether to run the computation synchronously computation_definition (Union[Unset, ComputationDefinition]): Generic computation. run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both) """ + wait: Union[Unset, None, bool] = UNSET computation_definition: Union[Unset, "ComputationDefinition"] = UNSET run_mode: Union[Unset, RunMode] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: + wait = self.wait computation_definition: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.computation_definition, Unset): computation_definition = self.computation_definition.to_dict() @@ -37,6 +40,8 @@ def to_dict(self) -> Dict[str, Any]: field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) + if wait is not UNSET: + field_dict["wait"] = wait if computation_definition is not UNSET: field_dict["computationDefinition"] = computation_definition if run_mode is not UNSET: @@ -49,6 +54,8 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: from ..models.computation_definition import ComputationDefinition d = src_dict.copy() + wait = d.pop("wait", UNSET) + _computation_definition = d.pop("computationDefinition", UNSET) computation_definition: Union[Unset, ComputationDefinition] if isinstance(_computation_definition, Unset): @@ -64,6 +71,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: run_mode = RunMode(_run_mode) run_project_parameters = cls( + wait=wait, computation_definition=computation_definition, run_mode=run_mode, ) diff --git a/src/tuneinsight/api/sdk/models/s3_parameters.py b/src/tuneinsight/api/sdk/models/s3_parameters.py index e8e8948..482e96b 100644 --- a/src/tuneinsight/api/sdk/models/s3_parameters.py +++ b/src/tuneinsight/api/sdk/models/s3_parameters.py @@ -12,30 +12,32 @@ class S3Parameters: """parameters for the remote s3-compatible storage Attributes: + secret_access_key (Union[Unset, str]): s3 secret access key url (Union[Unset, str]): s3 endpoint access_key_id (Union[Unset, str]): s3 access key id bucket (Union[Unset, str]): s3 bucket region (Union[Unset, str]): s3 region - secret_access_key (Union[Unset, str]): s3 secret access key """ + secret_access_key: Union[Unset, str] = UNSET url: Union[Unset, str] = UNSET access_key_id: Union[Unset, str] = UNSET bucket: Union[Unset, str] = UNSET region: Union[Unset, str] = UNSET - secret_access_key: Union[Unset, str] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: + secret_access_key = self.secret_access_key url = self.url access_key_id = self.access_key_id bucket = self.bucket region = self.region - secret_access_key = self.secret_access_key field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) + if secret_access_key is not UNSET: + field_dict["secretAccessKey"] = secret_access_key if url is not UNSET: field_dict["url"] = url if access_key_id is not UNSET: @@ -44,14 +46,14 @@ def to_dict(self) -> Dict[str, Any]: field_dict["bucket"] = bucket if region is not UNSET: field_dict["region"] = region - if secret_access_key is not UNSET: - field_dict["secretAccessKey"] = secret_access_key return field_dict @classmethod def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() + secret_access_key = d.pop("secretAccessKey", UNSET) + url = d.pop("url", UNSET) access_key_id = d.pop("accessKeyID", UNSET) @@ -60,14 +62,12 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: region = d.pop("region", UNSET) - secret_access_key = d.pop("secretAccessKey", UNSET) - s3_parameters = cls( + secret_access_key=secret_access_key, url=url, access_key_id=access_key_id, bucket=bucket, region=region, - secret_access_key=secret_access_key, ) s3_parameters.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/sample_extraction.py b/src/tuneinsight/api/sdk/models/sample_extraction.py index 1a11b5a..137e7b8 100644 --- a/src/tuneinsight/api/sdk/models/sample_extraction.py +++ b/src/tuneinsight/api/sdk/models/sample_extraction.py @@ -22,6 +22,8 @@ class SampleExtraction: """ Attributes: type (ComputationType): Type of the computation. + dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP + mode. Default: -1.0. input_clipping_method (Union[Unset, ComputationDefinitionInputClippingMethod]): Optional method used for clipping before encrypting values when running aggregation-based workflows. The bounds are deduced based on the cryptographic parameters used for the aggregation. @@ -32,14 +34,7 @@ class SampleExtraction: (default) - error: if some values are out of bounds, then the computation is aborted. Default: ComputationDefinitionInputClippingMethod.WARNING. - preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters - applied to the input retrieved from the datasource, if applicable - wait (Union[Unset, bool]): Whether to wait synchronously for the computation result. - dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various - disclosure prevention mechanisms - dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP - mode. Default: -1.0. - join_id (Union[Unset, str]): Unique identifier of a data object. + run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both) local_input_id (Union[Unset, str]): Unique identifier of a data object. maximum_aggregated_value (Union[Unset, None, float]): optional upper bound on the total expected value to be aggregated collectively. If provided, the computation will automatically deduce @@ -48,56 +43,61 @@ class SampleExtraction: up to 16 million. For example, when using default parameters and running an aggregation with 4 participants, local aggregated values cannot exceed 4 million. - cohort_id (Union[Unset, str]): Unique identifier of a data object. + preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters + applied to the input retrieved from the datasource, if applicable + data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource + from each node before the computation + end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true, + then when release results is set to true and the output + is initially encrypted with a network collective key, then it is key switched to + the initiating user's public key. + input_data_object (Union[Unset, str]): Shared identifier of a data object. + timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run. local_input (Union[Unset, LocalInput]): If a local input is provided, the node initiating the computation will use it instead of querying the datasource. This data is *not* shared to other nodes, only used for the duration of the computation. The local input columns/values must be in the form {: [, , ...], ...} owner (Union[Unset, str]): The username of the end user who requested the computation. project_id (Union[Unset, str]): Unique identifier of a project. - run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both) + dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various + disclosure prevention mechanisms + cohort_id (Union[Unset, str]): Unique identifier of a data object. + encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key. + join_id (Union[Unset, str]): Unique identifier of a data object. local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured the network) - encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key. - end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true, - then when release results is set to true and the output - is initially encrypted with a network collective key, then it is key switched to - the initiating user's public key. - input_data_object (Union[Unset, str]): Shared identifier of a data object. release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output results. If set, then encrypted results are automatically key switched and decrypted and a Result entity is saved - timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run. - data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource - from each node before the computation + wait (Union[Unset, bool]): Whether to wait synchronously for the computation result. sample_size (Union[Unset, int]): size of the sample as number of rows seed (Union[Unset, str]): seed to use for the sampling """ type: ComputationType + dp_epsilon: Union[Unset, float] = -1.0 input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] = ( ComputationDefinitionInputClippingMethod.WARNING ) - preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET - wait: Union[Unset, bool] = UNSET - dp_policy: Union[Unset, "DPPolicy"] = UNSET - dp_epsilon: Union[Unset, float] = -1.0 - join_id: Union[Unset, str] = UNSET + run_mode: Union[Unset, RunMode] = UNSET local_input_id: Union[Unset, str] = UNSET maximum_aggregated_value: Union[Unset, None, float] = UNSET - cohort_id: Union[Unset, str] = UNSET + preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET + data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET + end_to_end_encrypted: Union[Unset, bool] = UNSET + input_data_object: Union[Unset, str] = UNSET + timeout: Union[Unset, int] = UNSET local_input: Union[Unset, "LocalInput"] = UNSET owner: Union[Unset, str] = UNSET project_id: Union[Unset, str] = UNSET - run_mode: Union[Unset, RunMode] = UNSET - local: Union[Unset, bool] = UNSET + dp_policy: Union[Unset, "DPPolicy"] = UNSET + cohort_id: Union[Unset, str] = UNSET encrypted: Union[Unset, bool] = UNSET - end_to_end_encrypted: Union[Unset, bool] = UNSET - input_data_object: Union[Unset, str] = UNSET + join_id: Union[Unset, str] = UNSET + local: Union[Unset, bool] = UNSET release_results: Union[Unset, bool] = UNSET - timeout: Union[Unset, int] = UNSET - data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET + wait: Union[Unset, bool] = UNSET sample_size: Union[Unset, int] = UNSET seed: Union[Unset, str] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) @@ -105,44 +105,44 @@ class SampleExtraction: def to_dict(self) -> Dict[str, Any]: type = self.type.value + dp_epsilon = self.dp_epsilon input_clipping_method: Union[Unset, str] = UNSET if not isinstance(self.input_clipping_method, Unset): input_clipping_method = self.input_clipping_method.value + run_mode: Union[Unset, str] = UNSET + if not isinstance(self.run_mode, Unset): + run_mode = self.run_mode.value + + local_input_id = self.local_input_id + maximum_aggregated_value = self.maximum_aggregated_value preprocessing_parameters: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.preprocessing_parameters, Unset): preprocessing_parameters = self.preprocessing_parameters.to_dict() - wait = self.wait - dp_policy: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.dp_policy, Unset): - dp_policy = self.dp_policy.to_dict() + data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.data_source_parameters, Unset): + data_source_parameters = self.data_source_parameters.to_dict() - dp_epsilon = self.dp_epsilon - join_id = self.join_id - local_input_id = self.local_input_id - maximum_aggregated_value = self.maximum_aggregated_value - cohort_id = self.cohort_id + end_to_end_encrypted = self.end_to_end_encrypted + input_data_object = self.input_data_object + timeout = self.timeout local_input: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.local_input, Unset): local_input = self.local_input.to_dict() owner = self.owner project_id = self.project_id - run_mode: Union[Unset, str] = UNSET - if not isinstance(self.run_mode, Unset): - run_mode = self.run_mode.value + dp_policy: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.dp_policy, Unset): + dp_policy = self.dp_policy.to_dict() - local = self.local + cohort_id = self.cohort_id encrypted = self.encrypted - end_to_end_encrypted = self.end_to_end_encrypted - input_data_object = self.input_data_object + join_id = self.join_id + local = self.local release_results = self.release_results - timeout = self.timeout - data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.data_source_parameters, Unset): - data_source_parameters = self.data_source_parameters.to_dict() - + wait = self.wait sample_size = self.sample_size seed = self.seed @@ -153,46 +153,46 @@ def to_dict(self) -> Dict[str, Any]: "type": type, } ) - if input_clipping_method is not UNSET: - field_dict["inputClippingMethod"] = input_clipping_method - if preprocessing_parameters is not UNSET: - field_dict["preprocessingParameters"] = preprocessing_parameters - if wait is not UNSET: - field_dict["wait"] = wait - if dp_policy is not UNSET: - field_dict["DPPolicy"] = dp_policy if dp_epsilon is not UNSET: field_dict["dpEpsilon"] = dp_epsilon - if join_id is not UNSET: - field_dict["joinId"] = join_id + if input_clipping_method is not UNSET: + field_dict["inputClippingMethod"] = input_clipping_method + if run_mode is not UNSET: + field_dict["runMode"] = run_mode if local_input_id is not UNSET: field_dict["localInputID"] = local_input_id if maximum_aggregated_value is not UNSET: field_dict["maximumAggregatedValue"] = maximum_aggregated_value - if cohort_id is not UNSET: - field_dict["cohortId"] = cohort_id + if preprocessing_parameters is not UNSET: + field_dict["preprocessingParameters"] = preprocessing_parameters + if data_source_parameters is not UNSET: + field_dict["dataSourceParameters"] = data_source_parameters + if end_to_end_encrypted is not UNSET: + field_dict["endToEndEncrypted"] = end_to_end_encrypted + if input_data_object is not UNSET: + field_dict["inputDataObject"] = input_data_object + if timeout is not UNSET: + field_dict["timeout"] = timeout if local_input is not UNSET: field_dict["localInput"] = local_input if owner is not UNSET: field_dict["owner"] = owner if project_id is not UNSET: field_dict["projectId"] = project_id - if run_mode is not UNSET: - field_dict["runMode"] = run_mode - if local is not UNSET: - field_dict["local"] = local + if dp_policy is not UNSET: + field_dict["DPPolicy"] = dp_policy + if cohort_id is not UNSET: + field_dict["cohortId"] = cohort_id if encrypted is not UNSET: field_dict["encrypted"] = encrypted - if end_to_end_encrypted is not UNSET: - field_dict["endToEndEncrypted"] = end_to_end_encrypted - if input_data_object is not UNSET: - field_dict["inputDataObject"] = input_data_object + if join_id is not UNSET: + field_dict["joinId"] = join_id + if local is not UNSET: + field_dict["local"] = local if release_results is not UNSET: field_dict["releaseResults"] = release_results - if timeout is not UNSET: - field_dict["timeout"] = timeout - if data_source_parameters is not UNSET: - field_dict["dataSourceParameters"] = data_source_parameters + if wait is not UNSET: + field_dict["wait"] = wait if sample_size is not UNSET: field_dict["sampleSize"] = sample_size if seed is not UNSET: @@ -210,6 +210,8 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() type = ComputationType(d.pop("type")) + dp_epsilon = d.pop("dpEpsilon", UNSET) + _input_clipping_method = d.pop("inputClippingMethod", UNSET) input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] if isinstance(_input_clipping_method, Unset): @@ -217,6 +219,17 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method) + _run_mode = d.pop("runMode", UNSET) + run_mode: Union[Unset, RunMode] + if isinstance(_run_mode, Unset): + run_mode = UNSET + else: + run_mode = RunMode(_run_mode) + + local_input_id = d.pop("localInputID", UNSET) + + maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET) + _preprocessing_parameters = d.pop("preprocessingParameters", UNSET) preprocessing_parameters: Union[Unset, ComputationPreprocessingParameters] if isinstance(_preprocessing_parameters, Unset): @@ -224,24 +237,18 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: preprocessing_parameters = ComputationPreprocessingParameters.from_dict(_preprocessing_parameters) - wait = d.pop("wait", UNSET) - - _dp_policy = d.pop("DPPolicy", UNSET) - dp_policy: Union[Unset, DPPolicy] - if isinstance(_dp_policy, Unset): - dp_policy = UNSET + _data_source_parameters = d.pop("dataSourceParameters", UNSET) + data_source_parameters: Union[Unset, ComputationDataSourceParameters] + if isinstance(_data_source_parameters, Unset): + data_source_parameters = UNSET else: - dp_policy = DPPolicy.from_dict(_dp_policy) - - dp_epsilon = d.pop("dpEpsilon", UNSET) - - join_id = d.pop("joinId", UNSET) + data_source_parameters = ComputationDataSourceParameters.from_dict(_data_source_parameters) - local_input_id = d.pop("localInputID", UNSET) + end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) - maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET) + input_data_object = d.pop("inputDataObject", UNSET) - cohort_id = d.pop("cohortId", UNSET) + timeout = d.pop("timeout", UNSET) _local_input = d.pop("localInput", UNSET) local_input: Union[Unset, LocalInput] @@ -254,31 +261,24 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: project_id = d.pop("projectId", UNSET) - _run_mode = d.pop("runMode", UNSET) - run_mode: Union[Unset, RunMode] - if isinstance(_run_mode, Unset): - run_mode = UNSET + _dp_policy = d.pop("DPPolicy", UNSET) + dp_policy: Union[Unset, DPPolicy] + if isinstance(_dp_policy, Unset): + dp_policy = UNSET else: - run_mode = RunMode(_run_mode) + dp_policy = DPPolicy.from_dict(_dp_policy) - local = d.pop("local", UNSET) + cohort_id = d.pop("cohortId", UNSET) encrypted = d.pop("encrypted", UNSET) - end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) + join_id = d.pop("joinId", UNSET) - input_data_object = d.pop("inputDataObject", UNSET) + local = d.pop("local", UNSET) release_results = d.pop("releaseResults", UNSET) - timeout = d.pop("timeout", UNSET) - - _data_source_parameters = d.pop("dataSourceParameters", UNSET) - data_source_parameters: Union[Unset, ComputationDataSourceParameters] - if isinstance(_data_source_parameters, Unset): - data_source_parameters = UNSET - else: - data_source_parameters = ComputationDataSourceParameters.from_dict(_data_source_parameters) + wait = d.pop("wait", UNSET) sample_size = d.pop("sampleSize", UNSET) @@ -286,26 +286,26 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: sample_extraction = cls( type=type, - input_clipping_method=input_clipping_method, - preprocessing_parameters=preprocessing_parameters, - wait=wait, - dp_policy=dp_policy, dp_epsilon=dp_epsilon, - join_id=join_id, + input_clipping_method=input_clipping_method, + run_mode=run_mode, local_input_id=local_input_id, maximum_aggregated_value=maximum_aggregated_value, - cohort_id=cohort_id, + preprocessing_parameters=preprocessing_parameters, + data_source_parameters=data_source_parameters, + end_to_end_encrypted=end_to_end_encrypted, + input_data_object=input_data_object, + timeout=timeout, local_input=local_input, owner=owner, project_id=project_id, - run_mode=run_mode, - local=local, + dp_policy=dp_policy, + cohort_id=cohort_id, encrypted=encrypted, - end_to_end_encrypted=end_to_end_encrypted, - input_data_object=input_data_object, + join_id=join_id, + local=local, release_results=release_results, - timeout=timeout, - data_source_parameters=data_source_parameters, + wait=wait, sample_size=sample_size, seed=seed, ) diff --git a/src/tuneinsight/api/sdk/models/session.py b/src/tuneinsight/api/sdk/models/session.py index 7469c43..238bb9e 100644 --- a/src/tuneinsight/api/sdk/models/session.py +++ b/src/tuneinsight/api/sdk/models/session.py @@ -12,30 +12,32 @@ class Session: """basic information about a session returned from POST/GET Attributes: + network_id (Union[Unset, str]): network of the session params (Union[Unset, str]): b64 encoded marshaled parameters scheme (Union[Unset, str]): cryptographic scheme used, comes from the cryptolib collective_key (Union[Unset, str]): Unique identifier of a data object. id (Union[Unset, str]): Unique identifier of a session - network_id (Union[Unset, str]): network of the session """ + network_id: Union[Unset, str] = UNSET params: Union[Unset, str] = UNSET scheme: Union[Unset, str] = UNSET collective_key: Union[Unset, str] = UNSET id: Union[Unset, str] = UNSET - network_id: Union[Unset, str] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: + network_id = self.network_id params = self.params scheme = self.scheme collective_key = self.collective_key id = self.id - network_id = self.network_id field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) + if network_id is not UNSET: + field_dict["networkId"] = network_id if params is not UNSET: field_dict["params"] = params if scheme is not UNSET: @@ -44,14 +46,14 @@ def to_dict(self) -> Dict[str, Any]: field_dict["collectiveKey"] = collective_key if id is not UNSET: field_dict["id"] = id - if network_id is not UNSET: - field_dict["networkId"] = network_id return field_dict @classmethod def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() + network_id = d.pop("networkId", UNSET) + params = d.pop("params", UNSET) scheme = d.pop("scheme", UNSET) @@ -60,14 +62,12 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: id = d.pop("id", UNSET) - network_id = d.pop("networkId", UNSET) - session = cls( + network_id=network_id, params=params, scheme=scheme, collective_key=collective_key, id=id, - network_id=network_id, ) session.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/set_intersection.py b/src/tuneinsight/api/sdk/models/set_intersection.py index 778143d..a9001dd 100644 --- a/src/tuneinsight/api/sdk/models/set_intersection.py +++ b/src/tuneinsight/api/sdk/models/set_intersection.py @@ -24,6 +24,8 @@ class SetIntersection: """ Attributes: type (ComputationType): Type of the computation. + dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP + mode. Default: -1.0. input_clipping_method (Union[Unset, ComputationDefinitionInputClippingMethod]): Optional method used for clipping before encrypting values when running aggregation-based workflows. The bounds are deduced based on the cryptographic parameters used for the aggregation. @@ -34,14 +36,7 @@ class SetIntersection: (default) - error: if some values are out of bounds, then the computation is aborted. Default: ComputationDefinitionInputClippingMethod.WARNING. - preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters - applied to the input retrieved from the datasource, if applicable - wait (Union[Unset, bool]): Whether to wait synchronously for the computation result. - dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various - disclosure prevention mechanisms - dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP - mode. Default: -1.0. - join_id (Union[Unset, str]): Unique identifier of a data object. + run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both) local_input_id (Union[Unset, str]): Unique identifier of a data object. maximum_aggregated_value (Union[Unset, None, float]): optional upper bound on the total expected value to be aggregated collectively. If provided, the computation will automatically deduce @@ -50,29 +45,34 @@ class SetIntersection: up to 16 million. For example, when using default parameters and running an aggregation with 4 participants, local aggregated values cannot exceed 4 million. - cohort_id (Union[Unset, str]): Unique identifier of a data object. + preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters + applied to the input retrieved from the datasource, if applicable + data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource + from each node before the computation + end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true, + then when release results is set to true and the output + is initially encrypted with a network collective key, then it is key switched to + the initiating user's public key. + input_data_object (Union[Unset, str]): Shared identifier of a data object. + timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run. local_input (Union[Unset, LocalInput]): If a local input is provided, the node initiating the computation will use it instead of querying the datasource. This data is *not* shared to other nodes, only used for the duration of the computation. The local input columns/values must be in the form {: [, , ...], ...} owner (Union[Unset, str]): The username of the end user who requested the computation. project_id (Union[Unset, str]): Unique identifier of a project. - run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both) + dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various + disclosure prevention mechanisms + cohort_id (Union[Unset, str]): Unique identifier of a data object. + encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key. + join_id (Union[Unset, str]): Unique identifier of a data object. local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured the network) - encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key. - end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true, - then when release results is set to true and the output - is initially encrypted with a network collective key, then it is key switched to - the initiating user's public key. - input_data_object (Union[Unset, str]): Shared identifier of a data object. release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output results. If set, then encrypted results are automatically key switched and decrypted and a Result entity is saved - timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run. - data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource - from each node before the computation + wait (Union[Unset, bool]): Whether to wait synchronously for the computation result. encrypted_results (Union[Unset, bool]): if true, then the resulting matches are kept encrypted fuzzy_params (Union[Unset, FuzzyMatchingParameters]): hide_matching_origin (Union[Unset, bool]): if true, then the matches are aggregated before being decrypted, @@ -82,28 +82,28 @@ class SetIntersection: """ type: ComputationType + dp_epsilon: Union[Unset, float] = -1.0 input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] = ( ComputationDefinitionInputClippingMethod.WARNING ) - preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET - wait: Union[Unset, bool] = UNSET - dp_policy: Union[Unset, "DPPolicy"] = UNSET - dp_epsilon: Union[Unset, float] = -1.0 - join_id: Union[Unset, str] = UNSET + run_mode: Union[Unset, RunMode] = UNSET local_input_id: Union[Unset, str] = UNSET maximum_aggregated_value: Union[Unset, None, float] = UNSET - cohort_id: Union[Unset, str] = UNSET + preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET + data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET + end_to_end_encrypted: Union[Unset, bool] = UNSET + input_data_object: Union[Unset, str] = UNSET + timeout: Union[Unset, int] = UNSET local_input: Union[Unset, "LocalInput"] = UNSET owner: Union[Unset, str] = UNSET project_id: Union[Unset, str] = UNSET - run_mode: Union[Unset, RunMode] = UNSET - local: Union[Unset, bool] = UNSET + dp_policy: Union[Unset, "DPPolicy"] = UNSET + cohort_id: Union[Unset, str] = UNSET encrypted: Union[Unset, bool] = UNSET - end_to_end_encrypted: Union[Unset, bool] = UNSET - input_data_object: Union[Unset, str] = UNSET + join_id: Union[Unset, str] = UNSET + local: Union[Unset, bool] = UNSET release_results: Union[Unset, bool] = UNSET - timeout: Union[Unset, int] = UNSET - data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET + wait: Union[Unset, bool] = UNSET encrypted_results: Union[Unset, bool] = UNSET fuzzy_params: Union[Unset, "FuzzyMatchingParameters"] = UNSET hide_matching_origin: Union[Unset, bool] = UNSET @@ -114,44 +114,44 @@ class SetIntersection: def to_dict(self) -> Dict[str, Any]: type = self.type.value + dp_epsilon = self.dp_epsilon input_clipping_method: Union[Unset, str] = UNSET if not isinstance(self.input_clipping_method, Unset): input_clipping_method = self.input_clipping_method.value + run_mode: Union[Unset, str] = UNSET + if not isinstance(self.run_mode, Unset): + run_mode = self.run_mode.value + + local_input_id = self.local_input_id + maximum_aggregated_value = self.maximum_aggregated_value preprocessing_parameters: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.preprocessing_parameters, Unset): preprocessing_parameters = self.preprocessing_parameters.to_dict() - wait = self.wait - dp_policy: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.dp_policy, Unset): - dp_policy = self.dp_policy.to_dict() + data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.data_source_parameters, Unset): + data_source_parameters = self.data_source_parameters.to_dict() - dp_epsilon = self.dp_epsilon - join_id = self.join_id - local_input_id = self.local_input_id - maximum_aggregated_value = self.maximum_aggregated_value - cohort_id = self.cohort_id + end_to_end_encrypted = self.end_to_end_encrypted + input_data_object = self.input_data_object + timeout = self.timeout local_input: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.local_input, Unset): local_input = self.local_input.to_dict() owner = self.owner project_id = self.project_id - run_mode: Union[Unset, str] = UNSET - if not isinstance(self.run_mode, Unset): - run_mode = self.run_mode.value + dp_policy: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.dp_policy, Unset): + dp_policy = self.dp_policy.to_dict() - local = self.local + cohort_id = self.cohort_id encrypted = self.encrypted - end_to_end_encrypted = self.end_to_end_encrypted - input_data_object = self.input_data_object + join_id = self.join_id + local = self.local release_results = self.release_results - timeout = self.timeout - data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.data_source_parameters, Unset): - data_source_parameters = self.data_source_parameters.to_dict() - + wait = self.wait encrypted_results = self.encrypted_results fuzzy_params: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.fuzzy_params, Unset): @@ -173,46 +173,46 @@ def to_dict(self) -> Dict[str, Any]: "type": type, } ) - if input_clipping_method is not UNSET: - field_dict["inputClippingMethod"] = input_clipping_method - if preprocessing_parameters is not UNSET: - field_dict["preprocessingParameters"] = preprocessing_parameters - if wait is not UNSET: - field_dict["wait"] = wait - if dp_policy is not UNSET: - field_dict["DPPolicy"] = dp_policy if dp_epsilon is not UNSET: field_dict["dpEpsilon"] = dp_epsilon - if join_id is not UNSET: - field_dict["joinId"] = join_id + if input_clipping_method is not UNSET: + field_dict["inputClippingMethod"] = input_clipping_method + if run_mode is not UNSET: + field_dict["runMode"] = run_mode if local_input_id is not UNSET: field_dict["localInputID"] = local_input_id if maximum_aggregated_value is not UNSET: field_dict["maximumAggregatedValue"] = maximum_aggregated_value - if cohort_id is not UNSET: - field_dict["cohortId"] = cohort_id + if preprocessing_parameters is not UNSET: + field_dict["preprocessingParameters"] = preprocessing_parameters + if data_source_parameters is not UNSET: + field_dict["dataSourceParameters"] = data_source_parameters + if end_to_end_encrypted is not UNSET: + field_dict["endToEndEncrypted"] = end_to_end_encrypted + if input_data_object is not UNSET: + field_dict["inputDataObject"] = input_data_object + if timeout is not UNSET: + field_dict["timeout"] = timeout if local_input is not UNSET: field_dict["localInput"] = local_input if owner is not UNSET: field_dict["owner"] = owner if project_id is not UNSET: field_dict["projectId"] = project_id - if run_mode is not UNSET: - field_dict["runMode"] = run_mode - if local is not UNSET: - field_dict["local"] = local + if dp_policy is not UNSET: + field_dict["DPPolicy"] = dp_policy + if cohort_id is not UNSET: + field_dict["cohortId"] = cohort_id if encrypted is not UNSET: field_dict["encrypted"] = encrypted - if end_to_end_encrypted is not UNSET: - field_dict["endToEndEncrypted"] = end_to_end_encrypted - if input_data_object is not UNSET: - field_dict["inputDataObject"] = input_data_object + if join_id is not UNSET: + field_dict["joinId"] = join_id + if local is not UNSET: + field_dict["local"] = local if release_results is not UNSET: field_dict["releaseResults"] = release_results - if timeout is not UNSET: - field_dict["timeout"] = timeout - if data_source_parameters is not UNSET: - field_dict["dataSourceParameters"] = data_source_parameters + if wait is not UNSET: + field_dict["wait"] = wait if encrypted_results is not UNSET: field_dict["encryptedResults"] = encrypted_results if fuzzy_params is not UNSET: @@ -237,6 +237,8 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() type = ComputationType(d.pop("type")) + dp_epsilon = d.pop("dpEpsilon", UNSET) + _input_clipping_method = d.pop("inputClippingMethod", UNSET) input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] if isinstance(_input_clipping_method, Unset): @@ -244,6 +246,17 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method) + _run_mode = d.pop("runMode", UNSET) + run_mode: Union[Unset, RunMode] + if isinstance(_run_mode, Unset): + run_mode = UNSET + else: + run_mode = RunMode(_run_mode) + + local_input_id = d.pop("localInputID", UNSET) + + maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET) + _preprocessing_parameters = d.pop("preprocessingParameters", UNSET) preprocessing_parameters: Union[Unset, ComputationPreprocessingParameters] if isinstance(_preprocessing_parameters, Unset): @@ -251,24 +264,18 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: preprocessing_parameters = ComputationPreprocessingParameters.from_dict(_preprocessing_parameters) - wait = d.pop("wait", UNSET) - - _dp_policy = d.pop("DPPolicy", UNSET) - dp_policy: Union[Unset, DPPolicy] - if isinstance(_dp_policy, Unset): - dp_policy = UNSET + _data_source_parameters = d.pop("dataSourceParameters", UNSET) + data_source_parameters: Union[Unset, ComputationDataSourceParameters] + if isinstance(_data_source_parameters, Unset): + data_source_parameters = UNSET else: - dp_policy = DPPolicy.from_dict(_dp_policy) - - dp_epsilon = d.pop("dpEpsilon", UNSET) - - join_id = d.pop("joinId", UNSET) + data_source_parameters = ComputationDataSourceParameters.from_dict(_data_source_parameters) - local_input_id = d.pop("localInputID", UNSET) + end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) - maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET) + input_data_object = d.pop("inputDataObject", UNSET) - cohort_id = d.pop("cohortId", UNSET) + timeout = d.pop("timeout", UNSET) _local_input = d.pop("localInput", UNSET) local_input: Union[Unset, LocalInput] @@ -281,31 +288,24 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: project_id = d.pop("projectId", UNSET) - _run_mode = d.pop("runMode", UNSET) - run_mode: Union[Unset, RunMode] - if isinstance(_run_mode, Unset): - run_mode = UNSET + _dp_policy = d.pop("DPPolicy", UNSET) + dp_policy: Union[Unset, DPPolicy] + if isinstance(_dp_policy, Unset): + dp_policy = UNSET else: - run_mode = RunMode(_run_mode) + dp_policy = DPPolicy.from_dict(_dp_policy) - local = d.pop("local", UNSET) + cohort_id = d.pop("cohortId", UNSET) encrypted = d.pop("encrypted", UNSET) - end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) + join_id = d.pop("joinId", UNSET) - input_data_object = d.pop("inputDataObject", UNSET) + local = d.pop("local", UNSET) release_results = d.pop("releaseResults", UNSET) - timeout = d.pop("timeout", UNSET) - - _data_source_parameters = d.pop("dataSourceParameters", UNSET) - data_source_parameters: Union[Unset, ComputationDataSourceParameters] - if isinstance(_data_source_parameters, Unset): - data_source_parameters = UNSET - else: - data_source_parameters = ComputationDataSourceParameters.from_dict(_data_source_parameters) + wait = d.pop("wait", UNSET) encrypted_results = d.pop("encryptedResults", UNSET) @@ -329,26 +329,26 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: set_intersection = cls( type=type, - input_clipping_method=input_clipping_method, - preprocessing_parameters=preprocessing_parameters, - wait=wait, - dp_policy=dp_policy, dp_epsilon=dp_epsilon, - join_id=join_id, + input_clipping_method=input_clipping_method, + run_mode=run_mode, local_input_id=local_input_id, maximum_aggregated_value=maximum_aggregated_value, - cohort_id=cohort_id, + preprocessing_parameters=preprocessing_parameters, + data_source_parameters=data_source_parameters, + end_to_end_encrypted=end_to_end_encrypted, + input_data_object=input_data_object, + timeout=timeout, local_input=local_input, owner=owner, project_id=project_id, - run_mode=run_mode, - local=local, + dp_policy=dp_policy, + cohort_id=cohort_id, encrypted=encrypted, - end_to_end_encrypted=end_to_end_encrypted, - input_data_object=input_data_object, + join_id=join_id, + local=local, release_results=release_results, - timeout=timeout, - data_source_parameters=data_source_parameters, + wait=wait, encrypted_results=encrypted_results, fuzzy_params=fuzzy_params, hide_matching_origin=hide_matching_origin, diff --git a/src/tuneinsight/api/sdk/models/settings.py b/src/tuneinsight/api/sdk/models/settings.py new file mode 100644 index 0000000..c2734c9 --- /dev/null +++ b/src/tuneinsight/api/sdk/models/settings.py @@ -0,0 +1,96 @@ +from typing import Any, Dict, List, Type, TypeVar, Union + +import attr + +from ..models.workflow_type import WorkflowType +from ..types import UNSET, Unset + +T = TypeVar("T", bound="Settings") + + +@attr.s(auto_attribs=True) +class Settings: + """instance settings that is configurable by the administrator. + + Attributes: + authorized_project_types (Union[Unset, List[WorkflowType]]): array of project types that are available for + selection when creating a new project. + default_data_source (Union[Unset, None, str]): Unique identifier of a data source. + selectable_data_source (Union[Unset, None, bool]): whether or not the datasource of the project can be modified. + set_project_policies (Union[Unset, None, bool]): whether policies can be set for projects. + """ + + authorized_project_types: Union[Unset, List[WorkflowType]] = UNSET + default_data_source: Union[Unset, None, str] = UNSET + selectable_data_source: Union[Unset, None, bool] = UNSET + set_project_policies: Union[Unset, None, bool] = UNSET + additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + + def to_dict(self) -> Dict[str, Any]: + authorized_project_types: Union[Unset, List[str]] = UNSET + if not isinstance(self.authorized_project_types, Unset): + authorized_project_types = [] + for authorized_project_types_item_data in self.authorized_project_types: + authorized_project_types_item = authorized_project_types_item_data.value + + authorized_project_types.append(authorized_project_types_item) + + default_data_source = self.default_data_source + selectable_data_source = self.selectable_data_source + set_project_policies = self.set_project_policies + + field_dict: Dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update({}) + if authorized_project_types is not UNSET: + field_dict["authorizedProjectTypes"] = authorized_project_types + if default_data_source is not UNSET: + field_dict["defaultDataSource"] = default_data_source + if selectable_data_source is not UNSET: + field_dict["selectableDataSource"] = selectable_data_source + if set_project_policies is not UNSET: + field_dict["setProjectPolicies"] = set_project_policies + + return field_dict + + @classmethod + def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + d = src_dict.copy() + authorized_project_types = [] + _authorized_project_types = d.pop("authorizedProjectTypes", UNSET) + for authorized_project_types_item_data in _authorized_project_types or []: + authorized_project_types_item = WorkflowType(authorized_project_types_item_data) + + authorized_project_types.append(authorized_project_types_item) + + default_data_source = d.pop("defaultDataSource", UNSET) + + selectable_data_source = d.pop("selectableDataSource", UNSET) + + set_project_policies = d.pop("setProjectPolicies", UNSET) + + settings = cls( + authorized_project_types=authorized_project_types, + default_data_source=default_data_source, + selectable_data_source=selectable_data_source, + set_project_policies=set_project_policies, + ) + + settings.additional_properties = d + return settings + + @property + def additional_keys(self) -> List[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/src/tuneinsight/api/sdk/models/setup_session.py b/src/tuneinsight/api/sdk/models/setup_session.py index a2c09a2..2adc055 100644 --- a/src/tuneinsight/api/sdk/models/setup_session.py +++ b/src/tuneinsight/api/sdk/models/setup_session.py @@ -23,6 +23,8 @@ class SetupSession: """ Attributes: type (ComputationType): Type of the computation. + dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP + mode. Default: -1.0. input_clipping_method (Union[Unset, ComputationDefinitionInputClippingMethod]): Optional method used for clipping before encrypting values when running aggregation-based workflows. The bounds are deduced based on the cryptographic parameters used for the aggregation. @@ -33,14 +35,7 @@ class SetupSession: (default) - error: if some values are out of bounds, then the computation is aborted. Default: ComputationDefinitionInputClippingMethod.WARNING. - preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters - applied to the input retrieved from the datasource, if applicable - wait (Union[Unset, bool]): Whether to wait synchronously for the computation result. - dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various - disclosure prevention mechanisms - dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP - mode. Default: -1.0. - join_id (Union[Unset, str]): Unique identifier of a data object. + run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both) local_input_id (Union[Unset, str]): Unique identifier of a data object. maximum_aggregated_value (Union[Unset, None, float]): optional upper bound on the total expected value to be aggregated collectively. If provided, the computation will automatically deduce @@ -49,57 +44,62 @@ class SetupSession: up to 16 million. For example, when using default parameters and running an aggregation with 4 participants, local aggregated values cannot exceed 4 million. - cohort_id (Union[Unset, str]): Unique identifier of a data object. + preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters + applied to the input retrieved from the datasource, if applicable + data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource + from each node before the computation + end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true, + then when release results is set to true and the output + is initially encrypted with a network collective key, then it is key switched to + the initiating user's public key. + input_data_object (Union[Unset, str]): Shared identifier of a data object. + timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run. local_input (Union[Unset, LocalInput]): If a local input is provided, the node initiating the computation will use it instead of querying the datasource. This data is *not* shared to other nodes, only used for the duration of the computation. The local input columns/values must be in the form {: [, , ...], ...} owner (Union[Unset, str]): The username of the end user who requested the computation. project_id (Union[Unset, str]): Unique identifier of a project. - run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both) + dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various + disclosure prevention mechanisms + cohort_id (Union[Unset, str]): Unique identifier of a data object. + encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key. + join_id (Union[Unset, str]): Unique identifier of a data object. local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured the network) - encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key. - end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true, - then when release results is set to true and the output - is initially encrypted with a network collective key, then it is key switched to - the initiating user's public key. - input_data_object (Union[Unset, str]): Shared identifier of a data object. release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output results. If set, then encrypted results are automatically key switched and decrypted and a Result entity is saved - timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run. - data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource - from each node before the computation + wait (Union[Unset, bool]): Whether to wait synchronously for the computation result. relinearization_key (Union[Unset, bool]): whether or not to generate the relinearization key target_computation (Union[Unset, ComputationDefinition]): Generic computation. target_scheme_context (Union[Unset, str]): base64 encoded scheme context """ type: ComputationType + dp_epsilon: Union[Unset, float] = -1.0 input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] = ( ComputationDefinitionInputClippingMethod.WARNING ) - preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET - wait: Union[Unset, bool] = UNSET - dp_policy: Union[Unset, "DPPolicy"] = UNSET - dp_epsilon: Union[Unset, float] = -1.0 - join_id: Union[Unset, str] = UNSET + run_mode: Union[Unset, RunMode] = UNSET local_input_id: Union[Unset, str] = UNSET maximum_aggregated_value: Union[Unset, None, float] = UNSET - cohort_id: Union[Unset, str] = UNSET + preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET + data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET + end_to_end_encrypted: Union[Unset, bool] = UNSET + input_data_object: Union[Unset, str] = UNSET + timeout: Union[Unset, int] = UNSET local_input: Union[Unset, "LocalInput"] = UNSET owner: Union[Unset, str] = UNSET project_id: Union[Unset, str] = UNSET - run_mode: Union[Unset, RunMode] = UNSET - local: Union[Unset, bool] = UNSET + dp_policy: Union[Unset, "DPPolicy"] = UNSET + cohort_id: Union[Unset, str] = UNSET encrypted: Union[Unset, bool] = UNSET - end_to_end_encrypted: Union[Unset, bool] = UNSET - input_data_object: Union[Unset, str] = UNSET + join_id: Union[Unset, str] = UNSET + local: Union[Unset, bool] = UNSET release_results: Union[Unset, bool] = UNSET - timeout: Union[Unset, int] = UNSET - data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET + wait: Union[Unset, bool] = UNSET relinearization_key: Union[Unset, bool] = UNSET target_computation: Union[Unset, "ComputationDefinition"] = UNSET target_scheme_context: Union[Unset, str] = UNSET @@ -108,44 +108,44 @@ class SetupSession: def to_dict(self) -> Dict[str, Any]: type = self.type.value + dp_epsilon = self.dp_epsilon input_clipping_method: Union[Unset, str] = UNSET if not isinstance(self.input_clipping_method, Unset): input_clipping_method = self.input_clipping_method.value + run_mode: Union[Unset, str] = UNSET + if not isinstance(self.run_mode, Unset): + run_mode = self.run_mode.value + + local_input_id = self.local_input_id + maximum_aggregated_value = self.maximum_aggregated_value preprocessing_parameters: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.preprocessing_parameters, Unset): preprocessing_parameters = self.preprocessing_parameters.to_dict() - wait = self.wait - dp_policy: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.dp_policy, Unset): - dp_policy = self.dp_policy.to_dict() + data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.data_source_parameters, Unset): + data_source_parameters = self.data_source_parameters.to_dict() - dp_epsilon = self.dp_epsilon - join_id = self.join_id - local_input_id = self.local_input_id - maximum_aggregated_value = self.maximum_aggregated_value - cohort_id = self.cohort_id + end_to_end_encrypted = self.end_to_end_encrypted + input_data_object = self.input_data_object + timeout = self.timeout local_input: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.local_input, Unset): local_input = self.local_input.to_dict() owner = self.owner project_id = self.project_id - run_mode: Union[Unset, str] = UNSET - if not isinstance(self.run_mode, Unset): - run_mode = self.run_mode.value + dp_policy: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.dp_policy, Unset): + dp_policy = self.dp_policy.to_dict() - local = self.local + cohort_id = self.cohort_id encrypted = self.encrypted - end_to_end_encrypted = self.end_to_end_encrypted - input_data_object = self.input_data_object + join_id = self.join_id + local = self.local release_results = self.release_results - timeout = self.timeout - data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.data_source_parameters, Unset): - data_source_parameters = self.data_source_parameters.to_dict() - + wait = self.wait relinearization_key = self.relinearization_key target_computation: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.target_computation, Unset): @@ -160,46 +160,46 @@ def to_dict(self) -> Dict[str, Any]: "type": type, } ) - if input_clipping_method is not UNSET: - field_dict["inputClippingMethod"] = input_clipping_method - if preprocessing_parameters is not UNSET: - field_dict["preprocessingParameters"] = preprocessing_parameters - if wait is not UNSET: - field_dict["wait"] = wait - if dp_policy is not UNSET: - field_dict["DPPolicy"] = dp_policy if dp_epsilon is not UNSET: field_dict["dpEpsilon"] = dp_epsilon - if join_id is not UNSET: - field_dict["joinId"] = join_id + if input_clipping_method is not UNSET: + field_dict["inputClippingMethod"] = input_clipping_method + if run_mode is not UNSET: + field_dict["runMode"] = run_mode if local_input_id is not UNSET: field_dict["localInputID"] = local_input_id if maximum_aggregated_value is not UNSET: field_dict["maximumAggregatedValue"] = maximum_aggregated_value - if cohort_id is not UNSET: - field_dict["cohortId"] = cohort_id + if preprocessing_parameters is not UNSET: + field_dict["preprocessingParameters"] = preprocessing_parameters + if data_source_parameters is not UNSET: + field_dict["dataSourceParameters"] = data_source_parameters + if end_to_end_encrypted is not UNSET: + field_dict["endToEndEncrypted"] = end_to_end_encrypted + if input_data_object is not UNSET: + field_dict["inputDataObject"] = input_data_object + if timeout is not UNSET: + field_dict["timeout"] = timeout if local_input is not UNSET: field_dict["localInput"] = local_input if owner is not UNSET: field_dict["owner"] = owner if project_id is not UNSET: field_dict["projectId"] = project_id - if run_mode is not UNSET: - field_dict["runMode"] = run_mode - if local is not UNSET: - field_dict["local"] = local + if dp_policy is not UNSET: + field_dict["DPPolicy"] = dp_policy + if cohort_id is not UNSET: + field_dict["cohortId"] = cohort_id if encrypted is not UNSET: field_dict["encrypted"] = encrypted - if end_to_end_encrypted is not UNSET: - field_dict["endToEndEncrypted"] = end_to_end_encrypted - if input_data_object is not UNSET: - field_dict["inputDataObject"] = input_data_object + if join_id is not UNSET: + field_dict["joinId"] = join_id + if local is not UNSET: + field_dict["local"] = local if release_results is not UNSET: field_dict["releaseResults"] = release_results - if timeout is not UNSET: - field_dict["timeout"] = timeout - if data_source_parameters is not UNSET: - field_dict["dataSourceParameters"] = data_source_parameters + if wait is not UNSET: + field_dict["wait"] = wait if relinearization_key is not UNSET: field_dict["relinearizationKey"] = relinearization_key if target_computation is not UNSET: @@ -220,6 +220,8 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() type = ComputationType(d.pop("type")) + dp_epsilon = d.pop("dpEpsilon", UNSET) + _input_clipping_method = d.pop("inputClippingMethod", UNSET) input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] if isinstance(_input_clipping_method, Unset): @@ -227,6 +229,17 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method) + _run_mode = d.pop("runMode", UNSET) + run_mode: Union[Unset, RunMode] + if isinstance(_run_mode, Unset): + run_mode = UNSET + else: + run_mode = RunMode(_run_mode) + + local_input_id = d.pop("localInputID", UNSET) + + maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET) + _preprocessing_parameters = d.pop("preprocessingParameters", UNSET) preprocessing_parameters: Union[Unset, ComputationPreprocessingParameters] if isinstance(_preprocessing_parameters, Unset): @@ -234,24 +247,18 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: preprocessing_parameters = ComputationPreprocessingParameters.from_dict(_preprocessing_parameters) - wait = d.pop("wait", UNSET) - - _dp_policy = d.pop("DPPolicy", UNSET) - dp_policy: Union[Unset, DPPolicy] - if isinstance(_dp_policy, Unset): - dp_policy = UNSET + _data_source_parameters = d.pop("dataSourceParameters", UNSET) + data_source_parameters: Union[Unset, ComputationDataSourceParameters] + if isinstance(_data_source_parameters, Unset): + data_source_parameters = UNSET else: - dp_policy = DPPolicy.from_dict(_dp_policy) - - dp_epsilon = d.pop("dpEpsilon", UNSET) - - join_id = d.pop("joinId", UNSET) + data_source_parameters = ComputationDataSourceParameters.from_dict(_data_source_parameters) - local_input_id = d.pop("localInputID", UNSET) + end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) - maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET) + input_data_object = d.pop("inputDataObject", UNSET) - cohort_id = d.pop("cohortId", UNSET) + timeout = d.pop("timeout", UNSET) _local_input = d.pop("localInput", UNSET) local_input: Union[Unset, LocalInput] @@ -264,31 +271,24 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: project_id = d.pop("projectId", UNSET) - _run_mode = d.pop("runMode", UNSET) - run_mode: Union[Unset, RunMode] - if isinstance(_run_mode, Unset): - run_mode = UNSET + _dp_policy = d.pop("DPPolicy", UNSET) + dp_policy: Union[Unset, DPPolicy] + if isinstance(_dp_policy, Unset): + dp_policy = UNSET else: - run_mode = RunMode(_run_mode) + dp_policy = DPPolicy.from_dict(_dp_policy) - local = d.pop("local", UNSET) + cohort_id = d.pop("cohortId", UNSET) encrypted = d.pop("encrypted", UNSET) - end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) + join_id = d.pop("joinId", UNSET) - input_data_object = d.pop("inputDataObject", UNSET) + local = d.pop("local", UNSET) release_results = d.pop("releaseResults", UNSET) - timeout = d.pop("timeout", UNSET) - - _data_source_parameters = d.pop("dataSourceParameters", UNSET) - data_source_parameters: Union[Unset, ComputationDataSourceParameters] - if isinstance(_data_source_parameters, Unset): - data_source_parameters = UNSET - else: - data_source_parameters = ComputationDataSourceParameters.from_dict(_data_source_parameters) + wait = d.pop("wait", UNSET) relinearization_key = d.pop("relinearizationKey", UNSET) @@ -303,26 +303,26 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: setup_session = cls( type=type, - input_clipping_method=input_clipping_method, - preprocessing_parameters=preprocessing_parameters, - wait=wait, - dp_policy=dp_policy, dp_epsilon=dp_epsilon, - join_id=join_id, + input_clipping_method=input_clipping_method, + run_mode=run_mode, local_input_id=local_input_id, maximum_aggregated_value=maximum_aggregated_value, - cohort_id=cohort_id, + preprocessing_parameters=preprocessing_parameters, + data_source_parameters=data_source_parameters, + end_to_end_encrypted=end_to_end_encrypted, + input_data_object=input_data_object, + timeout=timeout, local_input=local_input, owner=owner, project_id=project_id, - run_mode=run_mode, - local=local, + dp_policy=dp_policy, + cohort_id=cohort_id, encrypted=encrypted, - end_to_end_encrypted=end_to_end_encrypted, - input_data_object=input_data_object, + join_id=join_id, + local=local, release_results=release_results, - timeout=timeout, - data_source_parameters=data_source_parameters, + wait=wait, relinearization_key=relinearization_key, target_computation=target_computation, target_scheme_context=target_scheme_context, diff --git a/src/tuneinsight/api/sdk/models/statistic_result.py b/src/tuneinsight/api/sdk/models/statistic_result.py index ea08f79..d3c53e9 100644 --- a/src/tuneinsight/api/sdk/models/statistic_result.py +++ b/src/tuneinsight/api/sdk/models/statistic_result.py @@ -18,25 +18,25 @@ class StatisticResult: filter_ (Union[Unset, Filter]): name (Union[Unset, str]): given name of the statistic variable (Union[Unset, str]): target variable in the dataset from the which the statistic is computed + iqr (Union[Unset, None, float]): + max_ (Union[Unset, None, float]): + mean (Union[Unset, None, float]): median (Union[Unset, None, float]): min_ (Union[Unset, None, float]): quantiles (Union[Unset, List[float]]): variance (Union[Unset, None, float]): - iqr (Union[Unset, None, float]): - max_ (Union[Unset, None, float]): - mean (Union[Unset, None, float]): """ filter_: Union[Unset, "Filter"] = UNSET name: Union[Unset, str] = UNSET variable: Union[Unset, str] = UNSET + iqr: Union[Unset, None, float] = UNSET + max_: Union[Unset, None, float] = UNSET + mean: Union[Unset, None, float] = UNSET median: Union[Unset, None, float] = UNSET min_: Union[Unset, None, float] = UNSET quantiles: Union[Unset, List[float]] = UNSET variance: Union[Unset, None, float] = UNSET - iqr: Union[Unset, None, float] = UNSET - max_: Union[Unset, None, float] = UNSET - mean: Union[Unset, None, float] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: @@ -46,6 +46,9 @@ def to_dict(self) -> Dict[str, Any]: name = self.name variable = self.variable + iqr = self.iqr + max_ = self.max_ + mean = self.mean median = self.median min_ = self.min_ quantiles: Union[Unset, List[float]] = UNSET @@ -53,9 +56,6 @@ def to_dict(self) -> Dict[str, Any]: quantiles = self.quantiles variance = self.variance - iqr = self.iqr - max_ = self.max_ - mean = self.mean field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) @@ -66,6 +66,12 @@ def to_dict(self) -> Dict[str, Any]: field_dict["name"] = name if variable is not UNSET: field_dict["variable"] = variable + if iqr is not UNSET: + field_dict["IQR"] = iqr + if max_ is not UNSET: + field_dict["max"] = max_ + if mean is not UNSET: + field_dict["mean"] = mean if median is not UNSET: field_dict["median"] = median if min_ is not UNSET: @@ -74,12 +80,6 @@ def to_dict(self) -> Dict[str, Any]: field_dict["quantiles"] = quantiles if variance is not UNSET: field_dict["variance"] = variance - if iqr is not UNSET: - field_dict["IQR"] = iqr - if max_ is not UNSET: - field_dict["max"] = max_ - if mean is not UNSET: - field_dict["mean"] = mean return field_dict @@ -99,6 +99,12 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: variable = d.pop("variable", UNSET) + iqr = d.pop("IQR", UNSET) + + max_ = d.pop("max", UNSET) + + mean = d.pop("mean", UNSET) + median = d.pop("median", UNSET) min_ = d.pop("min", UNSET) @@ -107,23 +113,17 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: variance = d.pop("variance", UNSET) - iqr = d.pop("IQR", UNSET) - - max_ = d.pop("max", UNSET) - - mean = d.pop("mean", UNSET) - statistic_result = cls( filter_=filter_, name=name, variable=variable, + iqr=iqr, + max_=max_, + mean=mean, median=median, min_=min_, quantiles=quantiles, variance=variance, - iqr=iqr, - max_=max_, - mean=mean, ) statistic_result.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/statistical_aggregation.py b/src/tuneinsight/api/sdk/models/statistical_aggregation.py index 4a0c4ab..dd3c62f 100644 --- a/src/tuneinsight/api/sdk/models/statistical_aggregation.py +++ b/src/tuneinsight/api/sdk/models/statistical_aggregation.py @@ -23,6 +23,8 @@ class StatisticalAggregation: """ Attributes: type (ComputationType): Type of the computation. + dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP + mode. Default: -1.0. input_clipping_method (Union[Unset, ComputationDefinitionInputClippingMethod]): Optional method used for clipping before encrypting values when running aggregation-based workflows. The bounds are deduced based on the cryptographic parameters used for the aggregation. @@ -33,14 +35,7 @@ class StatisticalAggregation: (default) - error: if some values are out of bounds, then the computation is aborted. Default: ComputationDefinitionInputClippingMethod.WARNING. - preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters - applied to the input retrieved from the datasource, if applicable - wait (Union[Unset, bool]): Whether to wait synchronously for the computation result. - dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various - disclosure prevention mechanisms - dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP - mode. Default: -1.0. - join_id (Union[Unset, str]): Unique identifier of a data object. + run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both) local_input_id (Union[Unset, str]): Unique identifier of a data object. maximum_aggregated_value (Union[Unset, None, float]): optional upper bound on the total expected value to be aggregated collectively. If provided, the computation will automatically deduce @@ -49,29 +44,34 @@ class StatisticalAggregation: up to 16 million. For example, when using default parameters and running an aggregation with 4 participants, local aggregated values cannot exceed 4 million. - cohort_id (Union[Unset, str]): Unique identifier of a data object. + preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters + applied to the input retrieved from the datasource, if applicable + data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource + from each node before the computation + end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true, + then when release results is set to true and the output + is initially encrypted with a network collective key, then it is key switched to + the initiating user's public key. + input_data_object (Union[Unset, str]): Shared identifier of a data object. + timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run. local_input (Union[Unset, LocalInput]): If a local input is provided, the node initiating the computation will use it instead of querying the datasource. This data is *not* shared to other nodes, only used for the duration of the computation. The local input columns/values must be in the form {: [, , ...], ...} owner (Union[Unset, str]): The username of the end user who requested the computation. project_id (Union[Unset, str]): Unique identifier of a project. - run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both) + dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various + disclosure prevention mechanisms + cohort_id (Union[Unset, str]): Unique identifier of a data object. + encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key. + join_id (Union[Unset, str]): Unique identifier of a data object. local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured the network) - encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key. - end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true, - then when release results is set to true and the output - is initially encrypted with a network collective key, then it is key switched to - the initiating user's public key. - input_data_object (Union[Unset, str]): Shared identifier of a data object. release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output results. If set, then encrypted results are automatically key switched and decrypted and a Result entity is saved - timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run. - data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource - from each node before the computation + wait (Union[Unset, bool]): Whether to wait synchronously for the computation result. aggregation_columns (Union[Unset, List[str]]): list of columns where all data is aggregated binning_operations (Union[Unset, List['BinningOperation']]): list of binning operations to apply before aggregating the results @@ -79,28 +79,28 @@ class StatisticalAggregation: """ type: ComputationType + dp_epsilon: Union[Unset, float] = -1.0 input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] = ( ComputationDefinitionInputClippingMethod.WARNING ) - preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET - wait: Union[Unset, bool] = UNSET - dp_policy: Union[Unset, "DPPolicy"] = UNSET - dp_epsilon: Union[Unset, float] = -1.0 - join_id: Union[Unset, str] = UNSET + run_mode: Union[Unset, RunMode] = UNSET local_input_id: Union[Unset, str] = UNSET maximum_aggregated_value: Union[Unset, None, float] = UNSET - cohort_id: Union[Unset, str] = UNSET + preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET + data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET + end_to_end_encrypted: Union[Unset, bool] = UNSET + input_data_object: Union[Unset, str] = UNSET + timeout: Union[Unset, int] = UNSET local_input: Union[Unset, "LocalInput"] = UNSET owner: Union[Unset, str] = UNSET project_id: Union[Unset, str] = UNSET - run_mode: Union[Unset, RunMode] = UNSET - local: Union[Unset, bool] = UNSET + dp_policy: Union[Unset, "DPPolicy"] = UNSET + cohort_id: Union[Unset, str] = UNSET encrypted: Union[Unset, bool] = UNSET - end_to_end_encrypted: Union[Unset, bool] = UNSET - input_data_object: Union[Unset, str] = UNSET + join_id: Union[Unset, str] = UNSET + local: Union[Unset, bool] = UNSET release_results: Union[Unset, bool] = UNSET - timeout: Union[Unset, int] = UNSET - data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET + wait: Union[Unset, bool] = UNSET aggregation_columns: Union[Unset, List[str]] = UNSET binning_operations: Union[Unset, List["BinningOperation"]] = UNSET include_dataset_length: Union[Unset, bool] = UNSET @@ -109,44 +109,44 @@ class StatisticalAggregation: def to_dict(self) -> Dict[str, Any]: type = self.type.value + dp_epsilon = self.dp_epsilon input_clipping_method: Union[Unset, str] = UNSET if not isinstance(self.input_clipping_method, Unset): input_clipping_method = self.input_clipping_method.value + run_mode: Union[Unset, str] = UNSET + if not isinstance(self.run_mode, Unset): + run_mode = self.run_mode.value + + local_input_id = self.local_input_id + maximum_aggregated_value = self.maximum_aggregated_value preprocessing_parameters: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.preprocessing_parameters, Unset): preprocessing_parameters = self.preprocessing_parameters.to_dict() - wait = self.wait - dp_policy: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.dp_policy, Unset): - dp_policy = self.dp_policy.to_dict() + data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.data_source_parameters, Unset): + data_source_parameters = self.data_source_parameters.to_dict() - dp_epsilon = self.dp_epsilon - join_id = self.join_id - local_input_id = self.local_input_id - maximum_aggregated_value = self.maximum_aggregated_value - cohort_id = self.cohort_id + end_to_end_encrypted = self.end_to_end_encrypted + input_data_object = self.input_data_object + timeout = self.timeout local_input: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.local_input, Unset): local_input = self.local_input.to_dict() owner = self.owner project_id = self.project_id - run_mode: Union[Unset, str] = UNSET - if not isinstance(self.run_mode, Unset): - run_mode = self.run_mode.value + dp_policy: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.dp_policy, Unset): + dp_policy = self.dp_policy.to_dict() - local = self.local + cohort_id = self.cohort_id encrypted = self.encrypted - end_to_end_encrypted = self.end_to_end_encrypted - input_data_object = self.input_data_object + join_id = self.join_id + local = self.local release_results = self.release_results - timeout = self.timeout - data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.data_source_parameters, Unset): - data_source_parameters = self.data_source_parameters.to_dict() - + wait = self.wait aggregation_columns: Union[Unset, List[str]] = UNSET if not isinstance(self.aggregation_columns, Unset): aggregation_columns = self.aggregation_columns @@ -168,46 +168,46 @@ def to_dict(self) -> Dict[str, Any]: "type": type, } ) - if input_clipping_method is not UNSET: - field_dict["inputClippingMethod"] = input_clipping_method - if preprocessing_parameters is not UNSET: - field_dict["preprocessingParameters"] = preprocessing_parameters - if wait is not UNSET: - field_dict["wait"] = wait - if dp_policy is not UNSET: - field_dict["DPPolicy"] = dp_policy if dp_epsilon is not UNSET: field_dict["dpEpsilon"] = dp_epsilon - if join_id is not UNSET: - field_dict["joinId"] = join_id + if input_clipping_method is not UNSET: + field_dict["inputClippingMethod"] = input_clipping_method + if run_mode is not UNSET: + field_dict["runMode"] = run_mode if local_input_id is not UNSET: field_dict["localInputID"] = local_input_id if maximum_aggregated_value is not UNSET: field_dict["maximumAggregatedValue"] = maximum_aggregated_value - if cohort_id is not UNSET: - field_dict["cohortId"] = cohort_id + if preprocessing_parameters is not UNSET: + field_dict["preprocessingParameters"] = preprocessing_parameters + if data_source_parameters is not UNSET: + field_dict["dataSourceParameters"] = data_source_parameters + if end_to_end_encrypted is not UNSET: + field_dict["endToEndEncrypted"] = end_to_end_encrypted + if input_data_object is not UNSET: + field_dict["inputDataObject"] = input_data_object + if timeout is not UNSET: + field_dict["timeout"] = timeout if local_input is not UNSET: field_dict["localInput"] = local_input if owner is not UNSET: field_dict["owner"] = owner if project_id is not UNSET: field_dict["projectId"] = project_id - if run_mode is not UNSET: - field_dict["runMode"] = run_mode - if local is not UNSET: - field_dict["local"] = local + if dp_policy is not UNSET: + field_dict["DPPolicy"] = dp_policy + if cohort_id is not UNSET: + field_dict["cohortId"] = cohort_id if encrypted is not UNSET: field_dict["encrypted"] = encrypted - if end_to_end_encrypted is not UNSET: - field_dict["endToEndEncrypted"] = end_to_end_encrypted - if input_data_object is not UNSET: - field_dict["inputDataObject"] = input_data_object + if join_id is not UNSET: + field_dict["joinId"] = join_id + if local is not UNSET: + field_dict["local"] = local if release_results is not UNSET: field_dict["releaseResults"] = release_results - if timeout is not UNSET: - field_dict["timeout"] = timeout - if data_source_parameters is not UNSET: - field_dict["dataSourceParameters"] = data_source_parameters + if wait is not UNSET: + field_dict["wait"] = wait if aggregation_columns is not UNSET: field_dict["aggregationColumns"] = aggregation_columns if binning_operations is not UNSET: @@ -228,6 +228,8 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() type = ComputationType(d.pop("type")) + dp_epsilon = d.pop("dpEpsilon", UNSET) + _input_clipping_method = d.pop("inputClippingMethod", UNSET) input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] if isinstance(_input_clipping_method, Unset): @@ -235,6 +237,17 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method) + _run_mode = d.pop("runMode", UNSET) + run_mode: Union[Unset, RunMode] + if isinstance(_run_mode, Unset): + run_mode = UNSET + else: + run_mode = RunMode(_run_mode) + + local_input_id = d.pop("localInputID", UNSET) + + maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET) + _preprocessing_parameters = d.pop("preprocessingParameters", UNSET) preprocessing_parameters: Union[Unset, ComputationPreprocessingParameters] if isinstance(_preprocessing_parameters, Unset): @@ -242,24 +255,18 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: preprocessing_parameters = ComputationPreprocessingParameters.from_dict(_preprocessing_parameters) - wait = d.pop("wait", UNSET) - - _dp_policy = d.pop("DPPolicy", UNSET) - dp_policy: Union[Unset, DPPolicy] - if isinstance(_dp_policy, Unset): - dp_policy = UNSET + _data_source_parameters = d.pop("dataSourceParameters", UNSET) + data_source_parameters: Union[Unset, ComputationDataSourceParameters] + if isinstance(_data_source_parameters, Unset): + data_source_parameters = UNSET else: - dp_policy = DPPolicy.from_dict(_dp_policy) - - dp_epsilon = d.pop("dpEpsilon", UNSET) - - join_id = d.pop("joinId", UNSET) + data_source_parameters = ComputationDataSourceParameters.from_dict(_data_source_parameters) - local_input_id = d.pop("localInputID", UNSET) + end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) - maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET) + input_data_object = d.pop("inputDataObject", UNSET) - cohort_id = d.pop("cohortId", UNSET) + timeout = d.pop("timeout", UNSET) _local_input = d.pop("localInput", UNSET) local_input: Union[Unset, LocalInput] @@ -272,31 +279,24 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: project_id = d.pop("projectId", UNSET) - _run_mode = d.pop("runMode", UNSET) - run_mode: Union[Unset, RunMode] - if isinstance(_run_mode, Unset): - run_mode = UNSET + _dp_policy = d.pop("DPPolicy", UNSET) + dp_policy: Union[Unset, DPPolicy] + if isinstance(_dp_policy, Unset): + dp_policy = UNSET else: - run_mode = RunMode(_run_mode) + dp_policy = DPPolicy.from_dict(_dp_policy) - local = d.pop("local", UNSET) + cohort_id = d.pop("cohortId", UNSET) encrypted = d.pop("encrypted", UNSET) - end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) + join_id = d.pop("joinId", UNSET) - input_data_object = d.pop("inputDataObject", UNSET) + local = d.pop("local", UNSET) release_results = d.pop("releaseResults", UNSET) - timeout = d.pop("timeout", UNSET) - - _data_source_parameters = d.pop("dataSourceParameters", UNSET) - data_source_parameters: Union[Unset, ComputationDataSourceParameters] - if isinstance(_data_source_parameters, Unset): - data_source_parameters = UNSET - else: - data_source_parameters = ComputationDataSourceParameters.from_dict(_data_source_parameters) + wait = d.pop("wait", UNSET) aggregation_columns = cast(List[str], d.pop("aggregationColumns", UNSET)) @@ -311,26 +311,26 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: statistical_aggregation = cls( type=type, - input_clipping_method=input_clipping_method, - preprocessing_parameters=preprocessing_parameters, - wait=wait, - dp_policy=dp_policy, dp_epsilon=dp_epsilon, - join_id=join_id, + input_clipping_method=input_clipping_method, + run_mode=run_mode, local_input_id=local_input_id, maximum_aggregated_value=maximum_aggregated_value, - cohort_id=cohort_id, + preprocessing_parameters=preprocessing_parameters, + data_source_parameters=data_source_parameters, + end_to_end_encrypted=end_to_end_encrypted, + input_data_object=input_data_object, + timeout=timeout, local_input=local_input, owner=owner, project_id=project_id, - run_mode=run_mode, - local=local, + dp_policy=dp_policy, + cohort_id=cohort_id, encrypted=encrypted, - end_to_end_encrypted=end_to_end_encrypted, - input_data_object=input_data_object, + join_id=join_id, + local=local, release_results=release_results, - timeout=timeout, - data_source_parameters=data_source_parameters, + wait=wait, aggregation_columns=aggregation_columns, binning_operations=binning_operations, include_dataset_length=include_dataset_length, diff --git a/src/tuneinsight/api/sdk/models/storage_definition.py b/src/tuneinsight/api/sdk/models/storage_definition.py index eabb425..9cc2552 100644 --- a/src/tuneinsight/api/sdk/models/storage_definition.py +++ b/src/tuneinsight/api/sdk/models/storage_definition.py @@ -17,7 +17,6 @@ class StorageDefinition: """specification of the storage operation Attributes: - backup_definition (Union[Unset, BackupDefinition]): backup parameters current_key (Union[Unset, str]): currently used b64-formatted encryption key, needs to be specified when running 'decrypt' or 'rotate' encrypt_unencrypted (Union[Unset, bool]): when performing a rotation, if true, then unencrypted values get @@ -25,20 +24,17 @@ class StorageDefinition: new_key (Union[Unset, str]): new b64-formatted key to use on the storage, needs to be specified when running 'encrypt' or 'rotate' operation (Union[Unset, StorageOperation]): operation to perform on the storage + backup_definition (Union[Unset, BackupDefinition]): backup parameters """ - backup_definition: Union[Unset, "BackupDefinition"] = UNSET current_key: Union[Unset, str] = UNSET encrypt_unencrypted: Union[Unset, bool] = UNSET new_key: Union[Unset, str] = UNSET operation: Union[Unset, StorageOperation] = UNSET + backup_definition: Union[Unset, "BackupDefinition"] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: - backup_definition: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.backup_definition, Unset): - backup_definition = self.backup_definition.to_dict() - current_key = self.current_key encrypt_unencrypted = self.encrypt_unencrypted new_key = self.new_key @@ -46,11 +42,13 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.operation, Unset): operation = self.operation.value + backup_definition: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.backup_definition, Unset): + backup_definition = self.backup_definition.to_dict() + field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) - if backup_definition is not UNSET: - field_dict["backupDefinition"] = backup_definition if current_key is not UNSET: field_dict["currentKey"] = current_key if encrypt_unencrypted is not UNSET: @@ -59,6 +57,8 @@ def to_dict(self) -> Dict[str, Any]: field_dict["newKey"] = new_key if operation is not UNSET: field_dict["operation"] = operation + if backup_definition is not UNSET: + field_dict["backupDefinition"] = backup_definition return field_dict @@ -67,13 +67,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: from ..models.backup_definition import BackupDefinition d = src_dict.copy() - _backup_definition = d.pop("backupDefinition", UNSET) - backup_definition: Union[Unset, BackupDefinition] - if isinstance(_backup_definition, Unset): - backup_definition = UNSET - else: - backup_definition = BackupDefinition.from_dict(_backup_definition) - current_key = d.pop("currentKey", UNSET) encrypt_unencrypted = d.pop("encryptUnencrypted", UNSET) @@ -87,12 +80,19 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: operation = StorageOperation(_operation) + _backup_definition = d.pop("backupDefinition", UNSET) + backup_definition: Union[Unset, BackupDefinition] + if isinstance(_backup_definition, Unset): + backup_definition = UNSET + else: + backup_definition = BackupDefinition.from_dict(_backup_definition) + storage_definition = cls( - backup_definition=backup_definition, current_key=current_key, encrypt_unencrypted=encrypt_unencrypted, new_key=new_key, operation=operation, + backup_definition=backup_definition, ) storage_definition.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/survival.py b/src/tuneinsight/api/sdk/models/survival.py index e0f2dce..d73f2f2 100644 --- a/src/tuneinsight/api/sdk/models/survival.py +++ b/src/tuneinsight/api/sdk/models/survival.py @@ -17,6 +17,10 @@ class Survival: """ Attributes: type (PreprocessingOperationType): type of preprocessing operation + event_col (Union[Unset, str]): the name of the column that stores the event status for each sample Default: + 'event'. + event_val (Union[Unset, str]): the event value indicating a survival event (i.e. death) + interval (Union[Unset, Duration]): definition of a date-independent time interval num_frames (Union[Unset, int]): the number of time frames to take into account starting from the start of the survival start_event (Union[Unset, str]): the event column that must contain the timestamps of the start of the trial @@ -24,35 +28,32 @@ class Survival: stored must be integers Default: 'duration'. end_event (Union[Unset, str]): the column that must contain the timestamps of the end event (can be empty if no event happened) - event_col (Union[Unset, str]): the name of the column that stores the event status for each sample Default: - 'event'. - event_val (Union[Unset, str]): the event value indicating a survival event (i.e. death) - interval (Union[Unset, Duration]): definition of a date-independent time interval """ type: PreprocessingOperationType + event_col: Union[Unset, str] = "event" + event_val: Union[Unset, str] = UNSET + interval: Union[Unset, "Duration"] = UNSET num_frames: Union[Unset, int] = UNSET start_event: Union[Unset, str] = UNSET duration_col: Union[Unset, str] = "duration" end_event: Union[Unset, str] = UNSET - event_col: Union[Unset, str] = "event" - event_val: Union[Unset, str] = UNSET - interval: Union[Unset, "Duration"] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: type = self.type.value - num_frames = self.num_frames - start_event = self.start_event - duration_col = self.duration_col - end_event = self.end_event event_col = self.event_col event_val = self.event_val interval: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.interval, Unset): interval = self.interval.to_dict() + num_frames = self.num_frames + start_event = self.start_event + duration_col = self.duration_col + end_event = self.end_event + field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update( @@ -60,6 +61,12 @@ def to_dict(self) -> Dict[str, Any]: "type": type, } ) + if event_col is not UNSET: + field_dict["eventCol"] = event_col + if event_val is not UNSET: + field_dict["eventVal"] = event_val + if interval is not UNSET: + field_dict["interval"] = interval if num_frames is not UNSET: field_dict["numFrames"] = num_frames if start_event is not UNSET: @@ -68,12 +75,6 @@ def to_dict(self) -> Dict[str, Any]: field_dict["durationCol"] = duration_col if end_event is not UNSET: field_dict["endEvent"] = end_event - if event_col is not UNSET: - field_dict["eventCol"] = event_col - if event_val is not UNSET: - field_dict["eventVal"] = event_val - if interval is not UNSET: - field_dict["interval"] = interval return field_dict @@ -84,14 +85,6 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() type = PreprocessingOperationType(d.pop("type")) - num_frames = d.pop("numFrames", UNSET) - - start_event = d.pop("startEvent", UNSET) - - duration_col = d.pop("durationCol", UNSET) - - end_event = d.pop("endEvent", UNSET) - event_col = d.pop("eventCol", UNSET) event_val = d.pop("eventVal", UNSET) @@ -103,15 +96,23 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: interval = Duration.from_dict(_interval) + num_frames = d.pop("numFrames", UNSET) + + start_event = d.pop("startEvent", UNSET) + + duration_col = d.pop("durationCol", UNSET) + + end_event = d.pop("endEvent", UNSET) + survival = cls( type=type, + event_col=event_col, + event_val=event_val, + interval=interval, num_frames=num_frames, start_event=start_event, duration_col=duration_col, end_event=end_event, - event_col=event_col, - event_val=event_val, - interval=interval, ) survival.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/survival_aggregation.py b/src/tuneinsight/api/sdk/models/survival_aggregation.py index f943571..50b211f 100644 --- a/src/tuneinsight/api/sdk/models/survival_aggregation.py +++ b/src/tuneinsight/api/sdk/models/survival_aggregation.py @@ -25,6 +25,8 @@ class SurvivalAggregation: """ Attributes: type (ComputationType): Type of the computation. + dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP + mode. Default: -1.0. input_clipping_method (Union[Unset, ComputationDefinitionInputClippingMethod]): Optional method used for clipping before encrypting values when running aggregation-based workflows. The bounds are deduced based on the cryptographic parameters used for the aggregation. @@ -35,14 +37,7 @@ class SurvivalAggregation: (default) - error: if some values are out of bounds, then the computation is aborted. Default: ComputationDefinitionInputClippingMethod.WARNING. - preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters - applied to the input retrieved from the datasource, if applicable - wait (Union[Unset, bool]): Whether to wait synchronously for the computation result. - dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various - disclosure prevention mechanisms - dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP - mode. Default: -1.0. - join_id (Union[Unset, str]): Unique identifier of a data object. + run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both) local_input_id (Union[Unset, str]): Unique identifier of a data object. maximum_aggregated_value (Union[Unset, None, float]): optional upper bound on the total expected value to be aggregated collectively. If provided, the computation will automatically deduce @@ -51,29 +46,36 @@ class SurvivalAggregation: up to 16 million. For example, when using default parameters and running an aggregation with 4 participants, local aggregated values cannot exceed 4 million. - cohort_id (Union[Unset, str]): Unique identifier of a data object. + preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters + applied to the input retrieved from the datasource, if applicable + data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource + from each node before the computation + end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true, + then when release results is set to true and the output + is initially encrypted with a network collective key, then it is key switched to + the initiating user's public key. + input_data_object (Union[Unset, str]): Shared identifier of a data object. + timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run. local_input (Union[Unset, LocalInput]): If a local input is provided, the node initiating the computation will use it instead of querying the datasource. This data is *not* shared to other nodes, only used for the duration of the computation. The local input columns/values must be in the form {: [, , ...], ...} owner (Union[Unset, str]): The username of the end user who requested the computation. project_id (Union[Unset, str]): Unique identifier of a project. - run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both) + dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various + disclosure prevention mechanisms + cohort_id (Union[Unset, str]): Unique identifier of a data object. + encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key. + join_id (Union[Unset, str]): Unique identifier of a data object. local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured the network) - encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key. - end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true, - then when release results is set to true and the output - is initially encrypted with a network collective key, then it is key switched to - the initiating user's public key. - input_data_object (Union[Unset, str]): Shared identifier of a data object. release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output results. If set, then encrypted results are automatically key switched and decrypted and a Result entity is saved - timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run. - data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource - from each node before the computation + wait (Union[Unset, bool]): Whether to wait synchronously for the computation result. + encrypted_matching (Union[Unset, bool]): if true, then the resulting matches are kept encrypted before + aggregating the survival data (slower) matching_columns (Union[Unset, List['MatchingColumn']]): The columns on which the data should be matched matching_organization (Union[Unset, str]): when secure matching is enabled, the organization with whom to match records with @@ -81,82 +83,81 @@ class SurvivalAggregation: organization subgroups (Union[Unset, List['SurvivalAggregationSubgroupsItem']]): list of filters to create survival subgroups survival_parameters (Union[Unset, Survival]): - encrypted_matching (Union[Unset, bool]): if true, then the resulting matches are kept encrypted before - aggregating the survival data (slower) """ type: ComputationType + dp_epsilon: Union[Unset, float] = -1.0 input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] = ( ComputationDefinitionInputClippingMethod.WARNING ) - preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET - wait: Union[Unset, bool] = UNSET - dp_policy: Union[Unset, "DPPolicy"] = UNSET - dp_epsilon: Union[Unset, float] = -1.0 - join_id: Union[Unset, str] = UNSET + run_mode: Union[Unset, RunMode] = UNSET local_input_id: Union[Unset, str] = UNSET maximum_aggregated_value: Union[Unset, None, float] = UNSET - cohort_id: Union[Unset, str] = UNSET + preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET + data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET + end_to_end_encrypted: Union[Unset, bool] = UNSET + input_data_object: Union[Unset, str] = UNSET + timeout: Union[Unset, int] = UNSET local_input: Union[Unset, "LocalInput"] = UNSET owner: Union[Unset, str] = UNSET project_id: Union[Unset, str] = UNSET - run_mode: Union[Unset, RunMode] = UNSET - local: Union[Unset, bool] = UNSET + dp_policy: Union[Unset, "DPPolicy"] = UNSET + cohort_id: Union[Unset, str] = UNSET encrypted: Union[Unset, bool] = UNSET - end_to_end_encrypted: Union[Unset, bool] = UNSET - input_data_object: Union[Unset, str] = UNSET + join_id: Union[Unset, str] = UNSET + local: Union[Unset, bool] = UNSET release_results: Union[Unset, bool] = UNSET - timeout: Union[Unset, int] = UNSET - data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET + wait: Union[Unset, bool] = UNSET + encrypted_matching: Union[Unset, bool] = UNSET matching_columns: Union[Unset, List["MatchingColumn"]] = UNSET matching_organization: Union[Unset, str] = UNSET secure_matching: Union[Unset, bool] = UNSET subgroups: Union[Unset, List["SurvivalAggregationSubgroupsItem"]] = UNSET survival_parameters: Union[Unset, "Survival"] = UNSET - encrypted_matching: Union[Unset, bool] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: type = self.type.value + dp_epsilon = self.dp_epsilon input_clipping_method: Union[Unset, str] = UNSET if not isinstance(self.input_clipping_method, Unset): input_clipping_method = self.input_clipping_method.value + run_mode: Union[Unset, str] = UNSET + if not isinstance(self.run_mode, Unset): + run_mode = self.run_mode.value + + local_input_id = self.local_input_id + maximum_aggregated_value = self.maximum_aggregated_value preprocessing_parameters: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.preprocessing_parameters, Unset): preprocessing_parameters = self.preprocessing_parameters.to_dict() - wait = self.wait - dp_policy: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.dp_policy, Unset): - dp_policy = self.dp_policy.to_dict() + data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.data_source_parameters, Unset): + data_source_parameters = self.data_source_parameters.to_dict() - dp_epsilon = self.dp_epsilon - join_id = self.join_id - local_input_id = self.local_input_id - maximum_aggregated_value = self.maximum_aggregated_value - cohort_id = self.cohort_id + end_to_end_encrypted = self.end_to_end_encrypted + input_data_object = self.input_data_object + timeout = self.timeout local_input: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.local_input, Unset): local_input = self.local_input.to_dict() owner = self.owner project_id = self.project_id - run_mode: Union[Unset, str] = UNSET - if not isinstance(self.run_mode, Unset): - run_mode = self.run_mode.value + dp_policy: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.dp_policy, Unset): + dp_policy = self.dp_policy.to_dict() - local = self.local + cohort_id = self.cohort_id encrypted = self.encrypted - end_to_end_encrypted = self.end_to_end_encrypted - input_data_object = self.input_data_object + join_id = self.join_id + local = self.local release_results = self.release_results - timeout = self.timeout - data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.data_source_parameters, Unset): - data_source_parameters = self.data_source_parameters.to_dict() - + wait = self.wait + encrypted_matching = self.encrypted_matching matching_columns: Union[Unset, List[Dict[str, Any]]] = UNSET if not isinstance(self.matching_columns, Unset): matching_columns = [] @@ -179,8 +180,6 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.survival_parameters, Unset): survival_parameters = self.survival_parameters.to_dict() - encrypted_matching = self.encrypted_matching - field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update( @@ -188,46 +187,48 @@ def to_dict(self) -> Dict[str, Any]: "type": type, } ) - if input_clipping_method is not UNSET: - field_dict["inputClippingMethod"] = input_clipping_method - if preprocessing_parameters is not UNSET: - field_dict["preprocessingParameters"] = preprocessing_parameters - if wait is not UNSET: - field_dict["wait"] = wait - if dp_policy is not UNSET: - field_dict["DPPolicy"] = dp_policy if dp_epsilon is not UNSET: field_dict["dpEpsilon"] = dp_epsilon - if join_id is not UNSET: - field_dict["joinId"] = join_id + if input_clipping_method is not UNSET: + field_dict["inputClippingMethod"] = input_clipping_method + if run_mode is not UNSET: + field_dict["runMode"] = run_mode if local_input_id is not UNSET: field_dict["localInputID"] = local_input_id if maximum_aggregated_value is not UNSET: field_dict["maximumAggregatedValue"] = maximum_aggregated_value - if cohort_id is not UNSET: - field_dict["cohortId"] = cohort_id + if preprocessing_parameters is not UNSET: + field_dict["preprocessingParameters"] = preprocessing_parameters + if data_source_parameters is not UNSET: + field_dict["dataSourceParameters"] = data_source_parameters + if end_to_end_encrypted is not UNSET: + field_dict["endToEndEncrypted"] = end_to_end_encrypted + if input_data_object is not UNSET: + field_dict["inputDataObject"] = input_data_object + if timeout is not UNSET: + field_dict["timeout"] = timeout if local_input is not UNSET: field_dict["localInput"] = local_input if owner is not UNSET: field_dict["owner"] = owner if project_id is not UNSET: field_dict["projectId"] = project_id - if run_mode is not UNSET: - field_dict["runMode"] = run_mode - if local is not UNSET: - field_dict["local"] = local + if dp_policy is not UNSET: + field_dict["DPPolicy"] = dp_policy + if cohort_id is not UNSET: + field_dict["cohortId"] = cohort_id if encrypted is not UNSET: field_dict["encrypted"] = encrypted - if end_to_end_encrypted is not UNSET: - field_dict["endToEndEncrypted"] = end_to_end_encrypted - if input_data_object is not UNSET: - field_dict["inputDataObject"] = input_data_object + if join_id is not UNSET: + field_dict["joinId"] = join_id + if local is not UNSET: + field_dict["local"] = local if release_results is not UNSET: field_dict["releaseResults"] = release_results - if timeout is not UNSET: - field_dict["timeout"] = timeout - if data_source_parameters is not UNSET: - field_dict["dataSourceParameters"] = data_source_parameters + if wait is not UNSET: + field_dict["wait"] = wait + if encrypted_matching is not UNSET: + field_dict["encryptedMatching"] = encrypted_matching if matching_columns is not UNSET: field_dict["matchingColumns"] = matching_columns if matching_organization is not UNSET: @@ -238,8 +239,6 @@ def to_dict(self) -> Dict[str, Any]: field_dict["subgroups"] = subgroups if survival_parameters is not UNSET: field_dict["survivalParameters"] = survival_parameters - if encrypted_matching is not UNSET: - field_dict["encryptedMatching"] = encrypted_matching return field_dict @@ -256,6 +255,8 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() type = ComputationType(d.pop("type")) + dp_epsilon = d.pop("dpEpsilon", UNSET) + _input_clipping_method = d.pop("inputClippingMethod", UNSET) input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] if isinstance(_input_clipping_method, Unset): @@ -263,6 +264,17 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method) + _run_mode = d.pop("runMode", UNSET) + run_mode: Union[Unset, RunMode] + if isinstance(_run_mode, Unset): + run_mode = UNSET + else: + run_mode = RunMode(_run_mode) + + local_input_id = d.pop("localInputID", UNSET) + + maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET) + _preprocessing_parameters = d.pop("preprocessingParameters", UNSET) preprocessing_parameters: Union[Unset, ComputationPreprocessingParameters] if isinstance(_preprocessing_parameters, Unset): @@ -270,24 +282,18 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: preprocessing_parameters = ComputationPreprocessingParameters.from_dict(_preprocessing_parameters) - wait = d.pop("wait", UNSET) - - _dp_policy = d.pop("DPPolicy", UNSET) - dp_policy: Union[Unset, DPPolicy] - if isinstance(_dp_policy, Unset): - dp_policy = UNSET + _data_source_parameters = d.pop("dataSourceParameters", UNSET) + data_source_parameters: Union[Unset, ComputationDataSourceParameters] + if isinstance(_data_source_parameters, Unset): + data_source_parameters = UNSET else: - dp_policy = DPPolicy.from_dict(_dp_policy) - - dp_epsilon = d.pop("dpEpsilon", UNSET) - - join_id = d.pop("joinId", UNSET) + data_source_parameters = ComputationDataSourceParameters.from_dict(_data_source_parameters) - local_input_id = d.pop("localInputID", UNSET) + end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) - maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET) + input_data_object = d.pop("inputDataObject", UNSET) - cohort_id = d.pop("cohortId", UNSET) + timeout = d.pop("timeout", UNSET) _local_input = d.pop("localInput", UNSET) local_input: Union[Unset, LocalInput] @@ -300,31 +306,26 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: project_id = d.pop("projectId", UNSET) - _run_mode = d.pop("runMode", UNSET) - run_mode: Union[Unset, RunMode] - if isinstance(_run_mode, Unset): - run_mode = UNSET + _dp_policy = d.pop("DPPolicy", UNSET) + dp_policy: Union[Unset, DPPolicy] + if isinstance(_dp_policy, Unset): + dp_policy = UNSET else: - run_mode = RunMode(_run_mode) + dp_policy = DPPolicy.from_dict(_dp_policy) - local = d.pop("local", UNSET) + cohort_id = d.pop("cohortId", UNSET) encrypted = d.pop("encrypted", UNSET) - end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) + join_id = d.pop("joinId", UNSET) - input_data_object = d.pop("inputDataObject", UNSET) + local = d.pop("local", UNSET) release_results = d.pop("releaseResults", UNSET) - timeout = d.pop("timeout", UNSET) + wait = d.pop("wait", UNSET) - _data_source_parameters = d.pop("dataSourceParameters", UNSET) - data_source_parameters: Union[Unset, ComputationDataSourceParameters] - if isinstance(_data_source_parameters, Unset): - data_source_parameters = UNSET - else: - data_source_parameters = ComputationDataSourceParameters.from_dict(_data_source_parameters) + encrypted_matching = d.pop("encryptedMatching", UNSET) matching_columns = [] _matching_columns = d.pop("matchingColumns", UNSET) @@ -351,36 +352,34 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: survival_parameters = Survival.from_dict(_survival_parameters) - encrypted_matching = d.pop("encryptedMatching", UNSET) - survival_aggregation = cls( type=type, - input_clipping_method=input_clipping_method, - preprocessing_parameters=preprocessing_parameters, - wait=wait, - dp_policy=dp_policy, dp_epsilon=dp_epsilon, - join_id=join_id, + input_clipping_method=input_clipping_method, + run_mode=run_mode, local_input_id=local_input_id, maximum_aggregated_value=maximum_aggregated_value, - cohort_id=cohort_id, + preprocessing_parameters=preprocessing_parameters, + data_source_parameters=data_source_parameters, + end_to_end_encrypted=end_to_end_encrypted, + input_data_object=input_data_object, + timeout=timeout, local_input=local_input, owner=owner, project_id=project_id, - run_mode=run_mode, - local=local, + dp_policy=dp_policy, + cohort_id=cohort_id, encrypted=encrypted, - end_to_end_encrypted=end_to_end_encrypted, - input_data_object=input_data_object, + join_id=join_id, + local=local, release_results=release_results, - timeout=timeout, - data_source_parameters=data_source_parameters, + wait=wait, + encrypted_matching=encrypted_matching, matching_columns=matching_columns, matching_organization=matching_organization, secure_matching=secure_matching, subgroups=subgroups, survival_parameters=survival_parameters, - encrypted_matching=encrypted_matching, ) survival_aggregation.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/time_diff.py b/src/tuneinsight/api/sdk/models/time_diff.py index f4b456f..63a4199 100644 --- a/src/tuneinsight/api/sdk/models/time_diff.py +++ b/src/tuneinsight/api/sdk/models/time_diff.py @@ -17,33 +17,32 @@ class TimeDiff: """ Attributes: type (PreprocessingOperationType): type of preprocessing operation + output (Union[Unset, str]): the output column that stores the numerical values for the time difference + start (Union[Unset, str]): column that contains timestamps representing the start of the measured difference end (Union[Unset, str]): column that contains timestamps representing the end of the measured difference filter_na (Union[Unset, bool]): whether or not to filter null values interval (Union[Unset, Duration]): definition of a date-independent time interval - output (Union[Unset, str]): the output column that stores the numerical values for the time difference - start (Union[Unset, str]): column that contains timestamps representing the start of the measured difference """ type: PreprocessingOperationType + output: Union[Unset, str] = UNSET + start: Union[Unset, str] = UNSET end: Union[Unset, str] = UNSET filter_na: Union[Unset, bool] = UNSET interval: Union[Unset, "Duration"] = UNSET - output: Union[Unset, str] = UNSET - start: Union[Unset, str] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: type = self.type.value + output = self.output + start = self.start end = self.end filter_na = self.filter_na interval: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.interval, Unset): interval = self.interval.to_dict() - output = self.output - start = self.start - field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update( @@ -51,16 +50,16 @@ def to_dict(self) -> Dict[str, Any]: "type": type, } ) + if output is not UNSET: + field_dict["output"] = output + if start is not UNSET: + field_dict["start"] = start if end is not UNSET: field_dict["end"] = end if filter_na is not UNSET: field_dict["filterNA"] = filter_na if interval is not UNSET: field_dict["interval"] = interval - if output is not UNSET: - field_dict["output"] = output - if start is not UNSET: - field_dict["start"] = start return field_dict @@ -71,6 +70,10 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() type = PreprocessingOperationType(d.pop("type")) + output = d.pop("output", UNSET) + + start = d.pop("start", UNSET) + end = d.pop("end", UNSET) filter_na = d.pop("filterNA", UNSET) @@ -82,17 +85,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: interval = Duration.from_dict(_interval) - output = d.pop("output", UNSET) - - start = d.pop("start", UNSET) - time_diff = cls( type=type, + output=output, + start=start, end=end, filter_na=filter_na, interval=interval, - output=output, - start=start, ) time_diff.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/user_definition.py b/src/tuneinsight/api/sdk/models/user_definition.py index 71a0865..80069ed 100644 --- a/src/tuneinsight/api/sdk/models/user_definition.py +++ b/src/tuneinsight/api/sdk/models/user_definition.py @@ -18,81 +18,68 @@ class UserDefinition: """ Attributes: + first_name (Union[Unset, str]): + attributes (Union[Unset, UserDefinitionAttributes]): created_timestamp (Union[Unset, int]): - email_verified (Union[Unset, bool]): - federation_link (Union[Unset, str]): - username (Union[Unset, str]): - client_roles (Union[Unset, UserDefinitionClientRoles]): + enabled (Union[Unset, bool]): email (Union[Unset, str]): groups (Union[Unset, List[str]]): - required_actions (Union[Unset, List[str]]): - service_account_client_id (Union[Unset, str]): + last_name (Union[Unset, str]): totp (Union[Unset, bool]): access (Union[Unset, UserDefinitionAccess]): - enabled (Union[Unset, bool]): - first_name (Union[Unset, str]): - id (Union[Unset, str]): - last_name (Union[Unset, str]): - attributes (Union[Unset, UserDefinitionAttributes]): - realm_roles (Union[Unset, List[str]]): + client_roles (Union[Unset, UserDefinitionClientRoles]): disableable_credential_types (Union[Unset, List['UserDefinitionDisableableCredentialTypesItem']]): + federation_link (Union[Unset, str]): + realm_roles (Union[Unset, List[str]]): + service_account_client_id (Union[Unset, str]): + username (Union[Unset, str]): + email_verified (Union[Unset, bool]): + id (Union[Unset, str]): + required_actions (Union[Unset, List[str]]): """ + first_name: Union[Unset, str] = UNSET + attributes: Union[Unset, "UserDefinitionAttributes"] = UNSET created_timestamp: Union[Unset, int] = UNSET - email_verified: Union[Unset, bool] = UNSET - federation_link: Union[Unset, str] = UNSET - username: Union[Unset, str] = UNSET - client_roles: Union[Unset, "UserDefinitionClientRoles"] = UNSET + enabled: Union[Unset, bool] = UNSET email: Union[Unset, str] = UNSET groups: Union[Unset, List[str]] = UNSET - required_actions: Union[Unset, List[str]] = UNSET - service_account_client_id: Union[Unset, str] = UNSET + last_name: Union[Unset, str] = UNSET totp: Union[Unset, bool] = UNSET access: Union[Unset, "UserDefinitionAccess"] = UNSET - enabled: Union[Unset, bool] = UNSET - first_name: Union[Unset, str] = UNSET - id: Union[Unset, str] = UNSET - last_name: Union[Unset, str] = UNSET - attributes: Union[Unset, "UserDefinitionAttributes"] = UNSET - realm_roles: Union[Unset, List[str]] = UNSET + client_roles: Union[Unset, "UserDefinitionClientRoles"] = UNSET disableable_credential_types: Union[Unset, List["UserDefinitionDisableableCredentialTypesItem"]] = UNSET + federation_link: Union[Unset, str] = UNSET + realm_roles: Union[Unset, List[str]] = UNSET + service_account_client_id: Union[Unset, str] = UNSET + username: Union[Unset, str] = UNSET + email_verified: Union[Unset, bool] = UNSET + id: Union[Unset, str] = UNSET + required_actions: Union[Unset, List[str]] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: - created_timestamp = self.created_timestamp - email_verified = self.email_verified - federation_link = self.federation_link - username = self.username - client_roles: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.client_roles, Unset): - client_roles = self.client_roles.to_dict() + first_name = self.first_name + attributes: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.attributes, Unset): + attributes = self.attributes.to_dict() + created_timestamp = self.created_timestamp + enabled = self.enabled email = self.email groups: Union[Unset, List[str]] = UNSET if not isinstance(self.groups, Unset): groups = self.groups - required_actions: Union[Unset, List[str]] = UNSET - if not isinstance(self.required_actions, Unset): - required_actions = self.required_actions - - service_account_client_id = self.service_account_client_id + last_name = self.last_name totp = self.totp access: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.access, Unset): access = self.access.to_dict() - enabled = self.enabled - first_name = self.first_name - id = self.id - last_name = self.last_name - attributes: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.attributes, Unset): - attributes = self.attributes.to_dict() - - realm_roles: Union[Unset, List[str]] = UNSET - if not isinstance(self.realm_roles, Unset): - realm_roles = self.realm_roles + client_roles: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.client_roles, Unset): + client_roles = self.client_roles.to_dict() disableable_credential_types: Union[Unset, List[Dict[str, Any]]] = UNSET if not isinstance(self.disableable_credential_types, Unset): @@ -102,45 +89,58 @@ def to_dict(self) -> Dict[str, Any]: disableable_credential_types.append(disableable_credential_types_item) + federation_link = self.federation_link + realm_roles: Union[Unset, List[str]] = UNSET + if not isinstance(self.realm_roles, Unset): + realm_roles = self.realm_roles + + service_account_client_id = self.service_account_client_id + username = self.username + email_verified = self.email_verified + id = self.id + required_actions: Union[Unset, List[str]] = UNSET + if not isinstance(self.required_actions, Unset): + required_actions = self.required_actions + field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) + if first_name is not UNSET: + field_dict["firstName"] = first_name + if attributes is not UNSET: + field_dict["attributes"] = attributes if created_timestamp is not UNSET: field_dict["createdTimestamp"] = created_timestamp - if email_verified is not UNSET: - field_dict["emailVerified"] = email_verified - if federation_link is not UNSET: - field_dict["federationLink"] = federation_link - if username is not UNSET: - field_dict["username"] = username - if client_roles is not UNSET: - field_dict["clientRoles"] = client_roles + if enabled is not UNSET: + field_dict["enabled"] = enabled if email is not UNSET: field_dict["email"] = email if groups is not UNSET: field_dict["groups"] = groups - if required_actions is not UNSET: - field_dict["requiredActions"] = required_actions - if service_account_client_id is not UNSET: - field_dict["serviceAccountClientID"] = service_account_client_id + if last_name is not UNSET: + field_dict["lastName"] = last_name if totp is not UNSET: field_dict["totp"] = totp if access is not UNSET: field_dict["access"] = access - if enabled is not UNSET: - field_dict["enabled"] = enabled - if first_name is not UNSET: - field_dict["firstName"] = first_name - if id is not UNSET: - field_dict["id"] = id - if last_name is not UNSET: - field_dict["lastName"] = last_name - if attributes is not UNSET: - field_dict["attributes"] = attributes - if realm_roles is not UNSET: - field_dict["realmRoles"] = realm_roles + if client_roles is not UNSET: + field_dict["clientRoles"] = client_roles if disableable_credential_types is not UNSET: field_dict["disableableCredentialTypes"] = disableable_credential_types + if federation_link is not UNSET: + field_dict["federationLink"] = federation_link + if realm_roles is not UNSET: + field_dict["realmRoles"] = realm_roles + if service_account_client_id is not UNSET: + field_dict["serviceAccountClientID"] = service_account_client_id + if username is not UNSET: + field_dict["username"] = username + if email_verified is not UNSET: + field_dict["emailVerified"] = email_verified + if id is not UNSET: + field_dict["id"] = id + if required_actions is not UNSET: + field_dict["requiredActions"] = required_actions return field_dict @@ -154,28 +154,24 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: ) d = src_dict.copy() - created_timestamp = d.pop("createdTimestamp", UNSET) - - email_verified = d.pop("emailVerified", UNSET) + first_name = d.pop("firstName", UNSET) - federation_link = d.pop("federationLink", UNSET) + _attributes = d.pop("attributes", UNSET) + attributes: Union[Unset, UserDefinitionAttributes] + if isinstance(_attributes, Unset): + attributes = UNSET + else: + attributes = UserDefinitionAttributes.from_dict(_attributes) - username = d.pop("username", UNSET) + created_timestamp = d.pop("createdTimestamp", UNSET) - _client_roles = d.pop("clientRoles", UNSET) - client_roles: Union[Unset, UserDefinitionClientRoles] - if isinstance(_client_roles, Unset): - client_roles = UNSET - else: - client_roles = UserDefinitionClientRoles.from_dict(_client_roles) + enabled = d.pop("enabled", UNSET) email = d.pop("email", UNSET) groups = cast(List[str], d.pop("groups", UNSET)) - required_actions = cast(List[str], d.pop("requiredActions", UNSET)) - - service_account_client_id = d.pop("serviceAccountClientID", UNSET) + last_name = d.pop("lastName", UNSET) totp = d.pop("totp", UNSET) @@ -186,22 +182,12 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: access = UserDefinitionAccess.from_dict(_access) - enabled = d.pop("enabled", UNSET) - - first_name = d.pop("firstName", UNSET) - - id = d.pop("id", UNSET) - - last_name = d.pop("lastName", UNSET) - - _attributes = d.pop("attributes", UNSET) - attributes: Union[Unset, UserDefinitionAttributes] - if isinstance(_attributes, Unset): - attributes = UNSET + _client_roles = d.pop("clientRoles", UNSET) + client_roles: Union[Unset, UserDefinitionClientRoles] + if isinstance(_client_roles, Unset): + client_roles = UNSET else: - attributes = UserDefinitionAttributes.from_dict(_attributes) - - realm_roles = cast(List[str], d.pop("realmRoles", UNSET)) + client_roles = UserDefinitionClientRoles.from_dict(_client_roles) disableable_credential_types = [] _disableable_credential_types = d.pop("disableableCredentialTypes", UNSET) @@ -212,25 +198,39 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: disableable_credential_types.append(disableable_credential_types_item) + federation_link = d.pop("federationLink", UNSET) + + realm_roles = cast(List[str], d.pop("realmRoles", UNSET)) + + service_account_client_id = d.pop("serviceAccountClientID", UNSET) + + username = d.pop("username", UNSET) + + email_verified = d.pop("emailVerified", UNSET) + + id = d.pop("id", UNSET) + + required_actions = cast(List[str], d.pop("requiredActions", UNSET)) + user_definition = cls( + first_name=first_name, + attributes=attributes, created_timestamp=created_timestamp, - email_verified=email_verified, - federation_link=federation_link, - username=username, - client_roles=client_roles, + enabled=enabled, email=email, groups=groups, - required_actions=required_actions, - service_account_client_id=service_account_client_id, + last_name=last_name, totp=totp, access=access, - enabled=enabled, - first_name=first_name, - id=id, - last_name=last_name, - attributes=attributes, - realm_roles=realm_roles, + client_roles=client_roles, disableable_credential_types=disableable_credential_types, + federation_link=federation_link, + realm_roles=realm_roles, + service_account_client_id=service_account_client_id, + username=username, + email_verified=email_verified, + id=id, + required_actions=required_actions, ) user_definition.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/user_list_query.py b/src/tuneinsight/api/sdk/models/user_list_query.py index 0aec4d4..87907ad 100644 --- a/src/tuneinsight/api/sdk/models/user_list_query.py +++ b/src/tuneinsight/api/sdk/models/user_list_query.py @@ -11,134 +11,134 @@ class UserListQuery: """ Attributes: - last_name (Union[Unset, str]): - q (Union[Unset, str]): - username (Union[Unset, str]): + brief_representation (Union[Unset, bool]): + exact (Union[Unset, bool]): first (Union[Unset, int]): first_name (Union[Unset, str]): - max_ (Union[Unset, int]): - email (Union[Unset, str]): + idp_alias (Union[Unset, str]): + username (Union[Unset, str]): enabled (Union[Unset, bool]): - exact (Union[Unset, bool]): idp_user_id (Union[Unset, str]): - search (Union[Unset, str]): - brief_representation (Union[Unset, bool]): + max_ (Union[Unset, int]): + q (Union[Unset, str]): email_verified (Union[Unset, bool]): - idp_alias (Union[Unset, str]): + email (Union[Unset, str]): + last_name (Union[Unset, str]): + search (Union[Unset, str]): """ - last_name: Union[Unset, str] = UNSET - q: Union[Unset, str] = UNSET - username: Union[Unset, str] = UNSET + brief_representation: Union[Unset, bool] = UNSET + exact: Union[Unset, bool] = UNSET first: Union[Unset, int] = UNSET first_name: Union[Unset, str] = UNSET - max_: Union[Unset, int] = UNSET - email: Union[Unset, str] = UNSET + idp_alias: Union[Unset, str] = UNSET + username: Union[Unset, str] = UNSET enabled: Union[Unset, bool] = UNSET - exact: Union[Unset, bool] = UNSET idp_user_id: Union[Unset, str] = UNSET - search: Union[Unset, str] = UNSET - brief_representation: Union[Unset, bool] = UNSET + max_: Union[Unset, int] = UNSET + q: Union[Unset, str] = UNSET email_verified: Union[Unset, bool] = UNSET - idp_alias: Union[Unset, str] = UNSET + email: Union[Unset, str] = UNSET + last_name: Union[Unset, str] = UNSET + search: Union[Unset, str] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: - last_name = self.last_name - q = self.q - username = self.username + brief_representation = self.brief_representation + exact = self.exact first = self.first first_name = self.first_name - max_ = self.max_ - email = self.email + idp_alias = self.idp_alias + username = self.username enabled = self.enabled - exact = self.exact idp_user_id = self.idp_user_id - search = self.search - brief_representation = self.brief_representation + max_ = self.max_ + q = self.q email_verified = self.email_verified - idp_alias = self.idp_alias + email = self.email + last_name = self.last_name + search = self.search field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) - if last_name is not UNSET: - field_dict["lastName"] = last_name - if q is not UNSET: - field_dict["q"] = q - if username is not UNSET: - field_dict["username"] = username + if brief_representation is not UNSET: + field_dict["briefRepresentation"] = brief_representation + if exact is not UNSET: + field_dict["exact"] = exact if first is not UNSET: field_dict["first"] = first if first_name is not UNSET: field_dict["firstName"] = first_name - if max_ is not UNSET: - field_dict["max"] = max_ - if email is not UNSET: - field_dict["email"] = email + if idp_alias is not UNSET: + field_dict["idpAlias"] = idp_alias + if username is not UNSET: + field_dict["username"] = username if enabled is not UNSET: field_dict["enabled"] = enabled - if exact is not UNSET: - field_dict["exact"] = exact if idp_user_id is not UNSET: field_dict["idpUserId"] = idp_user_id - if search is not UNSET: - field_dict["search"] = search - if brief_representation is not UNSET: - field_dict["briefRepresentation"] = brief_representation + if max_ is not UNSET: + field_dict["max"] = max_ + if q is not UNSET: + field_dict["q"] = q if email_verified is not UNSET: field_dict["emailVerified"] = email_verified - if idp_alias is not UNSET: - field_dict["idpAlias"] = idp_alias + if email is not UNSET: + field_dict["email"] = email + if last_name is not UNSET: + field_dict["lastName"] = last_name + if search is not UNSET: + field_dict["search"] = search return field_dict @classmethod def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() - last_name = d.pop("lastName", UNSET) - - q = d.pop("q", UNSET) + brief_representation = d.pop("briefRepresentation", UNSET) - username = d.pop("username", UNSET) + exact = d.pop("exact", UNSET) first = d.pop("first", UNSET) first_name = d.pop("firstName", UNSET) - max_ = d.pop("max", UNSET) + idp_alias = d.pop("idpAlias", UNSET) - email = d.pop("email", UNSET) + username = d.pop("username", UNSET) enabled = d.pop("enabled", UNSET) - exact = d.pop("exact", UNSET) - idp_user_id = d.pop("idpUserId", UNSET) - search = d.pop("search", UNSET) + max_ = d.pop("max", UNSET) - brief_representation = d.pop("briefRepresentation", UNSET) + q = d.pop("q", UNSET) email_verified = d.pop("emailVerified", UNSET) - idp_alias = d.pop("idpAlias", UNSET) + email = d.pop("email", UNSET) + + last_name = d.pop("lastName", UNSET) + + search = d.pop("search", UNSET) user_list_query = cls( - last_name=last_name, - q=q, - username=username, + brief_representation=brief_representation, + exact=exact, first=first, first_name=first_name, - max_=max_, - email=email, + idp_alias=idp_alias, + username=username, enabled=enabled, - exact=exact, idp_user_id=idp_user_id, - search=search, - brief_representation=brief_representation, + max_=max_, + q=q, email_verified=email_verified, - idp_alias=idp_alias, + email=email, + last_name=last_name, + search=search, ) user_list_query.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/v_binned_aggregation.py b/src/tuneinsight/api/sdk/models/v_binned_aggregation.py index ebf8f01..94cccff 100644 --- a/src/tuneinsight/api/sdk/models/v_binned_aggregation.py +++ b/src/tuneinsight/api/sdk/models/v_binned_aggregation.py @@ -23,6 +23,8 @@ class VBinnedAggregation: """ Attributes: type (ComputationType): Type of the computation. + dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP + mode. Default: -1.0. input_clipping_method (Union[Unset, ComputationDefinitionInputClippingMethod]): Optional method used for clipping before encrypting values when running aggregation-based workflows. The bounds are deduced based on the cryptographic parameters used for the aggregation. @@ -33,14 +35,7 @@ class VBinnedAggregation: (default) - error: if some values are out of bounds, then the computation is aborted. Default: ComputationDefinitionInputClippingMethod.WARNING. - preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters - applied to the input retrieved from the datasource, if applicable - wait (Union[Unset, bool]): Whether to wait synchronously for the computation result. - dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various - disclosure prevention mechanisms - dp_epsilon (Union[Unset, float]): If positive, the privacy budget used by this computation. Used only in DP - mode. Default: -1.0. - join_id (Union[Unset, str]): Unique identifier of a data object. + run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both) local_input_id (Union[Unset, str]): Unique identifier of a data object. maximum_aggregated_value (Union[Unset, None, float]): optional upper bound on the total expected value to be aggregated collectively. If provided, the computation will automatically deduce @@ -49,107 +44,110 @@ class VBinnedAggregation: up to 16 million. For example, when using default parameters and running an aggregation with 4 participants, local aggregated values cannot exceed 4 million. - cohort_id (Union[Unset, str]): Unique identifier of a data object. + preprocessing_parameters (Union[Unset, ComputationPreprocessingParameters]): dataframe pre-processing parameters + applied to the input retrieved from the datasource, if applicable + data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource + from each node before the computation + end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true, + then when release results is set to true and the output + is initially encrypted with a network collective key, then it is key switched to + the initiating user's public key. + input_data_object (Union[Unset, str]): Shared identifier of a data object. + timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run. local_input (Union[Unset, LocalInput]): If a local input is provided, the node initiating the computation will use it instead of querying the datasource. This data is *not* shared to other nodes, only used for the duration of the computation. The local input columns/values must be in the form {: [, , ...], ...} owner (Union[Unset, str]): The username of the end user who requested the computation. project_id (Union[Unset, str]): Unique identifier of a project. - run_mode (Union[Unset, RunMode]): Defines the mode in which to run a computation (local, collective, or both) + dp_policy (Union[Unset, DPPolicy]): represents the disclosure prevention policy that enables toggling various + disclosure prevention mechanisms + cohort_id (Union[Unset, str]): Unique identifier of a data object. + encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key. + join_id (Union[Unset, str]): Unique identifier of a data object. local (Union[Unset, bool]): True if the project's computation should run only with local data (not configured the network) - encrypted (Union[Unset, bool]): True if computation result should be encrypted with the collective public key. - end_to_end_encrypted (Union[Unset, bool]): if the end to end encrypted mode is set to true, - then when release results is set to true and the output - is initially encrypted with a network collective key, then it is key switched to - the initiating user's public key. - input_data_object (Union[Unset, str]): Shared identifier of a data object. release_results (Union[Unset, bool]): flag to set to true if the computation should directly release the output results. If set, then encrypted results are automatically key switched and decrypted and a Result entity is saved - timeout (Union[Unset, int]): The maximum amount of time in seconds the computation is allowed to run. - data_source_parameters (Union[Unset, ComputationDataSourceParameters]): Parameters used to query the datasource - from each node before the computation - aggregation_column (Union[Unset, str]): the column on which to aggregate - binning_column (Union[Unset, str]): the column on which to bin the data + wait (Union[Unset, bool]): Whether to wait synchronously for the computation result. binning_parameters (Union[Unset, BinningParameters]): parameters used to bin data identifiable_columns (Union[Unset, List[str]]): + aggregation_column (Union[Unset, str]): the column on which to aggregate + binning_column (Union[Unset, str]): the column on which to bin the data """ type: ComputationType + dp_epsilon: Union[Unset, float] = -1.0 input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] = ( ComputationDefinitionInputClippingMethod.WARNING ) - preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET - wait: Union[Unset, bool] = UNSET - dp_policy: Union[Unset, "DPPolicy"] = UNSET - dp_epsilon: Union[Unset, float] = -1.0 - join_id: Union[Unset, str] = UNSET + run_mode: Union[Unset, RunMode] = UNSET local_input_id: Union[Unset, str] = UNSET maximum_aggregated_value: Union[Unset, None, float] = UNSET - cohort_id: Union[Unset, str] = UNSET + preprocessing_parameters: Union[Unset, "ComputationPreprocessingParameters"] = UNSET + data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET + end_to_end_encrypted: Union[Unset, bool] = UNSET + input_data_object: Union[Unset, str] = UNSET + timeout: Union[Unset, int] = UNSET local_input: Union[Unset, "LocalInput"] = UNSET owner: Union[Unset, str] = UNSET project_id: Union[Unset, str] = UNSET - run_mode: Union[Unset, RunMode] = UNSET - local: Union[Unset, bool] = UNSET + dp_policy: Union[Unset, "DPPolicy"] = UNSET + cohort_id: Union[Unset, str] = UNSET encrypted: Union[Unset, bool] = UNSET - end_to_end_encrypted: Union[Unset, bool] = UNSET - input_data_object: Union[Unset, str] = UNSET + join_id: Union[Unset, str] = UNSET + local: Union[Unset, bool] = UNSET release_results: Union[Unset, bool] = UNSET - timeout: Union[Unset, int] = UNSET - data_source_parameters: Union[Unset, "ComputationDataSourceParameters"] = UNSET - aggregation_column: Union[Unset, str] = UNSET - binning_column: Union[Unset, str] = UNSET + wait: Union[Unset, bool] = UNSET binning_parameters: Union[Unset, "BinningParameters"] = UNSET identifiable_columns: Union[Unset, List[str]] = UNSET + aggregation_column: Union[Unset, str] = UNSET + binning_column: Union[Unset, str] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: type = self.type.value + dp_epsilon = self.dp_epsilon input_clipping_method: Union[Unset, str] = UNSET if not isinstance(self.input_clipping_method, Unset): input_clipping_method = self.input_clipping_method.value + run_mode: Union[Unset, str] = UNSET + if not isinstance(self.run_mode, Unset): + run_mode = self.run_mode.value + + local_input_id = self.local_input_id + maximum_aggregated_value = self.maximum_aggregated_value preprocessing_parameters: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.preprocessing_parameters, Unset): preprocessing_parameters = self.preprocessing_parameters.to_dict() - wait = self.wait - dp_policy: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.dp_policy, Unset): - dp_policy = self.dp_policy.to_dict() + data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.data_source_parameters, Unset): + data_source_parameters = self.data_source_parameters.to_dict() - dp_epsilon = self.dp_epsilon - join_id = self.join_id - local_input_id = self.local_input_id - maximum_aggregated_value = self.maximum_aggregated_value - cohort_id = self.cohort_id + end_to_end_encrypted = self.end_to_end_encrypted + input_data_object = self.input_data_object + timeout = self.timeout local_input: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.local_input, Unset): local_input = self.local_input.to_dict() owner = self.owner project_id = self.project_id - run_mode: Union[Unset, str] = UNSET - if not isinstance(self.run_mode, Unset): - run_mode = self.run_mode.value + dp_policy: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.dp_policy, Unset): + dp_policy = self.dp_policy.to_dict() - local = self.local + cohort_id = self.cohort_id encrypted = self.encrypted - end_to_end_encrypted = self.end_to_end_encrypted - input_data_object = self.input_data_object + join_id = self.join_id + local = self.local release_results = self.release_results - timeout = self.timeout - data_source_parameters: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.data_source_parameters, Unset): - data_source_parameters = self.data_source_parameters.to_dict() - - aggregation_column = self.aggregation_column - binning_column = self.binning_column + wait = self.wait binning_parameters: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.binning_parameters, Unset): binning_parameters = self.binning_parameters.to_dict() @@ -158,6 +156,9 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.identifiable_columns, Unset): identifiable_columns = self.identifiable_columns + aggregation_column = self.aggregation_column + binning_column = self.binning_column + field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update( @@ -165,54 +166,54 @@ def to_dict(self) -> Dict[str, Any]: "type": type, } ) - if input_clipping_method is not UNSET: - field_dict["inputClippingMethod"] = input_clipping_method - if preprocessing_parameters is not UNSET: - field_dict["preprocessingParameters"] = preprocessing_parameters - if wait is not UNSET: - field_dict["wait"] = wait - if dp_policy is not UNSET: - field_dict["DPPolicy"] = dp_policy if dp_epsilon is not UNSET: field_dict["dpEpsilon"] = dp_epsilon - if join_id is not UNSET: - field_dict["joinId"] = join_id + if input_clipping_method is not UNSET: + field_dict["inputClippingMethod"] = input_clipping_method + if run_mode is not UNSET: + field_dict["runMode"] = run_mode if local_input_id is not UNSET: field_dict["localInputID"] = local_input_id if maximum_aggregated_value is not UNSET: field_dict["maximumAggregatedValue"] = maximum_aggregated_value - if cohort_id is not UNSET: - field_dict["cohortId"] = cohort_id + if preprocessing_parameters is not UNSET: + field_dict["preprocessingParameters"] = preprocessing_parameters + if data_source_parameters is not UNSET: + field_dict["dataSourceParameters"] = data_source_parameters + if end_to_end_encrypted is not UNSET: + field_dict["endToEndEncrypted"] = end_to_end_encrypted + if input_data_object is not UNSET: + field_dict["inputDataObject"] = input_data_object + if timeout is not UNSET: + field_dict["timeout"] = timeout if local_input is not UNSET: field_dict["localInput"] = local_input if owner is not UNSET: field_dict["owner"] = owner if project_id is not UNSET: field_dict["projectId"] = project_id - if run_mode is not UNSET: - field_dict["runMode"] = run_mode - if local is not UNSET: - field_dict["local"] = local + if dp_policy is not UNSET: + field_dict["DPPolicy"] = dp_policy + if cohort_id is not UNSET: + field_dict["cohortId"] = cohort_id if encrypted is not UNSET: field_dict["encrypted"] = encrypted - if end_to_end_encrypted is not UNSET: - field_dict["endToEndEncrypted"] = end_to_end_encrypted - if input_data_object is not UNSET: - field_dict["inputDataObject"] = input_data_object + if join_id is not UNSET: + field_dict["joinId"] = join_id + if local is not UNSET: + field_dict["local"] = local if release_results is not UNSET: field_dict["releaseResults"] = release_results - if timeout is not UNSET: - field_dict["timeout"] = timeout - if data_source_parameters is not UNSET: - field_dict["dataSourceParameters"] = data_source_parameters - if aggregation_column is not UNSET: - field_dict["aggregationColumn"] = aggregation_column - if binning_column is not UNSET: - field_dict["binningColumn"] = binning_column + if wait is not UNSET: + field_dict["wait"] = wait if binning_parameters is not UNSET: field_dict["binningParameters"] = binning_parameters if identifiable_columns is not UNSET: field_dict["identifiableColumns"] = identifiable_columns + if aggregation_column is not UNSET: + field_dict["aggregationColumn"] = aggregation_column + if binning_column is not UNSET: + field_dict["binningColumn"] = binning_column return field_dict @@ -227,6 +228,8 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() type = ComputationType(d.pop("type")) + dp_epsilon = d.pop("dpEpsilon", UNSET) + _input_clipping_method = d.pop("inputClippingMethod", UNSET) input_clipping_method: Union[Unset, ComputationDefinitionInputClippingMethod] if isinstance(_input_clipping_method, Unset): @@ -234,6 +237,17 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: input_clipping_method = ComputationDefinitionInputClippingMethod(_input_clipping_method) + _run_mode = d.pop("runMode", UNSET) + run_mode: Union[Unset, RunMode] + if isinstance(_run_mode, Unset): + run_mode = UNSET + else: + run_mode = RunMode(_run_mode) + + local_input_id = d.pop("localInputID", UNSET) + + maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET) + _preprocessing_parameters = d.pop("preprocessingParameters", UNSET) preprocessing_parameters: Union[Unset, ComputationPreprocessingParameters] if isinstance(_preprocessing_parameters, Unset): @@ -241,24 +255,18 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: preprocessing_parameters = ComputationPreprocessingParameters.from_dict(_preprocessing_parameters) - wait = d.pop("wait", UNSET) - - _dp_policy = d.pop("DPPolicy", UNSET) - dp_policy: Union[Unset, DPPolicy] - if isinstance(_dp_policy, Unset): - dp_policy = UNSET + _data_source_parameters = d.pop("dataSourceParameters", UNSET) + data_source_parameters: Union[Unset, ComputationDataSourceParameters] + if isinstance(_data_source_parameters, Unset): + data_source_parameters = UNSET else: - dp_policy = DPPolicy.from_dict(_dp_policy) - - dp_epsilon = d.pop("dpEpsilon", UNSET) - - join_id = d.pop("joinId", UNSET) + data_source_parameters = ComputationDataSourceParameters.from_dict(_data_source_parameters) - local_input_id = d.pop("localInputID", UNSET) + end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) - maximum_aggregated_value = d.pop("maximumAggregatedValue", UNSET) + input_data_object = d.pop("inputDataObject", UNSET) - cohort_id = d.pop("cohortId", UNSET) + timeout = d.pop("timeout", UNSET) _local_input = d.pop("localInput", UNSET) local_input: Union[Unset, LocalInput] @@ -271,35 +279,24 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: project_id = d.pop("projectId", UNSET) - _run_mode = d.pop("runMode", UNSET) - run_mode: Union[Unset, RunMode] - if isinstance(_run_mode, Unset): - run_mode = UNSET + _dp_policy = d.pop("DPPolicy", UNSET) + dp_policy: Union[Unset, DPPolicy] + if isinstance(_dp_policy, Unset): + dp_policy = UNSET else: - run_mode = RunMode(_run_mode) + dp_policy = DPPolicy.from_dict(_dp_policy) - local = d.pop("local", UNSET) + cohort_id = d.pop("cohortId", UNSET) encrypted = d.pop("encrypted", UNSET) - end_to_end_encrypted = d.pop("endToEndEncrypted", UNSET) + join_id = d.pop("joinId", UNSET) - input_data_object = d.pop("inputDataObject", UNSET) + local = d.pop("local", UNSET) release_results = d.pop("releaseResults", UNSET) - timeout = d.pop("timeout", UNSET) - - _data_source_parameters = d.pop("dataSourceParameters", UNSET) - data_source_parameters: Union[Unset, ComputationDataSourceParameters] - if isinstance(_data_source_parameters, Unset): - data_source_parameters = UNSET - else: - data_source_parameters = ComputationDataSourceParameters.from_dict(_data_source_parameters) - - aggregation_column = d.pop("aggregationColumn", UNSET) - - binning_column = d.pop("binningColumn", UNSET) + wait = d.pop("wait", UNSET) _binning_parameters = d.pop("binningParameters", UNSET) binning_parameters: Union[Unset, BinningParameters] @@ -310,32 +307,36 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: identifiable_columns = cast(List[str], d.pop("identifiableColumns", UNSET)) + aggregation_column = d.pop("aggregationColumn", UNSET) + + binning_column = d.pop("binningColumn", UNSET) + v_binned_aggregation = cls( type=type, - input_clipping_method=input_clipping_method, - preprocessing_parameters=preprocessing_parameters, - wait=wait, - dp_policy=dp_policy, dp_epsilon=dp_epsilon, - join_id=join_id, + input_clipping_method=input_clipping_method, + run_mode=run_mode, local_input_id=local_input_id, maximum_aggregated_value=maximum_aggregated_value, - cohort_id=cohort_id, + preprocessing_parameters=preprocessing_parameters, + data_source_parameters=data_source_parameters, + end_to_end_encrypted=end_to_end_encrypted, + input_data_object=input_data_object, + timeout=timeout, local_input=local_input, owner=owner, project_id=project_id, - run_mode=run_mode, - local=local, + dp_policy=dp_policy, + cohort_id=cohort_id, encrypted=encrypted, - end_to_end_encrypted=end_to_end_encrypted, - input_data_object=input_data_object, + join_id=join_id, + local=local, release_results=release_results, - timeout=timeout, - data_source_parameters=data_source_parameters, - aggregation_column=aggregation_column, - binning_column=binning_column, + wait=wait, binning_parameters=binning_parameters, identifiable_columns=identifiable_columns, + aggregation_column=aggregation_column, + binning_column=binning_column, ) v_binned_aggregation.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/workflow_item.py b/src/tuneinsight/api/sdk/models/workflow_item.py index 00c6e73..e7bc59b 100644 --- a/src/tuneinsight/api/sdk/models/workflow_item.py +++ b/src/tuneinsight/api/sdk/models/workflow_item.py @@ -16,66 +16,66 @@ class WorkflowItem: """ Attributes: + target_handle (Union[Unset, str]): not used - UI specific + type (Union[Unset, str]): data (Union[Unset, WorkflowItemData]): + id (Union[Unset, str]): + source_handle (Union[Unset, str]): not used - UI specific + target (Union[Unset, str]): not used - UI specific position (Union[Unset, WorkflowItemPosition]): progress (Union[Unset, int]): source (Union[Unset, str]): not used - UI specific - source_handle (Union[Unset, str]): not used - UI specific - id (Union[Unset, str]): - target (Union[Unset, str]): not used - UI specific - target_handle (Union[Unset, str]): not used - UI specific - type (Union[Unset, str]): """ + target_handle: Union[Unset, str] = UNSET + type: Union[Unset, str] = UNSET data: Union[Unset, "WorkflowItemData"] = UNSET + id: Union[Unset, str] = UNSET + source_handle: Union[Unset, str] = UNSET + target: Union[Unset, str] = UNSET position: Union[Unset, "WorkflowItemPosition"] = UNSET progress: Union[Unset, int] = UNSET source: Union[Unset, str] = UNSET - source_handle: Union[Unset, str] = UNSET - id: Union[Unset, str] = UNSET - target: Union[Unset, str] = UNSET - target_handle: Union[Unset, str] = UNSET - type: Union[Unset, str] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: + target_handle = self.target_handle + type = self.type data: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.data, Unset): data = self.data.to_dict() + id = self.id + source_handle = self.source_handle + target = self.target position: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.position, Unset): position = self.position.to_dict() progress = self.progress source = self.source - source_handle = self.source_handle - id = self.id - target = self.target - target_handle = self.target_handle - type = self.type field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) + if target_handle is not UNSET: + field_dict["targetHandle"] = target_handle + if type is not UNSET: + field_dict["type"] = type if data is not UNSET: field_dict["data"] = data + if id is not UNSET: + field_dict["id"] = id + if source_handle is not UNSET: + field_dict["sourceHandle"] = source_handle + if target is not UNSET: + field_dict["target"] = target if position is not UNSET: field_dict["position"] = position if progress is not UNSET: field_dict["progress"] = progress if source is not UNSET: field_dict["source"] = source - if source_handle is not UNSET: - field_dict["sourceHandle"] = source_handle - if id is not UNSET: - field_dict["id"] = id - if target is not UNSET: - field_dict["target"] = target - if target_handle is not UNSET: - field_dict["targetHandle"] = target_handle - if type is not UNSET: - field_dict["type"] = type return field_dict @@ -85,6 +85,10 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: from ..models.workflow_item_position import WorkflowItemPosition d = src_dict.copy() + target_handle = d.pop("targetHandle", UNSET) + + type = d.pop("type", UNSET) + _data = d.pop("data", UNSET) data: Union[Unset, WorkflowItemData] if isinstance(_data, Unset): @@ -92,6 +96,12 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: data = WorkflowItemData.from_dict(_data) + id = d.pop("id", UNSET) + + source_handle = d.pop("sourceHandle", UNSET) + + target = d.pop("target", UNSET) + _position = d.pop("position", UNSET) position: Union[Unset, WorkflowItemPosition] if isinstance(_position, Unset): @@ -103,26 +113,16 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: source = d.pop("source", UNSET) - source_handle = d.pop("sourceHandle", UNSET) - - id = d.pop("id", UNSET) - - target = d.pop("target", UNSET) - - target_handle = d.pop("targetHandle", UNSET) - - type = d.pop("type", UNSET) - workflow_item = cls( + target_handle=target_handle, + type=type, data=data, + id=id, + source_handle=source_handle, + target=target, position=position, progress=progress, source=source, - source_handle=source_handle, - id=id, - target=target, - target_handle=target_handle, - type=type, ) workflow_item.additional_properties = d diff --git a/src/tuneinsight/api/sdk/models/workflow_type.py b/src/tuneinsight/api/sdk/models/workflow_type.py new file mode 100644 index 0000000..2a86e68 --- /dev/null +++ b/src/tuneinsight/api/sdk/models/workflow_type.py @@ -0,0 +1,11 @@ +from enum import Enum + + +class WorkflowType(str, Enum): + CUSTOM = "custom" + MAAS = "maas" + IBAN_SEARCH = "iban_search" + FEASIBILITY = "feasibility" + + def __str__(self) -> str: + return str(self.value) diff --git a/src/tuneinsight/client/datasource.py b/src/tuneinsight/client/datasource.py index fa3ae74..0994a38 100644 --- a/src/tuneinsight/client/datasource.py +++ b/src/tuneinsight/client/datasource.py @@ -8,10 +8,16 @@ from tuneinsight.api.sdk.types import File from tuneinsight.api.sdk import Client from tuneinsight.api.sdk import models -from tuneinsight.api.sdk.api.api_datasource import post_data_source -from tuneinsight.api.sdk.api.api_datasource import put_data_source_data -from tuneinsight.api.sdk.api.api_datasource import delete_data_source +from tuneinsight.api.sdk.types import Unset, UNSET +from tuneinsight.api.sdk.api.api_datagen import post_synthetic_dataset +from tuneinsight.api.sdk.api.api_datasource import ( + post_data_source, + put_data_source_data, + delete_data_source, + get_data_source, +) from tuneinsight.api.sdk.api.api_dataobject import post_data_object + from tuneinsight.client.validation import validate_response from tuneinsight.client.dataobject import DataObject @@ -35,6 +41,24 @@ class DataSource: def __init__(self, model: models.DataSource, client: Client): self.model = model self.client = client + self.query_parameters = None + + ## Methods to create a datasource. + + @classmethod + def fetch_from_id(cls, client: Client, datasource_id: str): + """ + Creates a datasource object relating to a datasource in the backend. + + Args: + client (Client): the client to use to interact with the datasource + datasource_id (str): the unique identifier of the datasource. + """ + ds_response: Response[models.DataSource] = get_data_source.sync_detailed( + client=client, data_source_id=datasource_id + ) + validate_response(ds_response) + return cls(model=ds_response.parsed, client=client) @classmethod def _from_definition(cls, client: Client, definition: models.DataSourceDefinition): @@ -64,60 +88,34 @@ def local(cls, client: Client, name: str, clear_if_exists: bool = False): definition = default_datasource_definition() definition.name = name definition.clear_if_exists = clear_if_exists - ds_config_type = models.DataSourceConfigType.LOCALDATASOURCECONFIG - ds_conf = models.LocalDataSourceConfig(type=ds_config_type) - definition.config = ds_conf - definition.type = "local" - + definition.type = models.DataSourceType.LOCAL return cls._from_definition(client, definition=definition) @classmethod def database( cls, client: Client, - config: models.DatabaseConnectionInfo, + config: models.DataSourceConfig, + credentials: models.Credentials, name: str, clear_if_exists: bool = False, - secret_id: str = None, ): """ Creates a new postgres database datasource. Args: client (Client): the client to use to interact with the datasource - config (models.DatabaseConnectionInfo): the database configuration + config (models.DataSourceConfig): the database configuration name (str, optional): the name to give to the datasource. Defaults to "". clear_if_exists (bool, optional): whether to try to clear any existing data source with the same name. - secret_id (str, optional): secret id that stores the database credentials on the KMS connected to the instance. + credentials_id (str, optional): secret id that stores the database credentials on the KMS connected to the instance. """ definition = default_datasource_definition() definition.name = name definition.clear_if_exists = clear_if_exists - definition.type = "database" - ds_config_type = models.DataSourceConfigType.DATABASEDATASOURCECONFIG - - cred_id = "db-creds" - if secret_id is None: - credentials = models.Credentials( - username=config.user, password=config.password, id=cred_id - ) - credential_provider = models.LocalCredentialsProvider( - type=models.CredentialsProviderType.LOCALCREDENTIALSPROVIDER, - credentials=[credentials], - ) - else: - credentials = models.AzureKeyVaultCredentialsProviderMappingsItem( - creds_id=cred_id, secret_id=secret_id - ) - credential_provider = models.AzureKeyVaultCredentialsProvider( - type=models.CredentialsProviderType.AZUREKEYVAULTCREDENTIALSPROVIDER, - mappings=[credentials], - ) - ds_config = models.DatabaseDataSourceConfig( - type=ds_config_type, connection_info=config - ) - definition.credentials_provider = credential_provider - definition.config = ds_config + definition.type = models.DataSourceType.DATABASE + definition.configuration = config + definition.credentials = credentials return cls._from_definition(client, definition=definition) @@ -125,7 +123,7 @@ def database( def from_api( cls, client: Client, - api_type: models.APIConnectionInfoType, + api_type: models.APIType, api_url: str, api_token: str, name: str, @@ -146,16 +144,11 @@ def from_api( definition = default_datasource_definition() definition.name = name definition.clear_if_exists = clear_if_exists - definition.type = "api" - - ds_config = models.ApiDataSourceConfig( - type=models.DataSourceConfigType.APIDATASOURCECONFIG + definition.type = models.DataSourceType.API + definition.configuration = models.DataSourceConfig( + api_url=api_url, api_type=api_type, cert=cert ) - ds_config.connection_info = models.APIConnectionInfo( - api_token=api_token, api_url=api_url, type=api_type, cert=cert - ) - definition.config = ds_config - + definition.credentials = models.Credentials(api_token=api_token) return cls._from_definition(client, definition=definition) @classmethod @@ -181,7 +174,7 @@ def from_dataframe( def __str__(self): model = self.model - return f"id: {model.unique_id}, name: {model.name}, type: {model.type}, createdAt: {model.created_at}" + return f"id: {model.id}, name: {model.name}, type: {model.type}, createdAt: {model.created_at}" def get_id(self) -> str: """ @@ -190,7 +183,9 @@ def get_id(self) -> str: Returns: str: the id as a a string """ - return self.model.unique_id + return self.model.id + + ## Methods to manipulate a datasource object. def adapt( self, do_type: models.DataObjectType, query: Any = "", json_path: str = "" @@ -235,7 +230,7 @@ def load_csv_data(self, path: str): ) response: Response[models.DataSource] = put_data_source_data.sync_detailed( client=self.client, - data_source_id=self.model.unique_id, + data_source_id=self.model.id, multipart_data=mpd, ) f.close() @@ -254,7 +249,7 @@ def load_dataframe(self, df: pd.DataFrame): data_source_request_data_raw=f.getvalue() ) response: Response[models.DataSource] = put_data_source_data.sync_detailed( - client=self.client, data_source_id=self.model.unique_id, multipart_data=mpd + client=self.client, data_source_id=self.model.id, multipart_data=mpd ) validate_response(response) @@ -282,12 +277,70 @@ def delete(self): Deletes this datasource. """ response: Response[Any] = delete_data_source.sync_detailed( - client=self.client, data_source_id=self.model.unique_id + client=self.client, data_source_id=self.model.id ) validate_response(response) + def synthesize( + self, + table: str = UNSET, + query: str = UNSET, + name: str = UNSET, + num_rows: int = UNSET, + ) -> "DataSource": + """ + Generates a synthetic dataset that mimics this datasource. -## Internal methods to manipulate configurations. + This creates a new database datasource that contains synthetic data + with the same data structure (attributes and data types) as well as + some statistical properties of the data. + + One of table or query must be specified to generate data from a + database datasource. If neither are provided, the datasource name + is used instead, but that can potentially cause issues. + + Args: + table (str, optional): the table for which to generate synthetic data. + query (str, optional): the data query to perform on the data to get + name (str, optional): name of the synthetic datasource. If not provided, + synthetic_{datasource_name} is used instead. + num_rows (int, optional): number of rows to generate. If not provided, + the synthetic dataset will have the same number of rows as this datasource. + """ + if isinstance(table, Unset) and isinstance(query, Unset): + table = self.model.name + if isinstance(name, Unset) and not isinstance(self.model.name, Unset): + name = f"synthetic_{self.model.name}" + response = post_synthetic_dataset.sync_detailed( + client=self.client, + data_source_id=self.model.unique_id, + num_rows=num_rows, + table=table, + query=query, + table_name=name, + ) + validate_response(response) + return DataSource(response.parsed, self.client) + + ## Methods to interact with queries etc. + def set_query(self, query: str): + """ + Sets the database query to use for this datasource. + + When this datasource is used in a project, its query will override the query defined + in the local data selection of the project (if any), but not the query defined in the + computation definition (which take precedence). + + Note that this is specific to the Diapason implementation, and the query is not + persisted on the Tune Insight instance. + + Args + query (str): the SQL query to use to fet the data from the datasource. + """ + self.query_parameters = models.DataSourceQuery(database_query=query) + + +## Internal functions to manipulate configurations. def default_datasource_definition() -> models.DataSourceDefinition: @@ -302,17 +355,35 @@ def default_datasource_definition() -> models.DataSourceDefinition: ) -def new_postgres_config( - host: str, port: str, name: str, user: str, password: str -) -> models.DatabaseConnectionInfo: - """Convert a Postgres configuration to a models.DatabaseConnectionInfo.""" - return models.DatabaseConnectionInfo( - type=models.DatabaseType.POSTGRES, +def new_credentials( + username: str = "", password: str = "", token: str = "", credentials_id: str = None +): + """ + Creates a new credentials class with the correct credentials type. + + Args: + username (str, optional): the username. Defaults to "". + password (str, optional): the password. Defaults to "". + token (str, optional): the API token. Defaults to "". + credentials_id (str, optional): the secret ID to fetch the credentials remotely, if set, then the credentials type will be set to Azure Key Vault. Defaults to None. + + Returns: + _type_: _description_ + """ + if credentials_id is None: + return models.Credentials(username=username, password=password, api_token=token) + return models.Credentials( + credentials_id=credentials_id, type=models.CredentialsType.AZUREKEYVAULT + ) + + +def new_postgres_config(host: str, port: str, name: str) -> models.DataSourceConfig: + """Convert a Postgres configuration to a models.DataSourceConfig.""" + return models.DataSourceConfig( + database_type=models.DatabaseType.POSTGRES, host=host, port=port, database=name, - user=user, - password=password, ) @@ -320,15 +391,11 @@ def new_mariadb_config( host: str = "mariadb", port: str = "3306", name: str = "geco_0", - user: str = "geco", - password: str = "geco", -) -> models.DatabaseConnectionInfo: - """Convert a MariaDB configuration to a models.DatabaseConnectionInfo.""" - return models.DatabaseConnectionInfo( - type=models.DatabaseType.MYSQL, +) -> models.DataSourceConfig: + """Convert a MariaDB configuration to a models.DataSourceConfig.""" + return models.DataSourceConfig( + database_type=models.DatabaseType.MYSQL, host=host, port=port, database=name, - user=user, - password=password, ) diff --git a/src/tuneinsight/client/diapason.py b/src/tuneinsight/client/diapason.py index 1cc1d51..c9ada68 100644 --- a/src/tuneinsight/client/diapason.py +++ b/src/tuneinsight/client/diapason.py @@ -15,10 +15,7 @@ from tuneinsight.api.sdk.api.api_project import post_project from tuneinsight.api.sdk.api.api_project import get_project from tuneinsight.api.sdk.api.api_project import get_project_list -from tuneinsight.api.sdk.api.api_datasource import ( - get_data_source_list, - get_data_source, -) +from tuneinsight.api.sdk.api.api_datasource import get_data_source_list from tuneinsight.api.sdk.api.api_dataobject import get_data_object from tuneinsight.api.sdk.api.api_infos import get_infos from tuneinsight.api.sdk import models @@ -278,7 +275,7 @@ def new_datasource( def new_api_datasource( self, - api_type: models.APIConnectionInfoType, + api_type: models.APIType, api_url: str, name: str, api_token: str = "", @@ -330,19 +327,19 @@ def new_csv_datasource( def new_database( self, - pg_config: models.DatabaseConnectionInfo, + pg_config: models.DataSourceConfig, name: str, clear_if_exists: bool = False, - secret_id: str = None, + credentials: models.Credentials = models.Credentials(), ) -> DataSource: """ Creates a new Postgres datasource. Args: - config (models.DatabaseConnectionInfo): Postgres configuration. + config (models.DataSourceConfig): Postgres configuration. name (str, required): name of the datasource to be created. clear_if_exists (str, optional): overwrite datasource if it already exists. - secret_id (str, optional): secret id that stores the database credentials on the KMS connected to the instance. + credentials_id (models.Credential, optional): credentials / secret id that stores the database credentials on the KMS connected to the instance. Returns: DataSource: the newly created datasource @@ -352,7 +349,7 @@ def new_database( config=pg_config, name=name, clear_if_exists=clear_if_exists, - secret_id=secret_id, + credentials=credentials, ) def get_datasources(self, name: str = "") -> List[DataSource]: @@ -399,11 +396,7 @@ def get_datasource(self, ds_id: str = None, name: str = None) -> DataSource: if name is None: raise ValueError("At least one of ds_id or name must be provided.") return self.get_datasources(name=name)[0] - ds_response: Response[models.DataSource] = get_data_source.sync_detailed( - client=self.client, data_source_id=ds_id - ) - validate_response(ds_response) - return DataSource(model=ds_response.parsed, client=self.client) + return DataSource.fetch_from_id(self.client, ds_id) # Dataobject management. diff --git a/src/tuneinsight/client/project.py b/src/tuneinsight/client/project.py index 8b9e8d2..1445e38 100644 --- a/src/tuneinsight/client/project.py +++ b/src/tuneinsight/client/project.py @@ -72,6 +72,18 @@ class Project: model: models.Project # The underlying model client: Unset # the client used to access the api + datasource: DataSource = None + + def __attrs_post_init__(self): + """Create a datasource object if one is defined in the project model.""" + if ( + not isinstance(self.model.data_source_id, Unset) + and self.model.data_source_id + ): + self.datasource = DataSource.fetch_from_id( + self.client, self.model.data_source_id + ) + # Internal methods. def _refresh(self): @@ -200,8 +212,8 @@ def set_computation_type(self, comp_type: Type): """ Sets the computation type of the project's computation definition. - Args: - comp_type (Type): _description_ + This creates an empty computation definition of the chosen type. Intended for tests. + """ definition = models.ComputationDefinition(type=comp_type) self.set_computation(definition) @@ -263,10 +275,16 @@ def set_input_datasource(self, ds: Union[DataSource, str]): Args: ds (DataSource | str): the datasource to link to the project, or its ID. """ + ds_id = ds if isinstance(ds, DataSource): - ds = ds.get_id() - proj_def = models.ProjectDefinition(data_source_id=ds) + ds_id = ds.get_id() + proj_def = models.ProjectDefinition(data_source_id=ds_id) self._patch(proj_def=proj_def) + # If the user provided an ID, create a datasource from this ID. Otherwise the same object is reused. + if isinstance(ds, DataSource): + self.datasource = ds + else: + self.datasource = DataSource.fetch_from_id(self.client, ds) def set_policy(self, policy: Policy): """ @@ -764,6 +782,16 @@ def get_policy(self) -> Policy: self._refresh() return Policy.from_model(self.model.policy) + @property + def is_differentially_private(self): + """Returns whether differential privacy is enabled for this project.""" + if isinstance(self.model.policy, Unset): + return False + dp_policy = self.model.policy.dp_policy + if isinstance(dp_policy, Unset): + return False + return dp_policy.use_differential_privacy + def get_computations(self) -> List[models.Computation]: return self.model.computations diff --git a/src/tuneinsight/computations/base.py b/src/tuneinsight/computations/base.py index 085d274..21d9c41 100644 --- a/src/tuneinsight/computations/base.py +++ b/src/tuneinsight/computations/base.py @@ -16,7 +16,7 @@ import pandas as pd from tuneinsight.api.sdk import models -from tuneinsight.api.sdk.types import UNSET +from tuneinsight.api.sdk.types import Unset, UNSET from tuneinsight.api.sdk.types import Response from tuneinsight.api.sdk.api.api_computations import ( compute, @@ -135,9 +135,7 @@ def _process_encrypted_results(self, dataobjects): @staticmethod def field_is_set(field: Any) -> bool: """Checks whether a field in a (API models) definition is set.""" - if field is UNSET or field == "": - return False - return True + return not (field is UNSET or field == "") @staticmethod def is_done(comp: models.Computation) -> bool: @@ -151,7 +149,7 @@ def is_done(comp: models.Computation) -> bool: def _update_computation_datasource(self, comp: models.ComputationDefinition): """ - Updates the definition of the input computation to have the specified datasource. + Updates the definition of the input computation to have the specified datasource parameters. """ if comp.type in [ models.ComputationType.COLLECTIVEKEYSWITCH, @@ -159,11 +157,22 @@ def _update_computation_datasource(self, comp: models.ComputationDefinition): models.ComputationType.PRIVATESEARCH, ]: return + # The data source query can be set at three levels (in decreasing precedence): + # 1. In this object (computation definition), + # 2. In the Datasource object of the project (project), + # 3. In the local data source (enforced on the server level). + # Check 1.: whether this computation has a query set. if self.datasource.query_set: comp.data_source_parameters = self.datasource.get_parameters() + # Otherwise, initialize empty data source parameters. else: if not self.field_is_set(comp.input_data_object): comp.data_source_parameters = models.ComputationDataSourceParameters() + # And check 2. whether the datasource has a query set. + if self.project.datasource is not None: + ds = self.project.datasource + if ds.query_parameters: + comp.data_source_parameters.data_source_query = ds.query_parameters def _update_computation_fields(self, comp: models.ComputationDefinition): """ @@ -494,6 +503,32 @@ def __init__( """ super().__init__(project) self.model = model_class(type=type, project_id=project.get_id(), **kwargs) + # Check DP compatibility (for now, with a friendly warning). + if project.is_differentially_private: + warn_message = ( + "This project has differential privacy enabled, but %s." + + "This will likely cause an error when running the computation." + + "Contact your administrator for more details." + ) + if not hasattr(self.model, "dp_epsilon"): + warnings.warn( + warn_message + % "this computation does not appear to support differential privacy" + ) + elif isinstance(self.model.dp_epsilon, Unset): + warnings.warn( + warn_message + % "the parameter dp_epsilon was not set. Using default value 0.1." + ) + self.model.dp_epsilon = 0.1 + else: + epsilon = float( + self.model.dp_epsilon + ) # Will raise an error if not float. + if epsilon <= 0: + raise ValueError( + "The parameter dp_epsilon must be a positive number." + ) def _get_model(self): return self.model diff --git a/src/tuneinsight/computations/count.py b/src/tuneinsight/computations/count.py index e851af0..16d208b 100644 --- a/src/tuneinsight/computations/count.py +++ b/src/tuneinsight/computations/count.py @@ -2,6 +2,7 @@ from tuneinsight.computations.base import ModelBasedComputation from tuneinsight.api.sdk import models +from tuneinsight.api.sdk.types import UNSET class Count(ModelBasedComputation): @@ -12,6 +13,8 @@ class Count(ModelBasedComputation): """ + dp_epsilon: float = UNSET + def __init__(self, project): super().__init__( project, diff --git a/src/tuneinsight/computations/enc_aggregation.py b/src/tuneinsight/computations/enc_aggregation.py index 22812b4..e1160d5 100644 --- a/src/tuneinsight/computations/enc_aggregation.py +++ b/src/tuneinsight/computations/enc_aggregation.py @@ -20,7 +20,7 @@ class EncryptedAggregation(ModelBasedComputation): cohort_id: str = "" join_id: str = "" selected_coselected_colsls: List[str] = None - dp_epsilon = None + dp_epsilon = UNSET lower_bounds = [] upper_bounds = [] diff --git a/src/tuneinsight/computations/policy.py b/src/tuneinsight/computations/policy.py index e2e6523..be45cbd 100644 --- a/src/tuneinsight/computations/policy.py +++ b/src/tuneinsight/computations/policy.py @@ -4,7 +4,6 @@ import json import datetime -import pytz from tuneinsight.computations.preprocessing import Operation from tuneinsight.computations.types import Type, displayed_types @@ -95,6 +94,27 @@ def __new_threshold( type=threshold_type, ) + def enable_differential_privacy(self): + """ + Enables the use of differential privacy (DP) in this project. + + When using DP, additional randomness is added to the outputs of a project + in order to protect the privacy of data subjects. Only a subset of operations + are allowed when using differential privacy. Each computation exhausts a + fraction of the execution quota (called privacy budget). + + See the documentation for more details. + + """ + self.dp_policy.use_differential_privacy = True + + def disable_differential_privacy(self): + """ + Disables the use of differential privacy (DP) in this project. + + """ + self.dp_policy.use_differential_privacy = False + def set_max_columns( self, relative: bool = False, fixed_value: int = 5, relative_factor: float = 0.2 ): @@ -146,61 +166,45 @@ def set_max_factor( relative, fixed_value, relative_factor ) - def set_output_noise( - self, - eps: float = 1, - sensitivity: float = 1, - discrete: bool = False, - delta: float = 1e-5, - ): - """ - Sets the noise parameters for differential privacy. - - When set, every computation output gets encrypted noise added to it. - If the noise is discrete, Gaussian noise is added, otherwise Laplace noise is used. - - Args: - eps (float, optional): the value for the epsilon (privacy budget parameter). Defaults to 1. - sensitivity (float, optional): the sensitivity parameter should be equal to - the maximum difference expected between two neighboring datasets. Defaults to 1. - discrete (bool, optional): whether or not the noise should be discretized - delta (float,optional): the delta value when the noise is discrete. Defaults to 1e-5. - """ - self.dp_policy.noise_parameters = models.NoiseParameters( - epsilon=eps, sensitivity=sensitivity, discrete=discrete, delta=delta - ) - def set_quota( self, - initial_queries: int, + initial: int, reallocation_amount: int = 0, reallocation_interval_hours: int = 24, max_quota: int = None, ): """ - set_quota defines a quota for limiting the workflow executions in the project. - The quota is defined in terms of # of queries. By default, executing any distributed / collective workflow Defines a quota for limiting the workflow executions in the project. - - When running encrypted matching / set intersection workflows, the query cost is equal to the number of matching queries / set size. + By default, The quota is defined in terms of number of queries. Executing any distributed or + collective workflow exhausts some computation quota. Once the quota is exhausted, no (collective) + computation can be run until the quota is refreshed. + + When using Differential Privacy, this is the total privacy budget that can be used in the project. - The quota is defined by specifying an initial amount of queries allocated globally for the project along with an optional reallocation amount and interval. - The quota can also be limited to a maximum amount of queries. + When running encrypted matching or set intersection workflows, the query cost is equal to the number + of matching queries / set size. + + The quota is defined by specifying an initial amount allocated globally for the project, along with + an optional reallocation amount and interval. Args: - initial_queries (int): corresponds to the initial amount of queries that are allocated to the project. - reallocation_amount (int, optional): the amount of queries that are reallocated at every reallocation interval. Defaults to 0. - reallocation_interval_hours (int, optional): the interval in terms of hours at which the queries are reallocated. Defaults to 24. - max_quota (int, optional): the limit to the quota, if not specified it will be set to the initial allocated amount. Defaults to None. + initial (int): corresponds to the initial quota allocated to the project. + reallocation_amount (int, optional): the amount reallocated at every + reallocation interval. Defaults to 0. + reallocation_interval_hours (int, optional): the interval in terms of hours + at which the quota is reallocated. Defaults to 24. + max_quota (int, optional): the absolute limit to the quota. If not specified + it will be set to the initial allocated amount. Defaults to None. """ if max_quota is None: - max_quota = initial_queries - start_time = datetime.datetime.utcnow().replace(tzinfo=pytz.utc) + max_quota = initial + start_time = datetime.datetime.now(datetime.timezone.utc) interval = models.Duration( unit=models.TimeUnit.HOURS, value=reallocation_interval_hours ) self.dp_policy.execution_quota_parameters = models.ExecutionQuotaParameters( - allocation=initial_queries, + allocation=initial, increment=reallocation_amount, max_allocation=max_quota, scope=models.ExecutionQuotaParametersScope.PROJECT, @@ -235,7 +239,6 @@ def add_authorized_computation_type(self, computation_type: Type): "minDatasetSize": "min rows", "minFrequencies": "min frequencies", "maxColumnCount": "max columns", - "noiseParameters": "noise", } @@ -386,30 +389,16 @@ def display_dp_policy(dp: models.DPPolicy, r: Renderer = None): "Verifies for each categorical variable that the number of factors does not exceed the threshold" ) - if not isinstance(dp.noise_parameters, Unset): - np = dp.noise_parameters - noise_type = "Laplacian Mechanism" - if np.discrete: - noise_type = "Discretized Gaussian Mechanism" - r.h3(f"Differential Privacy noise ({noise_type})") - r( - "This mechanism ensures that noise is added to the encrypted outputs in order to avoid individual information leakage when releasing the results to the users." - ) - - r( - "The amount of noise is controlled through the `epsilon`, `delta` and sensitivity parameters:" - ) - - r("- `epsilon`:", r.code(np.epsilon), "(recommended values are below `1`)") - r("- `delta`:", r.code(np.delta), "(recommended to be set below `1e-5`)") + use_dp = ( + not isinstance(dp.use_differential_privacy, Unset) + and dp.use_differential_privacy + ) + if use_dp: + r.h3("This project uses differential privacy.") r( - "- sensitivity:", - r.code(np.sensitivity), - "(maximum difference of result when computing over two neighboring datasets)", + "Only computations that support differential privacy can be run on this project.", + "Each computation will use some of the budget.", ) - - if np.discrete: - r("- Noise discretization is activated.") if not isinstance(dp.execution_quota_parameters, Unset): bp = dp.execution_quota_parameters r.h4("Query Limiting parameters") @@ -421,18 +410,32 @@ def display_dp_policy(dp: models.DPPolicy, r: Renderer = None): if isinstance(allocated, Unset): allocated = 0 - text = f"Query limits are enforced in this project through a quota that is allocated at the {scope} level." - text += f"Budgets represent the maximum amount of distributed workflows that can for each {scope}." - r(text) + quota = "budget $\\varepsilon$" if use_dp else "quota" r( - f"- A quota of {allocated} queries / computations is initially allocated at the following date `{alloc_start}`." + f"Query limits are enforced in this project through a {quota} that is allocated at the {scope} level.", ) - - if bp.increment > 0 and not isinstance(bp.allocation_interval, Unset): + if use_dp: r( - f"- The quota is reallocated by {bp.increment} queries each {bp.allocation_interval.value} {bp.allocation_interval.unit} and cannot exceed {bp.max_allocation}." + "Each distributed workflow run on this project consumes some user-defined amount of the budget." + ) + else: + r( + f"Quotas represent the maximum amount of distributed workflows that can be run for each {scope}.", ) - r( - "*Note that, depending on the specific workflow, the cost of a single workflow can exceed 1 in terms of quota.*" + f"- A {quota} of {allocated} is initially allocated at the following date `{alloc_start}`." ) + + if ( + not isinstance(bp.increment, Unset) + and bp.increment > 0 + and not isinstance(bp.allocation_interval, Unset) + ): + r( + f"- The {quota} is reallocated by {bp.increment} queries each {bp.allocation_interval.value} {bp.allocation_interval.unit} and cannot exceed {bp.max_allocation}." + ) + + if not use_dp: + r( + "*Note that, depending on the specific workflow, the cost of a single workflow can exceed 1 in terms of quota.*" + ) diff --git a/src/tuneinsight/cryptolib/cryptolib-linux_x86_64.so b/src/tuneinsight/cryptolib/cryptolib-linux_x86_64.so index 351514e..59a638a 100644 Binary files a/src/tuneinsight/cryptolib/cryptolib-linux_x86_64.so and b/src/tuneinsight/cryptolib/cryptolib-linux_x86_64.so differ diff --git a/src/tuneinsight/utils/datagen.py b/src/tuneinsight/utils/datagen.py index 23a47a0..fc31744 100644 --- a/src/tuneinsight/utils/datagen.py +++ b/src/tuneinsight/utils/datagen.py @@ -61,12 +61,14 @@ def generate( # Replace spaces by underscores and remove all non-word characters. table_name = table_name.replace(" ", "_") table_name = "".join(re.findall("[\\w]+", table_name)) + else: + table_name = f"mock_{self.method}" config: str = json.dumps(self.get_config()) response: Response = post_mock_dataset.sync_detailed( client=client, json_body=config, method=self.method, - name=table_name or f"mock_{self.method}", + name=table_name, numrows=num_rows, seed=seed, clear_if_exists=clear_if_exists, @@ -74,6 +76,8 @@ def generate( validate_response(response=response) # The response contains the description of the datasource created by the call. self.datasource = DataSource(model=response.parsed, client=client) + # Set the query on the datasource (since we know what table we want). + self.datasource.set_query(f"select * from {table_name}") return self.datasource @property diff --git a/src/tuneinsight/utils/display.py b/src/tuneinsight/utils/display.py index fdd5556..4591058 100644 --- a/src/tuneinsight/utils/display.py +++ b/src/tuneinsight/utils/display.py @@ -91,6 +91,7 @@ def __call__(self, *text, end=""): self.ln(*text, end=end) def end_paragraph(self): + """Ends the current paragraph with a line break.""" if self.use_ipython: self.text("
") else: diff --git a/src/tuneinsight/utils/privacy.py b/src/tuneinsight/utils/privacy.py index c87a246..d9cc8b1 100644 --- a/src/tuneinsight/utils/privacy.py +++ b/src/tuneinsight/utils/privacy.py @@ -2,6 +2,8 @@ from typing import List +from typing import Callable + import numpy as np import pandas as pd import matplotlib.pyplot as plt @@ -9,46 +11,60 @@ from tuneinsight.utils.plots import style_plot -class RatioEstimator: +# TO DO: use noise scales from results metadata. + + +class ConfidenceIntervalEstimator: """ - Compute confidence intervals for the ratio of two values computed with differential privacy. + Compute confidence intervals for a function of Differentially Private outputs. - This class uses simulated samples to estimate various properties of the observed values. + This uses Monte-Carlo sampling to simulate multiple runs of the Laplace mechanism + on a multi-dimensional query that is then aggregated to one number using an + arbitrary function. + This assumes that the outputs are computed with Laplace noise. """ def __init__( self, - numerator: float, - denominator: float, - noise_scale: float, - noise_scale_denominator: float = None, + noisy_answers: np.array, + noise_scales: np.array, + function: Callable[[np.array], float], num_samples: int = int(1e6), ): """ - Create simulated samples of the ratio under DP. + Create simulated samples of a function of the outputs of the Laplace mechanism. Args - numerator: the numerator of the ratio, observed with Laplace noise. - denominator: the denominator of the ratio, observer with Laplace noise. - noise_scale: the scale of the Laplace noise added to the numerator (and the denominator, if not specified). - noise_scale_denominator: the scale of the Laplace noise added to the denominator (if None, noise_scale is used). - num_samples (int, default 1e6): number of samples to use in the Monte-Carlo estimation. + noisy_answers (np.array): the observed output of the mechanism, typically the + result of several queries with added noise. + noise_scales (np.array): the scale of the noise added on each answer. If an array + of length 1 is provided, the noise is assumed to be of the same scale on each. + function: a callable function that maps an array of answers to a single float. The + input array will be one-dimensional. + num_samples (int, default 1e6): number of samples to use in Monte-Carlo estimation. + Only change this number if the code takes too long to run. """ - - laplace_noises = np.random.laplace(loc=0, scale=1, size=(2, num_samples)) - - numerators = numerator + noise_scale * laplace_noises[0, :] - if noise_scale_denominator is None: - noise_scale_denominator = noise_scale - denominators = denominator + noise_scale_denominator * laplace_noises[1, :] - - self.observed = numerator / denominator - self.samples = numerators / denominators + # Convert the inputs to 1-dimensional arrays of same dimension (hopefully). + noisy_answers = np.array(noisy_answers).flatten() + noise_scales = np.array(noise_scales).flatten() + if len(noise_scales) == 1: + noise_scales = np.full(noisy_answers.shape, noise_scales[0]) + assert ( + noise_scales.shape == noisy_answers.shape + ), "Mismatching input dimensions for noise_scales and noisy_answers." + # Simulate the Laplace mechanism multiple times. + laplace_noises = np.random.laplace( + loc=0, scale=noise_scales, size=(num_samples, len(noise_scales)) + ) + samples = np.tile(noisy_answers, (num_samples, 1)) + laplace_noises + # Compute the function on each of these samples. + self.observed = function(noisy_answers) + self.samples = np.array([function(row) for row in samples]) def confidence_intervals(self, p: List[float] = (95, 99)): """ - Estimate confidence intervals for the ratio. + Estimate confidence intervals for the function result. Args p: the probabilities of the confidence interval (in percentages, in [0, 100]). @@ -63,11 +79,12 @@ def confidence_intervals(self, p: List[float] = (95, 99)): return pd.DataFrame(results, columns=["Percentage", "CI (min)", "CI (max)"]) - def draw_distribution(self, ci_color="k"): - """Display the shape of this distribution in a matplotlib figure. + def draw_distribution(self, ci_color="k", local=False): + """Display the shape of the distribution of function results in a matplotlib figure. Args ci_color: if not None, the 95% and 99% confidence intervals are displayed in this color. + local: whether the results are from a local or collective computation. """ plt.style.use("bmh") @@ -118,4 +135,50 @@ def draw_distribution(self, ci_color="k"): x_label="Possible values", y_label="Likelihood", size=(8, 6), + local=local, + ) + + +class RatioEstimator(ConfidenceIntervalEstimator): + """Compute confidence intervals for the ratio of two values computed with differential privacy. + + This class uses simulated samples to estimate various properties of the observed values. + + """ + + def __init__( + self, + numerator: float, + denominator: float, + noise_scale: float, + noise_scale_denominator: float = None, + num_samples: int = int(1e6), + ): + """ + Create simulated samples of the ratio under DP. + + Args + numerator: the numerator of the ratio, observed with Laplace noise. + denominator: the denominator of the ratio, observer with Laplace noise. + noise_scale: the scale of the Laplace noise added to the numerator (and the denominator, if not specified). + noise_scale_denominator: the scale of the Laplace noise added to the denominator (if None, noise_scale is used). + num_samples (int, default 1e6): number of samples to use in the Monte-Carlo estimation. + """ + noise_scales = [noise_scale] + if noise_scale_denominator is not None: + noise_scales.append(noise_scale_denominator) + super().__init__( + [numerator, denominator], noise_scales, lambda x: x[0] / x[1], num_samples ) + + +class CountEstimator(ConfidenceIntervalEstimator): + """Confidence intervals for a counting query.""" + + def __init__(self, noisy_count: float, noise_scale: float): + """ + Args + noisy_count (float): the observed noisy count. + noise_scale (float): the scale of the Laplace noise added to the count. + """ + super().__init__([noisy_count], [noise_scale], lambda x: x[0])